1 /* 2 Defines the basic matrix operations for the ADJ adjacency list matrix data-structure. 3 */ 4 #include <../src/mat/impls/adj/mpi/mpiadj.h> /*I "petscmat.h" I*/ 5 #include <petscsf.h> 6 7 /* 8 The interface should be easy to use for both MatCreateSubMatrix (parallel sub-matrix) and MatCreateSubMatrices (sequential sub-matrices) 9 */ 10 static PetscErrorCode MatCreateSubMatrix_MPIAdj_data(Mat adj, IS irows, IS icols, PetscInt **sadj_xadj, PetscInt **sadj_adjncy, PetscInt **sadj_values) 11 { 12 PetscInt nlrows_is, icols_n, i, j, nroots, nleaves, rlocalindex, *ncols_send, *ncols_recv; 13 PetscInt nlrows_mat, *adjncy_recv, Ncols_recv, Ncols_send, *xadj_recv, *values_recv; 14 PetscInt *ncols_recv_offsets, loc, rnclos, *sadjncy, *sxadj, *svalues; 15 const PetscInt *irows_indices, *icols_indices, *xadj, *adjncy; 16 PetscMPIInt owner; 17 Mat_MPIAdj *a = (Mat_MPIAdj *)adj->data; 18 PetscLayout rmap; 19 MPI_Comm comm; 20 PetscSF sf; 21 PetscSFNode *iremote; 22 PetscBool done; 23 24 PetscFunctionBegin; 25 PetscCall(PetscObjectGetComm((PetscObject)adj, &comm)); 26 PetscCall(MatGetLayouts(adj, &rmap, NULL)); 27 PetscCall(ISGetLocalSize(irows, &nlrows_is)); 28 PetscCall(ISGetIndices(irows, &irows_indices)); 29 PetscCall(PetscMalloc1(nlrows_is, &iremote)); 30 /* construct sf graph*/ 31 nleaves = nlrows_is; 32 for (i = 0; i < nlrows_is; i++) { 33 owner = -1; 34 rlocalindex = -1; 35 PetscCall(PetscLayoutFindOwnerIndex(rmap, irows_indices[i], &owner, &rlocalindex)); 36 iremote[i].rank = owner; 37 iremote[i].index = rlocalindex; 38 } 39 PetscCall(MatGetRowIJ(adj, 0, PETSC_FALSE, PETSC_FALSE, &nlrows_mat, &xadj, &adjncy, &done)); 40 PetscCall(PetscCalloc4(nlrows_mat, &ncols_send, nlrows_is, &xadj_recv, nlrows_is + 1, &ncols_recv_offsets, nlrows_is, &ncols_recv)); 41 nroots = nlrows_mat; 42 for (i = 0; i < nlrows_mat; i++) ncols_send[i] = xadj[i + 1] - xadj[i]; 43 PetscCall(PetscSFCreate(comm, &sf)); 44 PetscCall(PetscSFSetGraph(sf, nroots, nleaves, NULL, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER)); 45 PetscCall(PetscSFSetType(sf, PETSCSFBASIC)); 46 PetscCall(PetscSFSetFromOptions(sf)); 47 PetscCall(PetscSFBcastBegin(sf, MPIU_INT, ncols_send, ncols_recv, MPI_REPLACE)); 48 PetscCall(PetscSFBcastEnd(sf, MPIU_INT, ncols_send, ncols_recv, MPI_REPLACE)); 49 PetscCall(PetscSFBcastBegin(sf, MPIU_INT, xadj, xadj_recv, MPI_REPLACE)); 50 PetscCall(PetscSFBcastEnd(sf, MPIU_INT, xadj, xadj_recv, MPI_REPLACE)); 51 PetscCall(PetscSFDestroy(&sf)); 52 Ncols_recv = 0; 53 for (i = 0; i < nlrows_is; i++) { 54 Ncols_recv += ncols_recv[i]; 55 ncols_recv_offsets[i + 1] = ncols_recv[i] + ncols_recv_offsets[i]; 56 } 57 Ncols_send = 0; 58 for (i = 0; i < nlrows_mat; i++) Ncols_send += ncols_send[i]; 59 PetscCall(PetscCalloc1(Ncols_recv, &iremote)); 60 PetscCall(PetscCalloc1(Ncols_recv, &adjncy_recv)); 61 nleaves = Ncols_recv; 62 Ncols_recv = 0; 63 for (i = 0; i < nlrows_is; i++) { 64 PetscCall(PetscLayoutFindOwner(rmap, irows_indices[i], &owner)); 65 for (j = 0; j < ncols_recv[i]; j++) { 66 iremote[Ncols_recv].rank = owner; 67 iremote[Ncols_recv++].index = xadj_recv[i] + j; 68 } 69 } 70 PetscCall(ISRestoreIndices(irows, &irows_indices)); 71 /*if we need to deal with edge weights ???*/ 72 if (a->useedgeweights) PetscCall(PetscCalloc1(Ncols_recv, &values_recv)); 73 nroots = Ncols_send; 74 PetscCall(PetscSFCreate(comm, &sf)); 75 PetscCall(PetscSFSetGraph(sf, nroots, nleaves, NULL, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER)); 76 PetscCall(PetscSFSetType(sf, PETSCSFBASIC)); 77 PetscCall(PetscSFSetFromOptions(sf)); 78 PetscCall(PetscSFBcastBegin(sf, MPIU_INT, adjncy, adjncy_recv, MPI_REPLACE)); 79 PetscCall(PetscSFBcastEnd(sf, MPIU_INT, adjncy, adjncy_recv, MPI_REPLACE)); 80 if (a->useedgeweights) { 81 PetscCall(PetscSFBcastBegin(sf, MPIU_INT, a->values, values_recv, MPI_REPLACE)); 82 PetscCall(PetscSFBcastEnd(sf, MPIU_INT, a->values, values_recv, MPI_REPLACE)); 83 } 84 PetscCall(PetscSFDestroy(&sf)); 85 PetscCall(MatRestoreRowIJ(adj, 0, PETSC_FALSE, PETSC_FALSE, &nlrows_mat, &xadj, &adjncy, &done)); 86 PetscCall(ISGetLocalSize(icols, &icols_n)); 87 PetscCall(ISGetIndices(icols, &icols_indices)); 88 rnclos = 0; 89 for (i = 0; i < nlrows_is; i++) { 90 for (j = ncols_recv_offsets[i]; j < ncols_recv_offsets[i + 1]; j++) { 91 PetscCall(PetscFindInt(adjncy_recv[j], icols_n, icols_indices, &loc)); 92 if (loc < 0) { 93 adjncy_recv[j] = -1; 94 if (a->useedgeweights) values_recv[j] = -1; 95 ncols_recv[i]--; 96 } else { 97 rnclos++; 98 } 99 } 100 } 101 PetscCall(ISRestoreIndices(icols, &icols_indices)); 102 PetscCall(PetscCalloc1(rnclos, &sadjncy)); 103 if (a->useedgeweights) PetscCall(PetscCalloc1(rnclos, &svalues)); 104 PetscCall(PetscCalloc1(nlrows_is + 1, &sxadj)); 105 rnclos = 0; 106 for (i = 0; i < nlrows_is; i++) { 107 for (j = ncols_recv_offsets[i]; j < ncols_recv_offsets[i + 1]; j++) { 108 if (adjncy_recv[j] < 0) continue; 109 sadjncy[rnclos] = adjncy_recv[j]; 110 if (a->useedgeweights) svalues[rnclos] = values_recv[j]; 111 rnclos++; 112 } 113 } 114 for (i = 0; i < nlrows_is; i++) sxadj[i + 1] = sxadj[i] + ncols_recv[i]; 115 if (sadj_xadj) { 116 *sadj_xadj = sxadj; 117 } else PetscCall(PetscFree(sxadj)); 118 if (sadj_adjncy) { 119 *sadj_adjncy = sadjncy; 120 } else PetscCall(PetscFree(sadjncy)); 121 if (sadj_values) { 122 if (a->useedgeweights) *sadj_values = svalues; 123 else *sadj_values = NULL; 124 } else { 125 if (a->useedgeweights) PetscCall(PetscFree(svalues)); 126 } 127 PetscCall(PetscFree4(ncols_send, xadj_recv, ncols_recv_offsets, ncols_recv)); 128 PetscCall(PetscFree(adjncy_recv)); 129 if (a->useedgeweights) PetscCall(PetscFree(values_recv)); 130 PetscFunctionReturn(PETSC_SUCCESS); 131 } 132 133 static PetscErrorCode MatCreateSubMatrices_MPIAdj_Private(Mat mat, PetscInt n, const IS irow[], const IS icol[], PetscBool subcomm, MatReuse scall, Mat *submat[]) 134 { 135 PetscInt i, irow_n, icol_n, *sxadj, *sadjncy, *svalues; 136 PetscInt *indices, nindx, j, k, loc; 137 PetscMPIInt issame; 138 const PetscInt *irow_indices, *icol_indices; 139 MPI_Comm scomm_row, scomm_col, scomm_mat; 140 141 PetscFunctionBegin; 142 nindx = 0; 143 /* 144 * Estimate a maximum number for allocating memory 145 */ 146 for (i = 0; i < n; i++) { 147 PetscCall(ISGetLocalSize(irow[i], &irow_n)); 148 PetscCall(ISGetLocalSize(icol[i], &icol_n)); 149 nindx = nindx > (irow_n + icol_n) ? nindx : (irow_n + icol_n); 150 } 151 PetscCall(PetscMalloc1(nindx, &indices)); 152 /* construct a submat */ 153 // if (scall == MAT_INITIAL_MATRIX) PetscCall(PetscMalloc1(n,submat)); 154 155 for (i = 0; i < n; i++) { 156 if (subcomm) { 157 PetscCall(PetscObjectGetComm((PetscObject)irow[i], &scomm_row)); 158 PetscCall(PetscObjectGetComm((PetscObject)icol[i], &scomm_col)); 159 PetscCallMPI(MPI_Comm_compare(scomm_row, scomm_col, &issame)); 160 PetscCheck(issame == MPI_IDENT, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "row index set must have the same comm as the col index set"); 161 PetscCallMPI(MPI_Comm_compare(scomm_row, PETSC_COMM_SELF, &issame)); 162 PetscCheck(issame != MPI_IDENT, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, " can not use PETSC_COMM_SELF as comm when extracting a parallel submatrix"); 163 } else { 164 scomm_row = PETSC_COMM_SELF; 165 } 166 /*get sub-matrix data*/ 167 sxadj = NULL; 168 sadjncy = NULL; 169 svalues = NULL; 170 PetscCall(MatCreateSubMatrix_MPIAdj_data(mat, irow[i], icol[i], &sxadj, &sadjncy, &svalues)); 171 PetscCall(ISGetLocalSize(irow[i], &irow_n)); 172 PetscCall(ISGetLocalSize(icol[i], &icol_n)); 173 PetscCall(ISGetIndices(irow[i], &irow_indices)); 174 PetscCall(PetscArraycpy(indices, irow_indices, irow_n)); 175 PetscCall(ISRestoreIndices(irow[i], &irow_indices)); 176 PetscCall(ISGetIndices(icol[i], &icol_indices)); 177 PetscCall(PetscArraycpy(PetscSafePointerPlusOffset(indices, irow_n), icol_indices, icol_n)); 178 PetscCall(ISRestoreIndices(icol[i], &icol_indices)); 179 nindx = irow_n + icol_n; 180 PetscCall(PetscSortRemoveDupsInt(&nindx, indices)); 181 /* renumber columns */ 182 for (j = 0; j < irow_n; j++) { 183 for (k = sxadj[j]; k < sxadj[j + 1]; k++) { 184 PetscCall(PetscFindInt(sadjncy[k], nindx, indices, &loc)); 185 PetscCheck(loc >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "can not find col %" PetscInt_FMT, sadjncy[k]); 186 sadjncy[k] = loc; 187 } 188 } 189 if (scall == MAT_INITIAL_MATRIX) { 190 PetscCall(MatCreateMPIAdj(scomm_row, irow_n, icol_n, sxadj, sadjncy, svalues, submat[i])); 191 } else { 192 Mat sadj = *submat[i]; 193 Mat_MPIAdj *sa = (Mat_MPIAdj *)((sadj)->data); 194 PetscCall(PetscObjectGetComm((PetscObject)sadj, &scomm_mat)); 195 PetscCallMPI(MPI_Comm_compare(scomm_row, scomm_mat, &issame)); 196 PetscCheck(issame == MPI_IDENT, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "submatrix must have the same comm as the col index set"); 197 PetscCall(PetscArraycpy(sa->i, sxadj, irow_n + 1)); 198 PetscCall(PetscArraycpy(sa->j, sadjncy, sxadj[irow_n])); 199 if (svalues) PetscCall(PetscArraycpy(sa->values, svalues, sxadj[irow_n])); 200 PetscCall(PetscFree(sxadj)); 201 PetscCall(PetscFree(sadjncy)); 202 PetscCall(PetscFree(svalues)); 203 } 204 } 205 PetscCall(PetscFree(indices)); 206 PetscFunctionReturn(PETSC_SUCCESS); 207 } 208 209 static PetscErrorCode MatCreateSubMatricesMPI_MPIAdj(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[]) 210 { 211 /*get sub-matrices across a sub communicator */ 212 PetscFunctionBegin; 213 PetscCall(MatCreateSubMatrices_MPIAdj_Private(mat, n, irow, icol, PETSC_TRUE, scall, submat)); 214 PetscFunctionReturn(PETSC_SUCCESS); 215 } 216 217 static PetscErrorCode MatCreateSubMatrices_MPIAdj(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[]) 218 { 219 PetscFunctionBegin; 220 /*get sub-matrices based on PETSC_COMM_SELF */ 221 PetscCall(MatCreateSubMatrices_MPIAdj_Private(mat, n, irow, icol, PETSC_FALSE, scall, submat)); 222 PetscFunctionReturn(PETSC_SUCCESS); 223 } 224 225 static PetscErrorCode MatView_MPIAdj_ASCII(Mat A, PetscViewer viewer) 226 { 227 Mat_MPIAdj *a = (Mat_MPIAdj *)A->data; 228 PetscInt i, j, m = A->rmap->n; 229 const char *name; 230 PetscViewerFormat format; 231 232 PetscFunctionBegin; 233 PetscCall(PetscObjectGetName((PetscObject)A, &name)); 234 PetscCall(PetscViewerGetFormat(viewer, &format)); 235 if (format == PETSC_VIEWER_ASCII_INFO) { 236 PetscFunctionReturn(PETSC_SUCCESS); 237 } else { 238 PetscCheck(format != PETSC_VIEWER_ASCII_MATLAB, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MATLAB format not supported"); 239 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE)); 240 PetscCall(PetscViewerASCIIPushSynchronized(viewer)); 241 for (i = 0; i < m; i++) { 242 PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "row %" PetscInt_FMT ":", i + A->rmap->rstart)); 243 for (j = a->i[i]; j < a->i[i + 1]; j++) { 244 if (a->values) { 245 PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " (%" PetscInt_FMT ", %" PetscInt_FMT ") ", a->j[j], a->values[j])); 246 } else { 247 PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " %" PetscInt_FMT " ", a->j[j])); 248 } 249 } 250 PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "\n")); 251 } 252 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE)); 253 PetscCall(PetscViewerFlush(viewer)); 254 PetscCall(PetscViewerASCIIPopSynchronized(viewer)); 255 } 256 PetscFunctionReturn(PETSC_SUCCESS); 257 } 258 259 static PetscErrorCode MatView_MPIAdj(Mat A, PetscViewer viewer) 260 { 261 PetscBool isascii; 262 263 PetscFunctionBegin; 264 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii)); 265 if (isascii) PetscCall(MatView_MPIAdj_ASCII(A, viewer)); 266 PetscFunctionReturn(PETSC_SUCCESS); 267 } 268 269 static PetscErrorCode MatDestroy_MPIAdj(Mat mat) 270 { 271 Mat_MPIAdj *a = (Mat_MPIAdj *)mat->data; 272 273 PetscFunctionBegin; 274 PetscCall(PetscLogObjectState((PetscObject)mat, "Rows=%" PetscInt_FMT ", Cols=%" PetscInt_FMT ", NZ=%" PetscInt_FMT, mat->rmap->n, mat->cmap->n, a->nz)); 275 PetscCall(PetscFree(a->diag)); 276 if (a->freeaij) { 277 if (a->freeaijwithfree) { 278 if (a->i) free(a->i); 279 if (a->j) free(a->j); 280 } else { 281 PetscCall(PetscFree(a->i)); 282 PetscCall(PetscFree(a->j)); 283 PetscCall(PetscFree(a->values)); 284 } 285 } 286 PetscCall(PetscFree(a->rowvalues)); 287 PetscCall(PetscFree(mat->data)); 288 PetscCall(PetscObjectChangeTypeName((PetscObject)mat, NULL)); 289 PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjSetPreallocation_C", NULL)); 290 PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjCreateNonemptySubcommMat_C", NULL)); 291 PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjToSeq_C", NULL)); 292 PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjToSeqRankZero_C", NULL)); 293 PetscFunctionReturn(PETSC_SUCCESS); 294 } 295 296 static PetscErrorCode MatSetOption_MPIAdj(Mat A, MatOption op, PetscBool flg) 297 { 298 Mat_MPIAdj *a = (Mat_MPIAdj *)A->data; 299 300 PetscFunctionBegin; 301 switch (op) { 302 case MAT_SYMMETRIC: 303 case MAT_STRUCTURALLY_SYMMETRIC: 304 case MAT_HERMITIAN: 305 case MAT_SPD: 306 a->symmetric = flg; 307 break; 308 default: 309 break; 310 } 311 PetscFunctionReturn(PETSC_SUCCESS); 312 } 313 314 static PetscErrorCode MatGetRow_MPIAdj(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v) 315 { 316 Mat_MPIAdj *a = (Mat_MPIAdj *)A->data; 317 318 PetscFunctionBegin; 319 row -= A->rmap->rstart; 320 PetscCheck(row >= 0 && row < A->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row out of range"); 321 *nz = a->i[row + 1] - a->i[row]; 322 if (v) { 323 PetscInt j; 324 if (a->rowvalues_alloc < *nz) { 325 PetscCall(PetscFree(a->rowvalues)); 326 a->rowvalues_alloc = PetscMax(a->rowvalues_alloc * 2, *nz); 327 PetscCall(PetscMalloc1(a->rowvalues_alloc, &a->rowvalues)); 328 } 329 for (j = 0; j < *nz; j++) a->rowvalues[j] = a->values ? a->values[a->i[row] + j] : 1.0; 330 *v = (*nz) ? a->rowvalues : NULL; 331 } 332 if (idx) *idx = (*nz) ? a->j + a->i[row] : NULL; 333 PetscFunctionReturn(PETSC_SUCCESS); 334 } 335 336 static PetscErrorCode MatEqual_MPIAdj(Mat A, Mat B, PetscBool *flg) 337 { 338 Mat_MPIAdj *a = (Mat_MPIAdj *)A->data, *b = (Mat_MPIAdj *)B->data; 339 PetscBool flag; 340 341 PetscFunctionBegin; 342 /* If the matrix dimensions are not equal,or no of nonzeros */ 343 if ((A->rmap->n != B->rmap->n) || (a->nz != b->nz)) flag = PETSC_FALSE; 344 345 /* if the a->i are the same */ 346 PetscCall(PetscArraycmp(a->i, b->i, A->rmap->n + 1, &flag)); 347 348 /* if a->j are the same */ 349 PetscCall(PetscArraycmp(a->j, b->j, a->nz, &flag)); 350 351 PetscCallMPI(MPIU_Allreduce(&flag, flg, 1, MPI_C_BOOL, MPI_LAND, PetscObjectComm((PetscObject)A))); 352 PetscFunctionReturn(PETSC_SUCCESS); 353 } 354 355 static PetscErrorCode MatGetRowIJ_MPIAdj(Mat A, PetscInt oshift, PetscBool symmetric, PetscBool blockcompressed, PetscInt *m, const PetscInt *inia[], const PetscInt *inja[], PetscBool *done) 356 { 357 PetscInt i; 358 Mat_MPIAdj *a = (Mat_MPIAdj *)A->data; 359 PetscInt **ia = (PetscInt **)inia, **ja = (PetscInt **)inja; 360 361 PetscFunctionBegin; 362 *m = A->rmap->n; 363 *ia = a->i; 364 *ja = a->j; 365 *done = PETSC_TRUE; 366 if (oshift) { 367 for (i = 0; i < (*ia)[*m]; i++) (*ja)[i]++; 368 for (i = 0; i <= (*m); i++) (*ia)[i]++; 369 } 370 PetscFunctionReturn(PETSC_SUCCESS); 371 } 372 373 static PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A, PetscInt oshift, PetscBool symmetric, PetscBool blockcompressed, PetscInt *m, const PetscInt *inia[], const PetscInt *inja[], PetscBool *done) 374 { 375 PetscInt i; 376 Mat_MPIAdj *a = (Mat_MPIAdj *)A->data; 377 PetscInt **ia = (PetscInt **)inia, **ja = (PetscInt **)inja; 378 379 PetscFunctionBegin; 380 PetscCheck(!ia || a->i == *ia, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "ia passed back is not one obtained with MatGetRowIJ()"); 381 PetscCheck(!ja || a->j == *ja, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "ja passed back is not one obtained with MatGetRowIJ()"); 382 if (oshift) { 383 PetscCheck(ia, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "If oshift then you must passed in inia[] argument"); 384 PetscCheck(ja, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "If oshift then you must passed in inja[] argument"); 385 for (i = 0; i <= (*m); i++) (*ia)[i]--; 386 for (i = 0; i < (*ia)[*m]; i++) (*ja)[i]--; 387 } 388 PetscFunctionReturn(PETSC_SUCCESS); 389 } 390 391 static PetscErrorCode MatConvertFrom_MPIAdj(Mat A, MatType type, MatReuse reuse, Mat *newmat) 392 { 393 Mat B; 394 PetscInt i, m, N, nzeros = 0, *ia, *ja, len, rstart, cnt, j, *a; 395 const PetscInt *rj; 396 const PetscScalar *ra; 397 MPI_Comm comm; 398 399 PetscFunctionBegin; 400 PetscCall(MatGetSize(A, NULL, &N)); 401 PetscCall(MatGetLocalSize(A, &m, NULL)); 402 PetscCall(MatGetOwnershipRange(A, &rstart, NULL)); 403 404 /* count the number of nonzeros per row */ 405 for (i = 0; i < m; i++) { 406 PetscCall(MatGetRow(A, i + rstart, &len, &rj, NULL)); 407 for (j = 0; j < len; j++) { 408 if (rj[j] == i + rstart) { 409 len--; 410 break; 411 } /* don't count diagonal */ 412 } 413 nzeros += len; 414 PetscCall(MatRestoreRow(A, i + rstart, &len, &rj, NULL)); 415 } 416 417 /* malloc space for nonzeros */ 418 PetscCall(PetscMalloc1(nzeros + 1, &a)); 419 PetscCall(PetscMalloc1(N + 1, &ia)); 420 PetscCall(PetscMalloc1(nzeros + 1, &ja)); 421 422 nzeros = 0; 423 ia[0] = 0; 424 for (i = 0; i < m; i++) { 425 PetscCall(MatGetRow(A, i + rstart, &len, &rj, &ra)); 426 cnt = 0; 427 for (j = 0; j < len; j++) { 428 if (rj[j] != i + rstart) { /* if not diagonal */ 429 a[nzeros + cnt] = (PetscInt)PetscAbsScalar(ra[j]); 430 ja[nzeros + cnt++] = rj[j]; 431 } 432 } 433 PetscCall(MatRestoreRow(A, i + rstart, &len, &rj, &ra)); 434 nzeros += cnt; 435 ia[i + 1] = nzeros; 436 } 437 438 PetscCall(PetscObjectGetComm((PetscObject)A, &comm)); 439 PetscCall(MatCreate(comm, &B)); 440 PetscCall(MatSetSizes(B, m, PETSC_DETERMINE, PETSC_DETERMINE, N)); 441 PetscCall(MatSetType(B, type)); 442 PetscCall(MatMPIAdjSetPreallocation(B, ia, ja, a)); 443 444 if (reuse == MAT_INPLACE_MATRIX) { 445 PetscCall(MatHeaderReplace(A, &B)); 446 } else { 447 *newmat = B; 448 } 449 PetscFunctionReturn(PETSC_SUCCESS); 450 } 451 452 static PetscErrorCode MatSetValues_MPIAdj(Mat A, PetscInt m, const PetscInt *rows, PetscInt n, const PetscInt *cols, const PetscScalar *values, InsertMode im) 453 { 454 Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data; 455 PetscInt rStart, rEnd, cStart, cEnd; 456 457 PetscFunctionBegin; 458 PetscCheck(!adj->i, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Matrix is already assembled, cannot change its values"); 459 PetscCall(MatGetOwnershipRange(A, &rStart, &rEnd)); 460 PetscCall(MatGetOwnershipRangeColumn(A, &cStart, &cEnd)); 461 if (!adj->ht) { 462 PetscCall(PetscHSetIJCreate(&adj->ht)); 463 PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject)A), 1, &A->stash)); 464 PetscCall(PetscLayoutSetUp(A->rmap)); 465 PetscCall(PetscLayoutSetUp(A->cmap)); 466 } 467 for (PetscInt r = 0; r < m; ++r) { 468 PetscHashIJKey key; 469 470 key.i = rows[r]; 471 if (key.i < 0) continue; 472 if ((key.i < rStart) || (key.i >= rEnd)) { 473 PetscCall(MatStashValuesRow_Private(&A->stash, key.i, n, cols, values, PETSC_FALSE)); 474 } else { 475 for (PetscInt c = 0; c < n; ++c) { 476 key.j = cols[c]; 477 if (key.j < 0 || key.i == key.j) continue; 478 PetscCall(PetscHSetIJAdd(adj->ht, key)); 479 } 480 } 481 } 482 PetscFunctionReturn(PETSC_SUCCESS); 483 } 484 485 static PetscErrorCode MatAssemblyBegin_MPIAdj(Mat A, MatAssemblyType type) 486 { 487 PetscInt nstash, reallocs; 488 Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data; 489 490 PetscFunctionBegin; 491 if (!adj->ht) { 492 PetscCall(PetscHSetIJCreate(&adj->ht)); 493 PetscCall(PetscLayoutSetUp(A->rmap)); 494 PetscCall(PetscLayoutSetUp(A->cmap)); 495 } 496 PetscCall(MatStashScatterBegin_Private(A, &A->stash, A->rmap->range)); 497 PetscCall(MatStashGetInfo_Private(&A->stash, &nstash, &reallocs)); 498 PetscCall(PetscInfo(A, "Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs)); 499 PetscFunctionReturn(PETSC_SUCCESS); 500 } 501 502 static PetscErrorCode MatAssemblyEnd_MPIAdj(Mat A, MatAssemblyType type) 503 { 504 PetscScalar *val; 505 PetscInt *row, *col, m, rstart, *rowstarts; 506 PetscInt i, j, ncols, flg, nz; 507 PetscMPIInt n; 508 Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data; 509 PetscHashIter hi; 510 PetscHashIJKey key; 511 PetscHSetIJ ht = adj->ht; 512 513 PetscFunctionBegin; 514 while (1) { 515 PetscCall(MatStashScatterGetMesg_Private(&A->stash, &n, &row, &col, &val, &flg)); 516 if (!flg) break; 517 518 for (i = 0; i < n;) { 519 /* Identify the consecutive vals belonging to the same row */ 520 for (j = i, rstart = row[j]; j < n; j++) { 521 if (row[j] != rstart) break; 522 } 523 if (j < n) ncols = j - i; 524 else ncols = n - i; 525 /* Set all these values with a single function call */ 526 PetscCall(MatSetValues_MPIAdj(A, 1, row + i, ncols, col + i, val + i, INSERT_VALUES)); 527 i = j; 528 } 529 } 530 PetscCall(MatStashScatterEnd_Private(&A->stash)); 531 PetscCall(MatStashDestroy_Private(&A->stash)); 532 533 PetscCall(MatGetLocalSize(A, &m, NULL)); 534 PetscCall(MatGetOwnershipRange(A, &rstart, NULL)); 535 PetscCall(PetscCalloc1(m + 1, &rowstarts)); 536 PetscHashIterBegin(ht, hi); 537 for (; !PetscHashIterAtEnd(ht, hi);) { 538 PetscHashIterGetKey(ht, hi, key); 539 rowstarts[key.i - rstart + 1]++; 540 PetscHashIterNext(ht, hi); 541 } 542 for (i = 1; i < m + 1; i++) rowstarts[i] = rowstarts[i - 1] + rowstarts[i]; 543 544 PetscCall(PetscHSetIJGetSize(ht, &nz)); 545 PetscCall(PetscMalloc1(nz, &col)); 546 PetscHashIterBegin(ht, hi); 547 for (; !PetscHashIterAtEnd(ht, hi);) { 548 PetscHashIterGetKey(ht, hi, key); 549 col[rowstarts[key.i - rstart]++] = key.j; 550 PetscHashIterNext(ht, hi); 551 } 552 PetscCall(PetscHSetIJDestroy(&ht)); 553 for (i = m; i > 0; i--) rowstarts[i] = rowstarts[i - 1]; 554 rowstarts[0] = 0; 555 556 for (PetscInt i = 0; i < m; i++) PetscCall(PetscSortInt(rowstarts[i + 1] - rowstarts[i], &col[rowstarts[i]])); 557 558 adj->i = rowstarts; 559 adj->j = col; 560 adj->nz = rowstarts[m]; 561 adj->freeaij = PETSC_TRUE; 562 PetscFunctionReturn(PETSC_SUCCESS); 563 } 564 565 static struct _MatOps MatOps_Values = {MatSetValues_MPIAdj, 566 MatGetRow_MPIAdj, 567 NULL, 568 NULL, 569 /* 4*/ NULL, 570 NULL, 571 NULL, 572 NULL, 573 NULL, 574 NULL, 575 /*10*/ NULL, 576 NULL, 577 NULL, 578 NULL, 579 NULL, 580 /*15*/ NULL, 581 MatEqual_MPIAdj, 582 NULL, 583 NULL, 584 NULL, 585 /*20*/ MatAssemblyBegin_MPIAdj, 586 MatAssemblyEnd_MPIAdj, 587 MatSetOption_MPIAdj, 588 NULL, 589 /*24*/ NULL, 590 NULL, 591 NULL, 592 NULL, 593 NULL, 594 /*29*/ NULL, 595 NULL, 596 NULL, 597 NULL, 598 NULL, 599 /*34*/ NULL, 600 NULL, 601 NULL, 602 NULL, 603 NULL, 604 /*39*/ NULL, 605 MatCreateSubMatrices_MPIAdj, 606 NULL, 607 NULL, 608 NULL, 609 /*44*/ NULL, 610 NULL, 611 MatShift_Basic, 612 NULL, 613 NULL, 614 /*49*/ NULL, 615 MatGetRowIJ_MPIAdj, 616 MatRestoreRowIJ_MPIAdj, 617 NULL, 618 NULL, 619 /*54*/ NULL, 620 NULL, 621 NULL, 622 NULL, 623 NULL, 624 /*59*/ NULL, 625 MatDestroy_MPIAdj, 626 MatView_MPIAdj, 627 MatConvertFrom_MPIAdj, 628 NULL, 629 /*64*/ NULL, 630 NULL, 631 NULL, 632 NULL, 633 NULL, 634 /*69*/ NULL, 635 NULL, 636 NULL, 637 NULL, 638 NULL, 639 /*74*/ NULL, 640 NULL, 641 NULL, 642 NULL, 643 NULL, 644 /*79*/ NULL, 645 NULL, 646 NULL, 647 NULL, 648 NULL, 649 /*84*/ NULL, 650 NULL, 651 NULL, 652 NULL, 653 NULL, 654 /*89*/ NULL, 655 NULL, 656 NULL, 657 NULL, 658 NULL, 659 /*94*/ NULL, 660 NULL, 661 NULL, 662 NULL, 663 NULL, 664 /*99*/ NULL, 665 NULL, 666 NULL, 667 NULL, 668 NULL, 669 /*104*/ NULL, 670 NULL, 671 NULL, 672 NULL, 673 NULL, 674 /*109*/ NULL, 675 NULL, 676 NULL, 677 NULL, 678 NULL, 679 /*114*/ NULL, 680 NULL, 681 NULL, 682 NULL, 683 MatCreateSubMatricesMPI_MPIAdj, 684 /*119*/ NULL, 685 NULL, 686 NULL, 687 NULL, 688 NULL, 689 /*124*/ NULL, 690 NULL, 691 NULL, 692 NULL, 693 NULL, 694 /*129*/ NULL, 695 NULL, 696 NULL, 697 NULL, 698 NULL, 699 /*134*/ NULL, 700 NULL, 701 NULL, 702 NULL, 703 NULL, 704 /*139*/ NULL, 705 NULL, 706 NULL, 707 NULL, 708 NULL}; 709 710 static PetscErrorCode MatMPIAdjSetPreallocation_MPIAdj(Mat B, PetscInt *i, PetscInt *j, PetscInt *values) 711 { 712 Mat_MPIAdj *b = (Mat_MPIAdj *)B->data; 713 PetscBool useedgeweights; 714 715 PetscFunctionBegin; 716 PetscCall(PetscLayoutSetUp(B->rmap)); 717 PetscCall(PetscLayoutSetUp(B->cmap)); 718 if (values) useedgeweights = PETSC_TRUE; 719 else useedgeweights = PETSC_FALSE; 720 /* Make everybody knows if they are using edge weights or not */ 721 PetscCallMPI(MPIU_Allreduce(&useedgeweights, &b->useedgeweights, 1, MPI_C_BOOL, MPI_LOR, PetscObjectComm((PetscObject)B))); 722 723 if (PetscDefined(USE_DEBUG)) { 724 PetscInt ii; 725 726 PetscCheck(i[0] == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "First i[] index must be zero, instead it is %" PetscInt_FMT, i[0]); 727 for (ii = 1; ii < B->rmap->n; ii++) { 728 PetscCheck(i[ii] >= 0 && i[ii] >= i[ii - 1], PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "i[%" PetscInt_FMT "]=%" PetscInt_FMT " index is out of range: i[%" PetscInt_FMT "]=%" PetscInt_FMT, ii, i[ii], ii - 1, i[ii - 1]); 729 } 730 for (ii = 0; ii < i[B->rmap->n]; ii++) PetscCheck(j[ii] >= 0 && j[ii] < B->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column index %" PetscInt_FMT " out of range %" PetscInt_FMT, ii, j[ii]); 731 } 732 b->j = j; 733 b->i = i; 734 b->values = values; 735 736 b->nz = i[B->rmap->n]; 737 b->diag = NULL; 738 b->symmetric = PETSC_FALSE; 739 b->freeaij = PETSC_TRUE; 740 741 B->ops->assemblybegin = NULL; 742 B->ops->assemblyend = NULL; 743 PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY)); 744 PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY)); 745 PetscCall(MatStashDestroy_Private(&B->stash)); 746 PetscFunctionReturn(PETSC_SUCCESS); 747 } 748 749 static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A, Mat *B) 750 { 751 Mat_MPIAdj *a = (Mat_MPIAdj *)A->data; 752 const PetscInt *ranges; 753 MPI_Comm acomm, bcomm; 754 MPI_Group agroup, bgroup; 755 PetscMPIInt i, rank, size, nranks, *ranks; 756 757 PetscFunctionBegin; 758 *B = NULL; 759 PetscCall(PetscObjectGetComm((PetscObject)A, &acomm)); 760 PetscCallMPI(MPI_Comm_size(acomm, &size)); 761 PetscCallMPI(MPI_Comm_size(acomm, &rank)); 762 PetscCall(MatGetOwnershipRanges(A, &ranges)); 763 for (i = 0, nranks = 0; i < size; i++) { 764 if (ranges[i + 1] - ranges[i] > 0) nranks++; 765 } 766 if (nranks == size) { /* All ranks have a positive number of rows, so we do not need to create a subcomm; */ 767 PetscCall(PetscObjectReference((PetscObject)A)); 768 *B = A; 769 PetscFunctionReturn(PETSC_SUCCESS); 770 } 771 772 PetscCall(PetscMalloc1(nranks, &ranks)); 773 for (i = 0, nranks = 0; i < size; i++) { 774 if (ranges[i + 1] - ranges[i] > 0) ranks[nranks++] = i; 775 } 776 PetscCallMPI(MPI_Comm_group(acomm, &agroup)); 777 PetscCallMPI(MPI_Group_incl(agroup, nranks, ranks, &bgroup)); 778 PetscCall(PetscFree(ranks)); 779 PetscCallMPI(MPI_Comm_create(acomm, bgroup, &bcomm)); 780 PetscCallMPI(MPI_Group_free(&agroup)); 781 PetscCallMPI(MPI_Group_free(&bgroup)); 782 if (bcomm != MPI_COMM_NULL) { 783 PetscInt m, N; 784 Mat_MPIAdj *b; 785 PetscCall(MatGetLocalSize(A, &m, NULL)); 786 PetscCall(MatGetSize(A, NULL, &N)); 787 PetscCall(MatCreateMPIAdj(bcomm, m, N, a->i, a->j, a->values, B)); 788 b = (Mat_MPIAdj *)(*B)->data; 789 b->freeaij = PETSC_FALSE; 790 PetscCallMPI(MPI_Comm_free(&bcomm)); 791 } 792 PetscFunctionReturn(PETSC_SUCCESS); 793 } 794 795 static PetscErrorCode MatMPIAdjToSeq_MPIAdj(Mat A, Mat *B) 796 { 797 PetscInt M, N, *II, *J, NZ, nz, m, nzstart, i; 798 PetscInt *Values = NULL; 799 Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data; 800 PetscMPIInt mnz, mm, *allnz, *allm, size, *dispnz, *dispm; 801 802 PetscFunctionBegin; 803 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size)); 804 PetscCall(MatGetSize(A, &M, &N)); 805 PetscCall(MatGetLocalSize(A, &m, NULL)); 806 nz = adj->nz; 807 PetscCheck(adj->i[m] == nz, PETSC_COMM_SELF, PETSC_ERR_PLIB, "nz %" PetscInt_FMT " not correct i[m] %" PetscInt_FMT, nz, adj->i[m]); 808 PetscCallMPI(MPIU_Allreduce(&nz, &NZ, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A))); 809 810 PetscCall(PetscMPIIntCast(nz, &mnz)); 811 PetscCall(PetscMalloc2(size, &allnz, size, &dispnz)); 812 PetscCallMPI(MPI_Allgather(&mnz, 1, MPI_INT, allnz, 1, MPI_INT, PetscObjectComm((PetscObject)A))); 813 dispnz[0] = 0; 814 for (i = 1; i < size; i++) dispnz[i] = dispnz[i - 1] + allnz[i - 1]; 815 if (adj->values) { 816 PetscCall(PetscMalloc1(NZ, &Values)); 817 PetscCallMPI(MPI_Allgatherv(adj->values, mnz, MPIU_INT, Values, allnz, dispnz, MPIU_INT, PetscObjectComm((PetscObject)A))); 818 } 819 PetscCall(PetscMalloc1(NZ, &J)); 820 PetscCallMPI(MPI_Allgatherv(adj->j, mnz, MPIU_INT, J, allnz, dispnz, MPIU_INT, PetscObjectComm((PetscObject)A))); 821 PetscCall(PetscFree2(allnz, dispnz)); 822 PetscCallMPI(MPI_Scan(&nz, &nzstart, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A))); 823 nzstart -= nz; 824 /* shift the i[] values so they will be correct after being received */ 825 for (i = 0; i < m; i++) adj->i[i] += nzstart; 826 PetscCall(PetscMalloc1(M + 1, &II)); 827 PetscCall(PetscMPIIntCast(m, &mm)); 828 PetscCall(PetscMalloc2(size, &allm, size, &dispm)); 829 PetscCallMPI(MPI_Allgather(&mm, 1, MPI_INT, allm, 1, MPI_INT, PetscObjectComm((PetscObject)A))); 830 dispm[0] = 0; 831 for (i = 1; i < size; i++) dispm[i] = dispm[i - 1] + allm[i - 1]; 832 PetscCallMPI(MPI_Allgatherv(adj->i, mm, MPIU_INT, II, allm, dispm, MPIU_INT, PetscObjectComm((PetscObject)A))); 833 PetscCall(PetscFree2(allm, dispm)); 834 II[M] = NZ; 835 /* shift the i[] values back */ 836 for (i = 0; i < m; i++) adj->i[i] -= nzstart; 837 PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, M, N, II, J, Values, B)); 838 PetscFunctionReturn(PETSC_SUCCESS); 839 } 840 841 static PetscErrorCode MatMPIAdjToSeqRankZero_MPIAdj(Mat A, Mat *B) 842 { 843 PetscInt M, N, *II, *J, NZ, nz, m, nzstart, i; 844 PetscInt *Values = NULL; 845 Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data; 846 PetscMPIInt mnz, mm, *allnz = NULL, *allm, size, *dispnz, *dispm, rank; 847 848 PetscFunctionBegin; 849 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size)); 850 PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)A), &rank)); 851 PetscCall(MatGetSize(A, &M, &N)); 852 PetscCall(MatGetLocalSize(A, &m, NULL)); 853 nz = adj->nz; 854 PetscCheck(adj->i[m] == nz, PETSC_COMM_SELF, PETSC_ERR_PLIB, "nz %" PetscInt_FMT " not correct i[m] %" PetscInt_FMT, nz, adj->i[m]); 855 PetscCallMPI(MPIU_Allreduce(&nz, &NZ, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A))); 856 857 PetscCall(PetscMPIIntCast(nz, &mnz)); 858 if (!rank) PetscCall(PetscMalloc2(size, &allnz, size, &dispnz)); 859 PetscCallMPI(MPI_Gather(&mnz, 1, MPI_INT, allnz, 1, MPI_INT, 0, PetscObjectComm((PetscObject)A))); 860 if (!rank) { 861 dispnz[0] = 0; 862 for (i = 1; i < size; i++) dispnz[i] = dispnz[i - 1] + allnz[i - 1]; 863 if (adj->values) { 864 PetscCall(PetscMalloc1(NZ, &Values)); 865 PetscCallMPI(MPI_Gatherv(adj->values, mnz, MPIU_INT, Values, allnz, dispnz, MPIU_INT, 0, PetscObjectComm((PetscObject)A))); 866 } 867 PetscCall(PetscMalloc1(NZ, &J)); 868 PetscCallMPI(MPI_Gatherv(adj->j, mnz, MPIU_INT, J, allnz, dispnz, MPIU_INT, 0, PetscObjectComm((PetscObject)A))); 869 PetscCall(PetscFree2(allnz, dispnz)); 870 } else { 871 if (adj->values) PetscCallMPI(MPI_Gatherv(adj->values, mnz, MPIU_INT, NULL, NULL, NULL, MPIU_INT, 0, PetscObjectComm((PetscObject)A))); 872 PetscCallMPI(MPI_Gatherv(adj->j, mnz, MPIU_INT, NULL, NULL, NULL, MPIU_INT, 0, PetscObjectComm((PetscObject)A))); 873 } 874 PetscCallMPI(MPI_Scan(&nz, &nzstart, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A))); 875 nzstart -= nz; 876 /* shift the i[] values so they will be correct after being received */ 877 for (i = 0; i < m; i++) adj->i[i] += nzstart; 878 PetscCall(PetscMPIIntCast(m, &mm)); 879 if (!rank) { 880 PetscCall(PetscMalloc1(M + 1, &II)); 881 PetscCall(PetscMalloc2(size, &allm, size, &dispm)); 882 PetscCallMPI(MPI_Gather(&mm, 1, MPI_INT, allm, 1, MPI_INT, 0, PetscObjectComm((PetscObject)A))); 883 dispm[0] = 0; 884 for (i = 1; i < size; i++) dispm[i] = dispm[i - 1] + allm[i - 1]; 885 PetscCallMPI(MPI_Gatherv(adj->i, mm, MPIU_INT, II, allm, dispm, MPIU_INT, 0, PetscObjectComm((PetscObject)A))); 886 PetscCall(PetscFree2(allm, dispm)); 887 II[M] = NZ; 888 } else { 889 PetscCallMPI(MPI_Gather(&mm, 1, MPI_INT, NULL, 1, MPI_INT, 0, PetscObjectComm((PetscObject)A))); 890 PetscCallMPI(MPI_Gatherv(adj->i, mm, MPIU_INT, NULL, NULL, NULL, MPIU_INT, 0, PetscObjectComm((PetscObject)A))); 891 } 892 /* shift the i[] values back */ 893 for (i = 0; i < m; i++) adj->i[i] -= nzstart; 894 if (!rank) PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, M, N, II, J, Values, B)); 895 PetscFunctionReturn(PETSC_SUCCESS); 896 } 897 898 /*@ 899 MatMPIAdjCreateNonemptySubcommMat - create the same `MATMPIADJ` matrix on a subcommunicator containing only processes owning a positive number of rows 900 901 Collective 902 903 Input Parameter: 904 . A - original `MATMPIADJ` matrix 905 906 Output Parameter: 907 . B - matrix on subcommunicator, `NULL` on MPI processes that own zero rows of `A` 908 909 Level: developer 910 911 Note: 912 The matrix `B` should be destroyed with `MatDestroy()`. The arrays are not copied, so `B` should be destroyed before `A` is destroyed. 913 914 Developer Note: 915 This function is mostly useful for internal use by mesh partitioning packages, such as ParMETIS that require that every process owns at least one row. 916 917 .seealso: [](ch_matrices), `Mat`, `MATMPIADJ`, `MatCreateMPIAdj()` 918 @*/ 919 PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A, Mat *B) 920 { 921 PetscFunctionBegin; 922 PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 923 PetscUseMethod(A, "MatMPIAdjCreateNonemptySubcommMat_C", (Mat, Mat *), (A, B)); 924 PetscFunctionReturn(PETSC_SUCCESS); 925 } 926 927 /*MC 928 MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices, 929 intended for use constructing orderings and partitionings. 930 931 Level: beginner 932 933 Note: 934 You can provide values to the matrix using `MatMPIAdjSetPreallocation()`, `MatCreateMPIAdj()`, or 935 by calling `MatSetValues()` and `MatAssemblyBegin()` followed by `MatAssemblyEnd()` 936 937 .seealso: [](ch_matrices), `Mat`, `MatCreateMPIAdj()`, `MatMPIAdjSetPreallocation()`, `MatSetValues()` 938 M*/ 939 PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B) 940 { 941 Mat_MPIAdj *b; 942 943 PetscFunctionBegin; 944 PetscCall(PetscNew(&b)); 945 B->data = (void *)b; 946 B->ops[0] = MatOps_Values; 947 B->assembled = PETSC_FALSE; 948 B->preallocated = PETSC_TRUE; /* so that MatSetValues() may be used */ 949 950 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjSetPreallocation_C", MatMPIAdjSetPreallocation_MPIAdj)); 951 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjCreateNonemptySubcommMat_C", MatMPIAdjCreateNonemptySubcommMat_MPIAdj)); 952 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjToSeq_C", MatMPIAdjToSeq_MPIAdj)); 953 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjToSeqRankZero_C", MatMPIAdjToSeqRankZero_MPIAdj)); 954 PetscCall(PetscObjectChangeTypeName((PetscObject)B, MATMPIADJ)); 955 PetscFunctionReturn(PETSC_SUCCESS); 956 } 957 958 /*@ 959 MatMPIAdjToSeq - Converts an parallel `MATMPIADJ` matrix to complete `MATMPIADJ` on each process (needed by sequential partitioners) 960 961 Logically Collective 962 963 Input Parameter: 964 . A - the matrix 965 966 Output Parameter: 967 . B - the same matrix on all processes 968 969 Level: intermediate 970 971 .seealso: [](ch_matrices), `Mat`, `MATMPIADJ`, `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MatMPIAdjToSeqRankZero()` 972 @*/ 973 PetscErrorCode MatMPIAdjToSeq(Mat A, Mat *B) 974 { 975 PetscFunctionBegin; 976 PetscUseMethod(A, "MatMPIAdjToSeq_C", (Mat, Mat *), (A, B)); 977 PetscFunctionReturn(PETSC_SUCCESS); 978 } 979 980 /*@ 981 MatMPIAdjToSeqRankZero - Converts an parallel `MATMPIADJ` matrix to complete `MATMPIADJ` on rank zero (needed by sequential partitioners) 982 983 Logically Collective 984 985 Input Parameter: 986 . A - the matrix 987 988 Output Parameter: 989 . B - the same matrix on rank zero, not set on other ranks 990 991 Level: intermediate 992 993 Note: 994 This routine has the advantage on systems with multiple ranks per node since only one copy of the matrix 995 is stored on the first node, instead of the number of ranks copies. This can allow partitioning much larger 996 parallel graph sequentially. 997 998 .seealso: [](ch_matrices), `Mat`, `MATMPIADJ`, `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MatMPIAdjToSeq()` 999 @*/ 1000 PetscErrorCode MatMPIAdjToSeqRankZero(Mat A, Mat *B) 1001 { 1002 PetscFunctionBegin; 1003 PetscUseMethod(A, "MatMPIAdjToSeqRankZero_C", (Mat, Mat *), (A, B)); 1004 PetscFunctionReturn(PETSC_SUCCESS); 1005 } 1006 1007 /*@ 1008 MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements 1009 1010 Logically Collective 1011 1012 Input Parameters: 1013 + B - the matrix 1014 . i - the indices into `j` for the start of each row 1015 . j - the column indices for each row (sorted for each row). 1016 The indices in `i` and `j` start with zero (NOT with one). 1017 - values - [use `NULL` if not provided] edge weights 1018 1019 Level: intermediate 1020 1021 Notes: 1022 The indices in `i` and `j` start with zero (NOT with one). 1023 1024 You must NOT free the `i`, `values` and `j` arrays yourself. PETSc will free them 1025 when the matrix is destroyed; you must allocate them with `PetscMalloc()`. 1026 1027 You should not include the matrix diagonal elements. 1028 1029 If you already have a matrix, you can create its adjacency matrix by a call 1030 to `MatConvert()`, specifying a type of `MATMPIADJ`. 1031 1032 Possible values for `MatSetOption()` - `MAT_STRUCTURALLY_SYMMETRIC` 1033 1034 Fortran Note: 1035 From Fortran the indices and values are copied so the array space need not be provided with `PetscMalloc()`. 1036 1037 .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MATMPIADJ` 1038 @*/ 1039 PetscErrorCode MatMPIAdjSetPreallocation(Mat B, PetscInt *i, PetscInt *j, PetscInt *values) 1040 { 1041 PetscFunctionBegin; 1042 PetscTryMethod(B, "MatMPIAdjSetPreallocation_C", (Mat, PetscInt *, PetscInt *, PetscInt *), (B, i, j, values)); 1043 PetscFunctionReturn(PETSC_SUCCESS); 1044 } 1045 1046 /*@C 1047 MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list. 1048 The matrix need not have numerical values associated with it, it is 1049 intended for ordering (to reduce bandwidth etc) and partitioning. 1050 1051 Collective 1052 1053 Input Parameters: 1054 + comm - MPI communicator 1055 . m - number of local rows 1056 . N - number of global columns 1057 . i - the indices into `j` for the start of each row 1058 . j - the column indices for each row (sorted for each row). 1059 - values - the values, optional, use `NULL` if not provided 1060 1061 Output Parameter: 1062 . A - the matrix 1063 1064 Level: intermediate 1065 1066 Notes: 1067 The indices in `i` and `j` start with zero (NOT with one). 1068 1069 You must NOT free the `i`, `values` and `j` arrays yourself. PETSc will free them 1070 when the matrix is destroyed; you must allocate them with `PetscMalloc()`. 1071 1072 You should not include the matrix diagonals. 1073 1074 If you already have a matrix, you can create its adjacency matrix by a call 1075 to `MatConvert()`, specifying a type of `MATMPIADJ`. 1076 1077 Possible values for `MatSetOption()` - `MAT_STRUCTURALLY_SYMMETRIC` 1078 1079 Fortran Note: 1080 From Fortran the arrays `indices` and `values` must be retained by the user until `A` is destroyed 1081 1082 .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatConvert()`, `MatGetOrdering()`, `MATMPIADJ`, `MatMPIAdjSetPreallocation()` 1083 @*/ 1084 PetscErrorCode MatCreateMPIAdj(MPI_Comm comm, PetscInt m, PetscInt N, PetscInt i[], PetscInt j[], PetscInt values[], Mat *A) PeNSS 1085 { 1086 PetscFunctionBegin; 1087 PetscCall(MatCreate(comm, A)); 1088 PetscCall(MatSetSizes(*A, m, PETSC_DETERMINE, PETSC_DETERMINE, N)); 1089 PetscCall(MatSetType(*A, MATMPIADJ)); 1090 PetscCall(MatMPIAdjSetPreallocation(*A, i, j, values)); 1091 PetscFunctionReturn(PETSC_SUCCESS); 1092 } 1093