1 /* 2 Defines the basic matrix operations for the ADJ adjacency list matrix data-structure. 3 */ 4 #include <../src/mat/impls/adj/mpi/mpiadj.h> /*I "petscmat.h" I*/ 5 #include <petscsf.h> 6 7 /* 8 The interface should be easy to use for both MatCreateSubMatrix (parallel sub-matrix) and MatCreateSubMatrices (sequential sub-matrices) 9 */ 10 static PetscErrorCode MatCreateSubMatrix_MPIAdj_data(Mat adj, IS irows, IS icols, PetscInt **sadj_xadj, PetscInt **sadj_adjncy, PetscInt **sadj_values) 11 { 12 PetscInt nlrows_is, icols_n, i, j, nroots, nleaves, rlocalindex, *ncols_send, *ncols_recv; 13 PetscInt nlrows_mat, *adjncy_recv, Ncols_recv, Ncols_send, *xadj_recv, *values_recv; 14 PetscInt *ncols_recv_offsets, loc, rnclos, *sadjncy, *sxadj, *svalues; 15 const PetscInt *irows_indices, *icols_indices, *xadj, *adjncy; 16 PetscMPIInt owner; 17 Mat_MPIAdj *a = (Mat_MPIAdj *)adj->data; 18 PetscLayout rmap; 19 MPI_Comm comm; 20 PetscSF sf; 21 PetscSFNode *iremote; 22 PetscBool done; 23 24 PetscFunctionBegin; 25 PetscCall(PetscObjectGetComm((PetscObject)adj, &comm)); 26 PetscCall(MatGetLayouts(adj, &rmap, NULL)); 27 PetscCall(ISGetLocalSize(irows, &nlrows_is)); 28 PetscCall(ISGetIndices(irows, &irows_indices)); 29 PetscCall(PetscMalloc1(nlrows_is, &iremote)); 30 /* construct sf graph*/ 31 nleaves = nlrows_is; 32 for (i = 0; i < nlrows_is; i++) { 33 owner = -1; 34 rlocalindex = -1; 35 PetscCall(PetscLayoutFindOwnerIndex(rmap, irows_indices[i], &owner, &rlocalindex)); 36 iremote[i].rank = owner; 37 iremote[i].index = rlocalindex; 38 } 39 PetscCall(MatGetRowIJ(adj, 0, PETSC_FALSE, PETSC_FALSE, &nlrows_mat, &xadj, &adjncy, &done)); 40 PetscCall(PetscCalloc4(nlrows_mat, &ncols_send, nlrows_is, &xadj_recv, nlrows_is + 1, &ncols_recv_offsets, nlrows_is, &ncols_recv)); 41 nroots = nlrows_mat; 42 for (i = 0; i < nlrows_mat; i++) ncols_send[i] = xadj[i + 1] - xadj[i]; 43 PetscCall(PetscSFCreate(comm, &sf)); 44 PetscCall(PetscSFSetGraph(sf, nroots, nleaves, NULL, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER)); 45 PetscCall(PetscSFSetType(sf, PETSCSFBASIC)); 46 PetscCall(PetscSFSetFromOptions(sf)); 47 PetscCall(PetscSFBcastBegin(sf, MPIU_INT, ncols_send, ncols_recv, MPI_REPLACE)); 48 PetscCall(PetscSFBcastEnd(sf, MPIU_INT, ncols_send, ncols_recv, MPI_REPLACE)); 49 PetscCall(PetscSFBcastBegin(sf, MPIU_INT, xadj, xadj_recv, MPI_REPLACE)); 50 PetscCall(PetscSFBcastEnd(sf, MPIU_INT, xadj, xadj_recv, MPI_REPLACE)); 51 PetscCall(PetscSFDestroy(&sf)); 52 Ncols_recv = 0; 53 for (i = 0; i < nlrows_is; i++) { 54 Ncols_recv += ncols_recv[i]; 55 ncols_recv_offsets[i + 1] = ncols_recv[i] + ncols_recv_offsets[i]; 56 } 57 Ncols_send = 0; 58 for (i = 0; i < nlrows_mat; i++) Ncols_send += ncols_send[i]; 59 PetscCall(PetscCalloc1(Ncols_recv, &iremote)); 60 PetscCall(PetscCalloc1(Ncols_recv, &adjncy_recv)); 61 nleaves = Ncols_recv; 62 Ncols_recv = 0; 63 for (i = 0; i < nlrows_is; i++) { 64 PetscCall(PetscLayoutFindOwner(rmap, irows_indices[i], &owner)); 65 for (j = 0; j < ncols_recv[i]; j++) { 66 iremote[Ncols_recv].rank = owner; 67 iremote[Ncols_recv++].index = xadj_recv[i] + j; 68 } 69 } 70 PetscCall(ISRestoreIndices(irows, &irows_indices)); 71 /*if we need to deal with edge weights ???*/ 72 if (a->useedgeweights) PetscCall(PetscCalloc1(Ncols_recv, &values_recv)); 73 nroots = Ncols_send; 74 PetscCall(PetscSFCreate(comm, &sf)); 75 PetscCall(PetscSFSetGraph(sf, nroots, nleaves, NULL, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER)); 76 PetscCall(PetscSFSetType(sf, PETSCSFBASIC)); 77 PetscCall(PetscSFSetFromOptions(sf)); 78 PetscCall(PetscSFBcastBegin(sf, MPIU_INT, adjncy, adjncy_recv, MPI_REPLACE)); 79 PetscCall(PetscSFBcastEnd(sf, MPIU_INT, adjncy, adjncy_recv, MPI_REPLACE)); 80 if (a->useedgeweights) { 81 PetscCall(PetscSFBcastBegin(sf, MPIU_INT, a->values, values_recv, MPI_REPLACE)); 82 PetscCall(PetscSFBcastEnd(sf, MPIU_INT, a->values, values_recv, MPI_REPLACE)); 83 } 84 PetscCall(PetscSFDestroy(&sf)); 85 PetscCall(MatRestoreRowIJ(adj, 0, PETSC_FALSE, PETSC_FALSE, &nlrows_mat, &xadj, &adjncy, &done)); 86 PetscCall(ISGetLocalSize(icols, &icols_n)); 87 PetscCall(ISGetIndices(icols, &icols_indices)); 88 rnclos = 0; 89 for (i = 0; i < nlrows_is; i++) { 90 for (j = ncols_recv_offsets[i]; j < ncols_recv_offsets[i + 1]; j++) { 91 PetscCall(PetscFindInt(adjncy_recv[j], icols_n, icols_indices, &loc)); 92 if (loc < 0) { 93 adjncy_recv[j] = -1; 94 if (a->useedgeweights) values_recv[j] = -1; 95 ncols_recv[i]--; 96 } else { 97 rnclos++; 98 } 99 } 100 } 101 PetscCall(ISRestoreIndices(icols, &icols_indices)); 102 PetscCall(PetscCalloc1(rnclos, &sadjncy)); 103 if (a->useedgeweights) PetscCall(PetscCalloc1(rnclos, &svalues)); 104 PetscCall(PetscCalloc1(nlrows_is + 1, &sxadj)); 105 rnclos = 0; 106 for (i = 0; i < nlrows_is; i++) { 107 for (j = ncols_recv_offsets[i]; j < ncols_recv_offsets[i + 1]; j++) { 108 if (adjncy_recv[j] < 0) continue; 109 sadjncy[rnclos] = adjncy_recv[j]; 110 if (a->useedgeweights) svalues[rnclos] = values_recv[j]; 111 rnclos++; 112 } 113 } 114 for (i = 0; i < nlrows_is; i++) sxadj[i + 1] = sxadj[i] + ncols_recv[i]; 115 if (sadj_xadj) { 116 *sadj_xadj = sxadj; 117 } else PetscCall(PetscFree(sxadj)); 118 if (sadj_adjncy) { 119 *sadj_adjncy = sadjncy; 120 } else PetscCall(PetscFree(sadjncy)); 121 if (sadj_values) { 122 if (a->useedgeweights) *sadj_values = svalues; 123 else *sadj_values = NULL; 124 } else { 125 if (a->useedgeweights) PetscCall(PetscFree(svalues)); 126 } 127 PetscCall(PetscFree4(ncols_send, xadj_recv, ncols_recv_offsets, ncols_recv)); 128 PetscCall(PetscFree(adjncy_recv)); 129 if (a->useedgeweights) PetscCall(PetscFree(values_recv)); 130 PetscFunctionReturn(PETSC_SUCCESS); 131 } 132 133 static PetscErrorCode MatCreateSubMatrices_MPIAdj_Private(Mat mat, PetscInt n, const IS irow[], const IS icol[], PetscBool subcomm, MatReuse scall, Mat *submat[]) 134 { 135 PetscInt i, irow_n, icol_n, *sxadj, *sadjncy, *svalues; 136 PetscInt *indices, nindx, j, k, loc; 137 PetscMPIInt issame; 138 const PetscInt *irow_indices, *icol_indices; 139 MPI_Comm scomm_row, scomm_col, scomm_mat; 140 141 PetscFunctionBegin; 142 nindx = 0; 143 /* 144 * Estimate a maximum number for allocating memory 145 */ 146 for (i = 0; i < n; i++) { 147 PetscCall(ISGetLocalSize(irow[i], &irow_n)); 148 PetscCall(ISGetLocalSize(icol[i], &icol_n)); 149 nindx = nindx > (irow_n + icol_n) ? nindx : (irow_n + icol_n); 150 } 151 PetscCall(PetscMalloc1(nindx, &indices)); 152 /* construct a submat */ 153 // if (scall == MAT_INITIAL_MATRIX) PetscCall(PetscMalloc1(n,submat)); 154 155 for (i = 0; i < n; i++) { 156 if (subcomm) { 157 PetscCall(PetscObjectGetComm((PetscObject)irow[i], &scomm_row)); 158 PetscCall(PetscObjectGetComm((PetscObject)icol[i], &scomm_col)); 159 PetscCallMPI(MPI_Comm_compare(scomm_row, scomm_col, &issame)); 160 PetscCheck(issame == MPI_IDENT, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "row index set must have the same comm as the col index set"); 161 PetscCallMPI(MPI_Comm_compare(scomm_row, PETSC_COMM_SELF, &issame)); 162 PetscCheck(issame != MPI_IDENT, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, " can not use PETSC_COMM_SELF as comm when extracting a parallel submatrix"); 163 } else { 164 scomm_row = PETSC_COMM_SELF; 165 } 166 /*get sub-matrix data*/ 167 sxadj = NULL; 168 sadjncy = NULL; 169 svalues = NULL; 170 PetscCall(MatCreateSubMatrix_MPIAdj_data(mat, irow[i], icol[i], &sxadj, &sadjncy, &svalues)); 171 PetscCall(ISGetLocalSize(irow[i], &irow_n)); 172 PetscCall(ISGetLocalSize(icol[i], &icol_n)); 173 PetscCall(ISGetIndices(irow[i], &irow_indices)); 174 PetscCall(PetscArraycpy(indices, irow_indices, irow_n)); 175 PetscCall(ISRestoreIndices(irow[i], &irow_indices)); 176 PetscCall(ISGetIndices(icol[i], &icol_indices)); 177 PetscCall(PetscArraycpy(PetscSafePointerPlusOffset(indices, irow_n), icol_indices, icol_n)); 178 PetscCall(ISRestoreIndices(icol[i], &icol_indices)); 179 nindx = irow_n + icol_n; 180 PetscCall(PetscSortRemoveDupsInt(&nindx, indices)); 181 /* renumber columns */ 182 for (j = 0; j < irow_n; j++) { 183 for (k = sxadj[j]; k < sxadj[j + 1]; k++) { 184 PetscCall(PetscFindInt(sadjncy[k], nindx, indices, &loc)); 185 PetscCheck(loc >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "can not find col %" PetscInt_FMT, sadjncy[k]); 186 sadjncy[k] = loc; 187 } 188 } 189 if (scall == MAT_INITIAL_MATRIX) { 190 PetscCall(MatCreateMPIAdj(scomm_row, irow_n, icol_n, sxadj, sadjncy, svalues, submat[i])); 191 } else { 192 Mat sadj = *submat[i]; 193 Mat_MPIAdj *sa = (Mat_MPIAdj *)((sadj)->data); 194 PetscCall(PetscObjectGetComm((PetscObject)sadj, &scomm_mat)); 195 PetscCallMPI(MPI_Comm_compare(scomm_row, scomm_mat, &issame)); 196 PetscCheck(issame == MPI_IDENT, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "submatrix must have the same comm as the col index set"); 197 PetscCall(PetscArraycpy(sa->i, sxadj, irow_n + 1)); 198 PetscCall(PetscArraycpy(sa->j, sadjncy, sxadj[irow_n])); 199 if (svalues) PetscCall(PetscArraycpy(sa->values, svalues, sxadj[irow_n])); 200 PetscCall(PetscFree(sxadj)); 201 PetscCall(PetscFree(sadjncy)); 202 PetscCall(PetscFree(svalues)); 203 } 204 } 205 PetscCall(PetscFree(indices)); 206 PetscFunctionReturn(PETSC_SUCCESS); 207 } 208 209 static PetscErrorCode MatCreateSubMatricesMPI_MPIAdj(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[]) 210 { 211 /*get sub-matrices across a sub communicator */ 212 PetscFunctionBegin; 213 PetscCall(MatCreateSubMatrices_MPIAdj_Private(mat, n, irow, icol, PETSC_TRUE, scall, submat)); 214 PetscFunctionReturn(PETSC_SUCCESS); 215 } 216 217 static PetscErrorCode MatCreateSubMatrices_MPIAdj(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[]) 218 { 219 PetscFunctionBegin; 220 /*get sub-matrices based on PETSC_COMM_SELF */ 221 PetscCall(MatCreateSubMatrices_MPIAdj_Private(mat, n, irow, icol, PETSC_FALSE, scall, submat)); 222 PetscFunctionReturn(PETSC_SUCCESS); 223 } 224 225 static PetscErrorCode MatView_MPIAdj_ASCII(Mat A, PetscViewer viewer) 226 { 227 Mat_MPIAdj *a = (Mat_MPIAdj *)A->data; 228 PetscInt i, j, m = A->rmap->n; 229 const char *name; 230 PetscViewerFormat format; 231 232 PetscFunctionBegin; 233 PetscCall(PetscObjectGetName((PetscObject)A, &name)); 234 PetscCall(PetscViewerGetFormat(viewer, &format)); 235 if (format == PETSC_VIEWER_ASCII_INFO) { 236 PetscFunctionReturn(PETSC_SUCCESS); 237 } else { 238 PetscCheck(format != PETSC_VIEWER_ASCII_MATLAB, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MATLAB format not supported"); 239 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE)); 240 PetscCall(PetscViewerASCIIPushSynchronized(viewer)); 241 for (i = 0; i < m; i++) { 242 PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "row %" PetscInt_FMT ":", i + A->rmap->rstart)); 243 for (j = a->i[i]; j < a->i[i + 1]; j++) { 244 if (a->values) { 245 PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " (%" PetscInt_FMT ", %" PetscInt_FMT ") ", a->j[j], a->values[j])); 246 } else { 247 PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " %" PetscInt_FMT " ", a->j[j])); 248 } 249 } 250 PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "\n")); 251 } 252 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE)); 253 PetscCall(PetscViewerFlush(viewer)); 254 PetscCall(PetscViewerASCIIPopSynchronized(viewer)); 255 } 256 PetscFunctionReturn(PETSC_SUCCESS); 257 } 258 259 static PetscErrorCode MatView_MPIAdj(Mat A, PetscViewer viewer) 260 { 261 PetscBool iascii; 262 263 PetscFunctionBegin; 264 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 265 if (iascii) PetscCall(MatView_MPIAdj_ASCII(A, viewer)); 266 PetscFunctionReturn(PETSC_SUCCESS); 267 } 268 269 static PetscErrorCode MatDestroy_MPIAdj(Mat mat) 270 { 271 Mat_MPIAdj *a = (Mat_MPIAdj *)mat->data; 272 273 PetscFunctionBegin; 274 PetscCall(PetscLogObjectState((PetscObject)mat, "Rows=%" PetscInt_FMT ", Cols=%" PetscInt_FMT ", NZ=%" PetscInt_FMT, mat->rmap->n, mat->cmap->n, a->nz)); 275 PetscCall(PetscFree(a->diag)); 276 if (a->freeaij) { 277 if (a->freeaijwithfree) { 278 if (a->i) free(a->i); 279 if (a->j) free(a->j); 280 } else { 281 PetscCall(PetscFree(a->i)); 282 PetscCall(PetscFree(a->j)); 283 PetscCall(PetscFree(a->values)); 284 } 285 } 286 PetscCall(PetscFree(a->rowvalues)); 287 PetscCall(PetscFree(mat->data)); 288 PetscCall(PetscObjectChangeTypeName((PetscObject)mat, NULL)); 289 PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjSetPreallocation_C", NULL)); 290 PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjCreateNonemptySubcommMat_C", NULL)); 291 PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjToSeq_C", NULL)); 292 PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjToSeqRankZero_C", NULL)); 293 PetscFunctionReturn(PETSC_SUCCESS); 294 } 295 296 static PetscErrorCode MatSetOption_MPIAdj(Mat A, MatOption op, PetscBool flg) 297 { 298 Mat_MPIAdj *a = (Mat_MPIAdj *)A->data; 299 300 PetscFunctionBegin; 301 switch (op) { 302 case MAT_SYMMETRIC: 303 case MAT_STRUCTURALLY_SYMMETRIC: 304 case MAT_HERMITIAN: 305 case MAT_SPD: 306 a->symmetric = flg; 307 break; 308 case MAT_SYMMETRY_ETERNAL: 309 case MAT_STRUCTURAL_SYMMETRY_ETERNAL: 310 case MAT_SPD_ETERNAL: 311 break; 312 default: 313 PetscCall(PetscInfo(A, "Option %s ignored\n", MatOptions[op])); 314 break; 315 } 316 PetscFunctionReturn(PETSC_SUCCESS); 317 } 318 319 static PetscErrorCode MatGetRow_MPIAdj(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v) 320 { 321 Mat_MPIAdj *a = (Mat_MPIAdj *)A->data; 322 323 PetscFunctionBegin; 324 row -= A->rmap->rstart; 325 PetscCheck(row >= 0 && row < A->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row out of range"); 326 *nz = a->i[row + 1] - a->i[row]; 327 if (v) { 328 PetscInt j; 329 if (a->rowvalues_alloc < *nz) { 330 PetscCall(PetscFree(a->rowvalues)); 331 a->rowvalues_alloc = PetscMax(a->rowvalues_alloc * 2, *nz); 332 PetscCall(PetscMalloc1(a->rowvalues_alloc, &a->rowvalues)); 333 } 334 for (j = 0; j < *nz; j++) a->rowvalues[j] = a->values ? a->values[a->i[row] + j] : 1.0; 335 *v = (*nz) ? a->rowvalues : NULL; 336 } 337 if (idx) *idx = (*nz) ? a->j + a->i[row] : NULL; 338 PetscFunctionReturn(PETSC_SUCCESS); 339 } 340 341 static PetscErrorCode MatRestoreRow_MPIAdj(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v) 342 { 343 PetscFunctionBegin; 344 PetscFunctionReturn(PETSC_SUCCESS); 345 } 346 347 static PetscErrorCode MatEqual_MPIAdj(Mat A, Mat B, PetscBool *flg) 348 { 349 Mat_MPIAdj *a = (Mat_MPIAdj *)A->data, *b = (Mat_MPIAdj *)B->data; 350 PetscBool flag; 351 352 PetscFunctionBegin; 353 /* If the matrix dimensions are not equal,or no of nonzeros */ 354 if ((A->rmap->n != B->rmap->n) || (a->nz != b->nz)) flag = PETSC_FALSE; 355 356 /* if the a->i are the same */ 357 PetscCall(PetscArraycmp(a->i, b->i, A->rmap->n + 1, &flag)); 358 359 /* if a->j are the same */ 360 PetscCall(PetscMemcmp(a->j, b->j, (a->nz) * sizeof(PetscInt), &flag)); 361 362 PetscCall(MPIU_Allreduce(&flag, flg, 1, MPIU_BOOL, MPI_LAND, PetscObjectComm((PetscObject)A))); 363 PetscFunctionReturn(PETSC_SUCCESS); 364 } 365 366 static PetscErrorCode MatGetRowIJ_MPIAdj(Mat A, PetscInt oshift, PetscBool symmetric, PetscBool blockcompressed, PetscInt *m, const PetscInt *inia[], const PetscInt *inja[], PetscBool *done) 367 { 368 PetscInt i; 369 Mat_MPIAdj *a = (Mat_MPIAdj *)A->data; 370 PetscInt **ia = (PetscInt **)inia, **ja = (PetscInt **)inja; 371 372 PetscFunctionBegin; 373 *m = A->rmap->n; 374 *ia = a->i; 375 *ja = a->j; 376 *done = PETSC_TRUE; 377 if (oshift) { 378 for (i = 0; i < (*ia)[*m]; i++) (*ja)[i]++; 379 for (i = 0; i <= (*m); i++) (*ia)[i]++; 380 } 381 PetscFunctionReturn(PETSC_SUCCESS); 382 } 383 384 static PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A, PetscInt oshift, PetscBool symmetric, PetscBool blockcompressed, PetscInt *m, const PetscInt *inia[], const PetscInt *inja[], PetscBool *done) 385 { 386 PetscInt i; 387 Mat_MPIAdj *a = (Mat_MPIAdj *)A->data; 388 PetscInt **ia = (PetscInt **)inia, **ja = (PetscInt **)inja; 389 390 PetscFunctionBegin; 391 PetscCheck(!ia || a->i == *ia, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "ia passed back is not one obtained with MatGetRowIJ()"); 392 PetscCheck(!ja || a->j == *ja, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "ja passed back is not one obtained with MatGetRowIJ()"); 393 if (oshift) { 394 PetscCheck(ia, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "If oshift then you must passed in inia[] argument"); 395 PetscCheck(ja, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "If oshift then you must passed in inja[] argument"); 396 for (i = 0; i <= (*m); i++) (*ia)[i]--; 397 for (i = 0; i < (*ia)[*m]; i++) (*ja)[i]--; 398 } 399 PetscFunctionReturn(PETSC_SUCCESS); 400 } 401 402 static PetscErrorCode MatConvertFrom_MPIAdj(Mat A, MatType type, MatReuse reuse, Mat *newmat) 403 { 404 Mat B; 405 PetscInt i, m, N, nzeros = 0, *ia, *ja, len, rstart, cnt, j, *a; 406 const PetscInt *rj; 407 const PetscScalar *ra; 408 MPI_Comm comm; 409 410 PetscFunctionBegin; 411 PetscCall(MatGetSize(A, NULL, &N)); 412 PetscCall(MatGetLocalSize(A, &m, NULL)); 413 PetscCall(MatGetOwnershipRange(A, &rstart, NULL)); 414 415 /* count the number of nonzeros per row */ 416 for (i = 0; i < m; i++) { 417 PetscCall(MatGetRow(A, i + rstart, &len, &rj, NULL)); 418 for (j = 0; j < len; j++) { 419 if (rj[j] == i + rstart) { 420 len--; 421 break; 422 } /* don't count diagonal */ 423 } 424 nzeros += len; 425 PetscCall(MatRestoreRow(A, i + rstart, &len, &rj, NULL)); 426 } 427 428 /* malloc space for nonzeros */ 429 PetscCall(PetscMalloc1(nzeros + 1, &a)); 430 PetscCall(PetscMalloc1(N + 1, &ia)); 431 PetscCall(PetscMalloc1(nzeros + 1, &ja)); 432 433 nzeros = 0; 434 ia[0] = 0; 435 for (i = 0; i < m; i++) { 436 PetscCall(MatGetRow(A, i + rstart, &len, &rj, &ra)); 437 cnt = 0; 438 for (j = 0; j < len; j++) { 439 if (rj[j] != i + rstart) { /* if not diagonal */ 440 a[nzeros + cnt] = (PetscInt)PetscAbsScalar(ra[j]); 441 ja[nzeros + cnt++] = rj[j]; 442 } 443 } 444 PetscCall(MatRestoreRow(A, i + rstart, &len, &rj, &ra)); 445 nzeros += cnt; 446 ia[i + 1] = nzeros; 447 } 448 449 PetscCall(PetscObjectGetComm((PetscObject)A, &comm)); 450 PetscCall(MatCreate(comm, &B)); 451 PetscCall(MatSetSizes(B, m, PETSC_DETERMINE, PETSC_DETERMINE, N)); 452 PetscCall(MatSetType(B, type)); 453 PetscCall(MatMPIAdjSetPreallocation(B, ia, ja, a)); 454 455 if (reuse == MAT_INPLACE_MATRIX) { 456 PetscCall(MatHeaderReplace(A, &B)); 457 } else { 458 *newmat = B; 459 } 460 PetscFunctionReturn(PETSC_SUCCESS); 461 } 462 463 static PetscErrorCode MatSetValues_MPIAdj(Mat A, PetscInt m, const PetscInt *rows, PetscInt n, const PetscInt *cols, const PetscScalar *values, InsertMode im) 464 { 465 Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data; 466 PetscInt rStart, rEnd, cStart, cEnd; 467 468 PetscFunctionBegin; 469 PetscCheck(!adj->i, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Matrix is already assembled, cannot change its values"); 470 PetscCall(MatGetOwnershipRange(A, &rStart, &rEnd)); 471 PetscCall(MatGetOwnershipRangeColumn(A, &cStart, &cEnd)); 472 if (!adj->ht) { 473 PetscCall(PetscHSetIJCreate(&adj->ht)); 474 PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject)A), 1, &A->stash)); 475 PetscCall(PetscLayoutSetUp(A->rmap)); 476 PetscCall(PetscLayoutSetUp(A->cmap)); 477 } 478 for (PetscInt r = 0; r < m; ++r) { 479 PetscHashIJKey key; 480 481 key.i = rows[r]; 482 if (key.i < 0) continue; 483 if ((key.i < rStart) || (key.i >= rEnd)) { 484 PetscCall(MatStashValuesRow_Private(&A->stash, key.i, n, cols, values, PETSC_FALSE)); 485 } else { 486 for (PetscInt c = 0; c < n; ++c) { 487 key.j = cols[c]; 488 if (key.j < 0 || key.i == key.j) continue; 489 PetscCall(PetscHSetIJAdd(adj->ht, key)); 490 } 491 } 492 } 493 PetscFunctionReturn(PETSC_SUCCESS); 494 } 495 496 static PetscErrorCode MatAssemblyBegin_MPIAdj(Mat A, MatAssemblyType type) 497 { 498 PetscInt nstash, reallocs; 499 Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data; 500 501 PetscFunctionBegin; 502 if (!adj->ht) { 503 PetscCall(PetscHSetIJCreate(&adj->ht)); 504 PetscCall(PetscLayoutSetUp(A->rmap)); 505 PetscCall(PetscLayoutSetUp(A->cmap)); 506 } 507 PetscCall(MatStashScatterBegin_Private(A, &A->stash, A->rmap->range)); 508 PetscCall(MatStashGetInfo_Private(&A->stash, &nstash, &reallocs)); 509 PetscCall(PetscInfo(A, "Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs)); 510 PetscFunctionReturn(PETSC_SUCCESS); 511 } 512 513 static PetscErrorCode MatAssemblyEnd_MPIAdj(Mat A, MatAssemblyType type) 514 { 515 PetscScalar *val; 516 PetscInt *row, *col, m, rstart, *rowstarts; 517 PetscInt i, j, ncols, flg, nz; 518 PetscMPIInt n; 519 Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data; 520 PetscHashIter hi; 521 PetscHashIJKey key; 522 PetscHSetIJ ht = adj->ht; 523 524 PetscFunctionBegin; 525 while (1) { 526 PetscCall(MatStashScatterGetMesg_Private(&A->stash, &n, &row, &col, &val, &flg)); 527 if (!flg) break; 528 529 for (i = 0; i < n;) { 530 /* Identify the consecutive vals belonging to the same row */ 531 for (j = i, rstart = row[j]; j < n; j++) { 532 if (row[j] != rstart) break; 533 } 534 if (j < n) ncols = j - i; 535 else ncols = n - i; 536 /* Set all these values with a single function call */ 537 PetscCall(MatSetValues_MPIAdj(A, 1, row + i, ncols, col + i, val + i, INSERT_VALUES)); 538 i = j; 539 } 540 } 541 PetscCall(MatStashScatterEnd_Private(&A->stash)); 542 PetscCall(MatStashDestroy_Private(&A->stash)); 543 544 PetscCall(MatGetLocalSize(A, &m, NULL)); 545 PetscCall(MatGetOwnershipRange(A, &rstart, NULL)); 546 PetscCall(PetscCalloc1(m + 1, &rowstarts)); 547 PetscHashIterBegin(ht, hi); 548 for (; !PetscHashIterAtEnd(ht, hi);) { 549 PetscHashIterGetKey(ht, hi, key); 550 rowstarts[key.i - rstart + 1]++; 551 PetscHashIterNext(ht, hi); 552 } 553 for (i = 1; i < m + 1; i++) rowstarts[i] = rowstarts[i - 1] + rowstarts[i]; 554 555 PetscCall(PetscHSetIJGetSize(ht, &nz)); 556 PetscCall(PetscMalloc1(nz, &col)); 557 PetscHashIterBegin(ht, hi); 558 for (; !PetscHashIterAtEnd(ht, hi);) { 559 PetscHashIterGetKey(ht, hi, key); 560 col[rowstarts[key.i - rstart]++] = key.j; 561 PetscHashIterNext(ht, hi); 562 } 563 PetscCall(PetscHSetIJDestroy(&ht)); 564 for (i = m; i > 0; i--) rowstarts[i] = rowstarts[i - 1]; 565 rowstarts[0] = 0; 566 567 for (PetscInt i = 0; i < m; i++) PetscCall(PetscSortInt(rowstarts[i + 1] - rowstarts[i], &col[rowstarts[i]])); 568 569 adj->i = rowstarts; 570 adj->j = col; 571 adj->nz = rowstarts[m]; 572 adj->freeaij = PETSC_TRUE; 573 PetscFunctionReturn(PETSC_SUCCESS); 574 } 575 576 static struct _MatOps MatOps_Values = {MatSetValues_MPIAdj, 577 MatGetRow_MPIAdj, 578 MatRestoreRow_MPIAdj, 579 NULL, 580 /* 4*/ NULL, 581 NULL, 582 NULL, 583 NULL, 584 NULL, 585 NULL, 586 /*10*/ NULL, 587 NULL, 588 NULL, 589 NULL, 590 NULL, 591 /*15*/ NULL, 592 MatEqual_MPIAdj, 593 NULL, 594 NULL, 595 NULL, 596 /*20*/ MatAssemblyBegin_MPIAdj, 597 MatAssemblyEnd_MPIAdj, 598 MatSetOption_MPIAdj, 599 NULL, 600 /*24*/ NULL, 601 NULL, 602 NULL, 603 NULL, 604 NULL, 605 /*29*/ NULL, 606 NULL, 607 NULL, 608 NULL, 609 NULL, 610 /*34*/ NULL, 611 NULL, 612 NULL, 613 NULL, 614 NULL, 615 /*39*/ NULL, 616 MatCreateSubMatrices_MPIAdj, 617 NULL, 618 NULL, 619 NULL, 620 /*44*/ NULL, 621 NULL, 622 MatShift_Basic, 623 NULL, 624 NULL, 625 /*49*/ NULL, 626 MatGetRowIJ_MPIAdj, 627 MatRestoreRowIJ_MPIAdj, 628 NULL, 629 NULL, 630 /*54*/ NULL, 631 NULL, 632 NULL, 633 NULL, 634 NULL, 635 /*59*/ NULL, 636 MatDestroy_MPIAdj, 637 MatView_MPIAdj, 638 MatConvertFrom_MPIAdj, 639 NULL, 640 /*64*/ NULL, 641 NULL, 642 NULL, 643 NULL, 644 NULL, 645 /*69*/ NULL, 646 NULL, 647 NULL, 648 NULL, 649 NULL, 650 /*74*/ NULL, 651 NULL, 652 NULL, 653 NULL, 654 NULL, 655 /*79*/ NULL, 656 NULL, 657 NULL, 658 NULL, 659 NULL, 660 /*84*/ NULL, 661 NULL, 662 NULL, 663 NULL, 664 NULL, 665 /*89*/ NULL, 666 NULL, 667 NULL, 668 NULL, 669 NULL, 670 /*94*/ NULL, 671 NULL, 672 NULL, 673 NULL, 674 NULL, 675 /*99*/ NULL, 676 NULL, 677 NULL, 678 NULL, 679 NULL, 680 /*104*/ NULL, 681 NULL, 682 NULL, 683 NULL, 684 NULL, 685 /*109*/ NULL, 686 NULL, 687 NULL, 688 NULL, 689 NULL, 690 /*114*/ NULL, 691 NULL, 692 NULL, 693 NULL, 694 NULL, 695 /*119*/ NULL, 696 NULL, 697 NULL, 698 NULL, 699 NULL, 700 /*124*/ NULL, 701 NULL, 702 NULL, 703 NULL, 704 MatCreateSubMatricesMPI_MPIAdj, 705 /*129*/ NULL, 706 NULL, 707 NULL, 708 NULL, 709 NULL, 710 /*134*/ NULL, 711 NULL, 712 NULL, 713 NULL, 714 NULL, 715 /*139*/ NULL, 716 NULL, 717 NULL, 718 NULL, 719 NULL, 720 /*144*/ NULL, 721 NULL, 722 NULL, 723 NULL, 724 NULL, 725 NULL, 726 /*150*/ NULL, 727 NULL, 728 NULL, 729 NULL}; 730 731 static PetscErrorCode MatMPIAdjSetPreallocation_MPIAdj(Mat B, PetscInt *i, PetscInt *j, PetscInt *values) 732 { 733 Mat_MPIAdj *b = (Mat_MPIAdj *)B->data; 734 PetscBool useedgeweights; 735 736 PetscFunctionBegin; 737 PetscCall(PetscLayoutSetUp(B->rmap)); 738 PetscCall(PetscLayoutSetUp(B->cmap)); 739 if (values) useedgeweights = PETSC_TRUE; 740 else useedgeweights = PETSC_FALSE; 741 /* Make everybody knows if they are using edge weights or not */ 742 PetscCall(MPIU_Allreduce((int *)&useedgeweights, (int *)&b->useedgeweights, 1, MPI_INT, MPI_MAX, PetscObjectComm((PetscObject)B))); 743 744 if (PetscDefined(USE_DEBUG)) { 745 PetscInt ii; 746 747 PetscCheck(i[0] == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "First i[] index must be zero, instead it is %" PetscInt_FMT, i[0]); 748 for (ii = 1; ii < B->rmap->n; ii++) { 749 PetscCheck(i[ii] >= 0 && i[ii] >= i[ii - 1], PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "i[%" PetscInt_FMT "]=%" PetscInt_FMT " index is out of range: i[%" PetscInt_FMT "]=%" PetscInt_FMT, ii, i[ii], ii - 1, i[ii - 1]); 750 } 751 for (ii = 0; ii < i[B->rmap->n]; ii++) PetscCheck(j[ii] >= 0 && j[ii] < B->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column index %" PetscInt_FMT " out of range %" PetscInt_FMT, ii, j[ii]); 752 } 753 b->j = j; 754 b->i = i; 755 b->values = values; 756 757 b->nz = i[B->rmap->n]; 758 b->diag = NULL; 759 b->symmetric = PETSC_FALSE; 760 b->freeaij = PETSC_TRUE; 761 762 B->ops->assemblybegin = NULL; 763 B->ops->assemblyend = NULL; 764 PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY)); 765 PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY)); 766 PetscCall(MatStashDestroy_Private(&B->stash)); 767 PetscFunctionReturn(PETSC_SUCCESS); 768 } 769 770 static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A, Mat *B) 771 { 772 Mat_MPIAdj *a = (Mat_MPIAdj *)A->data; 773 const PetscInt *ranges; 774 MPI_Comm acomm, bcomm; 775 MPI_Group agroup, bgroup; 776 PetscMPIInt i, rank, size, nranks, *ranks; 777 778 PetscFunctionBegin; 779 *B = NULL; 780 PetscCall(PetscObjectGetComm((PetscObject)A, &acomm)); 781 PetscCallMPI(MPI_Comm_size(acomm, &size)); 782 PetscCallMPI(MPI_Comm_size(acomm, &rank)); 783 PetscCall(MatGetOwnershipRanges(A, &ranges)); 784 for (i = 0, nranks = 0; i < size; i++) { 785 if (ranges[i + 1] - ranges[i] > 0) nranks++; 786 } 787 if (nranks == size) { /* All ranks have a positive number of rows, so we do not need to create a subcomm; */ 788 PetscCall(PetscObjectReference((PetscObject)A)); 789 *B = A; 790 PetscFunctionReturn(PETSC_SUCCESS); 791 } 792 793 PetscCall(PetscMalloc1(nranks, &ranks)); 794 for (i = 0, nranks = 0; i < size; i++) { 795 if (ranges[i + 1] - ranges[i] > 0) ranks[nranks++] = i; 796 } 797 PetscCallMPI(MPI_Comm_group(acomm, &agroup)); 798 PetscCallMPI(MPI_Group_incl(agroup, nranks, ranks, &bgroup)); 799 PetscCall(PetscFree(ranks)); 800 PetscCallMPI(MPI_Comm_create(acomm, bgroup, &bcomm)); 801 PetscCallMPI(MPI_Group_free(&agroup)); 802 PetscCallMPI(MPI_Group_free(&bgroup)); 803 if (bcomm != MPI_COMM_NULL) { 804 PetscInt m, N; 805 Mat_MPIAdj *b; 806 PetscCall(MatGetLocalSize(A, &m, NULL)); 807 PetscCall(MatGetSize(A, NULL, &N)); 808 PetscCall(MatCreateMPIAdj(bcomm, m, N, a->i, a->j, a->values, B)); 809 b = (Mat_MPIAdj *)(*B)->data; 810 b->freeaij = PETSC_FALSE; 811 PetscCallMPI(MPI_Comm_free(&bcomm)); 812 } 813 PetscFunctionReturn(PETSC_SUCCESS); 814 } 815 816 static PetscErrorCode MatMPIAdjToSeq_MPIAdj(Mat A, Mat *B) 817 { 818 PetscInt M, N, *II, *J, NZ, nz, m, nzstart, i; 819 PetscInt *Values = NULL; 820 Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data; 821 PetscMPIInt mnz, mm, *allnz, *allm, size, *dispnz, *dispm; 822 823 PetscFunctionBegin; 824 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size)); 825 PetscCall(MatGetSize(A, &M, &N)); 826 PetscCall(MatGetLocalSize(A, &m, NULL)); 827 nz = adj->nz; 828 PetscCheck(adj->i[m] == nz, PETSC_COMM_SELF, PETSC_ERR_PLIB, "nz %" PetscInt_FMT " not correct i[m] %" PetscInt_FMT, nz, adj->i[m]); 829 PetscCall(MPIU_Allreduce(&nz, &NZ, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A))); 830 831 PetscCall(PetscMPIIntCast(nz, &mnz)); 832 PetscCall(PetscMalloc2(size, &allnz, size, &dispnz)); 833 PetscCallMPI(MPI_Allgather(&mnz, 1, MPI_INT, allnz, 1, MPI_INT, PetscObjectComm((PetscObject)A))); 834 dispnz[0] = 0; 835 for (i = 1; i < size; i++) dispnz[i] = dispnz[i - 1] + allnz[i - 1]; 836 if (adj->values) { 837 PetscCall(PetscMalloc1(NZ, &Values)); 838 PetscCallMPI(MPI_Allgatherv(adj->values, mnz, MPIU_INT, Values, allnz, dispnz, MPIU_INT, PetscObjectComm((PetscObject)A))); 839 } 840 PetscCall(PetscMalloc1(NZ, &J)); 841 PetscCallMPI(MPI_Allgatherv(adj->j, mnz, MPIU_INT, J, allnz, dispnz, MPIU_INT, PetscObjectComm((PetscObject)A))); 842 PetscCall(PetscFree2(allnz, dispnz)); 843 PetscCallMPI(MPI_Scan(&nz, &nzstart, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A))); 844 nzstart -= nz; 845 /* shift the i[] values so they will be correct after being received */ 846 for (i = 0; i < m; i++) adj->i[i] += nzstart; 847 PetscCall(PetscMalloc1(M + 1, &II)); 848 PetscCall(PetscMPIIntCast(m, &mm)); 849 PetscCall(PetscMalloc2(size, &allm, size, &dispm)); 850 PetscCallMPI(MPI_Allgather(&mm, 1, MPI_INT, allm, 1, MPI_INT, PetscObjectComm((PetscObject)A))); 851 dispm[0] = 0; 852 for (i = 1; i < size; i++) dispm[i] = dispm[i - 1] + allm[i - 1]; 853 PetscCallMPI(MPI_Allgatherv(adj->i, mm, MPIU_INT, II, allm, dispm, MPIU_INT, PetscObjectComm((PetscObject)A))); 854 PetscCall(PetscFree2(allm, dispm)); 855 II[M] = NZ; 856 /* shift the i[] values back */ 857 for (i = 0; i < m; i++) adj->i[i] -= nzstart; 858 PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, M, N, II, J, Values, B)); 859 PetscFunctionReturn(PETSC_SUCCESS); 860 } 861 862 static PetscErrorCode MatMPIAdjToSeqRankZero_MPIAdj(Mat A, Mat *B) 863 { 864 PetscInt M, N, *II, *J, NZ, nz, m, nzstart, i; 865 PetscInt *Values = NULL; 866 Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data; 867 PetscMPIInt mnz, mm, *allnz = NULL, *allm, size, *dispnz, *dispm, rank; 868 869 PetscFunctionBegin; 870 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size)); 871 PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)A), &rank)); 872 PetscCall(MatGetSize(A, &M, &N)); 873 PetscCall(MatGetLocalSize(A, &m, NULL)); 874 nz = adj->nz; 875 PetscCheck(adj->i[m] == nz, PETSC_COMM_SELF, PETSC_ERR_PLIB, "nz %" PetscInt_FMT " not correct i[m] %" PetscInt_FMT, nz, adj->i[m]); 876 PetscCall(MPIU_Allreduce(&nz, &NZ, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A))); 877 878 PetscCall(PetscMPIIntCast(nz, &mnz)); 879 if (!rank) PetscCall(PetscMalloc2(size, &allnz, size, &dispnz)); 880 PetscCallMPI(MPI_Gather(&mnz, 1, MPI_INT, allnz, 1, MPI_INT, 0, PetscObjectComm((PetscObject)A))); 881 if (!rank) { 882 dispnz[0] = 0; 883 for (i = 1; i < size; i++) dispnz[i] = dispnz[i - 1] + allnz[i - 1]; 884 if (adj->values) { 885 PetscCall(PetscMalloc1(NZ, &Values)); 886 PetscCallMPI(MPI_Gatherv(adj->values, mnz, MPIU_INT, Values, allnz, dispnz, MPIU_INT, 0, PetscObjectComm((PetscObject)A))); 887 } 888 PetscCall(PetscMalloc1(NZ, &J)); 889 PetscCallMPI(MPI_Gatherv(adj->j, mnz, MPIU_INT, J, allnz, dispnz, MPIU_INT, 0, PetscObjectComm((PetscObject)A))); 890 PetscCall(PetscFree2(allnz, dispnz)); 891 } else { 892 if (adj->values) PetscCallMPI(MPI_Gatherv(adj->values, mnz, MPIU_INT, NULL, NULL, NULL, MPIU_INT, 0, PetscObjectComm((PetscObject)A))); 893 PetscCallMPI(MPI_Gatherv(adj->j, mnz, MPIU_INT, NULL, NULL, NULL, MPIU_INT, 0, PetscObjectComm((PetscObject)A))); 894 } 895 PetscCallMPI(MPI_Scan(&nz, &nzstart, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A))); 896 nzstart -= nz; 897 /* shift the i[] values so they will be correct after being received */ 898 for (i = 0; i < m; i++) adj->i[i] += nzstart; 899 PetscCall(PetscMPIIntCast(m, &mm)); 900 if (!rank) { 901 PetscCall(PetscMalloc1(M + 1, &II)); 902 PetscCall(PetscMalloc2(size, &allm, size, &dispm)); 903 PetscCallMPI(MPI_Gather(&mm, 1, MPI_INT, allm, 1, MPI_INT, 0, PetscObjectComm((PetscObject)A))); 904 dispm[0] = 0; 905 for (i = 1; i < size; i++) dispm[i] = dispm[i - 1] + allm[i - 1]; 906 PetscCallMPI(MPI_Gatherv(adj->i, mm, MPIU_INT, II, allm, dispm, MPIU_INT, 0, PetscObjectComm((PetscObject)A))); 907 PetscCall(PetscFree2(allm, dispm)); 908 II[M] = NZ; 909 } else { 910 PetscCallMPI(MPI_Gather(&mm, 1, MPI_INT, NULL, 1, MPI_INT, 0, PetscObjectComm((PetscObject)A))); 911 PetscCallMPI(MPI_Gatherv(adj->i, mm, MPIU_INT, NULL, NULL, NULL, MPIU_INT, 0, PetscObjectComm((PetscObject)A))); 912 } 913 /* shift the i[] values back */ 914 for (i = 0; i < m; i++) adj->i[i] -= nzstart; 915 if (!rank) PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, M, N, II, J, Values, B)); 916 PetscFunctionReturn(PETSC_SUCCESS); 917 } 918 919 /*@ 920 MatMPIAdjCreateNonemptySubcommMat - create the same `MATMPIADJ` matrix on a subcommunicator containing only processes owning a positive number of rows 921 922 Collective 923 924 Input Parameter: 925 . A - original `MATMPIADJ` matrix 926 927 Output Parameter: 928 . B - matrix on subcommunicator, `NULL` on MPI processes that own zero rows of `A` 929 930 Level: developer 931 932 Note: 933 The matrix `B` should be destroyed with `MatDestroy()`. The arrays are not copied, so `B` should be destroyed before `A` is destroyed. 934 935 Developer Note: 936 This function is mostly useful for internal use by mesh partitioning packages, such as ParMETIS that require that every process owns at least one row. 937 938 .seealso: [](ch_matrices), `Mat`, `MATMPIADJ`, `MatCreateMPIAdj()` 939 @*/ 940 PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A, Mat *B) 941 { 942 PetscFunctionBegin; 943 PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 944 PetscUseMethod(A, "MatMPIAdjCreateNonemptySubcommMat_C", (Mat, Mat *), (A, B)); 945 PetscFunctionReturn(PETSC_SUCCESS); 946 } 947 948 /*MC 949 MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices, 950 intended for use constructing orderings and partitionings. 951 952 Level: beginner 953 954 Note: 955 You can provide values to the matrix using `MatMPIAdjSetPreallocation()`, `MatCreateMPIAdj()`, or 956 by calling `MatSetValues()` and `MatAssemblyBegin()` followed by `MatAssemblyEnd()` 957 958 .seealso: [](ch_matrices), `Mat`, `MatCreateMPIAdj()`, `MatMPIAdjSetPreallocation()`, `MatSetValues()` 959 M*/ 960 PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B) 961 { 962 Mat_MPIAdj *b; 963 964 PetscFunctionBegin; 965 PetscCall(PetscNew(&b)); 966 B->data = (void *)b; 967 B->ops[0] = MatOps_Values; 968 B->assembled = PETSC_FALSE; 969 B->preallocated = PETSC_TRUE; /* so that MatSetValues() may be used */ 970 971 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjSetPreallocation_C", MatMPIAdjSetPreallocation_MPIAdj)); 972 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjCreateNonemptySubcommMat_C", MatMPIAdjCreateNonemptySubcommMat_MPIAdj)); 973 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjToSeq_C", MatMPIAdjToSeq_MPIAdj)); 974 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjToSeqRankZero_C", MatMPIAdjToSeqRankZero_MPIAdj)); 975 PetscCall(PetscObjectChangeTypeName((PetscObject)B, MATMPIADJ)); 976 PetscFunctionReturn(PETSC_SUCCESS); 977 } 978 979 /*@ 980 MatMPIAdjToSeq - Converts an parallel `MATMPIADJ` matrix to complete `MATMPIADJ` on each process (needed by sequential partitioners) 981 982 Logically Collective 983 984 Input Parameter: 985 . A - the matrix 986 987 Output Parameter: 988 . B - the same matrix on all processes 989 990 Level: intermediate 991 992 .seealso: [](ch_matrices), `Mat`, `MATMPIADJ`, `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MatMPIAdjToSeqRankZero()` 993 @*/ 994 PetscErrorCode MatMPIAdjToSeq(Mat A, Mat *B) 995 { 996 PetscFunctionBegin; 997 PetscUseMethod(A, "MatMPIAdjToSeq_C", (Mat, Mat *), (A, B)); 998 PetscFunctionReturn(PETSC_SUCCESS); 999 } 1000 1001 /*@ 1002 MatMPIAdjToSeqRankZero - Converts an parallel `MATMPIADJ` matrix to complete `MATMPIADJ` on rank zero (needed by sequential partitioners) 1003 1004 Logically Collective 1005 1006 Input Parameter: 1007 . A - the matrix 1008 1009 Output Parameter: 1010 . B - the same matrix on rank zero, not set on other ranks 1011 1012 Level: intermediate 1013 1014 Note: 1015 This routine has the advantage on systems with multiple ranks per node since only one copy of the matrix 1016 is stored on the first node, instead of the number of ranks copies. This can allow partitioning much larger 1017 parallel graph sequentially. 1018 1019 .seealso: [](ch_matrices), `Mat`, `MATMPIADJ`, `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MatMPIAdjToSeq()` 1020 @*/ 1021 PetscErrorCode MatMPIAdjToSeqRankZero(Mat A, Mat *B) 1022 { 1023 PetscFunctionBegin; 1024 PetscUseMethod(A, "MatMPIAdjToSeqRankZero_C", (Mat, Mat *), (A, B)); 1025 PetscFunctionReturn(PETSC_SUCCESS); 1026 } 1027 1028 /*@ 1029 MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements 1030 1031 Logically Collective 1032 1033 Input Parameters: 1034 + B - the matrix 1035 . i - the indices into `j` for the start of each row 1036 . j - the column indices for each row (sorted for each row). 1037 The indices in `i` and `j` start with zero (NOT with one). 1038 - values - [use `NULL` if not provided] edge weights 1039 1040 Level: intermediate 1041 1042 Notes: 1043 The indices in `i` and `j` start with zero (NOT with one). 1044 1045 You must NOT free the `i`, `values` and `j` arrays yourself. PETSc will free them 1046 when the matrix is destroyed; you must allocate them with `PetscMalloc()`. 1047 1048 You should not include the matrix diagonal elements. 1049 1050 If you already have a matrix, you can create its adjacency matrix by a call 1051 to `MatConvert()`, specifying a type of `MATMPIADJ`. 1052 1053 Possible values for `MatSetOption()` - `MAT_STRUCTURALLY_SYMMETRIC` 1054 1055 Fortran Note: 1056 From Fortran the indices and values are copied so the array space need not be provided with `PetscMalloc()`. 1057 1058 .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MATMPIADJ` 1059 @*/ 1060 PetscErrorCode MatMPIAdjSetPreallocation(Mat B, PetscInt *i, PetscInt *j, PetscInt *values) 1061 { 1062 PetscFunctionBegin; 1063 PetscTryMethod(B, "MatMPIAdjSetPreallocation_C", (Mat, PetscInt *, PetscInt *, PetscInt *), (B, i, j, values)); 1064 PetscFunctionReturn(PETSC_SUCCESS); 1065 } 1066 1067 /*@C 1068 MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list. 1069 The matrix need not have numerical values associated with it, it is 1070 intended for ordering (to reduce bandwidth etc) and partitioning. 1071 1072 Collective 1073 1074 Input Parameters: 1075 + comm - MPI communicator 1076 . m - number of local rows 1077 . N - number of global columns 1078 . i - the indices into `j` for the start of each row 1079 . j - the column indices for each row (sorted for each row). 1080 - values - the values, optional, use `NULL` if not provided 1081 1082 Output Parameter: 1083 . A - the matrix 1084 1085 Level: intermediate 1086 1087 Notes: 1088 The indices in `i` and `j` start with zero (NOT with one). 1089 1090 You must NOT free the `i`, `values` and `j` arrays yourself. PETSc will free them 1091 when the matrix is destroyed; you must allocate them with `PetscMalloc()`. 1092 1093 You should not include the matrix diagonals. 1094 1095 If you already have a matrix, you can create its adjacency matrix by a call 1096 to `MatConvert()`, specifying a type of `MATMPIADJ`. 1097 1098 Possible values for `MatSetOption()` - `MAT_STRUCTURALLY_SYMMETRIC` 1099 1100 Fortran Note: 1101 From Fortran the arrays `indices` and `values` must be retained by the user until `A` is destroyed 1102 1103 .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatConvert()`, `MatGetOrdering()`, `MATMPIADJ`, `MatMPIAdjSetPreallocation()` 1104 @*/ 1105 PetscErrorCode MatCreateMPIAdj(MPI_Comm comm, PetscInt m, PetscInt N, PetscInt *i, PetscInt *j, PetscInt *values, Mat *A) 1106 { 1107 PetscFunctionBegin; 1108 PetscCall(MatCreate(comm, A)); 1109 PetscCall(MatSetSizes(*A, m, PETSC_DETERMINE, PETSC_DETERMINE, N)); 1110 PetscCall(MatSetType(*A, MATMPIADJ)); 1111 PetscCall(MatMPIAdjSetPreallocation(*A, i, j, values)); 1112 PetscFunctionReturn(PETSC_SUCCESS); 1113 } 1114