1 /* 2 Defines the basic matrix operations for the ADJ adjacency list matrix data-structure. 3 */ 4 #include <../src/mat/impls/adj/mpi/mpiadj.h> /*I "petscmat.h" I*/ 5 #include <petscsf.h> 6 7 /* 8 The interface should be easy to use for both MatCreateSubMatrix (parallel sub-matrix) and MatCreateSubMatrices (sequential sub-matrices) 9 */ 10 static PetscErrorCode MatCreateSubMatrix_MPIAdj_data(Mat adj, IS irows, IS icols, PetscInt **sadj_xadj, PetscInt **sadj_adjncy, PetscInt **sadj_values) 11 { 12 PetscInt nlrows_is, icols_n, i, j, nroots, nleaves, rlocalindex, *ncols_send, *ncols_recv; 13 PetscInt nlrows_mat, *adjncy_recv, Ncols_recv, Ncols_send, *xadj_recv, *values_recv; 14 PetscInt *ncols_recv_offsets, loc, rnclos, *sadjncy, *sxadj, *svalues; 15 const PetscInt *irows_indices, *icols_indices, *xadj, *adjncy; 16 PetscMPIInt owner; 17 Mat_MPIAdj *a = (Mat_MPIAdj *)adj->data; 18 PetscLayout rmap; 19 MPI_Comm comm; 20 PetscSF sf; 21 PetscSFNode *iremote; 22 PetscBool done; 23 24 PetscFunctionBegin; 25 PetscCall(PetscObjectGetComm((PetscObject)adj, &comm)); 26 PetscCall(MatGetLayouts(adj, &rmap, NULL)); 27 PetscCall(ISGetLocalSize(irows, &nlrows_is)); 28 PetscCall(ISGetIndices(irows, &irows_indices)); 29 PetscCall(PetscMalloc1(nlrows_is, &iremote)); 30 /* construct sf graph*/ 31 nleaves = nlrows_is; 32 for (i = 0; i < nlrows_is; i++) { 33 owner = -1; 34 rlocalindex = -1; 35 PetscCall(PetscLayoutFindOwnerIndex(rmap, irows_indices[i], &owner, &rlocalindex)); 36 iremote[i].rank = owner; 37 iremote[i].index = rlocalindex; 38 } 39 PetscCall(MatGetRowIJ(adj, 0, PETSC_FALSE, PETSC_FALSE, &nlrows_mat, &xadj, &adjncy, &done)); 40 PetscCall(PetscCalloc4(nlrows_mat, &ncols_send, nlrows_is, &xadj_recv, nlrows_is + 1, &ncols_recv_offsets, nlrows_is, &ncols_recv)); 41 nroots = nlrows_mat; 42 for (i = 0; i < nlrows_mat; i++) ncols_send[i] = xadj[i + 1] - xadj[i]; 43 PetscCall(PetscSFCreate(comm, &sf)); 44 PetscCall(PetscSFSetGraph(sf, nroots, nleaves, NULL, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER)); 45 PetscCall(PetscSFSetType(sf, PETSCSFBASIC)); 46 PetscCall(PetscSFSetFromOptions(sf)); 47 PetscCall(PetscSFBcastBegin(sf, MPIU_INT, ncols_send, ncols_recv, MPI_REPLACE)); 48 PetscCall(PetscSFBcastEnd(sf, MPIU_INT, ncols_send, ncols_recv, MPI_REPLACE)); 49 PetscCall(PetscSFBcastBegin(sf, MPIU_INT, xadj, xadj_recv, MPI_REPLACE)); 50 PetscCall(PetscSFBcastEnd(sf, MPIU_INT, xadj, xadj_recv, MPI_REPLACE)); 51 PetscCall(PetscSFDestroy(&sf)); 52 Ncols_recv = 0; 53 for (i = 0; i < nlrows_is; i++) { 54 Ncols_recv += ncols_recv[i]; 55 ncols_recv_offsets[i + 1] = ncols_recv[i] + ncols_recv_offsets[i]; 56 } 57 Ncols_send = 0; 58 for (i = 0; i < nlrows_mat; i++) Ncols_send += ncols_send[i]; 59 PetscCall(PetscCalloc1(Ncols_recv, &iremote)); 60 PetscCall(PetscCalloc1(Ncols_recv, &adjncy_recv)); 61 nleaves = Ncols_recv; 62 Ncols_recv = 0; 63 for (i = 0; i < nlrows_is; i++) { 64 PetscCall(PetscLayoutFindOwner(rmap, irows_indices[i], &owner)); 65 for (j = 0; j < ncols_recv[i]; j++) { 66 iremote[Ncols_recv].rank = owner; 67 iremote[Ncols_recv++].index = xadj_recv[i] + j; 68 } 69 } 70 PetscCall(ISRestoreIndices(irows, &irows_indices)); 71 /*if we need to deal with edge weights ???*/ 72 if (a->useedgeweights) PetscCall(PetscCalloc1(Ncols_recv, &values_recv)); 73 nroots = Ncols_send; 74 PetscCall(PetscSFCreate(comm, &sf)); 75 PetscCall(PetscSFSetGraph(sf, nroots, nleaves, NULL, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER)); 76 PetscCall(PetscSFSetType(sf, PETSCSFBASIC)); 77 PetscCall(PetscSFSetFromOptions(sf)); 78 PetscCall(PetscSFBcastBegin(sf, MPIU_INT, adjncy, adjncy_recv, MPI_REPLACE)); 79 PetscCall(PetscSFBcastEnd(sf, MPIU_INT, adjncy, adjncy_recv, MPI_REPLACE)); 80 if (a->useedgeweights) { 81 PetscCall(PetscSFBcastBegin(sf, MPIU_INT, a->values, values_recv, MPI_REPLACE)); 82 PetscCall(PetscSFBcastEnd(sf, MPIU_INT, a->values, values_recv, MPI_REPLACE)); 83 } 84 PetscCall(PetscSFDestroy(&sf)); 85 PetscCall(MatRestoreRowIJ(adj, 0, PETSC_FALSE, PETSC_FALSE, &nlrows_mat, &xadj, &adjncy, &done)); 86 PetscCall(ISGetLocalSize(icols, &icols_n)); 87 PetscCall(ISGetIndices(icols, &icols_indices)); 88 rnclos = 0; 89 for (i = 0; i < nlrows_is; i++) { 90 for (j = ncols_recv_offsets[i]; j < ncols_recv_offsets[i + 1]; j++) { 91 PetscCall(PetscFindInt(adjncy_recv[j], icols_n, icols_indices, &loc)); 92 if (loc < 0) { 93 adjncy_recv[j] = -1; 94 if (a->useedgeweights) values_recv[j] = -1; 95 ncols_recv[i]--; 96 } else { 97 rnclos++; 98 } 99 } 100 } 101 PetscCall(ISRestoreIndices(icols, &icols_indices)); 102 PetscCall(PetscCalloc1(rnclos, &sadjncy)); 103 if (a->useedgeweights) PetscCall(PetscCalloc1(rnclos, &svalues)); 104 PetscCall(PetscCalloc1(nlrows_is + 1, &sxadj)); 105 rnclos = 0; 106 for (i = 0; i < nlrows_is; i++) { 107 for (j = ncols_recv_offsets[i]; j < ncols_recv_offsets[i + 1]; j++) { 108 if (adjncy_recv[j] < 0) continue; 109 sadjncy[rnclos] = adjncy_recv[j]; 110 if (a->useedgeweights) svalues[rnclos] = values_recv[j]; 111 rnclos++; 112 } 113 } 114 for (i = 0; i < nlrows_is; i++) sxadj[i + 1] = sxadj[i] + ncols_recv[i]; 115 if (sadj_xadj) { 116 *sadj_xadj = sxadj; 117 } else PetscCall(PetscFree(sxadj)); 118 if (sadj_adjncy) { 119 *sadj_adjncy = sadjncy; 120 } else PetscCall(PetscFree(sadjncy)); 121 if (sadj_values) { 122 if (a->useedgeweights) *sadj_values = svalues; 123 else *sadj_values = NULL; 124 } else { 125 if (a->useedgeweights) PetscCall(PetscFree(svalues)); 126 } 127 PetscCall(PetscFree4(ncols_send, xadj_recv, ncols_recv_offsets, ncols_recv)); 128 PetscCall(PetscFree(adjncy_recv)); 129 if (a->useedgeweights) PetscCall(PetscFree(values_recv)); 130 PetscFunctionReturn(PETSC_SUCCESS); 131 } 132 133 static PetscErrorCode MatCreateSubMatrices_MPIAdj_Private(Mat mat, PetscInt n, const IS irow[], const IS icol[], PetscBool subcomm, MatReuse scall, Mat *submat[]) 134 { 135 PetscInt i, irow_n, icol_n, *sxadj, *sadjncy, *svalues; 136 PetscInt *indices, nindx, j, k, loc; 137 PetscMPIInt issame; 138 const PetscInt *irow_indices, *icol_indices; 139 MPI_Comm scomm_row, scomm_col, scomm_mat; 140 141 PetscFunctionBegin; 142 nindx = 0; 143 /* 144 * Estimate a maximum number for allocating memory 145 */ 146 for (i = 0; i < n; i++) { 147 PetscCall(ISGetLocalSize(irow[i], &irow_n)); 148 PetscCall(ISGetLocalSize(icol[i], &icol_n)); 149 nindx = nindx > (irow_n + icol_n) ? nindx : (irow_n + icol_n); 150 } 151 PetscCall(PetscMalloc1(nindx, &indices)); 152 /* construct a submat */ 153 // if (scall == MAT_INITIAL_MATRIX) PetscCall(PetscMalloc1(n,submat)); 154 155 for (i = 0; i < n; i++) { 156 if (subcomm) { 157 PetscCall(PetscObjectGetComm((PetscObject)irow[i], &scomm_row)); 158 PetscCall(PetscObjectGetComm((PetscObject)icol[i], &scomm_col)); 159 PetscCallMPI(MPI_Comm_compare(scomm_row, scomm_col, &issame)); 160 PetscCheck(issame == MPI_IDENT, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "row index set must have the same comm as the col index set"); 161 PetscCallMPI(MPI_Comm_compare(scomm_row, PETSC_COMM_SELF, &issame)); 162 PetscCheck(issame != MPI_IDENT, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, " can not use PETSC_COMM_SELF as comm when extracting a parallel submatrix"); 163 } else { 164 scomm_row = PETSC_COMM_SELF; 165 } 166 /*get sub-matrix data*/ 167 sxadj = NULL; 168 sadjncy = NULL; 169 svalues = NULL; 170 PetscCall(MatCreateSubMatrix_MPIAdj_data(mat, irow[i], icol[i], &sxadj, &sadjncy, &svalues)); 171 PetscCall(ISGetLocalSize(irow[i], &irow_n)); 172 PetscCall(ISGetLocalSize(icol[i], &icol_n)); 173 PetscCall(ISGetIndices(irow[i], &irow_indices)); 174 PetscCall(PetscArraycpy(indices, irow_indices, irow_n)); 175 PetscCall(ISRestoreIndices(irow[i], &irow_indices)); 176 PetscCall(ISGetIndices(icol[i], &icol_indices)); 177 PetscCall(PetscArraycpy(PetscSafePointerPlusOffset(indices, irow_n), icol_indices, icol_n)); 178 PetscCall(ISRestoreIndices(icol[i], &icol_indices)); 179 nindx = irow_n + icol_n; 180 PetscCall(PetscSortRemoveDupsInt(&nindx, indices)); 181 /* renumber columns */ 182 for (j = 0; j < irow_n; j++) { 183 for (k = sxadj[j]; k < sxadj[j + 1]; k++) { 184 PetscCall(PetscFindInt(sadjncy[k], nindx, indices, &loc)); 185 PetscCheck(loc >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "can not find col %" PetscInt_FMT, sadjncy[k]); 186 sadjncy[k] = loc; 187 } 188 } 189 if (scall == MAT_INITIAL_MATRIX) { 190 PetscCall(MatCreateMPIAdj(scomm_row, irow_n, icol_n, sxadj, sadjncy, svalues, submat[i])); 191 } else { 192 Mat sadj = *submat[i]; 193 Mat_MPIAdj *sa = (Mat_MPIAdj *)((sadj)->data); 194 PetscCall(PetscObjectGetComm((PetscObject)sadj, &scomm_mat)); 195 PetscCallMPI(MPI_Comm_compare(scomm_row, scomm_mat, &issame)); 196 PetscCheck(issame == MPI_IDENT, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "submatrix must have the same comm as the col index set"); 197 PetscCall(PetscArraycpy(sa->i, sxadj, irow_n + 1)); 198 PetscCall(PetscArraycpy(sa->j, sadjncy, sxadj[irow_n])); 199 if (svalues) PetscCall(PetscArraycpy(sa->values, svalues, sxadj[irow_n])); 200 PetscCall(PetscFree(sxadj)); 201 PetscCall(PetscFree(sadjncy)); 202 PetscCall(PetscFree(svalues)); 203 } 204 } 205 PetscCall(PetscFree(indices)); 206 PetscFunctionReturn(PETSC_SUCCESS); 207 } 208 209 static PetscErrorCode MatCreateSubMatricesMPI_MPIAdj(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[]) 210 { 211 /*get sub-matrices across a sub communicator */ 212 PetscFunctionBegin; 213 PetscCall(MatCreateSubMatrices_MPIAdj_Private(mat, n, irow, icol, PETSC_TRUE, scall, submat)); 214 PetscFunctionReturn(PETSC_SUCCESS); 215 } 216 217 static PetscErrorCode MatCreateSubMatrices_MPIAdj(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[]) 218 { 219 PetscFunctionBegin; 220 /*get sub-matrices based on PETSC_COMM_SELF */ 221 PetscCall(MatCreateSubMatrices_MPIAdj_Private(mat, n, irow, icol, PETSC_FALSE, scall, submat)); 222 PetscFunctionReturn(PETSC_SUCCESS); 223 } 224 225 static PetscErrorCode MatView_MPIAdj_ASCII(Mat A, PetscViewer viewer) 226 { 227 Mat_MPIAdj *a = (Mat_MPIAdj *)A->data; 228 PetscInt i, j, m = A->rmap->n; 229 const char *name; 230 PetscViewerFormat format; 231 232 PetscFunctionBegin; 233 PetscCall(PetscObjectGetName((PetscObject)A, &name)); 234 PetscCall(PetscViewerGetFormat(viewer, &format)); 235 if (format == PETSC_VIEWER_ASCII_INFO) { 236 PetscFunctionReturn(PETSC_SUCCESS); 237 } else { 238 PetscCheck(format != PETSC_VIEWER_ASCII_MATLAB, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MATLAB format not supported"); 239 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE)); 240 PetscCall(PetscViewerASCIIPushSynchronized(viewer)); 241 for (i = 0; i < m; i++) { 242 PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "row %" PetscInt_FMT ":", i + A->rmap->rstart)); 243 for (j = a->i[i]; j < a->i[i + 1]; j++) { 244 if (a->values) { 245 PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " (%" PetscInt_FMT ", %" PetscInt_FMT ") ", a->j[j], a->values[j])); 246 } else { 247 PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " %" PetscInt_FMT " ", a->j[j])); 248 } 249 } 250 PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "\n")); 251 } 252 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE)); 253 PetscCall(PetscViewerFlush(viewer)); 254 PetscCall(PetscViewerASCIIPopSynchronized(viewer)); 255 } 256 PetscFunctionReturn(PETSC_SUCCESS); 257 } 258 259 static PetscErrorCode MatView_MPIAdj(Mat A, PetscViewer viewer) 260 { 261 PetscBool iascii; 262 263 PetscFunctionBegin; 264 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 265 if (iascii) PetscCall(MatView_MPIAdj_ASCII(A, viewer)); 266 PetscFunctionReturn(PETSC_SUCCESS); 267 } 268 269 static PetscErrorCode MatDestroy_MPIAdj(Mat mat) 270 { 271 Mat_MPIAdj *a = (Mat_MPIAdj *)mat->data; 272 273 PetscFunctionBegin; 274 PetscCall(PetscLogObjectState((PetscObject)mat, "Rows=%" PetscInt_FMT ", Cols=%" PetscInt_FMT ", NZ=%" PetscInt_FMT, mat->rmap->n, mat->cmap->n, a->nz)); 275 PetscCall(PetscFree(a->diag)); 276 if (a->freeaij) { 277 if (a->freeaijwithfree) { 278 if (a->i) free(a->i); 279 if (a->j) free(a->j); 280 } else { 281 PetscCall(PetscFree(a->i)); 282 PetscCall(PetscFree(a->j)); 283 PetscCall(PetscFree(a->values)); 284 } 285 } 286 PetscCall(PetscFree(a->rowvalues)); 287 PetscCall(PetscFree(mat->data)); 288 PetscCall(PetscObjectChangeTypeName((PetscObject)mat, NULL)); 289 PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjSetPreallocation_C", NULL)); 290 PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjCreateNonemptySubcommMat_C", NULL)); 291 PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjToSeq_C", NULL)); 292 PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjToSeqRankZero_C", NULL)); 293 PetscFunctionReturn(PETSC_SUCCESS); 294 } 295 296 static PetscErrorCode MatSetOption_MPIAdj(Mat A, MatOption op, PetscBool flg) 297 { 298 Mat_MPIAdj *a = (Mat_MPIAdj *)A->data; 299 300 PetscFunctionBegin; 301 switch (op) { 302 case MAT_SYMMETRIC: 303 case MAT_STRUCTURALLY_SYMMETRIC: 304 case MAT_HERMITIAN: 305 case MAT_SPD: 306 a->symmetric = flg; 307 break; 308 case MAT_SYMMETRY_ETERNAL: 309 case MAT_STRUCTURAL_SYMMETRY_ETERNAL: 310 case MAT_SPD_ETERNAL: 311 break; 312 default: 313 PetscCall(PetscInfo(A, "Option %s ignored\n", MatOptions[op])); 314 break; 315 } 316 PetscFunctionReturn(PETSC_SUCCESS); 317 } 318 319 static PetscErrorCode MatGetRow_MPIAdj(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v) 320 { 321 Mat_MPIAdj *a = (Mat_MPIAdj *)A->data; 322 323 PetscFunctionBegin; 324 row -= A->rmap->rstart; 325 PetscCheck(row >= 0 && row < A->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row out of range"); 326 *nz = a->i[row + 1] - a->i[row]; 327 if (v) { 328 PetscInt j; 329 if (a->rowvalues_alloc < *nz) { 330 PetscCall(PetscFree(a->rowvalues)); 331 a->rowvalues_alloc = PetscMax(a->rowvalues_alloc * 2, *nz); 332 PetscCall(PetscMalloc1(a->rowvalues_alloc, &a->rowvalues)); 333 } 334 for (j = 0; j < *nz; j++) a->rowvalues[j] = a->values ? a->values[a->i[row] + j] : 1.0; 335 *v = (*nz) ? a->rowvalues : NULL; 336 } 337 if (idx) *idx = (*nz) ? a->j + a->i[row] : NULL; 338 PetscFunctionReturn(PETSC_SUCCESS); 339 } 340 341 static PetscErrorCode MatRestoreRow_MPIAdj(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v) 342 { 343 PetscFunctionBegin; 344 PetscFunctionReturn(PETSC_SUCCESS); 345 } 346 347 static PetscErrorCode MatEqual_MPIAdj(Mat A, Mat B, PetscBool *flg) 348 { 349 Mat_MPIAdj *a = (Mat_MPIAdj *)A->data, *b = (Mat_MPIAdj *)B->data; 350 PetscBool flag; 351 352 PetscFunctionBegin; 353 /* If the matrix dimensions are not equal,or no of nonzeros */ 354 if ((A->rmap->n != B->rmap->n) || (a->nz != b->nz)) flag = PETSC_FALSE; 355 356 /* if the a->i are the same */ 357 PetscCall(PetscArraycmp(a->i, b->i, A->rmap->n + 1, &flag)); 358 359 /* if a->j are the same */ 360 PetscCall(PetscMemcmp(a->j, b->j, (a->nz) * sizeof(PetscInt), &flag)); 361 362 PetscCallMPI(MPIU_Allreduce(&flag, flg, 1, MPIU_BOOL, MPI_LAND, PetscObjectComm((PetscObject)A))); 363 PetscFunctionReturn(PETSC_SUCCESS); 364 } 365 366 static PetscErrorCode MatGetRowIJ_MPIAdj(Mat A, PetscInt oshift, PetscBool symmetric, PetscBool blockcompressed, PetscInt *m, const PetscInt *inia[], const PetscInt *inja[], PetscBool *done) 367 { 368 PetscInt i; 369 Mat_MPIAdj *a = (Mat_MPIAdj *)A->data; 370 PetscInt **ia = (PetscInt **)inia, **ja = (PetscInt **)inja; 371 372 PetscFunctionBegin; 373 *m = A->rmap->n; 374 *ia = a->i; 375 *ja = a->j; 376 *done = PETSC_TRUE; 377 if (oshift) { 378 for (i = 0; i < (*ia)[*m]; i++) (*ja)[i]++; 379 for (i = 0; i <= (*m); i++) (*ia)[i]++; 380 } 381 PetscFunctionReturn(PETSC_SUCCESS); 382 } 383 384 static PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A, PetscInt oshift, PetscBool symmetric, PetscBool blockcompressed, PetscInt *m, const PetscInt *inia[], const PetscInt *inja[], PetscBool *done) 385 { 386 PetscInt i; 387 Mat_MPIAdj *a = (Mat_MPIAdj *)A->data; 388 PetscInt **ia = (PetscInt **)inia, **ja = (PetscInt **)inja; 389 390 PetscFunctionBegin; 391 PetscCheck(!ia || a->i == *ia, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "ia passed back is not one obtained with MatGetRowIJ()"); 392 PetscCheck(!ja || a->j == *ja, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "ja passed back is not one obtained with MatGetRowIJ()"); 393 if (oshift) { 394 PetscCheck(ia, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "If oshift then you must passed in inia[] argument"); 395 PetscCheck(ja, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "If oshift then you must passed in inja[] argument"); 396 for (i = 0; i <= (*m); i++) (*ia)[i]--; 397 for (i = 0; i < (*ia)[*m]; i++) (*ja)[i]--; 398 } 399 PetscFunctionReturn(PETSC_SUCCESS); 400 } 401 402 static PetscErrorCode MatConvertFrom_MPIAdj(Mat A, MatType type, MatReuse reuse, Mat *newmat) 403 { 404 Mat B; 405 PetscInt i, m, N, nzeros = 0, *ia, *ja, len, rstart, cnt, j, *a; 406 const PetscInt *rj; 407 const PetscScalar *ra; 408 MPI_Comm comm; 409 410 PetscFunctionBegin; 411 PetscCall(MatGetSize(A, NULL, &N)); 412 PetscCall(MatGetLocalSize(A, &m, NULL)); 413 PetscCall(MatGetOwnershipRange(A, &rstart, NULL)); 414 415 /* count the number of nonzeros per row */ 416 for (i = 0; i < m; i++) { 417 PetscCall(MatGetRow(A, i + rstart, &len, &rj, NULL)); 418 for (j = 0; j < len; j++) { 419 if (rj[j] == i + rstart) { 420 len--; 421 break; 422 } /* don't count diagonal */ 423 } 424 nzeros += len; 425 PetscCall(MatRestoreRow(A, i + rstart, &len, &rj, NULL)); 426 } 427 428 /* malloc space for nonzeros */ 429 PetscCall(PetscMalloc1(nzeros + 1, &a)); 430 PetscCall(PetscMalloc1(N + 1, &ia)); 431 PetscCall(PetscMalloc1(nzeros + 1, &ja)); 432 433 nzeros = 0; 434 ia[0] = 0; 435 for (i = 0; i < m; i++) { 436 PetscCall(MatGetRow(A, i + rstart, &len, &rj, &ra)); 437 cnt = 0; 438 for (j = 0; j < len; j++) { 439 if (rj[j] != i + rstart) { /* if not diagonal */ 440 a[nzeros + cnt] = (PetscInt)PetscAbsScalar(ra[j]); 441 ja[nzeros + cnt++] = rj[j]; 442 } 443 } 444 PetscCall(MatRestoreRow(A, i + rstart, &len, &rj, &ra)); 445 nzeros += cnt; 446 ia[i + 1] = nzeros; 447 } 448 449 PetscCall(PetscObjectGetComm((PetscObject)A, &comm)); 450 PetscCall(MatCreate(comm, &B)); 451 PetscCall(MatSetSizes(B, m, PETSC_DETERMINE, PETSC_DETERMINE, N)); 452 PetscCall(MatSetType(B, type)); 453 PetscCall(MatMPIAdjSetPreallocation(B, ia, ja, a)); 454 455 if (reuse == MAT_INPLACE_MATRIX) { 456 PetscCall(MatHeaderReplace(A, &B)); 457 } else { 458 *newmat = B; 459 } 460 PetscFunctionReturn(PETSC_SUCCESS); 461 } 462 463 static PetscErrorCode MatSetValues_MPIAdj(Mat A, PetscInt m, const PetscInt *rows, PetscInt n, const PetscInt *cols, const PetscScalar *values, InsertMode im) 464 { 465 Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data; 466 PetscInt rStart, rEnd, cStart, cEnd; 467 468 PetscFunctionBegin; 469 PetscCheck(!adj->i, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Matrix is already assembled, cannot change its values"); 470 PetscCall(MatGetOwnershipRange(A, &rStart, &rEnd)); 471 PetscCall(MatGetOwnershipRangeColumn(A, &cStart, &cEnd)); 472 if (!adj->ht) { 473 PetscCall(PetscHSetIJCreate(&adj->ht)); 474 PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject)A), 1, &A->stash)); 475 PetscCall(PetscLayoutSetUp(A->rmap)); 476 PetscCall(PetscLayoutSetUp(A->cmap)); 477 } 478 for (PetscInt r = 0; r < m; ++r) { 479 PetscHashIJKey key; 480 481 key.i = rows[r]; 482 if (key.i < 0) continue; 483 if ((key.i < rStart) || (key.i >= rEnd)) { 484 PetscCall(MatStashValuesRow_Private(&A->stash, key.i, n, cols, values, PETSC_FALSE)); 485 } else { 486 for (PetscInt c = 0; c < n; ++c) { 487 key.j = cols[c]; 488 if (key.j < 0 || key.i == key.j) continue; 489 PetscCall(PetscHSetIJAdd(adj->ht, key)); 490 } 491 } 492 } 493 PetscFunctionReturn(PETSC_SUCCESS); 494 } 495 496 static PetscErrorCode MatAssemblyBegin_MPIAdj(Mat A, MatAssemblyType type) 497 { 498 PetscInt nstash, reallocs; 499 Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data; 500 501 PetscFunctionBegin; 502 if (!adj->ht) { 503 PetscCall(PetscHSetIJCreate(&adj->ht)); 504 PetscCall(PetscLayoutSetUp(A->rmap)); 505 PetscCall(PetscLayoutSetUp(A->cmap)); 506 } 507 PetscCall(MatStashScatterBegin_Private(A, &A->stash, A->rmap->range)); 508 PetscCall(MatStashGetInfo_Private(&A->stash, &nstash, &reallocs)); 509 PetscCall(PetscInfo(A, "Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs)); 510 PetscFunctionReturn(PETSC_SUCCESS); 511 } 512 513 static PetscErrorCode MatAssemblyEnd_MPIAdj(Mat A, MatAssemblyType type) 514 { 515 PetscScalar *val; 516 PetscInt *row, *col, m, rstart, *rowstarts; 517 PetscInt i, j, ncols, flg, nz; 518 PetscMPIInt n; 519 Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data; 520 PetscHashIter hi; 521 PetscHashIJKey key; 522 PetscHSetIJ ht = adj->ht; 523 524 PetscFunctionBegin; 525 while (1) { 526 PetscCall(MatStashScatterGetMesg_Private(&A->stash, &n, &row, &col, &val, &flg)); 527 if (!flg) break; 528 529 for (i = 0; i < n;) { 530 /* Identify the consecutive vals belonging to the same row */ 531 for (j = i, rstart = row[j]; j < n; j++) { 532 if (row[j] != rstart) break; 533 } 534 if (j < n) ncols = j - i; 535 else ncols = n - i; 536 /* Set all these values with a single function call */ 537 PetscCall(MatSetValues_MPIAdj(A, 1, row + i, ncols, col + i, val + i, INSERT_VALUES)); 538 i = j; 539 } 540 } 541 PetscCall(MatStashScatterEnd_Private(&A->stash)); 542 PetscCall(MatStashDestroy_Private(&A->stash)); 543 544 PetscCall(MatGetLocalSize(A, &m, NULL)); 545 PetscCall(MatGetOwnershipRange(A, &rstart, NULL)); 546 PetscCall(PetscCalloc1(m + 1, &rowstarts)); 547 PetscHashIterBegin(ht, hi); 548 for (; !PetscHashIterAtEnd(ht, hi);) { 549 PetscHashIterGetKey(ht, hi, key); 550 rowstarts[key.i - rstart + 1]++; 551 PetscHashIterNext(ht, hi); 552 } 553 for (i = 1; i < m + 1; i++) rowstarts[i] = rowstarts[i - 1] + rowstarts[i]; 554 555 PetscCall(PetscHSetIJGetSize(ht, &nz)); 556 PetscCall(PetscMalloc1(nz, &col)); 557 PetscHashIterBegin(ht, hi); 558 for (; !PetscHashIterAtEnd(ht, hi);) { 559 PetscHashIterGetKey(ht, hi, key); 560 col[rowstarts[key.i - rstart]++] = key.j; 561 PetscHashIterNext(ht, hi); 562 } 563 PetscCall(PetscHSetIJDestroy(&ht)); 564 for (i = m; i > 0; i--) rowstarts[i] = rowstarts[i - 1]; 565 rowstarts[0] = 0; 566 567 for (PetscInt i = 0; i < m; i++) PetscCall(PetscSortInt(rowstarts[i + 1] - rowstarts[i], &col[rowstarts[i]])); 568 569 adj->i = rowstarts; 570 adj->j = col; 571 adj->nz = rowstarts[m]; 572 adj->freeaij = PETSC_TRUE; 573 PetscFunctionReturn(PETSC_SUCCESS); 574 } 575 576 static struct _MatOps MatOps_Values = {MatSetValues_MPIAdj, 577 MatGetRow_MPIAdj, 578 MatRestoreRow_MPIAdj, 579 NULL, 580 /* 4*/ NULL, 581 NULL, 582 NULL, 583 NULL, 584 NULL, 585 NULL, 586 /*10*/ NULL, 587 NULL, 588 NULL, 589 NULL, 590 NULL, 591 /*15*/ NULL, 592 MatEqual_MPIAdj, 593 NULL, 594 NULL, 595 NULL, 596 /*20*/ MatAssemblyBegin_MPIAdj, 597 MatAssemblyEnd_MPIAdj, 598 MatSetOption_MPIAdj, 599 NULL, 600 /*24*/ NULL, 601 NULL, 602 NULL, 603 NULL, 604 NULL, 605 /*29*/ NULL, 606 NULL, 607 NULL, 608 NULL, 609 NULL, 610 /*34*/ NULL, 611 NULL, 612 NULL, 613 NULL, 614 NULL, 615 /*39*/ NULL, 616 MatCreateSubMatrices_MPIAdj, 617 NULL, 618 NULL, 619 NULL, 620 /*44*/ NULL, 621 NULL, 622 MatShift_Basic, 623 NULL, 624 NULL, 625 /*49*/ NULL, 626 MatGetRowIJ_MPIAdj, 627 MatRestoreRowIJ_MPIAdj, 628 NULL, 629 NULL, 630 /*54*/ NULL, 631 NULL, 632 NULL, 633 NULL, 634 NULL, 635 /*59*/ NULL, 636 MatDestroy_MPIAdj, 637 MatView_MPIAdj, 638 MatConvertFrom_MPIAdj, 639 NULL, 640 /*64*/ NULL, 641 NULL, 642 NULL, 643 NULL, 644 NULL, 645 /*69*/ NULL, 646 NULL, 647 NULL, 648 NULL, 649 NULL, 650 /*74*/ NULL, 651 NULL, 652 NULL, 653 NULL, 654 NULL, 655 /*79*/ NULL, 656 NULL, 657 NULL, 658 NULL, 659 NULL, 660 /*84*/ NULL, 661 NULL, 662 NULL, 663 NULL, 664 NULL, 665 /*89*/ NULL, 666 NULL, 667 NULL, 668 NULL, 669 NULL, 670 /*94*/ NULL, 671 NULL, 672 NULL, 673 NULL, 674 NULL, 675 /*99*/ NULL, 676 NULL, 677 NULL, 678 NULL, 679 NULL, 680 /*104*/ NULL, 681 NULL, 682 NULL, 683 NULL, 684 NULL, 685 /*109*/ NULL, 686 NULL, 687 NULL, 688 NULL, 689 NULL, 690 /*114*/ NULL, 691 NULL, 692 NULL, 693 NULL, 694 NULL, 695 /*119*/ NULL, 696 NULL, 697 NULL, 698 NULL, 699 NULL, 700 /*124*/ NULL, 701 NULL, 702 NULL, 703 NULL, 704 MatCreateSubMatricesMPI_MPIAdj, 705 /*129*/ NULL, 706 NULL, 707 NULL, 708 NULL, 709 NULL, 710 /*134*/ NULL, 711 NULL, 712 NULL, 713 NULL, 714 NULL, 715 /*139*/ NULL, 716 NULL, 717 NULL, 718 NULL, 719 NULL, 720 /*144*/ NULL, 721 NULL, 722 NULL, 723 NULL, 724 NULL, 725 NULL, 726 /*150*/ NULL, 727 NULL, 728 NULL, 729 NULL, 730 NULL, 731 /*155*/ NULL, 732 NULL}; 733 734 static PetscErrorCode MatMPIAdjSetPreallocation_MPIAdj(Mat B, PetscInt *i, PetscInt *j, PetscInt *values) 735 { 736 Mat_MPIAdj *b = (Mat_MPIAdj *)B->data; 737 PetscBool useedgeweights; 738 739 PetscFunctionBegin; 740 PetscCall(PetscLayoutSetUp(B->rmap)); 741 PetscCall(PetscLayoutSetUp(B->cmap)); 742 if (values) useedgeweights = PETSC_TRUE; 743 else useedgeweights = PETSC_FALSE; 744 /* Make everybody knows if they are using edge weights or not */ 745 PetscCallMPI(MPIU_Allreduce((int *)&useedgeweights, (int *)&b->useedgeweights, 1, MPI_INT, MPI_MAX, PetscObjectComm((PetscObject)B))); 746 747 if (PetscDefined(USE_DEBUG)) { 748 PetscInt ii; 749 750 PetscCheck(i[0] == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "First i[] index must be zero, instead it is %" PetscInt_FMT, i[0]); 751 for (ii = 1; ii < B->rmap->n; ii++) { 752 PetscCheck(i[ii] >= 0 && i[ii] >= i[ii - 1], PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "i[%" PetscInt_FMT "]=%" PetscInt_FMT " index is out of range: i[%" PetscInt_FMT "]=%" PetscInt_FMT, ii, i[ii], ii - 1, i[ii - 1]); 753 } 754 for (ii = 0; ii < i[B->rmap->n]; ii++) PetscCheck(j[ii] >= 0 && j[ii] < B->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column index %" PetscInt_FMT " out of range %" PetscInt_FMT, ii, j[ii]); 755 } 756 b->j = j; 757 b->i = i; 758 b->values = values; 759 760 b->nz = i[B->rmap->n]; 761 b->diag = NULL; 762 b->symmetric = PETSC_FALSE; 763 b->freeaij = PETSC_TRUE; 764 765 B->ops->assemblybegin = NULL; 766 B->ops->assemblyend = NULL; 767 PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY)); 768 PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY)); 769 PetscCall(MatStashDestroy_Private(&B->stash)); 770 PetscFunctionReturn(PETSC_SUCCESS); 771 } 772 773 static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A, Mat *B) 774 { 775 Mat_MPIAdj *a = (Mat_MPIAdj *)A->data; 776 const PetscInt *ranges; 777 MPI_Comm acomm, bcomm; 778 MPI_Group agroup, bgroup; 779 PetscMPIInt i, rank, size, nranks, *ranks; 780 781 PetscFunctionBegin; 782 *B = NULL; 783 PetscCall(PetscObjectGetComm((PetscObject)A, &acomm)); 784 PetscCallMPI(MPI_Comm_size(acomm, &size)); 785 PetscCallMPI(MPI_Comm_size(acomm, &rank)); 786 PetscCall(MatGetOwnershipRanges(A, &ranges)); 787 for (i = 0, nranks = 0; i < size; i++) { 788 if (ranges[i + 1] - ranges[i] > 0) nranks++; 789 } 790 if (nranks == size) { /* All ranks have a positive number of rows, so we do not need to create a subcomm; */ 791 PetscCall(PetscObjectReference((PetscObject)A)); 792 *B = A; 793 PetscFunctionReturn(PETSC_SUCCESS); 794 } 795 796 PetscCall(PetscMalloc1(nranks, &ranks)); 797 for (i = 0, nranks = 0; i < size; i++) { 798 if (ranges[i + 1] - ranges[i] > 0) ranks[nranks++] = i; 799 } 800 PetscCallMPI(MPI_Comm_group(acomm, &agroup)); 801 PetscCallMPI(MPI_Group_incl(agroup, nranks, ranks, &bgroup)); 802 PetscCall(PetscFree(ranks)); 803 PetscCallMPI(MPI_Comm_create(acomm, bgroup, &bcomm)); 804 PetscCallMPI(MPI_Group_free(&agroup)); 805 PetscCallMPI(MPI_Group_free(&bgroup)); 806 if (bcomm != MPI_COMM_NULL) { 807 PetscInt m, N; 808 Mat_MPIAdj *b; 809 PetscCall(MatGetLocalSize(A, &m, NULL)); 810 PetscCall(MatGetSize(A, NULL, &N)); 811 PetscCall(MatCreateMPIAdj(bcomm, m, N, a->i, a->j, a->values, B)); 812 b = (Mat_MPIAdj *)(*B)->data; 813 b->freeaij = PETSC_FALSE; 814 PetscCallMPI(MPI_Comm_free(&bcomm)); 815 } 816 PetscFunctionReturn(PETSC_SUCCESS); 817 } 818 819 static PetscErrorCode MatMPIAdjToSeq_MPIAdj(Mat A, Mat *B) 820 { 821 PetscInt M, N, *II, *J, NZ, nz, m, nzstart, i; 822 PetscInt *Values = NULL; 823 Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data; 824 PetscMPIInt mnz, mm, *allnz, *allm, size, *dispnz, *dispm; 825 826 PetscFunctionBegin; 827 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size)); 828 PetscCall(MatGetSize(A, &M, &N)); 829 PetscCall(MatGetLocalSize(A, &m, NULL)); 830 nz = adj->nz; 831 PetscCheck(adj->i[m] == nz, PETSC_COMM_SELF, PETSC_ERR_PLIB, "nz %" PetscInt_FMT " not correct i[m] %" PetscInt_FMT, nz, adj->i[m]); 832 PetscCallMPI(MPIU_Allreduce(&nz, &NZ, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A))); 833 834 PetscCall(PetscMPIIntCast(nz, &mnz)); 835 PetscCall(PetscMalloc2(size, &allnz, size, &dispnz)); 836 PetscCallMPI(MPI_Allgather(&mnz, 1, MPI_INT, allnz, 1, MPI_INT, PetscObjectComm((PetscObject)A))); 837 dispnz[0] = 0; 838 for (i = 1; i < size; i++) dispnz[i] = dispnz[i - 1] + allnz[i - 1]; 839 if (adj->values) { 840 PetscCall(PetscMalloc1(NZ, &Values)); 841 PetscCallMPI(MPI_Allgatherv(adj->values, mnz, MPIU_INT, Values, allnz, dispnz, MPIU_INT, PetscObjectComm((PetscObject)A))); 842 } 843 PetscCall(PetscMalloc1(NZ, &J)); 844 PetscCallMPI(MPI_Allgatherv(adj->j, mnz, MPIU_INT, J, allnz, dispnz, MPIU_INT, PetscObjectComm((PetscObject)A))); 845 PetscCall(PetscFree2(allnz, dispnz)); 846 PetscCallMPI(MPI_Scan(&nz, &nzstart, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A))); 847 nzstart -= nz; 848 /* shift the i[] values so they will be correct after being received */ 849 for (i = 0; i < m; i++) adj->i[i] += nzstart; 850 PetscCall(PetscMalloc1(M + 1, &II)); 851 PetscCall(PetscMPIIntCast(m, &mm)); 852 PetscCall(PetscMalloc2(size, &allm, size, &dispm)); 853 PetscCallMPI(MPI_Allgather(&mm, 1, MPI_INT, allm, 1, MPI_INT, PetscObjectComm((PetscObject)A))); 854 dispm[0] = 0; 855 for (i = 1; i < size; i++) dispm[i] = dispm[i - 1] + allm[i - 1]; 856 PetscCallMPI(MPI_Allgatherv(adj->i, mm, MPIU_INT, II, allm, dispm, MPIU_INT, PetscObjectComm((PetscObject)A))); 857 PetscCall(PetscFree2(allm, dispm)); 858 II[M] = NZ; 859 /* shift the i[] values back */ 860 for (i = 0; i < m; i++) adj->i[i] -= nzstart; 861 PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, M, N, II, J, Values, B)); 862 PetscFunctionReturn(PETSC_SUCCESS); 863 } 864 865 static PetscErrorCode MatMPIAdjToSeqRankZero_MPIAdj(Mat A, Mat *B) 866 { 867 PetscInt M, N, *II, *J, NZ, nz, m, nzstart, i; 868 PetscInt *Values = NULL; 869 Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data; 870 PetscMPIInt mnz, mm, *allnz = NULL, *allm, size, *dispnz, *dispm, rank; 871 872 PetscFunctionBegin; 873 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size)); 874 PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)A), &rank)); 875 PetscCall(MatGetSize(A, &M, &N)); 876 PetscCall(MatGetLocalSize(A, &m, NULL)); 877 nz = adj->nz; 878 PetscCheck(adj->i[m] == nz, PETSC_COMM_SELF, PETSC_ERR_PLIB, "nz %" PetscInt_FMT " not correct i[m] %" PetscInt_FMT, nz, adj->i[m]); 879 PetscCallMPI(MPIU_Allreduce(&nz, &NZ, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A))); 880 881 PetscCall(PetscMPIIntCast(nz, &mnz)); 882 if (!rank) PetscCall(PetscMalloc2(size, &allnz, size, &dispnz)); 883 PetscCallMPI(MPI_Gather(&mnz, 1, MPI_INT, allnz, 1, MPI_INT, 0, PetscObjectComm((PetscObject)A))); 884 if (!rank) { 885 dispnz[0] = 0; 886 for (i = 1; i < size; i++) dispnz[i] = dispnz[i - 1] + allnz[i - 1]; 887 if (adj->values) { 888 PetscCall(PetscMalloc1(NZ, &Values)); 889 PetscCallMPI(MPI_Gatherv(adj->values, mnz, MPIU_INT, Values, allnz, dispnz, MPIU_INT, 0, PetscObjectComm((PetscObject)A))); 890 } 891 PetscCall(PetscMalloc1(NZ, &J)); 892 PetscCallMPI(MPI_Gatherv(adj->j, mnz, MPIU_INT, J, allnz, dispnz, MPIU_INT, 0, PetscObjectComm((PetscObject)A))); 893 PetscCall(PetscFree2(allnz, dispnz)); 894 } else { 895 if (adj->values) PetscCallMPI(MPI_Gatherv(adj->values, mnz, MPIU_INT, NULL, NULL, NULL, MPIU_INT, 0, PetscObjectComm((PetscObject)A))); 896 PetscCallMPI(MPI_Gatherv(adj->j, mnz, MPIU_INT, NULL, NULL, NULL, MPIU_INT, 0, PetscObjectComm((PetscObject)A))); 897 } 898 PetscCallMPI(MPI_Scan(&nz, &nzstart, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A))); 899 nzstart -= nz; 900 /* shift the i[] values so they will be correct after being received */ 901 for (i = 0; i < m; i++) adj->i[i] += nzstart; 902 PetscCall(PetscMPIIntCast(m, &mm)); 903 if (!rank) { 904 PetscCall(PetscMalloc1(M + 1, &II)); 905 PetscCall(PetscMalloc2(size, &allm, size, &dispm)); 906 PetscCallMPI(MPI_Gather(&mm, 1, MPI_INT, allm, 1, MPI_INT, 0, PetscObjectComm((PetscObject)A))); 907 dispm[0] = 0; 908 for (i = 1; i < size; i++) dispm[i] = dispm[i - 1] + allm[i - 1]; 909 PetscCallMPI(MPI_Gatherv(adj->i, mm, MPIU_INT, II, allm, dispm, MPIU_INT, 0, PetscObjectComm((PetscObject)A))); 910 PetscCall(PetscFree2(allm, dispm)); 911 II[M] = NZ; 912 } else { 913 PetscCallMPI(MPI_Gather(&mm, 1, MPI_INT, NULL, 1, MPI_INT, 0, PetscObjectComm((PetscObject)A))); 914 PetscCallMPI(MPI_Gatherv(adj->i, mm, MPIU_INT, NULL, NULL, NULL, MPIU_INT, 0, PetscObjectComm((PetscObject)A))); 915 } 916 /* shift the i[] values back */ 917 for (i = 0; i < m; i++) adj->i[i] -= nzstart; 918 if (!rank) PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, M, N, II, J, Values, B)); 919 PetscFunctionReturn(PETSC_SUCCESS); 920 } 921 922 /*@ 923 MatMPIAdjCreateNonemptySubcommMat - create the same `MATMPIADJ` matrix on a subcommunicator containing only processes owning a positive number of rows 924 925 Collective 926 927 Input Parameter: 928 . A - original `MATMPIADJ` matrix 929 930 Output Parameter: 931 . B - matrix on subcommunicator, `NULL` on MPI processes that own zero rows of `A` 932 933 Level: developer 934 935 Note: 936 The matrix `B` should be destroyed with `MatDestroy()`. The arrays are not copied, so `B` should be destroyed before `A` is destroyed. 937 938 Developer Note: 939 This function is mostly useful for internal use by mesh partitioning packages, such as ParMETIS that require that every process owns at least one row. 940 941 .seealso: [](ch_matrices), `Mat`, `MATMPIADJ`, `MatCreateMPIAdj()` 942 @*/ 943 PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A, Mat *B) 944 { 945 PetscFunctionBegin; 946 PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 947 PetscUseMethod(A, "MatMPIAdjCreateNonemptySubcommMat_C", (Mat, Mat *), (A, B)); 948 PetscFunctionReturn(PETSC_SUCCESS); 949 } 950 951 /*MC 952 MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices, 953 intended for use constructing orderings and partitionings. 954 955 Level: beginner 956 957 Note: 958 You can provide values to the matrix using `MatMPIAdjSetPreallocation()`, `MatCreateMPIAdj()`, or 959 by calling `MatSetValues()` and `MatAssemblyBegin()` followed by `MatAssemblyEnd()` 960 961 .seealso: [](ch_matrices), `Mat`, `MatCreateMPIAdj()`, `MatMPIAdjSetPreallocation()`, `MatSetValues()` 962 M*/ 963 PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B) 964 { 965 Mat_MPIAdj *b; 966 967 PetscFunctionBegin; 968 PetscCall(PetscNew(&b)); 969 B->data = (void *)b; 970 B->ops[0] = MatOps_Values; 971 B->assembled = PETSC_FALSE; 972 B->preallocated = PETSC_TRUE; /* so that MatSetValues() may be used */ 973 974 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjSetPreallocation_C", MatMPIAdjSetPreallocation_MPIAdj)); 975 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjCreateNonemptySubcommMat_C", MatMPIAdjCreateNonemptySubcommMat_MPIAdj)); 976 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjToSeq_C", MatMPIAdjToSeq_MPIAdj)); 977 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjToSeqRankZero_C", MatMPIAdjToSeqRankZero_MPIAdj)); 978 PetscCall(PetscObjectChangeTypeName((PetscObject)B, MATMPIADJ)); 979 PetscFunctionReturn(PETSC_SUCCESS); 980 } 981 982 /*@ 983 MatMPIAdjToSeq - Converts an parallel `MATMPIADJ` matrix to complete `MATMPIADJ` on each process (needed by sequential partitioners) 984 985 Logically Collective 986 987 Input Parameter: 988 . A - the matrix 989 990 Output Parameter: 991 . B - the same matrix on all processes 992 993 Level: intermediate 994 995 .seealso: [](ch_matrices), `Mat`, `MATMPIADJ`, `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MatMPIAdjToSeqRankZero()` 996 @*/ 997 PetscErrorCode MatMPIAdjToSeq(Mat A, Mat *B) 998 { 999 PetscFunctionBegin; 1000 PetscUseMethod(A, "MatMPIAdjToSeq_C", (Mat, Mat *), (A, B)); 1001 PetscFunctionReturn(PETSC_SUCCESS); 1002 } 1003 1004 /*@ 1005 MatMPIAdjToSeqRankZero - Converts an parallel `MATMPIADJ` matrix to complete `MATMPIADJ` on rank zero (needed by sequential partitioners) 1006 1007 Logically Collective 1008 1009 Input Parameter: 1010 . A - the matrix 1011 1012 Output Parameter: 1013 . B - the same matrix on rank zero, not set on other ranks 1014 1015 Level: intermediate 1016 1017 Note: 1018 This routine has the advantage on systems with multiple ranks per node since only one copy of the matrix 1019 is stored on the first node, instead of the number of ranks copies. This can allow partitioning much larger 1020 parallel graph sequentially. 1021 1022 .seealso: [](ch_matrices), `Mat`, `MATMPIADJ`, `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MatMPIAdjToSeq()` 1023 @*/ 1024 PetscErrorCode MatMPIAdjToSeqRankZero(Mat A, Mat *B) 1025 { 1026 PetscFunctionBegin; 1027 PetscUseMethod(A, "MatMPIAdjToSeqRankZero_C", (Mat, Mat *), (A, B)); 1028 PetscFunctionReturn(PETSC_SUCCESS); 1029 } 1030 1031 /*@ 1032 MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements 1033 1034 Logically Collective 1035 1036 Input Parameters: 1037 + B - the matrix 1038 . i - the indices into `j` for the start of each row 1039 . j - the column indices for each row (sorted for each row). 1040 The indices in `i` and `j` start with zero (NOT with one). 1041 - values - [use `NULL` if not provided] edge weights 1042 1043 Level: intermediate 1044 1045 Notes: 1046 The indices in `i` and `j` start with zero (NOT with one). 1047 1048 You must NOT free the `i`, `values` and `j` arrays yourself. PETSc will free them 1049 when the matrix is destroyed; you must allocate them with `PetscMalloc()`. 1050 1051 You should not include the matrix diagonal elements. 1052 1053 If you already have a matrix, you can create its adjacency matrix by a call 1054 to `MatConvert()`, specifying a type of `MATMPIADJ`. 1055 1056 Possible values for `MatSetOption()` - `MAT_STRUCTURALLY_SYMMETRIC` 1057 1058 Fortran Note: 1059 From Fortran the indices and values are copied so the array space need not be provided with `PetscMalloc()`. 1060 1061 .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MATMPIADJ` 1062 @*/ 1063 PetscErrorCode MatMPIAdjSetPreallocation(Mat B, PetscInt *i, PetscInt *j, PetscInt *values) 1064 { 1065 PetscFunctionBegin; 1066 PetscTryMethod(B, "MatMPIAdjSetPreallocation_C", (Mat, PetscInt *, PetscInt *, PetscInt *), (B, i, j, values)); 1067 PetscFunctionReturn(PETSC_SUCCESS); 1068 } 1069 1070 /*@C 1071 MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list. 1072 The matrix need not have numerical values associated with it, it is 1073 intended for ordering (to reduce bandwidth etc) and partitioning. 1074 1075 Collective 1076 1077 Input Parameters: 1078 + comm - MPI communicator 1079 . m - number of local rows 1080 . N - number of global columns 1081 . i - the indices into `j` for the start of each row 1082 . j - the column indices for each row (sorted for each row). 1083 - values - the values, optional, use `NULL` if not provided 1084 1085 Output Parameter: 1086 . A - the matrix 1087 1088 Level: intermediate 1089 1090 Notes: 1091 The indices in `i` and `j` start with zero (NOT with one). 1092 1093 You must NOT free the `i`, `values` and `j` arrays yourself. PETSc will free them 1094 when the matrix is destroyed; you must allocate them with `PetscMalloc()`. 1095 1096 You should not include the matrix diagonals. 1097 1098 If you already have a matrix, you can create its adjacency matrix by a call 1099 to `MatConvert()`, specifying a type of `MATMPIADJ`. 1100 1101 Possible values for `MatSetOption()` - `MAT_STRUCTURALLY_SYMMETRIC` 1102 1103 Fortran Note: 1104 From Fortran the arrays `indices` and `values` must be retained by the user until `A` is destroyed 1105 1106 .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatConvert()`, `MatGetOrdering()`, `MATMPIADJ`, `MatMPIAdjSetPreallocation()` 1107 @*/ 1108 PetscErrorCode MatCreateMPIAdj(MPI_Comm comm, PetscInt m, PetscInt N, PetscInt *i, PetscInt *j, PetscInt *values, Mat *A) 1109 { 1110 PetscFunctionBegin; 1111 PetscCall(MatCreate(comm, A)); 1112 PetscCall(MatSetSizes(*A, m, PETSC_DETERMINE, PETSC_DETERMINE, N)); 1113 PetscCall(MatSetType(*A, MATMPIADJ)); 1114 PetscCall(MatMPIAdjSetPreallocation(*A, i, j, values)); 1115 PetscFunctionReturn(PETSC_SUCCESS); 1116 } 1117