1 /* 2 Defines the basic matrix operations for the ADJ adjacency list matrix data-structure. 3 */ 4 #include <../src/mat/impls/adj/mpi/mpiadj.h> /*I "petscmat.h" I*/ 5 #include <petscsf.h> 6 7 /* 8 The interface should be easy to use for both MatCreateSubMatrix (parallel sub-matrix) and MatCreateSubMatrices (sequential sub-matrices) 9 */ 10 static PetscErrorCode MatCreateSubMatrix_MPIAdj_data(Mat adj, IS irows, IS icols, PetscInt **sadj_xadj, PetscInt **sadj_adjncy, PetscInt **sadj_values) 11 { 12 PetscInt nlrows_is, icols_n, i, j, nroots, nleaves, rlocalindex, *ncols_send, *ncols_recv; 13 PetscInt nlrows_mat, *adjncy_recv, Ncols_recv, Ncols_send, *xadj_recv, *values_recv; 14 PetscInt *ncols_recv_offsets, loc, rnclos, *sadjncy, *sxadj, *svalues; 15 const PetscInt *irows_indices, *icols_indices, *xadj, *adjncy; 16 PetscMPIInt owner; 17 Mat_MPIAdj *a = (Mat_MPIAdj *)adj->data; 18 PetscLayout rmap; 19 MPI_Comm comm; 20 PetscSF sf; 21 PetscSFNode *iremote; 22 PetscBool done; 23 24 PetscFunctionBegin; 25 PetscCall(PetscObjectGetComm((PetscObject)adj, &comm)); 26 PetscCall(MatGetLayouts(adj, &rmap, NULL)); 27 PetscCall(ISGetLocalSize(irows, &nlrows_is)); 28 PetscCall(ISGetIndices(irows, &irows_indices)); 29 PetscCall(PetscMalloc1(nlrows_is, &iremote)); 30 /* construct sf graph*/ 31 nleaves = nlrows_is; 32 for (i = 0; i < nlrows_is; i++) { 33 owner = -1; 34 rlocalindex = -1; 35 PetscCall(PetscLayoutFindOwnerIndex(rmap, irows_indices[i], &owner, &rlocalindex)); 36 iremote[i].rank = owner; 37 iremote[i].index = rlocalindex; 38 } 39 PetscCall(MatGetRowIJ(adj, 0, PETSC_FALSE, PETSC_FALSE, &nlrows_mat, &xadj, &adjncy, &done)); 40 PetscCall(PetscCalloc4(nlrows_mat, &ncols_send, nlrows_is, &xadj_recv, nlrows_is + 1, &ncols_recv_offsets, nlrows_is, &ncols_recv)); 41 nroots = nlrows_mat; 42 for (i = 0; i < nlrows_mat; i++) ncols_send[i] = xadj[i + 1] - xadj[i]; 43 PetscCall(PetscSFCreate(comm, &sf)); 44 PetscCall(PetscSFSetGraph(sf, nroots, nleaves, NULL, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER)); 45 PetscCall(PetscSFSetType(sf, PETSCSFBASIC)); 46 PetscCall(PetscSFSetFromOptions(sf)); 47 PetscCall(PetscSFBcastBegin(sf, MPIU_INT, ncols_send, ncols_recv, MPI_REPLACE)); 48 PetscCall(PetscSFBcastEnd(sf, MPIU_INT, ncols_send, ncols_recv, MPI_REPLACE)); 49 PetscCall(PetscSFBcastBegin(sf, MPIU_INT, xadj, xadj_recv, MPI_REPLACE)); 50 PetscCall(PetscSFBcastEnd(sf, MPIU_INT, xadj, xadj_recv, MPI_REPLACE)); 51 PetscCall(PetscSFDestroy(&sf)); 52 Ncols_recv = 0; 53 for (i = 0; i < nlrows_is; i++) { 54 Ncols_recv += ncols_recv[i]; 55 ncols_recv_offsets[i + 1] = ncols_recv[i] + ncols_recv_offsets[i]; 56 } 57 Ncols_send = 0; 58 for (i = 0; i < nlrows_mat; i++) Ncols_send += ncols_send[i]; 59 PetscCall(PetscCalloc1(Ncols_recv, &iremote)); 60 PetscCall(PetscCalloc1(Ncols_recv, &adjncy_recv)); 61 nleaves = Ncols_recv; 62 Ncols_recv = 0; 63 for (i = 0; i < nlrows_is; i++) { 64 PetscCall(PetscLayoutFindOwner(rmap, irows_indices[i], &owner)); 65 for (j = 0; j < ncols_recv[i]; j++) { 66 iremote[Ncols_recv].rank = owner; 67 iremote[Ncols_recv++].index = xadj_recv[i] + j; 68 } 69 } 70 PetscCall(ISRestoreIndices(irows, &irows_indices)); 71 /*if we need to deal with edge weights ???*/ 72 if (a->useedgeweights) PetscCall(PetscCalloc1(Ncols_recv, &values_recv)); 73 nroots = Ncols_send; 74 PetscCall(PetscSFCreate(comm, &sf)); 75 PetscCall(PetscSFSetGraph(sf, nroots, nleaves, NULL, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER)); 76 PetscCall(PetscSFSetType(sf, PETSCSFBASIC)); 77 PetscCall(PetscSFSetFromOptions(sf)); 78 PetscCall(PetscSFBcastBegin(sf, MPIU_INT, adjncy, adjncy_recv, MPI_REPLACE)); 79 PetscCall(PetscSFBcastEnd(sf, MPIU_INT, adjncy, adjncy_recv, MPI_REPLACE)); 80 if (a->useedgeweights) { 81 PetscCall(PetscSFBcastBegin(sf, MPIU_INT, a->values, values_recv, MPI_REPLACE)); 82 PetscCall(PetscSFBcastEnd(sf, MPIU_INT, a->values, values_recv, MPI_REPLACE)); 83 } 84 PetscCall(PetscSFDestroy(&sf)); 85 PetscCall(MatRestoreRowIJ(adj, 0, PETSC_FALSE, PETSC_FALSE, &nlrows_mat, &xadj, &adjncy, &done)); 86 PetscCall(ISGetLocalSize(icols, &icols_n)); 87 PetscCall(ISGetIndices(icols, &icols_indices)); 88 rnclos = 0; 89 for (i = 0; i < nlrows_is; i++) { 90 for (j = ncols_recv_offsets[i]; j < ncols_recv_offsets[i + 1]; j++) { 91 PetscCall(PetscFindInt(adjncy_recv[j], icols_n, icols_indices, &loc)); 92 if (loc < 0) { 93 adjncy_recv[j] = -1; 94 if (a->useedgeweights) values_recv[j] = -1; 95 ncols_recv[i]--; 96 } else { 97 rnclos++; 98 } 99 } 100 } 101 PetscCall(ISRestoreIndices(icols, &icols_indices)); 102 PetscCall(PetscCalloc1(rnclos, &sadjncy)); 103 if (a->useedgeweights) PetscCall(PetscCalloc1(rnclos, &svalues)); 104 PetscCall(PetscCalloc1(nlrows_is + 1, &sxadj)); 105 rnclos = 0; 106 for (i = 0; i < nlrows_is; i++) { 107 for (j = ncols_recv_offsets[i]; j < ncols_recv_offsets[i + 1]; j++) { 108 if (adjncy_recv[j] < 0) continue; 109 sadjncy[rnclos] = adjncy_recv[j]; 110 if (a->useedgeweights) svalues[rnclos] = values_recv[j]; 111 rnclos++; 112 } 113 } 114 for (i = 0; i < nlrows_is; i++) sxadj[i + 1] = sxadj[i] + ncols_recv[i]; 115 if (sadj_xadj) { 116 *sadj_xadj = sxadj; 117 } else PetscCall(PetscFree(sxadj)); 118 if (sadj_adjncy) { 119 *sadj_adjncy = sadjncy; 120 } else PetscCall(PetscFree(sadjncy)); 121 if (sadj_values) { 122 if (a->useedgeweights) *sadj_values = svalues; 123 else *sadj_values = NULL; 124 } else { 125 if (a->useedgeweights) PetscCall(PetscFree(svalues)); 126 } 127 PetscCall(PetscFree4(ncols_send, xadj_recv, ncols_recv_offsets, ncols_recv)); 128 PetscCall(PetscFree(adjncy_recv)); 129 if (a->useedgeweights) PetscCall(PetscFree(values_recv)); 130 PetscFunctionReturn(PETSC_SUCCESS); 131 } 132 133 static PetscErrorCode MatCreateSubMatrices_MPIAdj_Private(Mat mat, PetscInt n, const IS irow[], const IS icol[], PetscBool subcomm, MatReuse scall, Mat *submat[]) 134 { 135 PetscInt i, irow_n, icol_n, *sxadj, *sadjncy, *svalues; 136 PetscInt *indices, nindx, j, k, loc; 137 PetscMPIInt issame; 138 const PetscInt *irow_indices, *icol_indices; 139 MPI_Comm scomm_row, scomm_col, scomm_mat; 140 141 PetscFunctionBegin; 142 nindx = 0; 143 /* 144 * Estimate a maximum number for allocating memory 145 */ 146 for (i = 0; i < n; i++) { 147 PetscCall(ISGetLocalSize(irow[i], &irow_n)); 148 PetscCall(ISGetLocalSize(icol[i], &icol_n)); 149 nindx = nindx > (irow_n + icol_n) ? nindx : (irow_n + icol_n); 150 } 151 PetscCall(PetscMalloc1(nindx, &indices)); 152 /* construct a submat */ 153 // if (scall == MAT_INITIAL_MATRIX) PetscCall(PetscMalloc1(n,submat)); 154 155 for (i = 0; i < n; i++) { 156 if (subcomm) { 157 PetscCall(PetscObjectGetComm((PetscObject)irow[i], &scomm_row)); 158 PetscCall(PetscObjectGetComm((PetscObject)icol[i], &scomm_col)); 159 PetscCallMPI(MPI_Comm_compare(scomm_row, scomm_col, &issame)); 160 PetscCheck(issame == MPI_IDENT, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "row index set must have the same comm as the col index set"); 161 PetscCallMPI(MPI_Comm_compare(scomm_row, PETSC_COMM_SELF, &issame)); 162 PetscCheck(issame != MPI_IDENT, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, " can not use PETSC_COMM_SELF as comm when extracting a parallel submatrix"); 163 } else { 164 scomm_row = PETSC_COMM_SELF; 165 } 166 /*get sub-matrix data*/ 167 sxadj = NULL; 168 sadjncy = NULL; 169 svalues = NULL; 170 PetscCall(MatCreateSubMatrix_MPIAdj_data(mat, irow[i], icol[i], &sxadj, &sadjncy, &svalues)); 171 PetscCall(ISGetLocalSize(irow[i], &irow_n)); 172 PetscCall(ISGetLocalSize(icol[i], &icol_n)); 173 PetscCall(ISGetIndices(irow[i], &irow_indices)); 174 PetscCall(PetscArraycpy(indices, irow_indices, irow_n)); 175 PetscCall(ISRestoreIndices(irow[i], &irow_indices)); 176 PetscCall(ISGetIndices(icol[i], &icol_indices)); 177 PetscCall(PetscArraycpy(PetscSafePointerPlusOffset(indices, irow_n), icol_indices, icol_n)); 178 PetscCall(ISRestoreIndices(icol[i], &icol_indices)); 179 nindx = irow_n + icol_n; 180 PetscCall(PetscSortRemoveDupsInt(&nindx, indices)); 181 /* renumber columns */ 182 for (j = 0; j < irow_n; j++) { 183 for (k = sxadj[j]; k < sxadj[j + 1]; k++) { 184 PetscCall(PetscFindInt(sadjncy[k], nindx, indices, &loc)); 185 PetscCheck(loc >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "can not find col %" PetscInt_FMT, sadjncy[k]); 186 sadjncy[k] = loc; 187 } 188 } 189 if (scall == MAT_INITIAL_MATRIX) { 190 PetscCall(MatCreateMPIAdj(scomm_row, irow_n, icol_n, sxadj, sadjncy, svalues, submat[i])); 191 } else { 192 Mat sadj = *submat[i]; 193 Mat_MPIAdj *sa = (Mat_MPIAdj *)((sadj)->data); 194 PetscCall(PetscObjectGetComm((PetscObject)sadj, &scomm_mat)); 195 PetscCallMPI(MPI_Comm_compare(scomm_row, scomm_mat, &issame)); 196 PetscCheck(issame == MPI_IDENT, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "submatrix must have the same comm as the col index set"); 197 PetscCall(PetscArraycpy(sa->i, sxadj, irow_n + 1)); 198 PetscCall(PetscArraycpy(sa->j, sadjncy, sxadj[irow_n])); 199 if (svalues) PetscCall(PetscArraycpy(sa->values, svalues, sxadj[irow_n])); 200 PetscCall(PetscFree(sxadj)); 201 PetscCall(PetscFree(sadjncy)); 202 PetscCall(PetscFree(svalues)); 203 } 204 } 205 PetscCall(PetscFree(indices)); 206 PetscFunctionReturn(PETSC_SUCCESS); 207 } 208 209 static PetscErrorCode MatCreateSubMatricesMPI_MPIAdj(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[]) 210 { 211 /*get sub-matrices across a sub communicator */ 212 PetscFunctionBegin; 213 PetscCall(MatCreateSubMatrices_MPIAdj_Private(mat, n, irow, icol, PETSC_TRUE, scall, submat)); 214 PetscFunctionReturn(PETSC_SUCCESS); 215 } 216 217 static PetscErrorCode MatCreateSubMatrices_MPIAdj(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[]) 218 { 219 PetscFunctionBegin; 220 /*get sub-matrices based on PETSC_COMM_SELF */ 221 PetscCall(MatCreateSubMatrices_MPIAdj_Private(mat, n, irow, icol, PETSC_FALSE, scall, submat)); 222 PetscFunctionReturn(PETSC_SUCCESS); 223 } 224 225 static PetscErrorCode MatView_MPIAdj_ASCII(Mat A, PetscViewer viewer) 226 { 227 Mat_MPIAdj *a = (Mat_MPIAdj *)A->data; 228 PetscInt i, j, m = A->rmap->n; 229 const char *name; 230 PetscViewerFormat format; 231 232 PetscFunctionBegin; 233 PetscCall(PetscObjectGetName((PetscObject)A, &name)); 234 PetscCall(PetscViewerGetFormat(viewer, &format)); 235 if (format == PETSC_VIEWER_ASCII_INFO) { 236 PetscFunctionReturn(PETSC_SUCCESS); 237 } else { 238 PetscCheck(format != PETSC_VIEWER_ASCII_MATLAB, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MATLAB format not supported"); 239 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE)); 240 PetscCall(PetscViewerASCIIPushSynchronized(viewer)); 241 for (i = 0; i < m; i++) { 242 PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "row %" PetscInt_FMT ":", i + A->rmap->rstart)); 243 for (j = a->i[i]; j < a->i[i + 1]; j++) { 244 if (a->values) { 245 PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " (%" PetscInt_FMT ", %" PetscInt_FMT ") ", a->j[j], a->values[j])); 246 } else { 247 PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " %" PetscInt_FMT " ", a->j[j])); 248 } 249 } 250 PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "\n")); 251 } 252 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE)); 253 PetscCall(PetscViewerFlush(viewer)); 254 PetscCall(PetscViewerASCIIPopSynchronized(viewer)); 255 } 256 PetscFunctionReturn(PETSC_SUCCESS); 257 } 258 259 static PetscErrorCode MatView_MPIAdj(Mat A, PetscViewer viewer) 260 { 261 PetscBool isascii; 262 263 PetscFunctionBegin; 264 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii)); 265 if (isascii) PetscCall(MatView_MPIAdj_ASCII(A, viewer)); 266 PetscFunctionReturn(PETSC_SUCCESS); 267 } 268 269 static PetscErrorCode MatDestroy_MPIAdj(Mat mat) 270 { 271 Mat_MPIAdj *a = (Mat_MPIAdj *)mat->data; 272 273 PetscFunctionBegin; 274 PetscCall(PetscLogObjectState((PetscObject)mat, "Rows=%" PetscInt_FMT ", Cols=%" PetscInt_FMT ", NZ=%" PetscInt_FMT, mat->rmap->n, mat->cmap->n, a->nz)); 275 PetscCall(PetscFree(a->diag)); 276 if (a->freeaij) { 277 if (a->freeaijwithfree) { 278 if (a->i) free(a->i); 279 if (a->j) free(a->j); 280 } else { 281 PetscCall(PetscFree(a->i)); 282 PetscCall(PetscFree(a->j)); 283 PetscCall(PetscFree(a->values)); 284 } 285 } 286 PetscCall(PetscFree(a->rowvalues)); 287 PetscCall(PetscFree(mat->data)); 288 PetscCall(PetscObjectChangeTypeName((PetscObject)mat, NULL)); 289 PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjSetPreallocation_C", NULL)); 290 PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjCreateNonemptySubcommMat_C", NULL)); 291 PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjToSeq_C", NULL)); 292 PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjToSeqRankZero_C", NULL)); 293 PetscFunctionReturn(PETSC_SUCCESS); 294 } 295 296 static PetscErrorCode MatSetOption_MPIAdj(Mat A, MatOption op, PetscBool flg) 297 { 298 Mat_MPIAdj *a = (Mat_MPIAdj *)A->data; 299 300 PetscFunctionBegin; 301 switch (op) { 302 case MAT_SYMMETRIC: 303 case MAT_STRUCTURALLY_SYMMETRIC: 304 case MAT_HERMITIAN: 305 case MAT_SPD: 306 a->symmetric = flg; 307 break; 308 default: 309 break; 310 } 311 PetscFunctionReturn(PETSC_SUCCESS); 312 } 313 314 static PetscErrorCode MatGetRow_MPIAdj(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v) 315 { 316 Mat_MPIAdj *a = (Mat_MPIAdj *)A->data; 317 318 PetscFunctionBegin; 319 row -= A->rmap->rstart; 320 PetscCheck(row >= 0 && row < A->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row out of range"); 321 *nz = a->i[row + 1] - a->i[row]; 322 if (v) { 323 PetscInt j; 324 if (a->rowvalues_alloc < *nz) { 325 PetscCall(PetscFree(a->rowvalues)); 326 a->rowvalues_alloc = PetscMax(a->rowvalues_alloc * 2, *nz); 327 PetscCall(PetscMalloc1(a->rowvalues_alloc, &a->rowvalues)); 328 } 329 for (j = 0; j < *nz; j++) a->rowvalues[j] = a->values ? a->values[a->i[row] + j] : 1.0; 330 *v = (*nz) ? a->rowvalues : NULL; 331 } 332 if (idx) *idx = (*nz) ? a->j + a->i[row] : NULL; 333 PetscFunctionReturn(PETSC_SUCCESS); 334 } 335 336 static PetscErrorCode MatEqual_MPIAdj(Mat A, Mat B, PetscBool *flg) 337 { 338 Mat_MPIAdj *a = (Mat_MPIAdj *)A->data, *b = (Mat_MPIAdj *)B->data; 339 PetscBool flag; 340 341 PetscFunctionBegin; 342 /* If the matrix dimensions are not equal,or no of nonzeros */ 343 if ((A->rmap->n != B->rmap->n) || (a->nz != b->nz)) flag = PETSC_FALSE; 344 345 /* if the a->i are the same */ 346 PetscCall(PetscArraycmp(a->i, b->i, A->rmap->n + 1, &flag)); 347 348 /* if a->j are the same */ 349 PetscCall(PetscMemcmp(a->j, b->j, (a->nz) * sizeof(PetscInt), &flag)); 350 351 PetscCallMPI(MPIU_Allreduce(&flag, flg, 1, MPI_C_BOOL, MPI_LAND, PetscObjectComm((PetscObject)A))); 352 PetscFunctionReturn(PETSC_SUCCESS); 353 } 354 355 static PetscErrorCode MatGetRowIJ_MPIAdj(Mat A, PetscInt oshift, PetscBool symmetric, PetscBool blockcompressed, PetscInt *m, const PetscInt *inia[], const PetscInt *inja[], PetscBool *done) 356 { 357 PetscInt i; 358 Mat_MPIAdj *a = (Mat_MPIAdj *)A->data; 359 PetscInt **ia = (PetscInt **)inia, **ja = (PetscInt **)inja; 360 361 PetscFunctionBegin; 362 *m = A->rmap->n; 363 *ia = a->i; 364 *ja = a->j; 365 *done = PETSC_TRUE; 366 if (oshift) { 367 for (i = 0; i < (*ia)[*m]; i++) (*ja)[i]++; 368 for (i = 0; i <= (*m); i++) (*ia)[i]++; 369 } 370 PetscFunctionReturn(PETSC_SUCCESS); 371 } 372 373 static PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A, PetscInt oshift, PetscBool symmetric, PetscBool blockcompressed, PetscInt *m, const PetscInt *inia[], const PetscInt *inja[], PetscBool *done) 374 { 375 PetscInt i; 376 Mat_MPIAdj *a = (Mat_MPIAdj *)A->data; 377 PetscInt **ia = (PetscInt **)inia, **ja = (PetscInt **)inja; 378 379 PetscFunctionBegin; 380 PetscCheck(!ia || a->i == *ia, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "ia passed back is not one obtained with MatGetRowIJ()"); 381 PetscCheck(!ja || a->j == *ja, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "ja passed back is not one obtained with MatGetRowIJ()"); 382 if (oshift) { 383 PetscCheck(ia, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "If oshift then you must passed in inia[] argument"); 384 PetscCheck(ja, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "If oshift then you must passed in inja[] argument"); 385 for (i = 0; i <= (*m); i++) (*ia)[i]--; 386 for (i = 0; i < (*ia)[*m]; i++) (*ja)[i]--; 387 } 388 PetscFunctionReturn(PETSC_SUCCESS); 389 } 390 391 static PetscErrorCode MatConvertFrom_MPIAdj(Mat A, MatType type, MatReuse reuse, Mat *newmat) 392 { 393 Mat B; 394 PetscInt i, m, N, nzeros = 0, *ia, *ja, len, rstart, cnt, j, *a; 395 const PetscInt *rj; 396 const PetscScalar *ra; 397 MPI_Comm comm; 398 399 PetscFunctionBegin; 400 PetscCall(MatGetSize(A, NULL, &N)); 401 PetscCall(MatGetLocalSize(A, &m, NULL)); 402 PetscCall(MatGetOwnershipRange(A, &rstart, NULL)); 403 404 /* count the number of nonzeros per row */ 405 for (i = 0; i < m; i++) { 406 PetscCall(MatGetRow(A, i + rstart, &len, &rj, NULL)); 407 for (j = 0; j < len; j++) { 408 if (rj[j] == i + rstart) { 409 len--; 410 break; 411 } /* don't count diagonal */ 412 } 413 nzeros += len; 414 PetscCall(MatRestoreRow(A, i + rstart, &len, &rj, NULL)); 415 } 416 417 /* malloc space for nonzeros */ 418 PetscCall(PetscMalloc1(nzeros + 1, &a)); 419 PetscCall(PetscMalloc1(N + 1, &ia)); 420 PetscCall(PetscMalloc1(nzeros + 1, &ja)); 421 422 nzeros = 0; 423 ia[0] = 0; 424 for (i = 0; i < m; i++) { 425 PetscCall(MatGetRow(A, i + rstart, &len, &rj, &ra)); 426 cnt = 0; 427 for (j = 0; j < len; j++) { 428 if (rj[j] != i + rstart) { /* if not diagonal */ 429 a[nzeros + cnt] = (PetscInt)PetscAbsScalar(ra[j]); 430 ja[nzeros + cnt++] = rj[j]; 431 } 432 } 433 PetscCall(MatRestoreRow(A, i + rstart, &len, &rj, &ra)); 434 nzeros += cnt; 435 ia[i + 1] = nzeros; 436 } 437 438 PetscCall(PetscObjectGetComm((PetscObject)A, &comm)); 439 PetscCall(MatCreate(comm, &B)); 440 PetscCall(MatSetSizes(B, m, PETSC_DETERMINE, PETSC_DETERMINE, N)); 441 PetscCall(MatSetType(B, type)); 442 PetscCall(MatMPIAdjSetPreallocation(B, ia, ja, a)); 443 444 if (reuse == MAT_INPLACE_MATRIX) { 445 PetscCall(MatHeaderReplace(A, &B)); 446 } else { 447 *newmat = B; 448 } 449 PetscFunctionReturn(PETSC_SUCCESS); 450 } 451 452 static PetscErrorCode MatSetValues_MPIAdj(Mat A, PetscInt m, const PetscInt *rows, PetscInt n, const PetscInt *cols, const PetscScalar *values, InsertMode im) 453 { 454 Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data; 455 PetscInt rStart, rEnd, cStart, cEnd; 456 457 PetscFunctionBegin; 458 PetscCheck(!adj->i, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Matrix is already assembled, cannot change its values"); 459 PetscCall(MatGetOwnershipRange(A, &rStart, &rEnd)); 460 PetscCall(MatGetOwnershipRangeColumn(A, &cStart, &cEnd)); 461 if (!adj->ht) { 462 PetscCall(PetscHSetIJCreate(&adj->ht)); 463 PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject)A), 1, &A->stash)); 464 PetscCall(PetscLayoutSetUp(A->rmap)); 465 PetscCall(PetscLayoutSetUp(A->cmap)); 466 } 467 for (PetscInt r = 0; r < m; ++r) { 468 PetscHashIJKey key; 469 470 key.i = rows[r]; 471 if (key.i < 0) continue; 472 if ((key.i < rStart) || (key.i >= rEnd)) { 473 PetscCall(MatStashValuesRow_Private(&A->stash, key.i, n, cols, values, PETSC_FALSE)); 474 } else { 475 for (PetscInt c = 0; c < n; ++c) { 476 key.j = cols[c]; 477 if (key.j < 0 || key.i == key.j) continue; 478 PetscCall(PetscHSetIJAdd(adj->ht, key)); 479 } 480 } 481 } 482 PetscFunctionReturn(PETSC_SUCCESS); 483 } 484 485 static PetscErrorCode MatAssemblyBegin_MPIAdj(Mat A, MatAssemblyType type) 486 { 487 PetscInt nstash, reallocs; 488 Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data; 489 490 PetscFunctionBegin; 491 if (!adj->ht) { 492 PetscCall(PetscHSetIJCreate(&adj->ht)); 493 PetscCall(PetscLayoutSetUp(A->rmap)); 494 PetscCall(PetscLayoutSetUp(A->cmap)); 495 } 496 PetscCall(MatStashScatterBegin_Private(A, &A->stash, A->rmap->range)); 497 PetscCall(MatStashGetInfo_Private(&A->stash, &nstash, &reallocs)); 498 PetscCall(PetscInfo(A, "Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs)); 499 PetscFunctionReturn(PETSC_SUCCESS); 500 } 501 502 static PetscErrorCode MatAssemblyEnd_MPIAdj(Mat A, MatAssemblyType type) 503 { 504 PetscScalar *val; 505 PetscInt *row, *col, m, rstart, *rowstarts; 506 PetscInt i, j, ncols, flg, nz; 507 PetscMPIInt n; 508 Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data; 509 PetscHashIter hi; 510 PetscHashIJKey key; 511 PetscHSetIJ ht = adj->ht; 512 513 PetscFunctionBegin; 514 while (1) { 515 PetscCall(MatStashScatterGetMesg_Private(&A->stash, &n, &row, &col, &val, &flg)); 516 if (!flg) break; 517 518 for (i = 0; i < n;) { 519 /* Identify the consecutive vals belonging to the same row */ 520 for (j = i, rstart = row[j]; j < n; j++) { 521 if (row[j] != rstart) break; 522 } 523 if (j < n) ncols = j - i; 524 else ncols = n - i; 525 /* Set all these values with a single function call */ 526 PetscCall(MatSetValues_MPIAdj(A, 1, row + i, ncols, col + i, val + i, INSERT_VALUES)); 527 i = j; 528 } 529 } 530 PetscCall(MatStashScatterEnd_Private(&A->stash)); 531 PetscCall(MatStashDestroy_Private(&A->stash)); 532 533 PetscCall(MatGetLocalSize(A, &m, NULL)); 534 PetscCall(MatGetOwnershipRange(A, &rstart, NULL)); 535 PetscCall(PetscCalloc1(m + 1, &rowstarts)); 536 PetscHashIterBegin(ht, hi); 537 for (; !PetscHashIterAtEnd(ht, hi);) { 538 PetscHashIterGetKey(ht, hi, key); 539 rowstarts[key.i - rstart + 1]++; 540 PetscHashIterNext(ht, hi); 541 } 542 for (i = 1; i < m + 1; i++) rowstarts[i] = rowstarts[i - 1] + rowstarts[i]; 543 544 PetscCall(PetscHSetIJGetSize(ht, &nz)); 545 PetscCall(PetscMalloc1(nz, &col)); 546 PetscHashIterBegin(ht, hi); 547 for (; !PetscHashIterAtEnd(ht, hi);) { 548 PetscHashIterGetKey(ht, hi, key); 549 col[rowstarts[key.i - rstart]++] = key.j; 550 PetscHashIterNext(ht, hi); 551 } 552 PetscCall(PetscHSetIJDestroy(&ht)); 553 for (i = m; i > 0; i--) rowstarts[i] = rowstarts[i - 1]; 554 rowstarts[0] = 0; 555 556 for (PetscInt i = 0; i < m; i++) PetscCall(PetscSortInt(rowstarts[i + 1] - rowstarts[i], &col[rowstarts[i]])); 557 558 adj->i = rowstarts; 559 adj->j = col; 560 adj->nz = rowstarts[m]; 561 adj->freeaij = PETSC_TRUE; 562 PetscFunctionReturn(PETSC_SUCCESS); 563 } 564 565 static struct _MatOps MatOps_Values = {MatSetValues_MPIAdj, 566 MatGetRow_MPIAdj, 567 NULL, 568 NULL, 569 /* 4*/ NULL, 570 NULL, 571 NULL, 572 NULL, 573 NULL, 574 NULL, 575 /*10*/ NULL, 576 NULL, 577 NULL, 578 NULL, 579 NULL, 580 /*15*/ NULL, 581 MatEqual_MPIAdj, 582 NULL, 583 NULL, 584 NULL, 585 /*20*/ MatAssemblyBegin_MPIAdj, 586 MatAssemblyEnd_MPIAdj, 587 MatSetOption_MPIAdj, 588 NULL, 589 /*24*/ NULL, 590 NULL, 591 NULL, 592 NULL, 593 NULL, 594 /*29*/ NULL, 595 NULL, 596 NULL, 597 NULL, 598 NULL, 599 /*34*/ NULL, 600 NULL, 601 NULL, 602 NULL, 603 NULL, 604 /*39*/ NULL, 605 MatCreateSubMatrices_MPIAdj, 606 NULL, 607 NULL, 608 NULL, 609 /*44*/ NULL, 610 NULL, 611 MatShift_Basic, 612 NULL, 613 NULL, 614 /*49*/ NULL, 615 MatGetRowIJ_MPIAdj, 616 MatRestoreRowIJ_MPIAdj, 617 NULL, 618 NULL, 619 /*54*/ NULL, 620 NULL, 621 NULL, 622 NULL, 623 NULL, 624 /*59*/ NULL, 625 MatDestroy_MPIAdj, 626 MatView_MPIAdj, 627 MatConvertFrom_MPIAdj, 628 NULL, 629 /*64*/ NULL, 630 NULL, 631 NULL, 632 NULL, 633 NULL, 634 /*69*/ NULL, 635 NULL, 636 NULL, 637 NULL, 638 NULL, 639 /*74*/ NULL, 640 NULL, 641 NULL, 642 NULL, 643 NULL, 644 /*79*/ NULL, 645 NULL, 646 NULL, 647 NULL, 648 NULL, 649 /*84*/ NULL, 650 NULL, 651 NULL, 652 NULL, 653 NULL, 654 /*89*/ NULL, 655 NULL, 656 NULL, 657 NULL, 658 NULL, 659 /*94*/ NULL, 660 NULL, 661 NULL, 662 NULL, 663 NULL, 664 /*99*/ NULL, 665 NULL, 666 NULL, 667 NULL, 668 NULL, 669 /*104*/ NULL, 670 NULL, 671 NULL, 672 NULL, 673 NULL, 674 /*109*/ NULL, 675 NULL, 676 NULL, 677 NULL, 678 NULL, 679 /*114*/ NULL, 680 NULL, 681 NULL, 682 NULL, 683 NULL, 684 /*119*/ MatCreateSubMatricesMPI_MPIAdj, 685 NULL, 686 NULL, 687 NULL, 688 NULL, 689 /*124*/ NULL, 690 NULL, 691 NULL, 692 NULL, 693 NULL, 694 /*129*/ NULL, 695 NULL, 696 NULL, 697 NULL, 698 NULL, 699 /*134*/ NULL, 700 NULL, 701 NULL, 702 NULL, 703 NULL, 704 /*139*/ NULL, 705 NULL, 706 NULL, 707 NULL, 708 NULL, 709 NULL}; 710 711 static PetscErrorCode MatMPIAdjSetPreallocation_MPIAdj(Mat B, PetscInt *i, PetscInt *j, PetscInt *values) 712 { 713 Mat_MPIAdj *b = (Mat_MPIAdj *)B->data; 714 PetscBool useedgeweights; 715 716 PetscFunctionBegin; 717 PetscCall(PetscLayoutSetUp(B->rmap)); 718 PetscCall(PetscLayoutSetUp(B->cmap)); 719 if (values) useedgeweights = PETSC_TRUE; 720 else useedgeweights = PETSC_FALSE; 721 /* Make everybody knows if they are using edge weights or not */ 722 PetscCallMPI(MPIU_Allreduce(&useedgeweights, &b->useedgeweights, 1, MPI_C_BOOL, MPI_LOR, PetscObjectComm((PetscObject)B))); 723 724 if (PetscDefined(USE_DEBUG)) { 725 PetscInt ii; 726 727 PetscCheck(i[0] == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "First i[] index must be zero, instead it is %" PetscInt_FMT, i[0]); 728 for (ii = 1; ii < B->rmap->n; ii++) { 729 PetscCheck(i[ii] >= 0 && i[ii] >= i[ii - 1], PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "i[%" PetscInt_FMT "]=%" PetscInt_FMT " index is out of range: i[%" PetscInt_FMT "]=%" PetscInt_FMT, ii, i[ii], ii - 1, i[ii - 1]); 730 } 731 for (ii = 0; ii < i[B->rmap->n]; ii++) PetscCheck(j[ii] >= 0 && j[ii] < B->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column index %" PetscInt_FMT " out of range %" PetscInt_FMT, ii, j[ii]); 732 } 733 b->j = j; 734 b->i = i; 735 b->values = values; 736 737 b->nz = i[B->rmap->n]; 738 b->diag = NULL; 739 b->symmetric = PETSC_FALSE; 740 b->freeaij = PETSC_TRUE; 741 742 B->ops->assemblybegin = NULL; 743 B->ops->assemblyend = NULL; 744 PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY)); 745 PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY)); 746 PetscCall(MatStashDestroy_Private(&B->stash)); 747 PetscFunctionReturn(PETSC_SUCCESS); 748 } 749 750 static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A, Mat *B) 751 { 752 Mat_MPIAdj *a = (Mat_MPIAdj *)A->data; 753 const PetscInt *ranges; 754 MPI_Comm acomm, bcomm; 755 MPI_Group agroup, bgroup; 756 PetscMPIInt i, rank, size, nranks, *ranks; 757 758 PetscFunctionBegin; 759 *B = NULL; 760 PetscCall(PetscObjectGetComm((PetscObject)A, &acomm)); 761 PetscCallMPI(MPI_Comm_size(acomm, &size)); 762 PetscCallMPI(MPI_Comm_size(acomm, &rank)); 763 PetscCall(MatGetOwnershipRanges(A, &ranges)); 764 for (i = 0, nranks = 0; i < size; i++) { 765 if (ranges[i + 1] - ranges[i] > 0) nranks++; 766 } 767 if (nranks == size) { /* All ranks have a positive number of rows, so we do not need to create a subcomm; */ 768 PetscCall(PetscObjectReference((PetscObject)A)); 769 *B = A; 770 PetscFunctionReturn(PETSC_SUCCESS); 771 } 772 773 PetscCall(PetscMalloc1(nranks, &ranks)); 774 for (i = 0, nranks = 0; i < size; i++) { 775 if (ranges[i + 1] - ranges[i] > 0) ranks[nranks++] = i; 776 } 777 PetscCallMPI(MPI_Comm_group(acomm, &agroup)); 778 PetscCallMPI(MPI_Group_incl(agroup, nranks, ranks, &bgroup)); 779 PetscCall(PetscFree(ranks)); 780 PetscCallMPI(MPI_Comm_create(acomm, bgroup, &bcomm)); 781 PetscCallMPI(MPI_Group_free(&agroup)); 782 PetscCallMPI(MPI_Group_free(&bgroup)); 783 if (bcomm != MPI_COMM_NULL) { 784 PetscInt m, N; 785 Mat_MPIAdj *b; 786 PetscCall(MatGetLocalSize(A, &m, NULL)); 787 PetscCall(MatGetSize(A, NULL, &N)); 788 PetscCall(MatCreateMPIAdj(bcomm, m, N, a->i, a->j, a->values, B)); 789 b = (Mat_MPIAdj *)(*B)->data; 790 b->freeaij = PETSC_FALSE; 791 PetscCallMPI(MPI_Comm_free(&bcomm)); 792 } 793 PetscFunctionReturn(PETSC_SUCCESS); 794 } 795 796 static PetscErrorCode MatMPIAdjToSeq_MPIAdj(Mat A, Mat *B) 797 { 798 PetscInt M, N, *II, *J, NZ, nz, m, nzstart, i; 799 PetscInt *Values = NULL; 800 Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data; 801 PetscMPIInt mnz, mm, *allnz, *allm, size, *dispnz, *dispm; 802 803 PetscFunctionBegin; 804 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size)); 805 PetscCall(MatGetSize(A, &M, &N)); 806 PetscCall(MatGetLocalSize(A, &m, NULL)); 807 nz = adj->nz; 808 PetscCheck(adj->i[m] == nz, PETSC_COMM_SELF, PETSC_ERR_PLIB, "nz %" PetscInt_FMT " not correct i[m] %" PetscInt_FMT, nz, adj->i[m]); 809 PetscCallMPI(MPIU_Allreduce(&nz, &NZ, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A))); 810 811 PetscCall(PetscMPIIntCast(nz, &mnz)); 812 PetscCall(PetscMalloc2(size, &allnz, size, &dispnz)); 813 PetscCallMPI(MPI_Allgather(&mnz, 1, MPI_INT, allnz, 1, MPI_INT, PetscObjectComm((PetscObject)A))); 814 dispnz[0] = 0; 815 for (i = 1; i < size; i++) dispnz[i] = dispnz[i - 1] + allnz[i - 1]; 816 if (adj->values) { 817 PetscCall(PetscMalloc1(NZ, &Values)); 818 PetscCallMPI(MPI_Allgatherv(adj->values, mnz, MPIU_INT, Values, allnz, dispnz, MPIU_INT, PetscObjectComm((PetscObject)A))); 819 } 820 PetscCall(PetscMalloc1(NZ, &J)); 821 PetscCallMPI(MPI_Allgatherv(adj->j, mnz, MPIU_INT, J, allnz, dispnz, MPIU_INT, PetscObjectComm((PetscObject)A))); 822 PetscCall(PetscFree2(allnz, dispnz)); 823 PetscCallMPI(MPI_Scan(&nz, &nzstart, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A))); 824 nzstart -= nz; 825 /* shift the i[] values so they will be correct after being received */ 826 for (i = 0; i < m; i++) adj->i[i] += nzstart; 827 PetscCall(PetscMalloc1(M + 1, &II)); 828 PetscCall(PetscMPIIntCast(m, &mm)); 829 PetscCall(PetscMalloc2(size, &allm, size, &dispm)); 830 PetscCallMPI(MPI_Allgather(&mm, 1, MPI_INT, allm, 1, MPI_INT, PetscObjectComm((PetscObject)A))); 831 dispm[0] = 0; 832 for (i = 1; i < size; i++) dispm[i] = dispm[i - 1] + allm[i - 1]; 833 PetscCallMPI(MPI_Allgatherv(adj->i, mm, MPIU_INT, II, allm, dispm, MPIU_INT, PetscObjectComm((PetscObject)A))); 834 PetscCall(PetscFree2(allm, dispm)); 835 II[M] = NZ; 836 /* shift the i[] values back */ 837 for (i = 0; i < m; i++) adj->i[i] -= nzstart; 838 PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, M, N, II, J, Values, B)); 839 PetscFunctionReturn(PETSC_SUCCESS); 840 } 841 842 static PetscErrorCode MatMPIAdjToSeqRankZero_MPIAdj(Mat A, Mat *B) 843 { 844 PetscInt M, N, *II, *J, NZ, nz, m, nzstart, i; 845 PetscInt *Values = NULL; 846 Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data; 847 PetscMPIInt mnz, mm, *allnz = NULL, *allm, size, *dispnz, *dispm, rank; 848 849 PetscFunctionBegin; 850 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size)); 851 PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)A), &rank)); 852 PetscCall(MatGetSize(A, &M, &N)); 853 PetscCall(MatGetLocalSize(A, &m, NULL)); 854 nz = adj->nz; 855 PetscCheck(adj->i[m] == nz, PETSC_COMM_SELF, PETSC_ERR_PLIB, "nz %" PetscInt_FMT " not correct i[m] %" PetscInt_FMT, nz, adj->i[m]); 856 PetscCallMPI(MPIU_Allreduce(&nz, &NZ, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A))); 857 858 PetscCall(PetscMPIIntCast(nz, &mnz)); 859 if (!rank) PetscCall(PetscMalloc2(size, &allnz, size, &dispnz)); 860 PetscCallMPI(MPI_Gather(&mnz, 1, MPI_INT, allnz, 1, MPI_INT, 0, PetscObjectComm((PetscObject)A))); 861 if (!rank) { 862 dispnz[0] = 0; 863 for (i = 1; i < size; i++) dispnz[i] = dispnz[i - 1] + allnz[i - 1]; 864 if (adj->values) { 865 PetscCall(PetscMalloc1(NZ, &Values)); 866 PetscCallMPI(MPI_Gatherv(adj->values, mnz, MPIU_INT, Values, allnz, dispnz, MPIU_INT, 0, PetscObjectComm((PetscObject)A))); 867 } 868 PetscCall(PetscMalloc1(NZ, &J)); 869 PetscCallMPI(MPI_Gatherv(adj->j, mnz, MPIU_INT, J, allnz, dispnz, MPIU_INT, 0, PetscObjectComm((PetscObject)A))); 870 PetscCall(PetscFree2(allnz, dispnz)); 871 } else { 872 if (adj->values) PetscCallMPI(MPI_Gatherv(adj->values, mnz, MPIU_INT, NULL, NULL, NULL, MPIU_INT, 0, PetscObjectComm((PetscObject)A))); 873 PetscCallMPI(MPI_Gatherv(adj->j, mnz, MPIU_INT, NULL, NULL, NULL, MPIU_INT, 0, PetscObjectComm((PetscObject)A))); 874 } 875 PetscCallMPI(MPI_Scan(&nz, &nzstart, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A))); 876 nzstart -= nz; 877 /* shift the i[] values so they will be correct after being received */ 878 for (i = 0; i < m; i++) adj->i[i] += nzstart; 879 PetscCall(PetscMPIIntCast(m, &mm)); 880 if (!rank) { 881 PetscCall(PetscMalloc1(M + 1, &II)); 882 PetscCall(PetscMalloc2(size, &allm, size, &dispm)); 883 PetscCallMPI(MPI_Gather(&mm, 1, MPI_INT, allm, 1, MPI_INT, 0, PetscObjectComm((PetscObject)A))); 884 dispm[0] = 0; 885 for (i = 1; i < size; i++) dispm[i] = dispm[i - 1] + allm[i - 1]; 886 PetscCallMPI(MPI_Gatherv(adj->i, mm, MPIU_INT, II, allm, dispm, MPIU_INT, 0, PetscObjectComm((PetscObject)A))); 887 PetscCall(PetscFree2(allm, dispm)); 888 II[M] = NZ; 889 } else { 890 PetscCallMPI(MPI_Gather(&mm, 1, MPI_INT, NULL, 1, MPI_INT, 0, PetscObjectComm((PetscObject)A))); 891 PetscCallMPI(MPI_Gatherv(adj->i, mm, MPIU_INT, NULL, NULL, NULL, MPIU_INT, 0, PetscObjectComm((PetscObject)A))); 892 } 893 /* shift the i[] values back */ 894 for (i = 0; i < m; i++) adj->i[i] -= nzstart; 895 if (!rank) PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, M, N, II, J, Values, B)); 896 PetscFunctionReturn(PETSC_SUCCESS); 897 } 898 899 /*@ 900 MatMPIAdjCreateNonemptySubcommMat - create the same `MATMPIADJ` matrix on a subcommunicator containing only processes owning a positive number of rows 901 902 Collective 903 904 Input Parameter: 905 . A - original `MATMPIADJ` matrix 906 907 Output Parameter: 908 . B - matrix on subcommunicator, `NULL` on MPI processes that own zero rows of `A` 909 910 Level: developer 911 912 Note: 913 The matrix `B` should be destroyed with `MatDestroy()`. The arrays are not copied, so `B` should be destroyed before `A` is destroyed. 914 915 Developer Note: 916 This function is mostly useful for internal use by mesh partitioning packages, such as ParMETIS that require that every process owns at least one row. 917 918 .seealso: [](ch_matrices), `Mat`, `MATMPIADJ`, `MatCreateMPIAdj()` 919 @*/ 920 PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A, Mat *B) 921 { 922 PetscFunctionBegin; 923 PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 924 PetscUseMethod(A, "MatMPIAdjCreateNonemptySubcommMat_C", (Mat, Mat *), (A, B)); 925 PetscFunctionReturn(PETSC_SUCCESS); 926 } 927 928 /*MC 929 MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices, 930 intended for use constructing orderings and partitionings. 931 932 Level: beginner 933 934 Note: 935 You can provide values to the matrix using `MatMPIAdjSetPreallocation()`, `MatCreateMPIAdj()`, or 936 by calling `MatSetValues()` and `MatAssemblyBegin()` followed by `MatAssemblyEnd()` 937 938 .seealso: [](ch_matrices), `Mat`, `MatCreateMPIAdj()`, `MatMPIAdjSetPreallocation()`, `MatSetValues()` 939 M*/ 940 PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B) 941 { 942 Mat_MPIAdj *b; 943 944 PetscFunctionBegin; 945 PetscCall(PetscNew(&b)); 946 B->data = (void *)b; 947 B->ops[0] = MatOps_Values; 948 B->assembled = PETSC_FALSE; 949 B->preallocated = PETSC_TRUE; /* so that MatSetValues() may be used */ 950 951 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjSetPreallocation_C", MatMPIAdjSetPreallocation_MPIAdj)); 952 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjCreateNonemptySubcommMat_C", MatMPIAdjCreateNonemptySubcommMat_MPIAdj)); 953 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjToSeq_C", MatMPIAdjToSeq_MPIAdj)); 954 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjToSeqRankZero_C", MatMPIAdjToSeqRankZero_MPIAdj)); 955 PetscCall(PetscObjectChangeTypeName((PetscObject)B, MATMPIADJ)); 956 PetscFunctionReturn(PETSC_SUCCESS); 957 } 958 959 /*@ 960 MatMPIAdjToSeq - Converts an parallel `MATMPIADJ` matrix to complete `MATMPIADJ` on each process (needed by sequential partitioners) 961 962 Logically Collective 963 964 Input Parameter: 965 . A - the matrix 966 967 Output Parameter: 968 . B - the same matrix on all processes 969 970 Level: intermediate 971 972 .seealso: [](ch_matrices), `Mat`, `MATMPIADJ`, `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MatMPIAdjToSeqRankZero()` 973 @*/ 974 PetscErrorCode MatMPIAdjToSeq(Mat A, Mat *B) 975 { 976 PetscFunctionBegin; 977 PetscUseMethod(A, "MatMPIAdjToSeq_C", (Mat, Mat *), (A, B)); 978 PetscFunctionReturn(PETSC_SUCCESS); 979 } 980 981 /*@ 982 MatMPIAdjToSeqRankZero - Converts an parallel `MATMPIADJ` matrix to complete `MATMPIADJ` on rank zero (needed by sequential partitioners) 983 984 Logically Collective 985 986 Input Parameter: 987 . A - the matrix 988 989 Output Parameter: 990 . B - the same matrix on rank zero, not set on other ranks 991 992 Level: intermediate 993 994 Note: 995 This routine has the advantage on systems with multiple ranks per node since only one copy of the matrix 996 is stored on the first node, instead of the number of ranks copies. This can allow partitioning much larger 997 parallel graph sequentially. 998 999 .seealso: [](ch_matrices), `Mat`, `MATMPIADJ`, `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MatMPIAdjToSeq()` 1000 @*/ 1001 PetscErrorCode MatMPIAdjToSeqRankZero(Mat A, Mat *B) 1002 { 1003 PetscFunctionBegin; 1004 PetscUseMethod(A, "MatMPIAdjToSeqRankZero_C", (Mat, Mat *), (A, B)); 1005 PetscFunctionReturn(PETSC_SUCCESS); 1006 } 1007 1008 /*@ 1009 MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements 1010 1011 Logically Collective 1012 1013 Input Parameters: 1014 + B - the matrix 1015 . i - the indices into `j` for the start of each row 1016 . j - the column indices for each row (sorted for each row). 1017 The indices in `i` and `j` start with zero (NOT with one). 1018 - values - [use `NULL` if not provided] edge weights 1019 1020 Level: intermediate 1021 1022 Notes: 1023 The indices in `i` and `j` start with zero (NOT with one). 1024 1025 You must NOT free the `i`, `values` and `j` arrays yourself. PETSc will free them 1026 when the matrix is destroyed; you must allocate them with `PetscMalloc()`. 1027 1028 You should not include the matrix diagonal elements. 1029 1030 If you already have a matrix, you can create its adjacency matrix by a call 1031 to `MatConvert()`, specifying a type of `MATMPIADJ`. 1032 1033 Possible values for `MatSetOption()` - `MAT_STRUCTURALLY_SYMMETRIC` 1034 1035 Fortran Note: 1036 From Fortran the indices and values are copied so the array space need not be provided with `PetscMalloc()`. 1037 1038 .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MATMPIADJ` 1039 @*/ 1040 PetscErrorCode MatMPIAdjSetPreallocation(Mat B, PetscInt *i, PetscInt *j, PetscInt *values) 1041 { 1042 PetscFunctionBegin; 1043 PetscTryMethod(B, "MatMPIAdjSetPreallocation_C", (Mat, PetscInt *, PetscInt *, PetscInt *), (B, i, j, values)); 1044 PetscFunctionReturn(PETSC_SUCCESS); 1045 } 1046 1047 /*@C 1048 MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list. 1049 The matrix need not have numerical values associated with it, it is 1050 intended for ordering (to reduce bandwidth etc) and partitioning. 1051 1052 Collective 1053 1054 Input Parameters: 1055 + comm - MPI communicator 1056 . m - number of local rows 1057 . N - number of global columns 1058 . i - the indices into `j` for the start of each row 1059 . j - the column indices for each row (sorted for each row). 1060 - values - the values, optional, use `NULL` if not provided 1061 1062 Output Parameter: 1063 . A - the matrix 1064 1065 Level: intermediate 1066 1067 Notes: 1068 The indices in `i` and `j` start with zero (NOT with one). 1069 1070 You must NOT free the `i`, `values` and `j` arrays yourself. PETSc will free them 1071 when the matrix is destroyed; you must allocate them with `PetscMalloc()`. 1072 1073 You should not include the matrix diagonals. 1074 1075 If you already have a matrix, you can create its adjacency matrix by a call 1076 to `MatConvert()`, specifying a type of `MATMPIADJ`. 1077 1078 Possible values for `MatSetOption()` - `MAT_STRUCTURALLY_SYMMETRIC` 1079 1080 Fortran Note: 1081 From Fortran the arrays `indices` and `values` must be retained by the user until `A` is destroyed 1082 1083 .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatConvert()`, `MatGetOrdering()`, `MATMPIADJ`, `MatMPIAdjSetPreallocation()` 1084 @*/ 1085 PetscErrorCode MatCreateMPIAdj(MPI_Comm comm, PetscInt m, PetscInt N, PetscInt i[], PetscInt j[], PetscInt values[], Mat *A) PeNSS 1086 { 1087 PetscFunctionBegin; 1088 PetscCall(MatCreate(comm, A)); 1089 PetscCall(MatSetSizes(*A, m, PETSC_DETERMINE, PETSC_DETERMINE, N)); 1090 PetscCall(MatSetType(*A, MATMPIADJ)); 1091 PetscCall(MatMPIAdjSetPreallocation(*A, i, j, values)); 1092 PetscFunctionReturn(PETSC_SUCCESS); 1093 } 1094