xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 2d30e087755efd99e28fdfe792ffbeb2ee1ea928)
1 
2 /*
3     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
4 */
5 #include <../src/mat/impls/adj/mpi/mpiadj.h> /*I "petscmat.h" I*/
6 #include <petscsf.h>
7 
8 /*
9  * The interface should be easy to use for both MatCreateSubMatrix (parallel sub-matrix) and MatCreateSubMatrices (sequential sub-matrices)
10  * */
11 static PetscErrorCode MatCreateSubMatrix_MPIAdj_data(Mat adj, IS irows, IS icols, PetscInt **sadj_xadj, PetscInt **sadj_adjncy, PetscInt **sadj_values) {
12   PetscInt        nlrows_is, icols_n, i, j, nroots, nleaves, rlocalindex, *ncols_send, *ncols_recv;
13   PetscInt        nlrows_mat, *adjncy_recv, Ncols_recv, Ncols_send, *xadj_recv, *values_recv;
14   PetscInt       *ncols_recv_offsets, loc, rnclos, *sadjncy, *sxadj, *svalues;
15   const PetscInt *irows_indices, *icols_indices, *xadj, *adjncy;
16   PetscMPIInt     owner;
17   Mat_MPIAdj     *a = (Mat_MPIAdj *)adj->data;
18   PetscLayout     rmap;
19   MPI_Comm        comm;
20   PetscSF         sf;
21   PetscSFNode    *iremote;
22   PetscBool       done;
23 
24   PetscFunctionBegin;
25   PetscCall(PetscObjectGetComm((PetscObject)adj, &comm));
26   PetscCall(MatGetLayouts(adj, &rmap, NULL));
27   PetscCall(ISGetLocalSize(irows, &nlrows_is));
28   PetscCall(ISGetIndices(irows, &irows_indices));
29   PetscCall(PetscMalloc1(nlrows_is, &iremote));
30   /* construct sf graph*/
31   nleaves = nlrows_is;
32   for (i = 0; i < nlrows_is; i++) {
33     owner       = -1;
34     rlocalindex = -1;
35     PetscCall(PetscLayoutFindOwnerIndex(rmap, irows_indices[i], &owner, &rlocalindex));
36     iremote[i].rank  = owner;
37     iremote[i].index = rlocalindex;
38   }
39   PetscCall(MatGetRowIJ(adj, 0, PETSC_FALSE, PETSC_FALSE, &nlrows_mat, &xadj, &adjncy, &done));
40   PetscCall(PetscCalloc4(nlrows_mat, &ncols_send, nlrows_is, &xadj_recv, nlrows_is + 1, &ncols_recv_offsets, nlrows_is, &ncols_recv));
41   nroots = nlrows_mat;
42   for (i = 0; i < nlrows_mat; i++) ncols_send[i] = xadj[i + 1] - xadj[i];
43   PetscCall(PetscSFCreate(comm, &sf));
44   PetscCall(PetscSFSetGraph(sf, nroots, nleaves, NULL, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER));
45   PetscCall(PetscSFSetType(sf, PETSCSFBASIC));
46   PetscCall(PetscSFSetFromOptions(sf));
47   PetscCall(PetscSFBcastBegin(sf, MPIU_INT, ncols_send, ncols_recv, MPI_REPLACE));
48   PetscCall(PetscSFBcastEnd(sf, MPIU_INT, ncols_send, ncols_recv, MPI_REPLACE));
49   PetscCall(PetscSFBcastBegin(sf, MPIU_INT, xadj, xadj_recv, MPI_REPLACE));
50   PetscCall(PetscSFBcastEnd(sf, MPIU_INT, xadj, xadj_recv, MPI_REPLACE));
51   PetscCall(PetscSFDestroy(&sf));
52   Ncols_recv = 0;
53   for (i = 0; i < nlrows_is; i++) {
54     Ncols_recv += ncols_recv[i];
55     ncols_recv_offsets[i + 1] = ncols_recv[i] + ncols_recv_offsets[i];
56   }
57   Ncols_send = 0;
58   for (i = 0; i < nlrows_mat; i++) Ncols_send += ncols_send[i];
59   PetscCall(PetscCalloc1(Ncols_recv, &iremote));
60   PetscCall(PetscCalloc1(Ncols_recv, &adjncy_recv));
61   nleaves    = Ncols_recv;
62   Ncols_recv = 0;
63   for (i = 0; i < nlrows_is; i++) {
64     PetscCall(PetscLayoutFindOwner(rmap, irows_indices[i], &owner));
65     for (j = 0; j < ncols_recv[i]; j++) {
66       iremote[Ncols_recv].rank    = owner;
67       iremote[Ncols_recv++].index = xadj_recv[i] + j;
68     }
69   }
70   PetscCall(ISRestoreIndices(irows, &irows_indices));
71   /*if we need to deal with edge weights ???*/
72   if (a->useedgeweights) PetscCall(PetscCalloc1(Ncols_recv, &values_recv));
73   nroots = Ncols_send;
74   PetscCall(PetscSFCreate(comm, &sf));
75   PetscCall(PetscSFSetGraph(sf, nroots, nleaves, NULL, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER));
76   PetscCall(PetscSFSetType(sf, PETSCSFBASIC));
77   PetscCall(PetscSFSetFromOptions(sf));
78   PetscCall(PetscSFBcastBegin(sf, MPIU_INT, adjncy, adjncy_recv, MPI_REPLACE));
79   PetscCall(PetscSFBcastEnd(sf, MPIU_INT, adjncy, adjncy_recv, MPI_REPLACE));
80   if (a->useedgeweights) {
81     PetscCall(PetscSFBcastBegin(sf, MPIU_INT, a->values, values_recv, MPI_REPLACE));
82     PetscCall(PetscSFBcastEnd(sf, MPIU_INT, a->values, values_recv, MPI_REPLACE));
83   }
84   PetscCall(PetscSFDestroy(&sf));
85   PetscCall(MatRestoreRowIJ(adj, 0, PETSC_FALSE, PETSC_FALSE, &nlrows_mat, &xadj, &adjncy, &done));
86   PetscCall(ISGetLocalSize(icols, &icols_n));
87   PetscCall(ISGetIndices(icols, &icols_indices));
88   rnclos = 0;
89   for (i = 0; i < nlrows_is; i++) {
90     for (j = ncols_recv_offsets[i]; j < ncols_recv_offsets[i + 1]; j++) {
91       PetscCall(PetscFindInt(adjncy_recv[j], icols_n, icols_indices, &loc));
92       if (loc < 0) {
93         adjncy_recv[j] = -1;
94         if (a->useedgeweights) values_recv[j] = -1;
95         ncols_recv[i]--;
96       } else {
97         rnclos++;
98       }
99     }
100   }
101   PetscCall(ISRestoreIndices(icols, &icols_indices));
102   PetscCall(PetscCalloc1(rnclos, &sadjncy));
103   if (a->useedgeweights) PetscCall(PetscCalloc1(rnclos, &svalues));
104   PetscCall(PetscCalloc1(nlrows_is + 1, &sxadj));
105   rnclos = 0;
106   for (i = 0; i < nlrows_is; i++) {
107     for (j = ncols_recv_offsets[i]; j < ncols_recv_offsets[i + 1]; j++) {
108       if (adjncy_recv[j] < 0) continue;
109       sadjncy[rnclos] = adjncy_recv[j];
110       if (a->useedgeweights) svalues[rnclos] = values_recv[j];
111       rnclos++;
112     }
113   }
114   for (i = 0; i < nlrows_is; i++) sxadj[i + 1] = sxadj[i] + ncols_recv[i];
115   if (sadj_xadj) {
116     *sadj_xadj = sxadj;
117   } else PetscCall(PetscFree(sxadj));
118   if (sadj_adjncy) {
119     *sadj_adjncy = sadjncy;
120   } else PetscCall(PetscFree(sadjncy));
121   if (sadj_values) {
122     if (a->useedgeweights) *sadj_values = svalues;
123     else *sadj_values = NULL;
124   } else {
125     if (a->useedgeweights) PetscCall(PetscFree(svalues));
126   }
127   PetscCall(PetscFree4(ncols_send, xadj_recv, ncols_recv_offsets, ncols_recv));
128   PetscCall(PetscFree(adjncy_recv));
129   if (a->useedgeweights) PetscCall(PetscFree(values_recv));
130   PetscFunctionReturn(0);
131 }
132 
133 static PetscErrorCode MatCreateSubMatrices_MPIAdj_Private(Mat mat, PetscInt n, const IS irow[], const IS icol[], PetscBool subcomm, MatReuse scall, Mat *submat[]) {
134   PetscInt        i, irow_n, icol_n, *sxadj, *sadjncy, *svalues;
135   PetscInt       *indices, nindx, j, k, loc;
136   PetscMPIInt     issame;
137   const PetscInt *irow_indices, *icol_indices;
138   MPI_Comm        scomm_row, scomm_col, scomm_mat;
139 
140   PetscFunctionBegin;
141   nindx = 0;
142   /*
143    * Estimate a maximum number for allocating memory
144    */
145   for (i = 0; i < n; i++) {
146     PetscCall(ISGetLocalSize(irow[i], &irow_n));
147     PetscCall(ISGetLocalSize(icol[i], &icol_n));
148     nindx = nindx > (irow_n + icol_n) ? nindx : (irow_n + icol_n);
149   }
150   PetscCall(PetscMalloc1(nindx, &indices));
151   /* construct a submat */
152   //  if (scall == MAT_INITIAL_MATRIX) PetscCall(PetscMalloc1(n,submat));
153 
154   for (i = 0; i < n; i++) {
155     if (subcomm) {
156       PetscCall(PetscObjectGetComm((PetscObject)irow[i], &scomm_row));
157       PetscCall(PetscObjectGetComm((PetscObject)icol[i], &scomm_col));
158       PetscCallMPI(MPI_Comm_compare(scomm_row, scomm_col, &issame));
159       PetscCheck(issame == MPI_IDENT, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "row index set must have the same comm as the col index set");
160       PetscCallMPI(MPI_Comm_compare(scomm_row, PETSC_COMM_SELF, &issame));
161       PetscCheck(issame != MPI_IDENT, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, " can not use PETSC_COMM_SELF as comm when extracting a parallel submatrix");
162     } else {
163       scomm_row = PETSC_COMM_SELF;
164     }
165     /*get sub-matrix data*/
166     sxadj   = NULL;
167     sadjncy = NULL;
168     svalues = NULL;
169     PetscCall(MatCreateSubMatrix_MPIAdj_data(mat, irow[i], icol[i], &sxadj, &sadjncy, &svalues));
170     PetscCall(ISGetLocalSize(irow[i], &irow_n));
171     PetscCall(ISGetLocalSize(icol[i], &icol_n));
172     PetscCall(ISGetIndices(irow[i], &irow_indices));
173     PetscCall(PetscArraycpy(indices, irow_indices, irow_n));
174     PetscCall(ISRestoreIndices(irow[i], &irow_indices));
175     PetscCall(ISGetIndices(icol[i], &icol_indices));
176     PetscCall(PetscArraycpy(indices + irow_n, icol_indices, icol_n));
177     PetscCall(ISRestoreIndices(icol[i], &icol_indices));
178     nindx = irow_n + icol_n;
179     PetscCall(PetscSortRemoveDupsInt(&nindx, indices));
180     /* renumber columns */
181     for (j = 0; j < irow_n; j++) {
182       for (k = sxadj[j]; k < sxadj[j + 1]; k++) {
183         PetscCall(PetscFindInt(sadjncy[k], nindx, indices, &loc));
184         PetscCheck(loc >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "can not find col %" PetscInt_FMT, sadjncy[k]);
185         sadjncy[k] = loc;
186       }
187     }
188     if (scall == MAT_INITIAL_MATRIX) {
189       PetscCall(MatCreateMPIAdj(scomm_row, irow_n, icol_n, sxadj, sadjncy, svalues, submat[i]));
190     } else {
191       Mat         sadj = *(submat[i]);
192       Mat_MPIAdj *sa   = (Mat_MPIAdj *)((sadj)->data);
193       PetscCall(PetscObjectGetComm((PetscObject)sadj, &scomm_mat));
194       PetscCallMPI(MPI_Comm_compare(scomm_row, scomm_mat, &issame));
195       PetscCheck(issame == MPI_IDENT, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "submatrix  must have the same comm as the col index set");
196       PetscCall(PetscArraycpy(sa->i, sxadj, irow_n + 1));
197       PetscCall(PetscArraycpy(sa->j, sadjncy, sxadj[irow_n]));
198       if (svalues) PetscCall(PetscArraycpy(sa->values, svalues, sxadj[irow_n]));
199       PetscCall(PetscFree(sxadj));
200       PetscCall(PetscFree(sadjncy));
201       PetscCall(PetscFree(svalues));
202     }
203   }
204   PetscCall(PetscFree(indices));
205   PetscFunctionReturn(0);
206 }
207 
208 static PetscErrorCode MatCreateSubMatricesMPI_MPIAdj(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[]) {
209   /*get sub-matrices across a sub communicator */
210   PetscFunctionBegin;
211   PetscCall(MatCreateSubMatrices_MPIAdj_Private(mat, n, irow, icol, PETSC_TRUE, scall, submat));
212   PetscFunctionReturn(0);
213 }
214 
215 static PetscErrorCode MatCreateSubMatrices_MPIAdj(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[]) {
216   PetscFunctionBegin;
217   /*get sub-matrices based on PETSC_COMM_SELF */
218   PetscCall(MatCreateSubMatrices_MPIAdj_Private(mat, n, irow, icol, PETSC_FALSE, scall, submat));
219   PetscFunctionReturn(0);
220 }
221 
222 static PetscErrorCode MatView_MPIAdj_ASCII(Mat A, PetscViewer viewer) {
223   Mat_MPIAdj       *a = (Mat_MPIAdj *)A->data;
224   PetscInt          i, j, m = A->rmap->n;
225   const char       *name;
226   PetscViewerFormat format;
227 
228   PetscFunctionBegin;
229   PetscCall(PetscObjectGetName((PetscObject)A, &name));
230   PetscCall(PetscViewerGetFormat(viewer, &format));
231   if (format == PETSC_VIEWER_ASCII_INFO) {
232     PetscFunctionReturn(0);
233   } else {
234     PetscCheck(format != PETSC_VIEWER_ASCII_MATLAB, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MATLAB format not supported");
235     PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
236     PetscCall(PetscViewerASCIIPushSynchronized(viewer));
237     for (i = 0; i < m; i++) {
238       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "row %" PetscInt_FMT ":", i + A->rmap->rstart));
239       for (j = a->i[i]; j < a->i[i + 1]; j++) {
240         if (a->values) {
241           PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " (%" PetscInt_FMT ", %" PetscInt_FMT ") ", a->j[j], a->values[j]));
242         } else {
243           PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " %" PetscInt_FMT " ", a->j[j]));
244         }
245       }
246       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "\n"));
247     }
248     PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
249     PetscCall(PetscViewerFlush(viewer));
250     PetscCall(PetscViewerASCIIPopSynchronized(viewer));
251   }
252   PetscFunctionReturn(0);
253 }
254 
255 static PetscErrorCode MatView_MPIAdj(Mat A, PetscViewer viewer) {
256   PetscBool iascii;
257 
258   PetscFunctionBegin;
259   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
260   if (iascii) PetscCall(MatView_MPIAdj_ASCII(A, viewer));
261   PetscFunctionReturn(0);
262 }
263 
264 static PetscErrorCode MatDestroy_MPIAdj(Mat mat) {
265   Mat_MPIAdj *a = (Mat_MPIAdj *)mat->data;
266 
267   PetscFunctionBegin;
268 #if defined(PETSC_USE_LOG)
269   PetscLogObjectState((PetscObject)mat, "Rows=%" PetscInt_FMT ", Cols=%" PetscInt_FMT ", NZ=%" PetscInt_FMT, mat->rmap->n, mat->cmap->n, a->nz);
270 #endif
271   PetscCall(PetscFree(a->diag));
272   if (a->freeaij) {
273     if (a->freeaijwithfree) {
274       if (a->i) free(a->i);
275       if (a->j) free(a->j);
276     } else {
277       PetscCall(PetscFree(a->i));
278       PetscCall(PetscFree(a->j));
279       PetscCall(PetscFree(a->values));
280     }
281   }
282   PetscCall(PetscFree(a->rowvalues));
283   PetscCall(PetscFree(mat->data));
284   PetscCall(PetscObjectChangeTypeName((PetscObject)mat, NULL));
285   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjSetPreallocation_C", NULL));
286   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjCreateNonemptySubcommMat_C", NULL));
287   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjToSeq_C", NULL));
288   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjToSeqRankZero_C", NULL));
289   PetscFunctionReturn(0);
290 }
291 
292 static PetscErrorCode MatSetOption_MPIAdj(Mat A, MatOption op, PetscBool flg) {
293   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
294 
295   PetscFunctionBegin;
296   switch (op) {
297   case MAT_SYMMETRIC:
298   case MAT_STRUCTURALLY_SYMMETRIC:
299   case MAT_HERMITIAN:
300   case MAT_SPD: a->symmetric = flg; break;
301   case MAT_SYMMETRY_ETERNAL:
302   case MAT_STRUCTURAL_SYMMETRY_ETERNAL:
303   case MAT_SPD_ETERNAL: break;
304   default: PetscCall(PetscInfo(A, "Option %s ignored\n", MatOptions[op])); break;
305   }
306   PetscFunctionReturn(0);
307 }
308 
309 static PetscErrorCode MatGetRow_MPIAdj(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v) {
310   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
311 
312   PetscFunctionBegin;
313   row -= A->rmap->rstart;
314   PetscCheck(row >= 0 && row < A->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row out of range");
315   *nz = a->i[row + 1] - a->i[row];
316   if (v) {
317     PetscInt j;
318     if (a->rowvalues_alloc < *nz) {
319       PetscCall(PetscFree(a->rowvalues));
320       a->rowvalues_alloc = PetscMax(a->rowvalues_alloc * 2, *nz);
321       PetscCall(PetscMalloc1(a->rowvalues_alloc, &a->rowvalues));
322     }
323     for (j = 0; j < *nz; j++) a->rowvalues[j] = a->values ? a->values[a->i[row] + j] : 1.0;
324     *v = (*nz) ? a->rowvalues : NULL;
325   }
326   if (idx) *idx = (*nz) ? a->j + a->i[row] : NULL;
327   PetscFunctionReturn(0);
328 }
329 
330 static PetscErrorCode MatRestoreRow_MPIAdj(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v) {
331   PetscFunctionBegin;
332   PetscFunctionReturn(0);
333 }
334 
335 static PetscErrorCode MatEqual_MPIAdj(Mat A, Mat B, PetscBool *flg) {
336   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data, *b = (Mat_MPIAdj *)B->data;
337   PetscBool   flag;
338 
339   PetscFunctionBegin;
340   /* If the  matrix dimensions are not equal,or no of nonzeros */
341   if ((A->rmap->n != B->rmap->n) || (a->nz != b->nz)) flag = PETSC_FALSE;
342 
343   /* if the a->i are the same */
344   PetscCall(PetscArraycmp(a->i, b->i, A->rmap->n + 1, &flag));
345 
346   /* if a->j are the same */
347   PetscCall(PetscMemcmp(a->j, b->j, (a->nz) * sizeof(PetscInt), &flag));
348 
349   PetscCall(MPIU_Allreduce(&flag, flg, 1, MPIU_BOOL, MPI_LAND, PetscObjectComm((PetscObject)A)));
350   PetscFunctionReturn(0);
351 }
352 
353 static PetscErrorCode MatGetRowIJ_MPIAdj(Mat A, PetscInt oshift, PetscBool symmetric, PetscBool blockcompressed, PetscInt *m, const PetscInt *inia[], const PetscInt *inja[], PetscBool *done) {
354   PetscInt    i;
355   Mat_MPIAdj *a  = (Mat_MPIAdj *)A->data;
356   PetscInt  **ia = (PetscInt **)inia, **ja = (PetscInt **)inja;
357 
358   PetscFunctionBegin;
359   *m    = A->rmap->n;
360   *ia   = a->i;
361   *ja   = a->j;
362   *done = PETSC_TRUE;
363   if (oshift) {
364     for (i = 0; i < (*ia)[*m]; i++) (*ja)[i]++;
365     for (i = 0; i <= (*m); i++) (*ia)[i]++;
366   }
367   PetscFunctionReturn(0);
368 }
369 
370 static PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A, PetscInt oshift, PetscBool symmetric, PetscBool blockcompressed, PetscInt *m, const PetscInt *inia[], const PetscInt *inja[], PetscBool *done) {
371   PetscInt    i;
372   Mat_MPIAdj *a  = (Mat_MPIAdj *)A->data;
373   PetscInt  **ia = (PetscInt **)inia, **ja = (PetscInt **)inja;
374 
375   PetscFunctionBegin;
376   PetscCheck(!ia || a->i == *ia, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "ia passed back is not one obtained with MatGetRowIJ()");
377   PetscCheck(!ja || a->j == *ja, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "ja passed back is not one obtained with MatGetRowIJ()");
378   if (oshift) {
379     PetscCheck(ia, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "If oshift then you must passed in inia[] argument");
380     PetscCheck(ja, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "If oshift then you must passed in inja[] argument");
381     for (i = 0; i <= (*m); i++) (*ia)[i]--;
382     for (i = 0; i < (*ia)[*m]; i++) (*ja)[i]--;
383   }
384   PetscFunctionReturn(0);
385 }
386 
387 PetscErrorCode MatConvertFrom_MPIAdj(Mat A, MatType type, MatReuse reuse, Mat *newmat) {
388   Mat                B;
389   PetscInt           i, m, N, nzeros = 0, *ia, *ja, len, rstart, cnt, j, *a;
390   const PetscInt    *rj;
391   const PetscScalar *ra;
392   MPI_Comm           comm;
393 
394   PetscFunctionBegin;
395   PetscCall(MatGetSize(A, NULL, &N));
396   PetscCall(MatGetLocalSize(A, &m, NULL));
397   PetscCall(MatGetOwnershipRange(A, &rstart, NULL));
398 
399   /* count the number of nonzeros per row */
400   for (i = 0; i < m; i++) {
401     PetscCall(MatGetRow(A, i + rstart, &len, &rj, NULL));
402     for (j = 0; j < len; j++) {
403       if (rj[j] == i + rstart) {
404         len--;
405         break;
406       } /* don't count diagonal */
407     }
408     nzeros += len;
409     PetscCall(MatRestoreRow(A, i + rstart, &len, &rj, NULL));
410   }
411 
412   /* malloc space for nonzeros */
413   PetscCall(PetscMalloc1(nzeros + 1, &a));
414   PetscCall(PetscMalloc1(N + 1, &ia));
415   PetscCall(PetscMalloc1(nzeros + 1, &ja));
416 
417   nzeros = 0;
418   ia[0]  = 0;
419   for (i = 0; i < m; i++) {
420     PetscCall(MatGetRow(A, i + rstart, &len, &rj, &ra));
421     cnt = 0;
422     for (j = 0; j < len; j++) {
423       if (rj[j] != i + rstart) { /* if not diagonal */
424         a[nzeros + cnt]    = (PetscInt)PetscAbsScalar(ra[j]);
425         ja[nzeros + cnt++] = rj[j];
426       }
427     }
428     PetscCall(MatRestoreRow(A, i + rstart, &len, &rj, &ra));
429     nzeros += cnt;
430     ia[i + 1] = nzeros;
431   }
432 
433   PetscCall(PetscObjectGetComm((PetscObject)A, &comm));
434   PetscCall(MatCreate(comm, &B));
435   PetscCall(MatSetSizes(B, m, PETSC_DETERMINE, PETSC_DETERMINE, N));
436   PetscCall(MatSetType(B, type));
437   PetscCall(MatMPIAdjSetPreallocation(B, ia, ja, a));
438 
439   if (reuse == MAT_INPLACE_MATRIX) {
440     PetscCall(MatHeaderReplace(A, &B));
441   } else {
442     *newmat = B;
443   }
444   PetscFunctionReturn(0);
445 }
446 
447 PetscErrorCode MatSetValues_MPIAdj(Mat A, PetscInt m, const PetscInt *rows, PetscInt n, const PetscInt *cols, const PetscScalar *values, InsertMode im) {
448   Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data;
449   PetscInt    rStart, rEnd, cStart, cEnd;
450 
451   PetscFunctionBegin;
452   PetscCheck(!adj->i, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Matrix is already assembled, cannot change its values");
453   PetscCall(MatGetOwnershipRange(A, &rStart, &rEnd));
454   PetscCall(MatGetOwnershipRangeColumn(A, &cStart, &cEnd));
455   if (!adj->ht) {
456     PetscCall(PetscHSetIJCreate(&adj->ht));
457     PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject)A), 1, &A->stash));
458     PetscCall(PetscLayoutSetUp(A->rmap));
459     PetscCall(PetscLayoutSetUp(A->cmap));
460   }
461   for (PetscInt r = 0; r < m; ++r) {
462     PetscHashIJKey key;
463 
464     key.i = rows[r];
465     if (key.i < 0) continue;
466     if ((key.i < rStart) || (key.i >= rEnd)) {
467       PetscCall(MatStashValuesRow_Private(&A->stash, key.i, n, cols, values, PETSC_FALSE));
468     } else {
469       for (PetscInt c = 0; c < n; ++c) {
470         key.j = cols[c];
471         if (key.j < 0 || key.i == key.j) continue;
472         PetscCall(PetscHSetIJAdd(adj->ht, key));
473       }
474     }
475   }
476   PetscFunctionReturn(0);
477 }
478 
479 PetscErrorCode MatAssemblyBegin_MPIAdj(Mat A, MatAssemblyType type) {
480   PetscInt    nstash, reallocs;
481   Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data;
482 
483   PetscFunctionBegin;
484   if (!adj->ht) {
485     PetscCall(PetscHSetIJCreate(&adj->ht));
486     PetscCall(PetscLayoutSetUp(A->rmap));
487     PetscCall(PetscLayoutSetUp(A->cmap));
488   }
489   PetscCall(MatStashScatterBegin_Private(A, &A->stash, A->rmap->range));
490   PetscCall(MatStashGetInfo_Private(&A->stash, &nstash, &reallocs));
491   PetscCall(PetscInfo(A, "Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
492   PetscFunctionReturn(0);
493 }
494 
495 PetscErrorCode MatAssemblyEnd_MPIAdj(Mat A, MatAssemblyType type) {
496   PetscScalar   *val;
497   PetscInt      *row, *col, m, rstart, *rowstarts;
498   PetscInt       i, j, ncols, flg, nz;
499   PetscMPIInt    n;
500   Mat_MPIAdj    *adj = (Mat_MPIAdj *)A->data;
501   PetscHashIter  hi;
502   PetscHashIJKey key;
503   PetscHSetIJ    ht = adj->ht;
504 
505   PetscFunctionBegin;
506   while (1) {
507     PetscCall(MatStashScatterGetMesg_Private(&A->stash, &n, &row, &col, &val, &flg));
508     if (!flg) break;
509 
510     for (i = 0; i < n;) {
511       /* Identify the consecutive vals belonging to the same row */
512       for (j = i, rstart = row[j]; j < n; j++) {
513         if (row[j] != rstart) break;
514       }
515       if (j < n) ncols = j - i;
516       else ncols = n - i;
517       /* Set all these values with a single function call */
518       PetscCall(MatSetValues_MPIAdj(A, 1, row + i, ncols, col + i, val + i, INSERT_VALUES));
519       i = j;
520     }
521   }
522   PetscCall(MatStashScatterEnd_Private(&A->stash));
523   PetscCall(MatStashDestroy_Private(&A->stash));
524 
525   PetscCall(MatGetLocalSize(A, &m, NULL));
526   PetscCall(MatGetOwnershipRange(A, &rstart, NULL));
527   PetscCall(PetscCalloc1(m + 1, &rowstarts));
528   PetscHashIterBegin(ht, hi);
529   for (; !PetscHashIterAtEnd(ht, hi);) {
530     PetscHashIterGetKey(ht, hi, key);
531     rowstarts[key.i - rstart + 1]++;
532     PetscHashIterNext(ht, hi);
533   }
534   for (i = 1; i < m + 1; i++) rowstarts[i] = rowstarts[i - 1] + rowstarts[i];
535 
536   PetscCall(PetscHSetIJGetSize(ht, &nz));
537   PetscCall(PetscMalloc1(nz, &col));
538   PetscHashIterBegin(ht, hi);
539   for (; !PetscHashIterAtEnd(ht, hi);) {
540     PetscHashIterGetKey(ht, hi, key);
541     col[rowstarts[key.i - rstart]++] = key.j;
542     PetscHashIterNext(ht, hi);
543   }
544   PetscCall(PetscHSetIJDestroy(&ht));
545   for (i = m; i > 0; i--) rowstarts[i] = rowstarts[i - 1];
546   rowstarts[0] = 0;
547 
548   for (PetscInt i = 0; i < m; i++) PetscCall(PetscSortInt(rowstarts[i + 1] - rowstarts[i], &col[rowstarts[i]]));
549 
550   adj->i       = rowstarts;
551   adj->j       = col;
552   adj->nz      = rowstarts[m];
553   adj->freeaij = PETSC_TRUE;
554   PetscFunctionReturn(0);
555 }
556 
557 /* -------------------------------------------------------------------*/
558 static struct _MatOps MatOps_Values = {MatSetValues_MPIAdj,
559                                        MatGetRow_MPIAdj,
560                                        MatRestoreRow_MPIAdj,
561                                        NULL,
562                                        /* 4*/ NULL,
563                                        NULL,
564                                        NULL,
565                                        NULL,
566                                        NULL,
567                                        NULL,
568                                        /*10*/ NULL,
569                                        NULL,
570                                        NULL,
571                                        NULL,
572                                        NULL,
573                                        /*15*/ NULL,
574                                        MatEqual_MPIAdj,
575                                        NULL,
576                                        NULL,
577                                        NULL,
578                                        /*20*/ MatAssemblyBegin_MPIAdj,
579                                        MatAssemblyEnd_MPIAdj,
580                                        MatSetOption_MPIAdj,
581                                        NULL,
582                                        /*24*/ NULL,
583                                        NULL,
584                                        NULL,
585                                        NULL,
586                                        NULL,
587                                        /*29*/ NULL,
588                                        NULL,
589                                        NULL,
590                                        NULL,
591                                        NULL,
592                                        /*34*/ NULL,
593                                        NULL,
594                                        NULL,
595                                        NULL,
596                                        NULL,
597                                        /*39*/ NULL,
598                                        MatCreateSubMatrices_MPIAdj,
599                                        NULL,
600                                        NULL,
601                                        NULL,
602                                        /*44*/ NULL,
603                                        NULL,
604                                        MatShift_Basic,
605                                        NULL,
606                                        NULL,
607                                        /*49*/ NULL,
608                                        MatGetRowIJ_MPIAdj,
609                                        MatRestoreRowIJ_MPIAdj,
610                                        NULL,
611                                        NULL,
612                                        /*54*/ NULL,
613                                        NULL,
614                                        NULL,
615                                        NULL,
616                                        NULL,
617                                        /*59*/ NULL,
618                                        MatDestroy_MPIAdj,
619                                        MatView_MPIAdj,
620                                        MatConvertFrom_MPIAdj,
621                                        NULL,
622                                        /*64*/ NULL,
623                                        NULL,
624                                        NULL,
625                                        NULL,
626                                        NULL,
627                                        /*69*/ NULL,
628                                        NULL,
629                                        NULL,
630                                        NULL,
631                                        NULL,
632                                        /*74*/ NULL,
633                                        NULL,
634                                        NULL,
635                                        NULL,
636                                        NULL,
637                                        /*79*/ NULL,
638                                        NULL,
639                                        NULL,
640                                        NULL,
641                                        NULL,
642                                        /*84*/ NULL,
643                                        NULL,
644                                        NULL,
645                                        NULL,
646                                        NULL,
647                                        /*89*/ NULL,
648                                        NULL,
649                                        NULL,
650                                        NULL,
651                                        NULL,
652                                        /*94*/ NULL,
653                                        NULL,
654                                        NULL,
655                                        NULL,
656                                        NULL,
657                                        /*99*/ NULL,
658                                        NULL,
659                                        NULL,
660                                        NULL,
661                                        NULL,
662                                        /*104*/ NULL,
663                                        NULL,
664                                        NULL,
665                                        NULL,
666                                        NULL,
667                                        /*109*/ NULL,
668                                        NULL,
669                                        NULL,
670                                        NULL,
671                                        NULL,
672                                        /*114*/ NULL,
673                                        NULL,
674                                        NULL,
675                                        NULL,
676                                        NULL,
677                                        /*119*/ NULL,
678                                        NULL,
679                                        NULL,
680                                        NULL,
681                                        NULL,
682                                        /*124*/ NULL,
683                                        NULL,
684                                        NULL,
685                                        NULL,
686                                        MatCreateSubMatricesMPI_MPIAdj,
687                                        /*129*/ NULL,
688                                        NULL,
689                                        NULL,
690                                        NULL,
691                                        NULL,
692                                        /*134*/ NULL,
693                                        NULL,
694                                        NULL,
695                                        NULL,
696                                        NULL,
697                                        /*139*/ NULL,
698                                        NULL,
699                                        NULL,
700                                        NULL,
701                                        NULL,
702                                        /*144*/ NULL,
703                                        NULL,
704                                        NULL,
705                                        NULL,
706                                        NULL,
707                                        NULL,
708                                        /*150*/ NULL};
709 
710 static PetscErrorCode MatMPIAdjSetPreallocation_MPIAdj(Mat B, PetscInt *i, PetscInt *j, PetscInt *values) {
711   Mat_MPIAdj *b = (Mat_MPIAdj *)B->data;
712   PetscBool   useedgeweights;
713 
714   PetscFunctionBegin;
715   PetscCall(PetscLayoutSetUp(B->rmap));
716   PetscCall(PetscLayoutSetUp(B->cmap));
717   if (values) useedgeweights = PETSC_TRUE;
718   else useedgeweights = PETSC_FALSE;
719   /* Make everybody knows if they are using edge weights or not */
720   PetscCall(MPIU_Allreduce((int *)&useedgeweights, (int *)&b->useedgeweights, 1, MPI_INT, MPI_MAX, PetscObjectComm((PetscObject)B)));
721 
722   if (PetscDefined(USE_DEBUG)) {
723     PetscInt ii;
724 
725     PetscCheck(i[0] == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "First i[] index must be zero, instead it is %" PetscInt_FMT, i[0]);
726     for (ii = 1; ii < B->rmap->n; ii++) {
727       PetscCheck(i[ii] >= 0 && i[ii] >= i[ii - 1], PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "i[%" PetscInt_FMT "]=%" PetscInt_FMT " index is out of range: i[%" PetscInt_FMT "]=%" PetscInt_FMT, ii, i[ii], ii - 1, i[ii - 1]);
728     }
729     for (ii = 0; ii < i[B->rmap->n]; ii++) PetscCheck(j[ii] >= 0 && j[ii] < B->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column index %" PetscInt_FMT " out of range %" PetscInt_FMT, ii, j[ii]);
730   }
731   b->j      = j;
732   b->i      = i;
733   b->values = values;
734 
735   b->nz        = i[B->rmap->n];
736   b->diag      = NULL;
737   b->symmetric = PETSC_FALSE;
738   b->freeaij   = PETSC_TRUE;
739 
740   B->ops->assemblybegin = NULL;
741   B->ops->assemblyend   = NULL;
742   PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
743   PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
744   PetscCall(MatStashDestroy_Private(&B->stash));
745   PetscFunctionReturn(0);
746 }
747 
748 static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A, Mat *B) {
749   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data;
750   const PetscInt *ranges;
751   MPI_Comm        acomm, bcomm;
752   MPI_Group       agroup, bgroup;
753   PetscMPIInt     i, rank, size, nranks, *ranks;
754 
755   PetscFunctionBegin;
756   *B = NULL;
757   PetscCall(PetscObjectGetComm((PetscObject)A, &acomm));
758   PetscCallMPI(MPI_Comm_size(acomm, &size));
759   PetscCallMPI(MPI_Comm_size(acomm, &rank));
760   PetscCall(MatGetOwnershipRanges(A, &ranges));
761   for (i = 0, nranks = 0; i < size; i++) {
762     if (ranges[i + 1] - ranges[i] > 0) nranks++;
763   }
764   if (nranks == size) { /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
765     PetscCall(PetscObjectReference((PetscObject)A));
766     *B = A;
767     PetscFunctionReturn(0);
768   }
769 
770   PetscCall(PetscMalloc1(nranks, &ranks));
771   for (i = 0, nranks = 0; i < size; i++) {
772     if (ranges[i + 1] - ranges[i] > 0) ranks[nranks++] = i;
773   }
774   PetscCallMPI(MPI_Comm_group(acomm, &agroup));
775   PetscCallMPI(MPI_Group_incl(agroup, nranks, ranks, &bgroup));
776   PetscCall(PetscFree(ranks));
777   PetscCallMPI(MPI_Comm_create(acomm, bgroup, &bcomm));
778   PetscCallMPI(MPI_Group_free(&agroup));
779   PetscCallMPI(MPI_Group_free(&bgroup));
780   if (bcomm != MPI_COMM_NULL) {
781     PetscInt    m, N;
782     Mat_MPIAdj *b;
783     PetscCall(MatGetLocalSize(A, &m, NULL));
784     PetscCall(MatGetSize(A, NULL, &N));
785     PetscCall(MatCreateMPIAdj(bcomm, m, N, a->i, a->j, a->values, B));
786     b          = (Mat_MPIAdj *)(*B)->data;
787     b->freeaij = PETSC_FALSE;
788     PetscCallMPI(MPI_Comm_free(&bcomm));
789   }
790   PetscFunctionReturn(0);
791 }
792 
793 PetscErrorCode MatMPIAdjToSeq_MPIAdj(Mat A, Mat *B) {
794   PetscInt    M, N, *II, *J, NZ, nz, m, nzstart, i;
795   PetscInt   *Values = NULL;
796   Mat_MPIAdj *adj    = (Mat_MPIAdj *)A->data;
797   PetscMPIInt mnz, mm, *allnz, *allm, size, *dispnz, *dispm;
798 
799   PetscFunctionBegin;
800   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
801   PetscCall(MatGetSize(A, &M, &N));
802   PetscCall(MatGetLocalSize(A, &m, NULL));
803   nz = adj->nz;
804   PetscCheck(adj->i[m] == nz, PETSC_COMM_SELF, PETSC_ERR_PLIB, "nz %" PetscInt_FMT " not correct i[m] %" PetscInt_FMT, nz, adj->i[m]);
805   PetscCallMPI(MPI_Allreduce(&nz, &NZ, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A)));
806 
807   PetscCall(PetscMPIIntCast(nz, &mnz));
808   PetscCall(PetscMalloc2(size, &allnz, size, &dispnz));
809   PetscCallMPI(MPI_Allgather(&mnz, 1, MPI_INT, allnz, 1, MPI_INT, PetscObjectComm((PetscObject)A)));
810   dispnz[0] = 0;
811   for (i = 1; i < size; i++) dispnz[i] = dispnz[i - 1] + allnz[i - 1];
812   if (adj->values) {
813     PetscCall(PetscMalloc1(NZ, &Values));
814     PetscCallMPI(MPI_Allgatherv(adj->values, mnz, MPIU_INT, Values, allnz, dispnz, MPIU_INT, PetscObjectComm((PetscObject)A)));
815   }
816   PetscCall(PetscMalloc1(NZ, &J));
817   PetscCallMPI(MPI_Allgatherv(adj->j, mnz, MPIU_INT, J, allnz, dispnz, MPIU_INT, PetscObjectComm((PetscObject)A)));
818   PetscCall(PetscFree2(allnz, dispnz));
819   PetscCallMPI(MPI_Scan(&nz, &nzstart, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A)));
820   nzstart -= nz;
821   /* shift the i[] values so they will be correct after being received */
822   for (i = 0; i < m; i++) adj->i[i] += nzstart;
823   PetscCall(PetscMalloc1(M + 1, &II));
824   PetscCall(PetscMPIIntCast(m, &mm));
825   PetscCall(PetscMalloc2(size, &allm, size, &dispm));
826   PetscCallMPI(MPI_Allgather(&mm, 1, MPI_INT, allm, 1, MPI_INT, PetscObjectComm((PetscObject)A)));
827   dispm[0] = 0;
828   for (i = 1; i < size; i++) dispm[i] = dispm[i - 1] + allm[i - 1];
829   PetscCallMPI(MPI_Allgatherv(adj->i, mm, MPIU_INT, II, allm, dispm, MPIU_INT, PetscObjectComm((PetscObject)A)));
830   PetscCall(PetscFree2(allm, dispm));
831   II[M] = NZ;
832   /* shift the i[] values back */
833   for (i = 0; i < m; i++) adj->i[i] -= nzstart;
834   PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, M, N, II, J, Values, B));
835   PetscFunctionReturn(0);
836 }
837 
838 PetscErrorCode MatMPIAdjToSeqRankZero_MPIAdj(Mat A, Mat *B) {
839   PetscInt    M, N, *II, *J, NZ, nz, m, nzstart, i;
840   PetscInt   *Values = NULL;
841   Mat_MPIAdj *adj    = (Mat_MPIAdj *)A->data;
842   PetscMPIInt mnz, mm, *allnz = NULL, *allm, size, *dispnz, *dispm, rank;
843 
844   PetscFunctionBegin;
845   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
846   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)A), &rank));
847   PetscCall(MatGetSize(A, &M, &N));
848   PetscCall(MatGetLocalSize(A, &m, NULL));
849   nz = adj->nz;
850   PetscCheck(adj->i[m] == nz, PETSC_COMM_SELF, PETSC_ERR_PLIB, "nz %" PetscInt_FMT " not correct i[m] %" PetscInt_FMT, nz, adj->i[m]);
851   PetscCallMPI(MPI_Allreduce(&nz, &NZ, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A)));
852 
853   PetscCall(PetscMPIIntCast(nz, &mnz));
854   if (!rank) PetscCall(PetscMalloc2(size, &allnz, size, &dispnz));
855   PetscCallMPI(MPI_Gather(&mnz, 1, MPI_INT, allnz, 1, MPI_INT, 0, PetscObjectComm((PetscObject)A)));
856   if (!rank) {
857     dispnz[0] = 0;
858     for (i = 1; i < size; i++) dispnz[i] = dispnz[i - 1] + allnz[i - 1];
859     if (adj->values) {
860       PetscCall(PetscMalloc1(NZ, &Values));
861       PetscCallMPI(MPI_Gatherv(adj->values, mnz, MPIU_INT, Values, allnz, dispnz, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
862     }
863     PetscCall(PetscMalloc1(NZ, &J));
864     PetscCallMPI(MPI_Gatherv(adj->j, mnz, MPIU_INT, J, allnz, dispnz, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
865     PetscCall(PetscFree2(allnz, dispnz));
866   } else {
867     if (adj->values) PetscCallMPI(MPI_Gatherv(adj->values, mnz, MPIU_INT, NULL, NULL, NULL, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
868     PetscCallMPI(MPI_Gatherv(adj->j, mnz, MPIU_INT, NULL, NULL, NULL, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
869   }
870   PetscCallMPI(MPI_Scan(&nz, &nzstart, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A)));
871   nzstart -= nz;
872   /* shift the i[] values so they will be correct after being received */
873   for (i = 0; i < m; i++) adj->i[i] += nzstart;
874   PetscCall(PetscMPIIntCast(m, &mm));
875   if (!rank) {
876     PetscCall(PetscMalloc1(M + 1, &II));
877     PetscCall(PetscMalloc2(size, &allm, size, &dispm));
878     PetscCallMPI(MPI_Gather(&mm, 1, MPI_INT, allm, 1, MPI_INT, 0, PetscObjectComm((PetscObject)A)));
879     dispm[0] = 0;
880     for (i = 1; i < size; i++) dispm[i] = dispm[i - 1] + allm[i - 1];
881     PetscCallMPI(MPI_Gatherv(adj->i, mm, MPIU_INT, II, allm, dispm, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
882     PetscCall(PetscFree2(allm, dispm));
883     II[M] = NZ;
884   } else {
885     PetscCallMPI(MPI_Gather(&mm, 1, MPI_INT, NULL, 1, MPI_INT, 0, PetscObjectComm((PetscObject)A)));
886     PetscCallMPI(MPI_Gatherv(adj->i, mm, MPIU_INT, NULL, NULL, NULL, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
887   }
888   /* shift the i[] values back */
889   for (i = 0; i < m; i++) adj->i[i] -= nzstart;
890   if (!rank) PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, M, N, II, J, Values, B));
891   PetscFunctionReturn(0);
892 }
893 
894 /*@
895    MatMPIAdjCreateNonemptySubcommMat - create the same `MATMPIADJ` matrix on a subcommunicator containing only processes owning a positive number of rows
896 
897    Collective
898 
899    Input Parameter:
900 .  A - original MPIAdj matrix
901 
902    Output Parameter:
903 .  B - matrix on subcommunicator, NULL on ranks that owned zero rows of A
904 
905    Level: developer
906 
907    Note:
908    This function is mostly useful for internal use by mesh partitioning packages that require that every process owns at least one row.
909 
910    The matrix B should be destroyed with `MatDestroy()`. The arrays are not copied, so B should be destroyed before A is destroyed.
911 
912 .seealso: `MATMPIADJ`, `MatCreateMPIAdj()`
913 @*/
914 PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A, Mat *B) {
915   PetscFunctionBegin;
916   PetscValidHeaderSpecific(A, MAT_CLASSID, 1);
917   PetscUseMethod(A, "MatMPIAdjCreateNonemptySubcommMat_C", (Mat, Mat *), (A, B));
918   PetscFunctionReturn(0);
919 }
920 
921 /*MC
922    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
923    intended for use constructing orderings and partitionings.
924 
925   Level: beginner
926 
927   Note:
928     You can provide values to the matrix using `MatMPIAdjSetPreallocation()`, `MatCreateMPIAdj()`, or
929     by calling `MatSetValues()` and `MatAssemblyBegin()` followed by `MatAssemblyEnd()`
930 
931 .seealso: `MatCreateMPIAdj()`, `MatMPIAdjSetPreallocation()`, `MatSetValues()`
932 M*/
933 
934 PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B) {
935   Mat_MPIAdj *b;
936 
937   PetscFunctionBegin;
938   PetscCall(PetscNewLog(B, &b));
939   B->data = (void *)b;
940   PetscCall(PetscMemcpy(B->ops, &MatOps_Values, sizeof(struct _MatOps)));
941   B->assembled    = PETSC_FALSE;
942   B->preallocated = PETSC_TRUE; /* so that MatSetValues() may be used */
943 
944   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjSetPreallocation_C", MatMPIAdjSetPreallocation_MPIAdj));
945   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjCreateNonemptySubcommMat_C", MatMPIAdjCreateNonemptySubcommMat_MPIAdj));
946   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjToSeq_C", MatMPIAdjToSeq_MPIAdj));
947   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjToSeqRankZero_C", MatMPIAdjToSeqRankZero_MPIAdj));
948   PetscCall(PetscObjectChangeTypeName((PetscObject)B, MATMPIADJ));
949   PetscFunctionReturn(0);
950 }
951 
952 /*@C
953    MatMPIAdjToSeq - Converts an parallel `MATMPIADJ` matrix to complete `MATMPIADJ` on each process (needed by sequential partitioner)
954 
955    Logically Collective
956 
957    Input Parameter:
958 .  A - the matrix
959 
960    Output Parameter:
961 .  B - the same matrix on all processes
962 
963    Level: intermediate
964 
965 .seealso: `MATMPIADJ`, `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MatMPIAdjToSeqRankZero()`
966 @*/
967 PetscErrorCode MatMPIAdjToSeq(Mat A, Mat *B) {
968   PetscFunctionBegin;
969   PetscUseMethod(A, "MatMPIAdjToSeq_C", (Mat, Mat *), (A, B));
970   PetscFunctionReturn(0);
971 }
972 
973 /*@C
974    MatMPIAdjToSeqRankZero - Converts an parallel `MATMPIADJ` matrix to complete `MATMPIADJ` on rank zero (needed by sequential partitioner)
975 
976    Logically Collective
977 
978    Input Parameter:
979 .  A - the matrix
980 
981    Output Parameter:
982 .  B - the same matrix on rank zero, not set on other ranks
983 
984    Level: intermediate
985 
986    Note:
987      This routine has the advantage on systems with multiple ranks per node since only one copy of the matrix
988      is stored on the first node, instead of the number of ranks copies. This can allow partitioning much larger
989      paralllel graph sequentially.
990 
991 .seealso: `MATMPIADJ`, `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MatMPIAdjToSeq()`
992 @*/
993 PetscErrorCode MatMPIAdjToSeqRankZero(Mat A, Mat *B) {
994   PetscFunctionBegin;
995   PetscUseMethod(A, "MatMPIAdjToSeqRankZero_C", (Mat, Mat *), (A, B));
996   PetscFunctionReturn(0);
997 }
998 
999 /*@C
1000    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
1001 
1002    Logically Collective
1003 
1004    Input Parameters:
1005 +  A - the matrix
1006 .  i - the indices into j for the start of each row
1007 .  j - the column indices for each row (sorted for each row).
1008        The indices in i and j start with zero (NOT with one).
1009 -  values - [optional] edge weights
1010 
1011    Level: intermediate
1012 
1013 .seealso: `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MATMPIADJ`, `MatCreateMPIAdj()`
1014 @*/
1015 PetscErrorCode MatMPIAdjSetPreallocation(Mat B, PetscInt *i, PetscInt *j, PetscInt *values) {
1016   PetscFunctionBegin;
1017   PetscTryMethod(B, "MatMPIAdjSetPreallocation_C", (Mat, PetscInt *, PetscInt *, PetscInt *), (B, i, j, values));
1018   PetscFunctionReturn(0);
1019 }
1020 
1021 /*@C
1022    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
1023    The matrix does not have numerical values associated with it, but is
1024    intended for ordering (to reduce bandwidth etc) and partitioning.
1025 
1026    Collective
1027 
1028    Input Parameters:
1029 +  comm - MPI communicator
1030 .  m - number of local rows
1031 .  N - number of global columns
1032 .  i - the indices into j for the start of each row
1033 .  j - the column indices for each row (sorted for each row).
1034        The indices in i and j start with zero (NOT with one).
1035 -  values -[optional] edge weights
1036 
1037    Output Parameter:
1038 .  A - the matrix
1039 
1040    Level: intermediate
1041 
1042    Notes:
1043    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
1044    when the matrix is destroyed; you must allocate them with `PetscMalloc()`. If you
1045    call from Fortran you need not create the arrays with `PetscMalloc()`.
1046 
1047    You should not include the matrix diagonals.
1048 
1049    If you already have a matrix, you can create its adjacency matrix by a call
1050    to `MatConvert()`, specifying a type of `MATMPIADJ`.
1051 
1052    Possible values for `MatSetOption()` - `MAT_STRUCTURALLY_SYMMETRIC`
1053 
1054 .seealso: `MatCreate()`, `MatConvert()`, `MatGetOrdering()`, `MATMPIADJ`, `MatMPIAdjSetPreallocation()`
1055 @*/
1056 PetscErrorCode MatCreateMPIAdj(MPI_Comm comm, PetscInt m, PetscInt N, PetscInt *i, PetscInt *j, PetscInt *values, Mat *A) {
1057   PetscFunctionBegin;
1058   PetscCall(MatCreate(comm, A));
1059   PetscCall(MatSetSizes(*A, m, PETSC_DETERMINE, PETSC_DETERMINE, N));
1060   PetscCall(MatSetType(*A, MATMPIADJ));
1061   PetscCall(MatMPIAdjSetPreallocation(*A, i, j, values));
1062   PetscFunctionReturn(0);
1063 }
1064