xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 98d129c30f3ee9fdddc40fdbc5a989b7be64f888)
1 /*
2     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
3 */
4 #include <../src/mat/impls/adj/mpi/mpiadj.h> /*I "petscmat.h" I*/
5 #include <petscsf.h>
6 
7 /*
8   The interface should be easy to use for both MatCreateSubMatrix (parallel sub-matrix) and MatCreateSubMatrices (sequential sub-matrices)
9 */
10 static PetscErrorCode MatCreateSubMatrix_MPIAdj_data(Mat adj, IS irows, IS icols, PetscInt **sadj_xadj, PetscInt **sadj_adjncy, PetscInt **sadj_values)
11 {
12   PetscInt        nlrows_is, icols_n, i, j, nroots, nleaves, rlocalindex, *ncols_send, *ncols_recv;
13   PetscInt        nlrows_mat, *adjncy_recv, Ncols_recv, Ncols_send, *xadj_recv, *values_recv;
14   PetscInt       *ncols_recv_offsets, loc, rnclos, *sadjncy, *sxadj, *svalues;
15   const PetscInt *irows_indices, *icols_indices, *xadj, *adjncy;
16   PetscMPIInt     owner;
17   Mat_MPIAdj     *a = (Mat_MPIAdj *)adj->data;
18   PetscLayout     rmap;
19   MPI_Comm        comm;
20   PetscSF         sf;
21   PetscSFNode    *iremote;
22   PetscBool       done;
23 
24   PetscFunctionBegin;
25   PetscCall(PetscObjectGetComm((PetscObject)adj, &comm));
26   PetscCall(MatGetLayouts(adj, &rmap, NULL));
27   PetscCall(ISGetLocalSize(irows, &nlrows_is));
28   PetscCall(ISGetIndices(irows, &irows_indices));
29   PetscCall(PetscMalloc1(nlrows_is, &iremote));
30   /* construct sf graph*/
31   nleaves = nlrows_is;
32   for (i = 0; i < nlrows_is; i++) {
33     owner       = -1;
34     rlocalindex = -1;
35     PetscCall(PetscLayoutFindOwnerIndex(rmap, irows_indices[i], &owner, &rlocalindex));
36     iremote[i].rank  = owner;
37     iremote[i].index = rlocalindex;
38   }
39   PetscCall(MatGetRowIJ(adj, 0, PETSC_FALSE, PETSC_FALSE, &nlrows_mat, &xadj, &adjncy, &done));
40   PetscCall(PetscCalloc4(nlrows_mat, &ncols_send, nlrows_is, &xadj_recv, nlrows_is + 1, &ncols_recv_offsets, nlrows_is, &ncols_recv));
41   nroots = nlrows_mat;
42   for (i = 0; i < nlrows_mat; i++) ncols_send[i] = xadj[i + 1] - xadj[i];
43   PetscCall(PetscSFCreate(comm, &sf));
44   PetscCall(PetscSFSetGraph(sf, nroots, nleaves, NULL, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER));
45   PetscCall(PetscSFSetType(sf, PETSCSFBASIC));
46   PetscCall(PetscSFSetFromOptions(sf));
47   PetscCall(PetscSFBcastBegin(sf, MPIU_INT, ncols_send, ncols_recv, MPI_REPLACE));
48   PetscCall(PetscSFBcastEnd(sf, MPIU_INT, ncols_send, ncols_recv, MPI_REPLACE));
49   PetscCall(PetscSFBcastBegin(sf, MPIU_INT, xadj, xadj_recv, MPI_REPLACE));
50   PetscCall(PetscSFBcastEnd(sf, MPIU_INT, xadj, xadj_recv, MPI_REPLACE));
51   PetscCall(PetscSFDestroy(&sf));
52   Ncols_recv = 0;
53   for (i = 0; i < nlrows_is; i++) {
54     Ncols_recv += ncols_recv[i];
55     ncols_recv_offsets[i + 1] = ncols_recv[i] + ncols_recv_offsets[i];
56   }
57   Ncols_send = 0;
58   for (i = 0; i < nlrows_mat; i++) Ncols_send += ncols_send[i];
59   PetscCall(PetscCalloc1(Ncols_recv, &iremote));
60   PetscCall(PetscCalloc1(Ncols_recv, &adjncy_recv));
61   nleaves    = Ncols_recv;
62   Ncols_recv = 0;
63   for (i = 0; i < nlrows_is; i++) {
64     PetscCall(PetscLayoutFindOwner(rmap, irows_indices[i], &owner));
65     for (j = 0; j < ncols_recv[i]; j++) {
66       iremote[Ncols_recv].rank    = owner;
67       iremote[Ncols_recv++].index = xadj_recv[i] + j;
68     }
69   }
70   PetscCall(ISRestoreIndices(irows, &irows_indices));
71   /*if we need to deal with edge weights ???*/
72   if (a->useedgeweights) PetscCall(PetscCalloc1(Ncols_recv, &values_recv));
73   nroots = Ncols_send;
74   PetscCall(PetscSFCreate(comm, &sf));
75   PetscCall(PetscSFSetGraph(sf, nroots, nleaves, NULL, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER));
76   PetscCall(PetscSFSetType(sf, PETSCSFBASIC));
77   PetscCall(PetscSFSetFromOptions(sf));
78   PetscCall(PetscSFBcastBegin(sf, MPIU_INT, adjncy, adjncy_recv, MPI_REPLACE));
79   PetscCall(PetscSFBcastEnd(sf, MPIU_INT, adjncy, adjncy_recv, MPI_REPLACE));
80   if (a->useedgeweights) {
81     PetscCall(PetscSFBcastBegin(sf, MPIU_INT, a->values, values_recv, MPI_REPLACE));
82     PetscCall(PetscSFBcastEnd(sf, MPIU_INT, a->values, values_recv, MPI_REPLACE));
83   }
84   PetscCall(PetscSFDestroy(&sf));
85   PetscCall(MatRestoreRowIJ(adj, 0, PETSC_FALSE, PETSC_FALSE, &nlrows_mat, &xadj, &adjncy, &done));
86   PetscCall(ISGetLocalSize(icols, &icols_n));
87   PetscCall(ISGetIndices(icols, &icols_indices));
88   rnclos = 0;
89   for (i = 0; i < nlrows_is; i++) {
90     for (j = ncols_recv_offsets[i]; j < ncols_recv_offsets[i + 1]; j++) {
91       PetscCall(PetscFindInt(adjncy_recv[j], icols_n, icols_indices, &loc));
92       if (loc < 0) {
93         adjncy_recv[j] = -1;
94         if (a->useedgeweights) values_recv[j] = -1;
95         ncols_recv[i]--;
96       } else {
97         rnclos++;
98       }
99     }
100   }
101   PetscCall(ISRestoreIndices(icols, &icols_indices));
102   PetscCall(PetscCalloc1(rnclos, &sadjncy));
103   if (a->useedgeweights) PetscCall(PetscCalloc1(rnclos, &svalues));
104   PetscCall(PetscCalloc1(nlrows_is + 1, &sxadj));
105   rnclos = 0;
106   for (i = 0; i < nlrows_is; i++) {
107     for (j = ncols_recv_offsets[i]; j < ncols_recv_offsets[i + 1]; j++) {
108       if (adjncy_recv[j] < 0) continue;
109       sadjncy[rnclos] = adjncy_recv[j];
110       if (a->useedgeweights) svalues[rnclos] = values_recv[j];
111       rnclos++;
112     }
113   }
114   for (i = 0; i < nlrows_is; i++) sxadj[i + 1] = sxadj[i] + ncols_recv[i];
115   if (sadj_xadj) {
116     *sadj_xadj = sxadj;
117   } else PetscCall(PetscFree(sxadj));
118   if (sadj_adjncy) {
119     *sadj_adjncy = sadjncy;
120   } else PetscCall(PetscFree(sadjncy));
121   if (sadj_values) {
122     if (a->useedgeweights) *sadj_values = svalues;
123     else *sadj_values = NULL;
124   } else {
125     if (a->useedgeweights) PetscCall(PetscFree(svalues));
126   }
127   PetscCall(PetscFree4(ncols_send, xadj_recv, ncols_recv_offsets, ncols_recv));
128   PetscCall(PetscFree(adjncy_recv));
129   if (a->useedgeweights) PetscCall(PetscFree(values_recv));
130   PetscFunctionReturn(PETSC_SUCCESS);
131 }
132 
133 static PetscErrorCode MatCreateSubMatrices_MPIAdj_Private(Mat mat, PetscInt n, const IS irow[], const IS icol[], PetscBool subcomm, MatReuse scall, Mat *submat[])
134 {
135   PetscInt        i, irow_n, icol_n, *sxadj, *sadjncy, *svalues;
136   PetscInt       *indices, nindx, j, k, loc;
137   PetscMPIInt     issame;
138   const PetscInt *irow_indices, *icol_indices;
139   MPI_Comm        scomm_row, scomm_col, scomm_mat;
140 
141   PetscFunctionBegin;
142   nindx = 0;
143   /*
144    * Estimate a maximum number for allocating memory
145    */
146   for (i = 0; i < n; i++) {
147     PetscCall(ISGetLocalSize(irow[i], &irow_n));
148     PetscCall(ISGetLocalSize(icol[i], &icol_n));
149     nindx = nindx > (irow_n + icol_n) ? nindx : (irow_n + icol_n);
150   }
151   PetscCall(PetscMalloc1(nindx, &indices));
152   /* construct a submat */
153   //  if (scall == MAT_INITIAL_MATRIX) PetscCall(PetscMalloc1(n,submat));
154 
155   for (i = 0; i < n; i++) {
156     if (subcomm) {
157       PetscCall(PetscObjectGetComm((PetscObject)irow[i], &scomm_row));
158       PetscCall(PetscObjectGetComm((PetscObject)icol[i], &scomm_col));
159       PetscCallMPI(MPI_Comm_compare(scomm_row, scomm_col, &issame));
160       PetscCheck(issame == MPI_IDENT, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "row index set must have the same comm as the col index set");
161       PetscCallMPI(MPI_Comm_compare(scomm_row, PETSC_COMM_SELF, &issame));
162       PetscCheck(issame != MPI_IDENT, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, " can not use PETSC_COMM_SELF as comm when extracting a parallel submatrix");
163     } else {
164       scomm_row = PETSC_COMM_SELF;
165     }
166     /*get sub-matrix data*/
167     sxadj   = NULL;
168     sadjncy = NULL;
169     svalues = NULL;
170     PetscCall(MatCreateSubMatrix_MPIAdj_data(mat, irow[i], icol[i], &sxadj, &sadjncy, &svalues));
171     PetscCall(ISGetLocalSize(irow[i], &irow_n));
172     PetscCall(ISGetLocalSize(icol[i], &icol_n));
173     PetscCall(ISGetIndices(irow[i], &irow_indices));
174     PetscCall(PetscArraycpy(indices, irow_indices, irow_n));
175     PetscCall(ISRestoreIndices(irow[i], &irow_indices));
176     PetscCall(ISGetIndices(icol[i], &icol_indices));
177     PetscCall(PetscArraycpy(PetscSafePointerPlusOffset(indices, irow_n), icol_indices, icol_n));
178     PetscCall(ISRestoreIndices(icol[i], &icol_indices));
179     nindx = irow_n + icol_n;
180     PetscCall(PetscSortRemoveDupsInt(&nindx, indices));
181     /* renumber columns */
182     for (j = 0; j < irow_n; j++) {
183       for (k = sxadj[j]; k < sxadj[j + 1]; k++) {
184         PetscCall(PetscFindInt(sadjncy[k], nindx, indices, &loc));
185         PetscCheck(loc >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "can not find col %" PetscInt_FMT, sadjncy[k]);
186         sadjncy[k] = loc;
187       }
188     }
189     if (scall == MAT_INITIAL_MATRIX) {
190       PetscCall(MatCreateMPIAdj(scomm_row, irow_n, icol_n, sxadj, sadjncy, svalues, submat[i]));
191     } else {
192       Mat         sadj = *submat[i];
193       Mat_MPIAdj *sa   = (Mat_MPIAdj *)((sadj)->data);
194       PetscCall(PetscObjectGetComm((PetscObject)sadj, &scomm_mat));
195       PetscCallMPI(MPI_Comm_compare(scomm_row, scomm_mat, &issame));
196       PetscCheck(issame == MPI_IDENT, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "submatrix  must have the same comm as the col index set");
197       PetscCall(PetscArraycpy(sa->i, sxadj, irow_n + 1));
198       PetscCall(PetscArraycpy(sa->j, sadjncy, sxadj[irow_n]));
199       if (svalues) PetscCall(PetscArraycpy(sa->values, svalues, sxadj[irow_n]));
200       PetscCall(PetscFree(sxadj));
201       PetscCall(PetscFree(sadjncy));
202       PetscCall(PetscFree(svalues));
203     }
204   }
205   PetscCall(PetscFree(indices));
206   PetscFunctionReturn(PETSC_SUCCESS);
207 }
208 
209 static PetscErrorCode MatCreateSubMatricesMPI_MPIAdj(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
210 {
211   /*get sub-matrices across a sub communicator */
212   PetscFunctionBegin;
213   PetscCall(MatCreateSubMatrices_MPIAdj_Private(mat, n, irow, icol, PETSC_TRUE, scall, submat));
214   PetscFunctionReturn(PETSC_SUCCESS);
215 }
216 
217 static PetscErrorCode MatCreateSubMatrices_MPIAdj(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
218 {
219   PetscFunctionBegin;
220   /*get sub-matrices based on PETSC_COMM_SELF */
221   PetscCall(MatCreateSubMatrices_MPIAdj_Private(mat, n, irow, icol, PETSC_FALSE, scall, submat));
222   PetscFunctionReturn(PETSC_SUCCESS);
223 }
224 
225 static PetscErrorCode MatView_MPIAdj_ASCII(Mat A, PetscViewer viewer)
226 {
227   Mat_MPIAdj       *a = (Mat_MPIAdj *)A->data;
228   PetscInt          i, j, m = A->rmap->n;
229   const char       *name;
230   PetscViewerFormat format;
231 
232   PetscFunctionBegin;
233   PetscCall(PetscObjectGetName((PetscObject)A, &name));
234   PetscCall(PetscViewerGetFormat(viewer, &format));
235   if (format == PETSC_VIEWER_ASCII_INFO) {
236     PetscFunctionReturn(PETSC_SUCCESS);
237   } else {
238     PetscCheck(format != PETSC_VIEWER_ASCII_MATLAB, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MATLAB format not supported");
239     PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
240     PetscCall(PetscViewerASCIIPushSynchronized(viewer));
241     for (i = 0; i < m; i++) {
242       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "row %" PetscInt_FMT ":", i + A->rmap->rstart));
243       for (j = a->i[i]; j < a->i[i + 1]; j++) {
244         if (a->values) {
245           PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " (%" PetscInt_FMT ", %" PetscInt_FMT ") ", a->j[j], a->values[j]));
246         } else {
247           PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " %" PetscInt_FMT " ", a->j[j]));
248         }
249       }
250       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "\n"));
251     }
252     PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
253     PetscCall(PetscViewerFlush(viewer));
254     PetscCall(PetscViewerASCIIPopSynchronized(viewer));
255   }
256   PetscFunctionReturn(PETSC_SUCCESS);
257 }
258 
259 static PetscErrorCode MatView_MPIAdj(Mat A, PetscViewer viewer)
260 {
261   PetscBool iascii;
262 
263   PetscFunctionBegin;
264   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
265   if (iascii) PetscCall(MatView_MPIAdj_ASCII(A, viewer));
266   PetscFunctionReturn(PETSC_SUCCESS);
267 }
268 
269 static PetscErrorCode MatDestroy_MPIAdj(Mat mat)
270 {
271   Mat_MPIAdj *a = (Mat_MPIAdj *)mat->data;
272 
273   PetscFunctionBegin;
274   PetscCall(PetscLogObjectState((PetscObject)mat, "Rows=%" PetscInt_FMT ", Cols=%" PetscInt_FMT ", NZ=%" PetscInt_FMT, mat->rmap->n, mat->cmap->n, a->nz));
275   PetscCall(PetscFree(a->diag));
276   if (a->freeaij) {
277     if (a->freeaijwithfree) {
278       if (a->i) free(a->i);
279       if (a->j) free(a->j);
280     } else {
281       PetscCall(PetscFree(a->i));
282       PetscCall(PetscFree(a->j));
283       PetscCall(PetscFree(a->values));
284     }
285   }
286   PetscCall(PetscFree(a->rowvalues));
287   PetscCall(PetscFree(mat->data));
288   PetscCall(PetscObjectChangeTypeName((PetscObject)mat, NULL));
289   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjSetPreallocation_C", NULL));
290   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjCreateNonemptySubcommMat_C", NULL));
291   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjToSeq_C", NULL));
292   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjToSeqRankZero_C", NULL));
293   PetscFunctionReturn(PETSC_SUCCESS);
294 }
295 
296 static PetscErrorCode MatSetOption_MPIAdj(Mat A, MatOption op, PetscBool flg)
297 {
298   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
299 
300   PetscFunctionBegin;
301   switch (op) {
302   case MAT_SYMMETRIC:
303   case MAT_STRUCTURALLY_SYMMETRIC:
304   case MAT_HERMITIAN:
305   case MAT_SPD:
306     a->symmetric = flg;
307     break;
308   case MAT_SYMMETRY_ETERNAL:
309   case MAT_STRUCTURAL_SYMMETRY_ETERNAL:
310   case MAT_SPD_ETERNAL:
311     break;
312   default:
313     PetscCall(PetscInfo(A, "Option %s ignored\n", MatOptions[op]));
314     break;
315   }
316   PetscFunctionReturn(PETSC_SUCCESS);
317 }
318 
319 static PetscErrorCode MatGetRow_MPIAdj(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v)
320 {
321   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
322 
323   PetscFunctionBegin;
324   row -= A->rmap->rstart;
325   PetscCheck(row >= 0 && row < A->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row out of range");
326   *nz = a->i[row + 1] - a->i[row];
327   if (v) {
328     PetscInt j;
329     if (a->rowvalues_alloc < *nz) {
330       PetscCall(PetscFree(a->rowvalues));
331       a->rowvalues_alloc = PetscMax(a->rowvalues_alloc * 2, *nz);
332       PetscCall(PetscMalloc1(a->rowvalues_alloc, &a->rowvalues));
333     }
334     for (j = 0; j < *nz; j++) a->rowvalues[j] = a->values ? a->values[a->i[row] + j] : 1.0;
335     *v = (*nz) ? a->rowvalues : NULL;
336   }
337   if (idx) *idx = (*nz) ? a->j + a->i[row] : NULL;
338   PetscFunctionReturn(PETSC_SUCCESS);
339 }
340 
341 static PetscErrorCode MatRestoreRow_MPIAdj(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v)
342 {
343   PetscFunctionBegin;
344   PetscFunctionReturn(PETSC_SUCCESS);
345 }
346 
347 static PetscErrorCode MatEqual_MPIAdj(Mat A, Mat B, PetscBool *flg)
348 {
349   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data, *b = (Mat_MPIAdj *)B->data;
350   PetscBool   flag;
351 
352   PetscFunctionBegin;
353   /* If the  matrix dimensions are not equal,or no of nonzeros */
354   if ((A->rmap->n != B->rmap->n) || (a->nz != b->nz)) flag = PETSC_FALSE;
355 
356   /* if the a->i are the same */
357   PetscCall(PetscArraycmp(a->i, b->i, A->rmap->n + 1, &flag));
358 
359   /* if a->j are the same */
360   PetscCall(PetscMemcmp(a->j, b->j, (a->nz) * sizeof(PetscInt), &flag));
361 
362   PetscCall(MPIU_Allreduce(&flag, flg, 1, MPIU_BOOL, MPI_LAND, PetscObjectComm((PetscObject)A)));
363   PetscFunctionReturn(PETSC_SUCCESS);
364 }
365 
366 static PetscErrorCode MatGetRowIJ_MPIAdj(Mat A, PetscInt oshift, PetscBool symmetric, PetscBool blockcompressed, PetscInt *m, const PetscInt *inia[], const PetscInt *inja[], PetscBool *done)
367 {
368   PetscInt    i;
369   Mat_MPIAdj *a  = (Mat_MPIAdj *)A->data;
370   PetscInt  **ia = (PetscInt **)inia, **ja = (PetscInt **)inja;
371 
372   PetscFunctionBegin;
373   *m    = A->rmap->n;
374   *ia   = a->i;
375   *ja   = a->j;
376   *done = PETSC_TRUE;
377   if (oshift) {
378     for (i = 0; i < (*ia)[*m]; i++) (*ja)[i]++;
379     for (i = 0; i <= (*m); i++) (*ia)[i]++;
380   }
381   PetscFunctionReturn(PETSC_SUCCESS);
382 }
383 
384 static PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A, PetscInt oshift, PetscBool symmetric, PetscBool blockcompressed, PetscInt *m, const PetscInt *inia[], const PetscInt *inja[], PetscBool *done)
385 {
386   PetscInt    i;
387   Mat_MPIAdj *a  = (Mat_MPIAdj *)A->data;
388   PetscInt  **ia = (PetscInt **)inia, **ja = (PetscInt **)inja;
389 
390   PetscFunctionBegin;
391   PetscCheck(!ia || a->i == *ia, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "ia passed back is not one obtained with MatGetRowIJ()");
392   PetscCheck(!ja || a->j == *ja, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "ja passed back is not one obtained with MatGetRowIJ()");
393   if (oshift) {
394     PetscCheck(ia, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "If oshift then you must passed in inia[] argument");
395     PetscCheck(ja, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "If oshift then you must passed in inja[] argument");
396     for (i = 0; i <= (*m); i++) (*ia)[i]--;
397     for (i = 0; i < (*ia)[*m]; i++) (*ja)[i]--;
398   }
399   PetscFunctionReturn(PETSC_SUCCESS);
400 }
401 
402 static PetscErrorCode MatConvertFrom_MPIAdj(Mat A, MatType type, MatReuse reuse, Mat *newmat)
403 {
404   Mat                B;
405   PetscInt           i, m, N, nzeros = 0, *ia, *ja, len, rstart, cnt, j, *a;
406   const PetscInt    *rj;
407   const PetscScalar *ra;
408   MPI_Comm           comm;
409 
410   PetscFunctionBegin;
411   PetscCall(MatGetSize(A, NULL, &N));
412   PetscCall(MatGetLocalSize(A, &m, NULL));
413   PetscCall(MatGetOwnershipRange(A, &rstart, NULL));
414 
415   /* count the number of nonzeros per row */
416   for (i = 0; i < m; i++) {
417     PetscCall(MatGetRow(A, i + rstart, &len, &rj, NULL));
418     for (j = 0; j < len; j++) {
419       if (rj[j] == i + rstart) {
420         len--;
421         break;
422       } /* don't count diagonal */
423     }
424     nzeros += len;
425     PetscCall(MatRestoreRow(A, i + rstart, &len, &rj, NULL));
426   }
427 
428   /* malloc space for nonzeros */
429   PetscCall(PetscMalloc1(nzeros + 1, &a));
430   PetscCall(PetscMalloc1(N + 1, &ia));
431   PetscCall(PetscMalloc1(nzeros + 1, &ja));
432 
433   nzeros = 0;
434   ia[0]  = 0;
435   for (i = 0; i < m; i++) {
436     PetscCall(MatGetRow(A, i + rstart, &len, &rj, &ra));
437     cnt = 0;
438     for (j = 0; j < len; j++) {
439       if (rj[j] != i + rstart) { /* if not diagonal */
440         a[nzeros + cnt]    = (PetscInt)PetscAbsScalar(ra[j]);
441         ja[nzeros + cnt++] = rj[j];
442       }
443     }
444     PetscCall(MatRestoreRow(A, i + rstart, &len, &rj, &ra));
445     nzeros += cnt;
446     ia[i + 1] = nzeros;
447   }
448 
449   PetscCall(PetscObjectGetComm((PetscObject)A, &comm));
450   PetscCall(MatCreate(comm, &B));
451   PetscCall(MatSetSizes(B, m, PETSC_DETERMINE, PETSC_DETERMINE, N));
452   PetscCall(MatSetType(B, type));
453   PetscCall(MatMPIAdjSetPreallocation(B, ia, ja, a));
454 
455   if (reuse == MAT_INPLACE_MATRIX) {
456     PetscCall(MatHeaderReplace(A, &B));
457   } else {
458     *newmat = B;
459   }
460   PetscFunctionReturn(PETSC_SUCCESS);
461 }
462 
463 static PetscErrorCode MatSetValues_MPIAdj(Mat A, PetscInt m, const PetscInt *rows, PetscInt n, const PetscInt *cols, const PetscScalar *values, InsertMode im)
464 {
465   Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data;
466   PetscInt    rStart, rEnd, cStart, cEnd;
467 
468   PetscFunctionBegin;
469   PetscCheck(!adj->i, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Matrix is already assembled, cannot change its values");
470   PetscCall(MatGetOwnershipRange(A, &rStart, &rEnd));
471   PetscCall(MatGetOwnershipRangeColumn(A, &cStart, &cEnd));
472   if (!adj->ht) {
473     PetscCall(PetscHSetIJCreate(&adj->ht));
474     PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject)A), 1, &A->stash));
475     PetscCall(PetscLayoutSetUp(A->rmap));
476     PetscCall(PetscLayoutSetUp(A->cmap));
477   }
478   for (PetscInt r = 0; r < m; ++r) {
479     PetscHashIJKey key;
480 
481     key.i = rows[r];
482     if (key.i < 0) continue;
483     if ((key.i < rStart) || (key.i >= rEnd)) {
484       PetscCall(MatStashValuesRow_Private(&A->stash, key.i, n, cols, values, PETSC_FALSE));
485     } else {
486       for (PetscInt c = 0; c < n; ++c) {
487         key.j = cols[c];
488         if (key.j < 0 || key.i == key.j) continue;
489         PetscCall(PetscHSetIJAdd(adj->ht, key));
490       }
491     }
492   }
493   PetscFunctionReturn(PETSC_SUCCESS);
494 }
495 
496 static PetscErrorCode MatAssemblyBegin_MPIAdj(Mat A, MatAssemblyType type)
497 {
498   PetscInt    nstash, reallocs;
499   Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data;
500 
501   PetscFunctionBegin;
502   if (!adj->ht) {
503     PetscCall(PetscHSetIJCreate(&adj->ht));
504     PetscCall(PetscLayoutSetUp(A->rmap));
505     PetscCall(PetscLayoutSetUp(A->cmap));
506   }
507   PetscCall(MatStashScatterBegin_Private(A, &A->stash, A->rmap->range));
508   PetscCall(MatStashGetInfo_Private(&A->stash, &nstash, &reallocs));
509   PetscCall(PetscInfo(A, "Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
510   PetscFunctionReturn(PETSC_SUCCESS);
511 }
512 
513 static PetscErrorCode MatAssemblyEnd_MPIAdj(Mat A, MatAssemblyType type)
514 {
515   PetscScalar   *val;
516   PetscInt      *row, *col, m, rstart, *rowstarts;
517   PetscInt       i, j, ncols, flg, nz;
518   PetscMPIInt    n;
519   Mat_MPIAdj    *adj = (Mat_MPIAdj *)A->data;
520   PetscHashIter  hi;
521   PetscHashIJKey key;
522   PetscHSetIJ    ht = adj->ht;
523 
524   PetscFunctionBegin;
525   while (1) {
526     PetscCall(MatStashScatterGetMesg_Private(&A->stash, &n, &row, &col, &val, &flg));
527     if (!flg) break;
528 
529     for (i = 0; i < n;) {
530       /* Identify the consecutive vals belonging to the same row */
531       for (j = i, rstart = row[j]; j < n; j++) {
532         if (row[j] != rstart) break;
533       }
534       if (j < n) ncols = j - i;
535       else ncols = n - i;
536       /* Set all these values with a single function call */
537       PetscCall(MatSetValues_MPIAdj(A, 1, row + i, ncols, col + i, val + i, INSERT_VALUES));
538       i = j;
539     }
540   }
541   PetscCall(MatStashScatterEnd_Private(&A->stash));
542   PetscCall(MatStashDestroy_Private(&A->stash));
543 
544   PetscCall(MatGetLocalSize(A, &m, NULL));
545   PetscCall(MatGetOwnershipRange(A, &rstart, NULL));
546   PetscCall(PetscCalloc1(m + 1, &rowstarts));
547   PetscHashIterBegin(ht, hi);
548   for (; !PetscHashIterAtEnd(ht, hi);) {
549     PetscHashIterGetKey(ht, hi, key);
550     rowstarts[key.i - rstart + 1]++;
551     PetscHashIterNext(ht, hi);
552   }
553   for (i = 1; i < m + 1; i++) rowstarts[i] = rowstarts[i - 1] + rowstarts[i];
554 
555   PetscCall(PetscHSetIJGetSize(ht, &nz));
556   PetscCall(PetscMalloc1(nz, &col));
557   PetscHashIterBegin(ht, hi);
558   for (; !PetscHashIterAtEnd(ht, hi);) {
559     PetscHashIterGetKey(ht, hi, key);
560     col[rowstarts[key.i - rstart]++] = key.j;
561     PetscHashIterNext(ht, hi);
562   }
563   PetscCall(PetscHSetIJDestroy(&ht));
564   for (i = m; i > 0; i--) rowstarts[i] = rowstarts[i - 1];
565   rowstarts[0] = 0;
566 
567   for (PetscInt i = 0; i < m; i++) PetscCall(PetscSortInt(rowstarts[i + 1] - rowstarts[i], &col[rowstarts[i]]));
568 
569   adj->i       = rowstarts;
570   adj->j       = col;
571   adj->nz      = rowstarts[m];
572   adj->freeaij = PETSC_TRUE;
573   PetscFunctionReturn(PETSC_SUCCESS);
574 }
575 
576 static struct _MatOps MatOps_Values = {MatSetValues_MPIAdj,
577                                        MatGetRow_MPIAdj,
578                                        MatRestoreRow_MPIAdj,
579                                        NULL,
580                                        /* 4*/ NULL,
581                                        NULL,
582                                        NULL,
583                                        NULL,
584                                        NULL,
585                                        NULL,
586                                        /*10*/ NULL,
587                                        NULL,
588                                        NULL,
589                                        NULL,
590                                        NULL,
591                                        /*15*/ NULL,
592                                        MatEqual_MPIAdj,
593                                        NULL,
594                                        NULL,
595                                        NULL,
596                                        /*20*/ MatAssemblyBegin_MPIAdj,
597                                        MatAssemblyEnd_MPIAdj,
598                                        MatSetOption_MPIAdj,
599                                        NULL,
600                                        /*24*/ NULL,
601                                        NULL,
602                                        NULL,
603                                        NULL,
604                                        NULL,
605                                        /*29*/ NULL,
606                                        NULL,
607                                        NULL,
608                                        NULL,
609                                        NULL,
610                                        /*34*/ NULL,
611                                        NULL,
612                                        NULL,
613                                        NULL,
614                                        NULL,
615                                        /*39*/ NULL,
616                                        MatCreateSubMatrices_MPIAdj,
617                                        NULL,
618                                        NULL,
619                                        NULL,
620                                        /*44*/ NULL,
621                                        NULL,
622                                        MatShift_Basic,
623                                        NULL,
624                                        NULL,
625                                        /*49*/ NULL,
626                                        MatGetRowIJ_MPIAdj,
627                                        MatRestoreRowIJ_MPIAdj,
628                                        NULL,
629                                        NULL,
630                                        /*54*/ NULL,
631                                        NULL,
632                                        NULL,
633                                        NULL,
634                                        NULL,
635                                        /*59*/ NULL,
636                                        MatDestroy_MPIAdj,
637                                        MatView_MPIAdj,
638                                        MatConvertFrom_MPIAdj,
639                                        NULL,
640                                        /*64*/ NULL,
641                                        NULL,
642                                        NULL,
643                                        NULL,
644                                        NULL,
645                                        /*69*/ NULL,
646                                        NULL,
647                                        NULL,
648                                        NULL,
649                                        NULL,
650                                        /*74*/ NULL,
651                                        NULL,
652                                        NULL,
653                                        NULL,
654                                        NULL,
655                                        /*79*/ NULL,
656                                        NULL,
657                                        NULL,
658                                        NULL,
659                                        NULL,
660                                        /*84*/ NULL,
661                                        NULL,
662                                        NULL,
663                                        NULL,
664                                        NULL,
665                                        /*89*/ NULL,
666                                        NULL,
667                                        NULL,
668                                        NULL,
669                                        NULL,
670                                        /*94*/ NULL,
671                                        NULL,
672                                        NULL,
673                                        NULL,
674                                        NULL,
675                                        /*99*/ NULL,
676                                        NULL,
677                                        NULL,
678                                        NULL,
679                                        NULL,
680                                        /*104*/ NULL,
681                                        NULL,
682                                        NULL,
683                                        NULL,
684                                        NULL,
685                                        /*109*/ NULL,
686                                        NULL,
687                                        NULL,
688                                        NULL,
689                                        NULL,
690                                        /*114*/ NULL,
691                                        NULL,
692                                        NULL,
693                                        NULL,
694                                        NULL,
695                                        /*119*/ NULL,
696                                        NULL,
697                                        NULL,
698                                        NULL,
699                                        NULL,
700                                        /*124*/ NULL,
701                                        NULL,
702                                        NULL,
703                                        NULL,
704                                        MatCreateSubMatricesMPI_MPIAdj,
705                                        /*129*/ NULL,
706                                        NULL,
707                                        NULL,
708                                        NULL,
709                                        NULL,
710                                        /*134*/ NULL,
711                                        NULL,
712                                        NULL,
713                                        NULL,
714                                        NULL,
715                                        /*139*/ NULL,
716                                        NULL,
717                                        NULL,
718                                        NULL,
719                                        NULL,
720                                        /*144*/ NULL,
721                                        NULL,
722                                        NULL,
723                                        NULL,
724                                        NULL,
725                                        NULL,
726                                        /*150*/ NULL,
727                                        NULL,
728                                        NULL};
729 
730 static PetscErrorCode MatMPIAdjSetPreallocation_MPIAdj(Mat B, PetscInt *i, PetscInt *j, PetscInt *values)
731 {
732   Mat_MPIAdj *b = (Mat_MPIAdj *)B->data;
733   PetscBool   useedgeweights;
734 
735   PetscFunctionBegin;
736   PetscCall(PetscLayoutSetUp(B->rmap));
737   PetscCall(PetscLayoutSetUp(B->cmap));
738   if (values) useedgeweights = PETSC_TRUE;
739   else useedgeweights = PETSC_FALSE;
740   /* Make everybody knows if they are using edge weights or not */
741   PetscCall(MPIU_Allreduce((int *)&useedgeweights, (int *)&b->useedgeweights, 1, MPI_INT, MPI_MAX, PetscObjectComm((PetscObject)B)));
742 
743   if (PetscDefined(USE_DEBUG)) {
744     PetscInt ii;
745 
746     PetscCheck(i[0] == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "First i[] index must be zero, instead it is %" PetscInt_FMT, i[0]);
747     for (ii = 1; ii < B->rmap->n; ii++) {
748       PetscCheck(i[ii] >= 0 && i[ii] >= i[ii - 1], PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "i[%" PetscInt_FMT "]=%" PetscInt_FMT " index is out of range: i[%" PetscInt_FMT "]=%" PetscInt_FMT, ii, i[ii], ii - 1, i[ii - 1]);
749     }
750     for (ii = 0; ii < i[B->rmap->n]; ii++) PetscCheck(j[ii] >= 0 && j[ii] < B->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column index %" PetscInt_FMT " out of range %" PetscInt_FMT, ii, j[ii]);
751   }
752   b->j      = j;
753   b->i      = i;
754   b->values = values;
755 
756   b->nz        = i[B->rmap->n];
757   b->diag      = NULL;
758   b->symmetric = PETSC_FALSE;
759   b->freeaij   = PETSC_TRUE;
760 
761   B->ops->assemblybegin = NULL;
762   B->ops->assemblyend   = NULL;
763   PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
764   PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
765   PetscCall(MatStashDestroy_Private(&B->stash));
766   PetscFunctionReturn(PETSC_SUCCESS);
767 }
768 
769 static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A, Mat *B)
770 {
771   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data;
772   const PetscInt *ranges;
773   MPI_Comm        acomm, bcomm;
774   MPI_Group       agroup, bgroup;
775   PetscMPIInt     i, rank, size, nranks, *ranks;
776 
777   PetscFunctionBegin;
778   *B = NULL;
779   PetscCall(PetscObjectGetComm((PetscObject)A, &acomm));
780   PetscCallMPI(MPI_Comm_size(acomm, &size));
781   PetscCallMPI(MPI_Comm_size(acomm, &rank));
782   PetscCall(MatGetOwnershipRanges(A, &ranges));
783   for (i = 0, nranks = 0; i < size; i++) {
784     if (ranges[i + 1] - ranges[i] > 0) nranks++;
785   }
786   if (nranks == size) { /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
787     PetscCall(PetscObjectReference((PetscObject)A));
788     *B = A;
789     PetscFunctionReturn(PETSC_SUCCESS);
790   }
791 
792   PetscCall(PetscMalloc1(nranks, &ranks));
793   for (i = 0, nranks = 0; i < size; i++) {
794     if (ranges[i + 1] - ranges[i] > 0) ranks[nranks++] = i;
795   }
796   PetscCallMPI(MPI_Comm_group(acomm, &agroup));
797   PetscCallMPI(MPI_Group_incl(agroup, nranks, ranks, &bgroup));
798   PetscCall(PetscFree(ranks));
799   PetscCallMPI(MPI_Comm_create(acomm, bgroup, &bcomm));
800   PetscCallMPI(MPI_Group_free(&agroup));
801   PetscCallMPI(MPI_Group_free(&bgroup));
802   if (bcomm != MPI_COMM_NULL) {
803     PetscInt    m, N;
804     Mat_MPIAdj *b;
805     PetscCall(MatGetLocalSize(A, &m, NULL));
806     PetscCall(MatGetSize(A, NULL, &N));
807     PetscCall(MatCreateMPIAdj(bcomm, m, N, a->i, a->j, a->values, B));
808     b          = (Mat_MPIAdj *)(*B)->data;
809     b->freeaij = PETSC_FALSE;
810     PetscCallMPI(MPI_Comm_free(&bcomm));
811   }
812   PetscFunctionReturn(PETSC_SUCCESS);
813 }
814 
815 static PetscErrorCode MatMPIAdjToSeq_MPIAdj(Mat A, Mat *B)
816 {
817   PetscInt    M, N, *II, *J, NZ, nz, m, nzstart, i;
818   PetscInt   *Values = NULL;
819   Mat_MPIAdj *adj    = (Mat_MPIAdj *)A->data;
820   PetscMPIInt mnz, mm, *allnz, *allm, size, *dispnz, *dispm;
821 
822   PetscFunctionBegin;
823   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
824   PetscCall(MatGetSize(A, &M, &N));
825   PetscCall(MatGetLocalSize(A, &m, NULL));
826   nz = adj->nz;
827   PetscCheck(adj->i[m] == nz, PETSC_COMM_SELF, PETSC_ERR_PLIB, "nz %" PetscInt_FMT " not correct i[m] %" PetscInt_FMT, nz, adj->i[m]);
828   PetscCall(MPIU_Allreduce(&nz, &NZ, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A)));
829 
830   PetscCall(PetscMPIIntCast(nz, &mnz));
831   PetscCall(PetscMalloc2(size, &allnz, size, &dispnz));
832   PetscCallMPI(MPI_Allgather(&mnz, 1, MPI_INT, allnz, 1, MPI_INT, PetscObjectComm((PetscObject)A)));
833   dispnz[0] = 0;
834   for (i = 1; i < size; i++) dispnz[i] = dispnz[i - 1] + allnz[i - 1];
835   if (adj->values) {
836     PetscCall(PetscMalloc1(NZ, &Values));
837     PetscCallMPI(MPI_Allgatherv(adj->values, mnz, MPIU_INT, Values, allnz, dispnz, MPIU_INT, PetscObjectComm((PetscObject)A)));
838   }
839   PetscCall(PetscMalloc1(NZ, &J));
840   PetscCallMPI(MPI_Allgatherv(adj->j, mnz, MPIU_INT, J, allnz, dispnz, MPIU_INT, PetscObjectComm((PetscObject)A)));
841   PetscCall(PetscFree2(allnz, dispnz));
842   PetscCallMPI(MPI_Scan(&nz, &nzstart, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A)));
843   nzstart -= nz;
844   /* shift the i[] values so they will be correct after being received */
845   for (i = 0; i < m; i++) adj->i[i] += nzstart;
846   PetscCall(PetscMalloc1(M + 1, &II));
847   PetscCall(PetscMPIIntCast(m, &mm));
848   PetscCall(PetscMalloc2(size, &allm, size, &dispm));
849   PetscCallMPI(MPI_Allgather(&mm, 1, MPI_INT, allm, 1, MPI_INT, PetscObjectComm((PetscObject)A)));
850   dispm[0] = 0;
851   for (i = 1; i < size; i++) dispm[i] = dispm[i - 1] + allm[i - 1];
852   PetscCallMPI(MPI_Allgatherv(adj->i, mm, MPIU_INT, II, allm, dispm, MPIU_INT, PetscObjectComm((PetscObject)A)));
853   PetscCall(PetscFree2(allm, dispm));
854   II[M] = NZ;
855   /* shift the i[] values back */
856   for (i = 0; i < m; i++) adj->i[i] -= nzstart;
857   PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, M, N, II, J, Values, B));
858   PetscFunctionReturn(PETSC_SUCCESS);
859 }
860 
861 static PetscErrorCode MatMPIAdjToSeqRankZero_MPIAdj(Mat A, Mat *B)
862 {
863   PetscInt    M, N, *II, *J, NZ, nz, m, nzstart, i;
864   PetscInt   *Values = NULL;
865   Mat_MPIAdj *adj    = (Mat_MPIAdj *)A->data;
866   PetscMPIInt mnz, mm, *allnz = NULL, *allm, size, *dispnz, *dispm, rank;
867 
868   PetscFunctionBegin;
869   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
870   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)A), &rank));
871   PetscCall(MatGetSize(A, &M, &N));
872   PetscCall(MatGetLocalSize(A, &m, NULL));
873   nz = adj->nz;
874   PetscCheck(adj->i[m] == nz, PETSC_COMM_SELF, PETSC_ERR_PLIB, "nz %" PetscInt_FMT " not correct i[m] %" PetscInt_FMT, nz, adj->i[m]);
875   PetscCall(MPIU_Allreduce(&nz, &NZ, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A)));
876 
877   PetscCall(PetscMPIIntCast(nz, &mnz));
878   if (!rank) PetscCall(PetscMalloc2(size, &allnz, size, &dispnz));
879   PetscCallMPI(MPI_Gather(&mnz, 1, MPI_INT, allnz, 1, MPI_INT, 0, PetscObjectComm((PetscObject)A)));
880   if (!rank) {
881     dispnz[0] = 0;
882     for (i = 1; i < size; i++) dispnz[i] = dispnz[i - 1] + allnz[i - 1];
883     if (adj->values) {
884       PetscCall(PetscMalloc1(NZ, &Values));
885       PetscCallMPI(MPI_Gatherv(adj->values, mnz, MPIU_INT, Values, allnz, dispnz, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
886     }
887     PetscCall(PetscMalloc1(NZ, &J));
888     PetscCallMPI(MPI_Gatherv(adj->j, mnz, MPIU_INT, J, allnz, dispnz, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
889     PetscCall(PetscFree2(allnz, dispnz));
890   } else {
891     if (adj->values) PetscCallMPI(MPI_Gatherv(adj->values, mnz, MPIU_INT, NULL, NULL, NULL, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
892     PetscCallMPI(MPI_Gatherv(adj->j, mnz, MPIU_INT, NULL, NULL, NULL, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
893   }
894   PetscCallMPI(MPI_Scan(&nz, &nzstart, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A)));
895   nzstart -= nz;
896   /* shift the i[] values so they will be correct after being received */
897   for (i = 0; i < m; i++) adj->i[i] += nzstart;
898   PetscCall(PetscMPIIntCast(m, &mm));
899   if (!rank) {
900     PetscCall(PetscMalloc1(M + 1, &II));
901     PetscCall(PetscMalloc2(size, &allm, size, &dispm));
902     PetscCallMPI(MPI_Gather(&mm, 1, MPI_INT, allm, 1, MPI_INT, 0, PetscObjectComm((PetscObject)A)));
903     dispm[0] = 0;
904     for (i = 1; i < size; i++) dispm[i] = dispm[i - 1] + allm[i - 1];
905     PetscCallMPI(MPI_Gatherv(adj->i, mm, MPIU_INT, II, allm, dispm, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
906     PetscCall(PetscFree2(allm, dispm));
907     II[M] = NZ;
908   } else {
909     PetscCallMPI(MPI_Gather(&mm, 1, MPI_INT, NULL, 1, MPI_INT, 0, PetscObjectComm((PetscObject)A)));
910     PetscCallMPI(MPI_Gatherv(adj->i, mm, MPIU_INT, NULL, NULL, NULL, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
911   }
912   /* shift the i[] values back */
913   for (i = 0; i < m; i++) adj->i[i] -= nzstart;
914   if (!rank) PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, M, N, II, J, Values, B));
915   PetscFunctionReturn(PETSC_SUCCESS);
916 }
917 
918 /*@
919   MatMPIAdjCreateNonemptySubcommMat - create the same `MATMPIADJ` matrix on a subcommunicator containing only processes owning a positive number of rows
920 
921   Collective
922 
923   Input Parameter:
924 . A - original `MATMPIADJ` matrix
925 
926   Output Parameter:
927 . B - matrix on subcommunicator, `NULL` on MPI processes that own zero rows of `A`
928 
929   Level: developer
930 
931   Note:
932   The matrix `B` should be destroyed with `MatDestroy()`. The arrays are not copied, so `B` should be destroyed before `A` is destroyed.
933 
934   Developer Note:
935   This function is mostly useful for internal use by mesh partitioning packages, such as ParMETIS that require that every process owns at least one row.
936 
937 .seealso: [](ch_matrices), `Mat`, `MATMPIADJ`, `MatCreateMPIAdj()`
938 @*/
939 PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A, Mat *B)
940 {
941   PetscFunctionBegin;
942   PetscValidHeaderSpecific(A, MAT_CLASSID, 1);
943   PetscUseMethod(A, "MatMPIAdjCreateNonemptySubcommMat_C", (Mat, Mat *), (A, B));
944   PetscFunctionReturn(PETSC_SUCCESS);
945 }
946 
947 /*MC
948   MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
949   intended for use constructing orderings and partitionings.
950 
951   Level: beginner
952 
953   Note:
954   You can provide values to the matrix using `MatMPIAdjSetPreallocation()`, `MatCreateMPIAdj()`, or
955   by calling `MatSetValues()` and `MatAssemblyBegin()` followed by `MatAssemblyEnd()`
956 
957 .seealso: [](ch_matrices), `Mat`, `MatCreateMPIAdj()`, `MatMPIAdjSetPreallocation()`, `MatSetValues()`
958 M*/
959 PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B)
960 {
961   Mat_MPIAdj *b;
962 
963   PetscFunctionBegin;
964   PetscCall(PetscNew(&b));
965   B->data         = (void *)b;
966   B->ops[0]       = MatOps_Values;
967   B->assembled    = PETSC_FALSE;
968   B->preallocated = PETSC_TRUE; /* so that MatSetValues() may be used */
969 
970   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjSetPreallocation_C", MatMPIAdjSetPreallocation_MPIAdj));
971   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjCreateNonemptySubcommMat_C", MatMPIAdjCreateNonemptySubcommMat_MPIAdj));
972   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjToSeq_C", MatMPIAdjToSeq_MPIAdj));
973   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjToSeqRankZero_C", MatMPIAdjToSeqRankZero_MPIAdj));
974   PetscCall(PetscObjectChangeTypeName((PetscObject)B, MATMPIADJ));
975   PetscFunctionReturn(PETSC_SUCCESS);
976 }
977 
978 /*@C
979   MatMPIAdjToSeq - Converts an parallel `MATMPIADJ` matrix to complete `MATMPIADJ` on each process (needed by sequential partitioners)
980 
981   Logically Collective
982 
983   Input Parameter:
984 . A - the matrix
985 
986   Output Parameter:
987 . B - the same matrix on all processes
988 
989   Level: intermediate
990 
991 .seealso: [](ch_matrices), `Mat`, `MATMPIADJ`, `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MatMPIAdjToSeqRankZero()`
992 @*/
993 PetscErrorCode MatMPIAdjToSeq(Mat A, Mat *B)
994 {
995   PetscFunctionBegin;
996   PetscUseMethod(A, "MatMPIAdjToSeq_C", (Mat, Mat *), (A, B));
997   PetscFunctionReturn(PETSC_SUCCESS);
998 }
999 
1000 /*@C
1001   MatMPIAdjToSeqRankZero - Converts an parallel `MATMPIADJ` matrix to complete `MATMPIADJ` on rank zero (needed by sequential partitioners)
1002 
1003   Logically Collective
1004 
1005   Input Parameter:
1006 . A - the matrix
1007 
1008   Output Parameter:
1009 . B - the same matrix on rank zero, not set on other ranks
1010 
1011   Level: intermediate
1012 
1013   Note:
1014   This routine has the advantage on systems with multiple ranks per node since only one copy of the matrix
1015   is stored on the first node, instead of the number of ranks copies. This can allow partitioning much larger
1016   parallel graph sequentially.
1017 
1018 .seealso: [](ch_matrices), `Mat`, `MATMPIADJ`, `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MatMPIAdjToSeq()`
1019 @*/
1020 PetscErrorCode MatMPIAdjToSeqRankZero(Mat A, Mat *B)
1021 {
1022   PetscFunctionBegin;
1023   PetscUseMethod(A, "MatMPIAdjToSeqRankZero_C", (Mat, Mat *), (A, B));
1024   PetscFunctionReturn(PETSC_SUCCESS);
1025 }
1026 
1027 /*@C
1028   MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
1029 
1030   Logically Collective
1031 
1032   Input Parameters:
1033 + B      - the matrix
1034 . i      - the indices into `j` for the start of each row
1035 . j      - the column indices for each row (sorted for each row).
1036            The indices in `i` and `j` start with zero (NOT with one).
1037 - values - [use `NULL` if not provided] edge weights
1038 
1039   Level: intermediate
1040 
1041   Notes:
1042   The indices in `i` and `j` start with zero (NOT with one).
1043 
1044   You must NOT free the `i`, `values` and `j` arrays yourself. PETSc will free them
1045   when the matrix is destroyed; you must allocate them with `PetscMalloc()`.
1046 
1047   You should not include the matrix diagonal elements.
1048 
1049   If you already have a matrix, you can create its adjacency matrix by a call
1050   to `MatConvert()`, specifying a type of `MATMPIADJ`.
1051 
1052   Possible values for `MatSetOption()` - `MAT_STRUCTURALLY_SYMMETRIC`
1053 
1054   Fortran Note:
1055   From Fortran the indices and values are copied so the array space need not be provided with `PetscMalloc()`.
1056 
1057 .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MATMPIADJ`
1058 @*/
1059 PetscErrorCode MatMPIAdjSetPreallocation(Mat B, PetscInt *i, PetscInt *j, PetscInt *values)
1060 {
1061   PetscFunctionBegin;
1062   PetscTryMethod(B, "MatMPIAdjSetPreallocation_C", (Mat, PetscInt *, PetscInt *, PetscInt *), (B, i, j, values));
1063   PetscFunctionReturn(PETSC_SUCCESS);
1064 }
1065 
1066 /*@C
1067   MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
1068   The matrix need not have numerical values associated with it, it is
1069   intended for ordering (to reduce bandwidth etc) and partitioning.
1070 
1071   Collective
1072 
1073   Input Parameters:
1074 + comm   - MPI communicator
1075 . m      - number of local rows
1076 . N      - number of global columns
1077 . i      - the indices into `j` for the start of each row
1078 . j      - the column indices for each row (sorted for each row).
1079 - values - the values, optional, use `NULL` if not provided
1080 
1081   Output Parameter:
1082 . A - the matrix
1083 
1084   Level: intermediate
1085 
1086   Notes:
1087   The indices in `i` and `j` start with zero (NOT with one).
1088 
1089   You must NOT free the `i`, `values` and `j` arrays yourself. PETSc will free them
1090   when the matrix is destroyed; you must allocate them with `PetscMalloc()`.
1091 
1092   You should not include the matrix diagonals.
1093 
1094   If you already have a matrix, you can create its adjacency matrix by a call
1095   to `MatConvert()`, specifying a type of `MATMPIADJ`.
1096 
1097   Possible values for `MatSetOption()` - `MAT_STRUCTURALLY_SYMMETRIC`
1098 
1099   Fortran Note:
1100   From Fortran the indices and values are copied so the array space need not be provided with `PetscMalloc()`.
1101 
1102 .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatConvert()`, `MatGetOrdering()`, `MATMPIADJ`, `MatMPIAdjSetPreallocation()`
1103 @*/
1104 PetscErrorCode MatCreateMPIAdj(MPI_Comm comm, PetscInt m, PetscInt N, PetscInt *i, PetscInt *j, PetscInt *values, Mat *A)
1105 {
1106   PetscFunctionBegin;
1107   PetscCall(MatCreate(comm, A));
1108   PetscCall(MatSetSizes(*A, m, PETSC_DETERMINE, PETSC_DETERMINE, N));
1109   PetscCall(MatSetType(*A, MATMPIADJ));
1110   PetscCall(MatMPIAdjSetPreallocation(*A, i, j, values));
1111   PetscFunctionReturn(PETSC_SUCCESS);
1112 }
1113