xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 813083eb7473ec21f4ff50a178709600a8f71c4c)
1 /*
2     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
3 */
4 #include <../src/mat/impls/adj/mpi/mpiadj.h> /*I "petscmat.h" I*/
5 #include <petscsf.h>
6 
7 /*
8   The interface should be easy to use for both MatCreateSubMatrix (parallel sub-matrix) and MatCreateSubMatrices (sequential sub-matrices)
9 */
10 static PetscErrorCode MatCreateSubMatrix_MPIAdj_data(Mat adj, IS irows, IS icols, PetscInt **sadj_xadj, PetscInt **sadj_adjncy, PetscInt **sadj_values)
11 {
12   PetscInt        nlrows_is, icols_n, i, j, nroots, nleaves, rlocalindex, *ncols_send, *ncols_recv;
13   PetscInt        nlrows_mat, *adjncy_recv, Ncols_recv, Ncols_send, *xadj_recv, *values_recv;
14   PetscInt       *ncols_recv_offsets, loc, rnclos, *sadjncy, *sxadj, *svalues;
15   const PetscInt *irows_indices, *icols_indices, *xadj, *adjncy;
16   PetscMPIInt     owner;
17   Mat_MPIAdj     *a = (Mat_MPIAdj *)adj->data;
18   PetscLayout     rmap;
19   MPI_Comm        comm;
20   PetscSF         sf;
21   PetscSFNode    *iremote;
22   PetscBool       done;
23 
24   PetscFunctionBegin;
25   PetscCall(PetscObjectGetComm((PetscObject)adj, &comm));
26   PetscCall(MatGetLayouts(adj, &rmap, NULL));
27   PetscCall(ISGetLocalSize(irows, &nlrows_is));
28   PetscCall(ISGetIndices(irows, &irows_indices));
29   PetscCall(PetscMalloc1(nlrows_is, &iremote));
30   /* construct sf graph*/
31   nleaves = nlrows_is;
32   for (i = 0; i < nlrows_is; i++) {
33     owner       = -1;
34     rlocalindex = -1;
35     PetscCall(PetscLayoutFindOwnerIndex(rmap, irows_indices[i], &owner, &rlocalindex));
36     iremote[i].rank  = owner;
37     iremote[i].index = rlocalindex;
38   }
39   PetscCall(MatGetRowIJ(adj, 0, PETSC_FALSE, PETSC_FALSE, &nlrows_mat, &xadj, &adjncy, &done));
40   PetscCall(PetscCalloc4(nlrows_mat, &ncols_send, nlrows_is, &xadj_recv, nlrows_is + 1, &ncols_recv_offsets, nlrows_is, &ncols_recv));
41   nroots = nlrows_mat;
42   for (i = 0; i < nlrows_mat; i++) ncols_send[i] = xadj[i + 1] - xadj[i];
43   PetscCall(PetscSFCreate(comm, &sf));
44   PetscCall(PetscSFSetGraph(sf, nroots, nleaves, NULL, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER));
45   PetscCall(PetscSFSetType(sf, PETSCSFBASIC));
46   PetscCall(PetscSFSetFromOptions(sf));
47   PetscCall(PetscSFBcastBegin(sf, MPIU_INT, ncols_send, ncols_recv, MPI_REPLACE));
48   PetscCall(PetscSFBcastEnd(sf, MPIU_INT, ncols_send, ncols_recv, MPI_REPLACE));
49   PetscCall(PetscSFBcastBegin(sf, MPIU_INT, xadj, xadj_recv, MPI_REPLACE));
50   PetscCall(PetscSFBcastEnd(sf, MPIU_INT, xadj, xadj_recv, MPI_REPLACE));
51   PetscCall(PetscSFDestroy(&sf));
52   Ncols_recv = 0;
53   for (i = 0; i < nlrows_is; i++) {
54     Ncols_recv += ncols_recv[i];
55     ncols_recv_offsets[i + 1] = ncols_recv[i] + ncols_recv_offsets[i];
56   }
57   Ncols_send = 0;
58   for (i = 0; i < nlrows_mat; i++) Ncols_send += ncols_send[i];
59   PetscCall(PetscCalloc1(Ncols_recv, &iremote));
60   PetscCall(PetscCalloc1(Ncols_recv, &adjncy_recv));
61   nleaves    = Ncols_recv;
62   Ncols_recv = 0;
63   for (i = 0; i < nlrows_is; i++) {
64     PetscCall(PetscLayoutFindOwner(rmap, irows_indices[i], &owner));
65     for (j = 0; j < ncols_recv[i]; j++) {
66       iremote[Ncols_recv].rank    = owner;
67       iremote[Ncols_recv++].index = xadj_recv[i] + j;
68     }
69   }
70   PetscCall(ISRestoreIndices(irows, &irows_indices));
71   /*if we need to deal with edge weights ???*/
72   if (a->useedgeweights) PetscCall(PetscCalloc1(Ncols_recv, &values_recv));
73   nroots = Ncols_send;
74   PetscCall(PetscSFCreate(comm, &sf));
75   PetscCall(PetscSFSetGraph(sf, nroots, nleaves, NULL, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER));
76   PetscCall(PetscSFSetType(sf, PETSCSFBASIC));
77   PetscCall(PetscSFSetFromOptions(sf));
78   PetscCall(PetscSFBcastBegin(sf, MPIU_INT, adjncy, adjncy_recv, MPI_REPLACE));
79   PetscCall(PetscSFBcastEnd(sf, MPIU_INT, adjncy, adjncy_recv, MPI_REPLACE));
80   if (a->useedgeweights) {
81     PetscCall(PetscSFBcastBegin(sf, MPIU_INT, a->values, values_recv, MPI_REPLACE));
82     PetscCall(PetscSFBcastEnd(sf, MPIU_INT, a->values, values_recv, MPI_REPLACE));
83   }
84   PetscCall(PetscSFDestroy(&sf));
85   PetscCall(MatRestoreRowIJ(adj, 0, PETSC_FALSE, PETSC_FALSE, &nlrows_mat, &xadj, &adjncy, &done));
86   PetscCall(ISGetLocalSize(icols, &icols_n));
87   PetscCall(ISGetIndices(icols, &icols_indices));
88   rnclos = 0;
89   for (i = 0; i < nlrows_is; i++) {
90     for (j = ncols_recv_offsets[i]; j < ncols_recv_offsets[i + 1]; j++) {
91       PetscCall(PetscFindInt(adjncy_recv[j], icols_n, icols_indices, &loc));
92       if (loc < 0) {
93         adjncy_recv[j] = -1;
94         if (a->useedgeweights) values_recv[j] = -1;
95         ncols_recv[i]--;
96       } else {
97         rnclos++;
98       }
99     }
100   }
101   PetscCall(ISRestoreIndices(icols, &icols_indices));
102   PetscCall(PetscCalloc1(rnclos, &sadjncy));
103   if (a->useedgeweights) PetscCall(PetscCalloc1(rnclos, &svalues));
104   PetscCall(PetscCalloc1(nlrows_is + 1, &sxadj));
105   rnclos = 0;
106   for (i = 0; i < nlrows_is; i++) {
107     for (j = ncols_recv_offsets[i]; j < ncols_recv_offsets[i + 1]; j++) {
108       if (adjncy_recv[j] < 0) continue;
109       sadjncy[rnclos] = adjncy_recv[j];
110       if (a->useedgeweights) svalues[rnclos] = values_recv[j];
111       rnclos++;
112     }
113   }
114   for (i = 0; i < nlrows_is; i++) sxadj[i + 1] = sxadj[i] + ncols_recv[i];
115   if (sadj_xadj) {
116     *sadj_xadj = sxadj;
117   } else PetscCall(PetscFree(sxadj));
118   if (sadj_adjncy) {
119     *sadj_adjncy = sadjncy;
120   } else PetscCall(PetscFree(sadjncy));
121   if (sadj_values) {
122     if (a->useedgeweights) *sadj_values = svalues;
123     else *sadj_values = NULL;
124   } else {
125     if (a->useedgeweights) PetscCall(PetscFree(svalues));
126   }
127   PetscCall(PetscFree4(ncols_send, xadj_recv, ncols_recv_offsets, ncols_recv));
128   PetscCall(PetscFree(adjncy_recv));
129   if (a->useedgeweights) PetscCall(PetscFree(values_recv));
130   PetscFunctionReturn(PETSC_SUCCESS);
131 }
132 
133 static PetscErrorCode MatCreateSubMatrices_MPIAdj_Private(Mat mat, PetscInt n, const IS irow[], const IS icol[], PetscBool subcomm, MatReuse scall, Mat *submat[])
134 {
135   PetscInt        i, irow_n, icol_n, *sxadj, *sadjncy, *svalues;
136   PetscInt       *indices, nindx, j, k, loc;
137   PetscMPIInt     issame;
138   const PetscInt *irow_indices, *icol_indices;
139   MPI_Comm        scomm_row, scomm_col, scomm_mat;
140 
141   PetscFunctionBegin;
142   nindx = 0;
143   /*
144    * Estimate a maximum number for allocating memory
145    */
146   for (i = 0; i < n; i++) {
147     PetscCall(ISGetLocalSize(irow[i], &irow_n));
148     PetscCall(ISGetLocalSize(icol[i], &icol_n));
149     nindx = nindx > (irow_n + icol_n) ? nindx : (irow_n + icol_n);
150   }
151   PetscCall(PetscMalloc1(nindx, &indices));
152   /* construct a submat */
153   //  if (scall == MAT_INITIAL_MATRIX) PetscCall(PetscMalloc1(n,submat));
154 
155   for (i = 0; i < n; i++) {
156     if (subcomm) {
157       PetscCall(PetscObjectGetComm((PetscObject)irow[i], &scomm_row));
158       PetscCall(PetscObjectGetComm((PetscObject)icol[i], &scomm_col));
159       PetscCallMPI(MPI_Comm_compare(scomm_row, scomm_col, &issame));
160       PetscCheck(issame == MPI_IDENT, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "row index set must have the same comm as the col index set");
161       PetscCallMPI(MPI_Comm_compare(scomm_row, PETSC_COMM_SELF, &issame));
162       PetscCheck(issame != MPI_IDENT, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, " can not use PETSC_COMM_SELF as comm when extracting a parallel submatrix");
163     } else {
164       scomm_row = PETSC_COMM_SELF;
165     }
166     /*get sub-matrix data*/
167     sxadj   = NULL;
168     sadjncy = NULL;
169     svalues = NULL;
170     PetscCall(MatCreateSubMatrix_MPIAdj_data(mat, irow[i], icol[i], &sxadj, &sadjncy, &svalues));
171     PetscCall(ISGetLocalSize(irow[i], &irow_n));
172     PetscCall(ISGetLocalSize(icol[i], &icol_n));
173     PetscCall(ISGetIndices(irow[i], &irow_indices));
174     PetscCall(PetscArraycpy(indices, irow_indices, irow_n));
175     PetscCall(ISRestoreIndices(irow[i], &irow_indices));
176     PetscCall(ISGetIndices(icol[i], &icol_indices));
177     PetscCall(PetscArraycpy(PetscSafePointerPlusOffset(indices, irow_n), icol_indices, icol_n));
178     PetscCall(ISRestoreIndices(icol[i], &icol_indices));
179     nindx = irow_n + icol_n;
180     PetscCall(PetscSortRemoveDupsInt(&nindx, indices));
181     /* renumber columns */
182     for (j = 0; j < irow_n; j++) {
183       for (k = sxadj[j]; k < sxadj[j + 1]; k++) {
184         PetscCall(PetscFindInt(sadjncy[k], nindx, indices, &loc));
185         PetscCheck(loc >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "can not find col %" PetscInt_FMT, sadjncy[k]);
186         sadjncy[k] = loc;
187       }
188     }
189     if (scall == MAT_INITIAL_MATRIX) {
190       PetscCall(MatCreateMPIAdj(scomm_row, irow_n, icol_n, sxadj, sadjncy, svalues, submat[i]));
191     } else {
192       Mat         sadj = *submat[i];
193       Mat_MPIAdj *sa   = (Mat_MPIAdj *)((sadj)->data);
194       PetscCall(PetscObjectGetComm((PetscObject)sadj, &scomm_mat));
195       PetscCallMPI(MPI_Comm_compare(scomm_row, scomm_mat, &issame));
196       PetscCheck(issame == MPI_IDENT, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "submatrix  must have the same comm as the col index set");
197       PetscCall(PetscArraycpy(sa->i, sxadj, irow_n + 1));
198       PetscCall(PetscArraycpy(sa->j, sadjncy, sxadj[irow_n]));
199       if (svalues) PetscCall(PetscArraycpy(sa->values, svalues, sxadj[irow_n]));
200       PetscCall(PetscFree(sxadj));
201       PetscCall(PetscFree(sadjncy));
202       PetscCall(PetscFree(svalues));
203     }
204   }
205   PetscCall(PetscFree(indices));
206   PetscFunctionReturn(PETSC_SUCCESS);
207 }
208 
209 static PetscErrorCode MatCreateSubMatricesMPI_MPIAdj(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
210 {
211   /*get sub-matrices across a sub communicator */
212   PetscFunctionBegin;
213   PetscCall(MatCreateSubMatrices_MPIAdj_Private(mat, n, irow, icol, PETSC_TRUE, scall, submat));
214   PetscFunctionReturn(PETSC_SUCCESS);
215 }
216 
217 static PetscErrorCode MatCreateSubMatrices_MPIAdj(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
218 {
219   PetscFunctionBegin;
220   /*get sub-matrices based on PETSC_COMM_SELF */
221   PetscCall(MatCreateSubMatrices_MPIAdj_Private(mat, n, irow, icol, PETSC_FALSE, scall, submat));
222   PetscFunctionReturn(PETSC_SUCCESS);
223 }
224 
225 static PetscErrorCode MatView_MPIAdj_ASCII(Mat A, PetscViewer viewer)
226 {
227   Mat_MPIAdj       *a = (Mat_MPIAdj *)A->data;
228   PetscInt          i, j, m = A->rmap->n;
229   const char       *name;
230   PetscViewerFormat format;
231 
232   PetscFunctionBegin;
233   PetscCall(PetscObjectGetName((PetscObject)A, &name));
234   PetscCall(PetscViewerGetFormat(viewer, &format));
235   if (format == PETSC_VIEWER_ASCII_INFO) {
236     PetscFunctionReturn(PETSC_SUCCESS);
237   } else {
238     PetscCheck(format != PETSC_VIEWER_ASCII_MATLAB, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MATLAB format not supported");
239     PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
240     PetscCall(PetscViewerASCIIPushSynchronized(viewer));
241     for (i = 0; i < m; i++) {
242       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "row %" PetscInt_FMT ":", i + A->rmap->rstart));
243       for (j = a->i[i]; j < a->i[i + 1]; j++) {
244         if (a->values) {
245           PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " (%" PetscInt_FMT ", %" PetscInt_FMT ") ", a->j[j], a->values[j]));
246         } else {
247           PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " %" PetscInt_FMT " ", a->j[j]));
248         }
249       }
250       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "\n"));
251     }
252     PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
253     PetscCall(PetscViewerFlush(viewer));
254     PetscCall(PetscViewerASCIIPopSynchronized(viewer));
255   }
256   PetscFunctionReturn(PETSC_SUCCESS);
257 }
258 
259 static PetscErrorCode MatView_MPIAdj(Mat A, PetscViewer viewer)
260 {
261   PetscBool iascii;
262 
263   PetscFunctionBegin;
264   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
265   if (iascii) PetscCall(MatView_MPIAdj_ASCII(A, viewer));
266   PetscFunctionReturn(PETSC_SUCCESS);
267 }
268 
269 static PetscErrorCode MatDestroy_MPIAdj(Mat mat)
270 {
271   Mat_MPIAdj *a = (Mat_MPIAdj *)mat->data;
272 
273   PetscFunctionBegin;
274   PetscCall(PetscLogObjectState((PetscObject)mat, "Rows=%" PetscInt_FMT ", Cols=%" PetscInt_FMT ", NZ=%" PetscInt_FMT, mat->rmap->n, mat->cmap->n, a->nz));
275   PetscCall(PetscFree(a->diag));
276   if (a->freeaij) {
277     if (a->freeaijwithfree) {
278       if (a->i) free(a->i);
279       if (a->j) free(a->j);
280     } else {
281       PetscCall(PetscFree(a->i));
282       PetscCall(PetscFree(a->j));
283       PetscCall(PetscFree(a->values));
284     }
285   }
286   PetscCall(PetscFree(a->rowvalues));
287   PetscCall(PetscFree(mat->data));
288   PetscCall(PetscObjectChangeTypeName((PetscObject)mat, NULL));
289   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjSetPreallocation_C", NULL));
290   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjCreateNonemptySubcommMat_C", NULL));
291   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjToSeq_C", NULL));
292   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjToSeqRankZero_C", NULL));
293   PetscFunctionReturn(PETSC_SUCCESS);
294 }
295 
296 static PetscErrorCode MatSetOption_MPIAdj(Mat A, MatOption op, PetscBool flg)
297 {
298   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
299 
300   PetscFunctionBegin;
301   switch (op) {
302   case MAT_SYMMETRIC:
303   case MAT_STRUCTURALLY_SYMMETRIC:
304   case MAT_HERMITIAN:
305   case MAT_SPD:
306     a->symmetric = flg;
307     break;
308   case MAT_SYMMETRY_ETERNAL:
309   case MAT_STRUCTURAL_SYMMETRY_ETERNAL:
310   case MAT_SPD_ETERNAL:
311     break;
312   default:
313     PetscCall(PetscInfo(A, "Option %s ignored\n", MatOptions[op]));
314     break;
315   }
316   PetscFunctionReturn(PETSC_SUCCESS);
317 }
318 
319 static PetscErrorCode MatGetRow_MPIAdj(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v)
320 {
321   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
322 
323   PetscFunctionBegin;
324   row -= A->rmap->rstart;
325   PetscCheck(row >= 0 && row < A->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row out of range");
326   *nz = a->i[row + 1] - a->i[row];
327   if (v) {
328     PetscInt j;
329     if (a->rowvalues_alloc < *nz) {
330       PetscCall(PetscFree(a->rowvalues));
331       a->rowvalues_alloc = PetscMax(a->rowvalues_alloc * 2, *nz);
332       PetscCall(PetscMalloc1(a->rowvalues_alloc, &a->rowvalues));
333     }
334     for (j = 0; j < *nz; j++) a->rowvalues[j] = a->values ? a->values[a->i[row] + j] : 1.0;
335     *v = (*nz) ? a->rowvalues : NULL;
336   }
337   if (idx) *idx = (*nz) ? a->j + a->i[row] : NULL;
338   PetscFunctionReturn(PETSC_SUCCESS);
339 }
340 
341 static PetscErrorCode MatEqual_MPIAdj(Mat A, Mat B, PetscBool *flg)
342 {
343   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data, *b = (Mat_MPIAdj *)B->data;
344   PetscBool   flag;
345 
346   PetscFunctionBegin;
347   /* If the  matrix dimensions are not equal,or no of nonzeros */
348   if ((A->rmap->n != B->rmap->n) || (a->nz != b->nz)) flag = PETSC_FALSE;
349 
350   /* if the a->i are the same */
351   PetscCall(PetscArraycmp(a->i, b->i, A->rmap->n + 1, &flag));
352 
353   /* if a->j are the same */
354   PetscCall(PetscMemcmp(a->j, b->j, (a->nz) * sizeof(PetscInt), &flag));
355 
356   PetscCallMPI(MPIU_Allreduce(&flag, flg, 1, MPIU_BOOL, MPI_LAND, PetscObjectComm((PetscObject)A)));
357   PetscFunctionReturn(PETSC_SUCCESS);
358 }
359 
360 static PetscErrorCode MatGetRowIJ_MPIAdj(Mat A, PetscInt oshift, PetscBool symmetric, PetscBool blockcompressed, PetscInt *m, const PetscInt *inia[], const PetscInt *inja[], PetscBool *done)
361 {
362   PetscInt    i;
363   Mat_MPIAdj *a  = (Mat_MPIAdj *)A->data;
364   PetscInt  **ia = (PetscInt **)inia, **ja = (PetscInt **)inja;
365 
366   PetscFunctionBegin;
367   *m    = A->rmap->n;
368   *ia   = a->i;
369   *ja   = a->j;
370   *done = PETSC_TRUE;
371   if (oshift) {
372     for (i = 0; i < (*ia)[*m]; i++) (*ja)[i]++;
373     for (i = 0; i <= (*m); i++) (*ia)[i]++;
374   }
375   PetscFunctionReturn(PETSC_SUCCESS);
376 }
377 
378 static PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A, PetscInt oshift, PetscBool symmetric, PetscBool blockcompressed, PetscInt *m, const PetscInt *inia[], const PetscInt *inja[], PetscBool *done)
379 {
380   PetscInt    i;
381   Mat_MPIAdj *a  = (Mat_MPIAdj *)A->data;
382   PetscInt  **ia = (PetscInt **)inia, **ja = (PetscInt **)inja;
383 
384   PetscFunctionBegin;
385   PetscCheck(!ia || a->i == *ia, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "ia passed back is not one obtained with MatGetRowIJ()");
386   PetscCheck(!ja || a->j == *ja, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "ja passed back is not one obtained with MatGetRowIJ()");
387   if (oshift) {
388     PetscCheck(ia, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "If oshift then you must passed in inia[] argument");
389     PetscCheck(ja, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "If oshift then you must passed in inja[] argument");
390     for (i = 0; i <= (*m); i++) (*ia)[i]--;
391     for (i = 0; i < (*ia)[*m]; i++) (*ja)[i]--;
392   }
393   PetscFunctionReturn(PETSC_SUCCESS);
394 }
395 
396 static PetscErrorCode MatConvertFrom_MPIAdj(Mat A, MatType type, MatReuse reuse, Mat *newmat)
397 {
398   Mat                B;
399   PetscInt           i, m, N, nzeros = 0, *ia, *ja, len, rstart, cnt, j, *a;
400   const PetscInt    *rj;
401   const PetscScalar *ra;
402   MPI_Comm           comm;
403 
404   PetscFunctionBegin;
405   PetscCall(MatGetSize(A, NULL, &N));
406   PetscCall(MatGetLocalSize(A, &m, NULL));
407   PetscCall(MatGetOwnershipRange(A, &rstart, NULL));
408 
409   /* count the number of nonzeros per row */
410   for (i = 0; i < m; i++) {
411     PetscCall(MatGetRow(A, i + rstart, &len, &rj, NULL));
412     for (j = 0; j < len; j++) {
413       if (rj[j] == i + rstart) {
414         len--;
415         break;
416       } /* don't count diagonal */
417     }
418     nzeros += len;
419     PetscCall(MatRestoreRow(A, i + rstart, &len, &rj, NULL));
420   }
421 
422   /* malloc space for nonzeros */
423   PetscCall(PetscMalloc1(nzeros + 1, &a));
424   PetscCall(PetscMalloc1(N + 1, &ia));
425   PetscCall(PetscMalloc1(nzeros + 1, &ja));
426 
427   nzeros = 0;
428   ia[0]  = 0;
429   for (i = 0; i < m; i++) {
430     PetscCall(MatGetRow(A, i + rstart, &len, &rj, &ra));
431     cnt = 0;
432     for (j = 0; j < len; j++) {
433       if (rj[j] != i + rstart) { /* if not diagonal */
434         a[nzeros + cnt]    = (PetscInt)PetscAbsScalar(ra[j]);
435         ja[nzeros + cnt++] = rj[j];
436       }
437     }
438     PetscCall(MatRestoreRow(A, i + rstart, &len, &rj, &ra));
439     nzeros += cnt;
440     ia[i + 1] = nzeros;
441   }
442 
443   PetscCall(PetscObjectGetComm((PetscObject)A, &comm));
444   PetscCall(MatCreate(comm, &B));
445   PetscCall(MatSetSizes(B, m, PETSC_DETERMINE, PETSC_DETERMINE, N));
446   PetscCall(MatSetType(B, type));
447   PetscCall(MatMPIAdjSetPreallocation(B, ia, ja, a));
448 
449   if (reuse == MAT_INPLACE_MATRIX) {
450     PetscCall(MatHeaderReplace(A, &B));
451   } else {
452     *newmat = B;
453   }
454   PetscFunctionReturn(PETSC_SUCCESS);
455 }
456 
457 static PetscErrorCode MatSetValues_MPIAdj(Mat A, PetscInt m, const PetscInt *rows, PetscInt n, const PetscInt *cols, const PetscScalar *values, InsertMode im)
458 {
459   Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data;
460   PetscInt    rStart, rEnd, cStart, cEnd;
461 
462   PetscFunctionBegin;
463   PetscCheck(!adj->i, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Matrix is already assembled, cannot change its values");
464   PetscCall(MatGetOwnershipRange(A, &rStart, &rEnd));
465   PetscCall(MatGetOwnershipRangeColumn(A, &cStart, &cEnd));
466   if (!adj->ht) {
467     PetscCall(PetscHSetIJCreate(&adj->ht));
468     PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject)A), 1, &A->stash));
469     PetscCall(PetscLayoutSetUp(A->rmap));
470     PetscCall(PetscLayoutSetUp(A->cmap));
471   }
472   for (PetscInt r = 0; r < m; ++r) {
473     PetscHashIJKey key;
474 
475     key.i = rows[r];
476     if (key.i < 0) continue;
477     if ((key.i < rStart) || (key.i >= rEnd)) {
478       PetscCall(MatStashValuesRow_Private(&A->stash, key.i, n, cols, values, PETSC_FALSE));
479     } else {
480       for (PetscInt c = 0; c < n; ++c) {
481         key.j = cols[c];
482         if (key.j < 0 || key.i == key.j) continue;
483         PetscCall(PetscHSetIJAdd(adj->ht, key));
484       }
485     }
486   }
487   PetscFunctionReturn(PETSC_SUCCESS);
488 }
489 
490 static PetscErrorCode MatAssemblyBegin_MPIAdj(Mat A, MatAssemblyType type)
491 {
492   PetscInt    nstash, reallocs;
493   Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data;
494 
495   PetscFunctionBegin;
496   if (!adj->ht) {
497     PetscCall(PetscHSetIJCreate(&adj->ht));
498     PetscCall(PetscLayoutSetUp(A->rmap));
499     PetscCall(PetscLayoutSetUp(A->cmap));
500   }
501   PetscCall(MatStashScatterBegin_Private(A, &A->stash, A->rmap->range));
502   PetscCall(MatStashGetInfo_Private(&A->stash, &nstash, &reallocs));
503   PetscCall(PetscInfo(A, "Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
504   PetscFunctionReturn(PETSC_SUCCESS);
505 }
506 
507 static PetscErrorCode MatAssemblyEnd_MPIAdj(Mat A, MatAssemblyType type)
508 {
509   PetscScalar   *val;
510   PetscInt      *row, *col, m, rstart, *rowstarts;
511   PetscInt       i, j, ncols, flg, nz;
512   PetscMPIInt    n;
513   Mat_MPIAdj    *adj = (Mat_MPIAdj *)A->data;
514   PetscHashIter  hi;
515   PetscHashIJKey key;
516   PetscHSetIJ    ht = adj->ht;
517 
518   PetscFunctionBegin;
519   while (1) {
520     PetscCall(MatStashScatterGetMesg_Private(&A->stash, &n, &row, &col, &val, &flg));
521     if (!flg) break;
522 
523     for (i = 0; i < n;) {
524       /* Identify the consecutive vals belonging to the same row */
525       for (j = i, rstart = row[j]; j < n; j++) {
526         if (row[j] != rstart) break;
527       }
528       if (j < n) ncols = j - i;
529       else ncols = n - i;
530       /* Set all these values with a single function call */
531       PetscCall(MatSetValues_MPIAdj(A, 1, row + i, ncols, col + i, val + i, INSERT_VALUES));
532       i = j;
533     }
534   }
535   PetscCall(MatStashScatterEnd_Private(&A->stash));
536   PetscCall(MatStashDestroy_Private(&A->stash));
537 
538   PetscCall(MatGetLocalSize(A, &m, NULL));
539   PetscCall(MatGetOwnershipRange(A, &rstart, NULL));
540   PetscCall(PetscCalloc1(m + 1, &rowstarts));
541   PetscHashIterBegin(ht, hi);
542   for (; !PetscHashIterAtEnd(ht, hi);) {
543     PetscHashIterGetKey(ht, hi, key);
544     rowstarts[key.i - rstart + 1]++;
545     PetscHashIterNext(ht, hi);
546   }
547   for (i = 1; i < m + 1; i++) rowstarts[i] = rowstarts[i - 1] + rowstarts[i];
548 
549   PetscCall(PetscHSetIJGetSize(ht, &nz));
550   PetscCall(PetscMalloc1(nz, &col));
551   PetscHashIterBegin(ht, hi);
552   for (; !PetscHashIterAtEnd(ht, hi);) {
553     PetscHashIterGetKey(ht, hi, key);
554     col[rowstarts[key.i - rstart]++] = key.j;
555     PetscHashIterNext(ht, hi);
556   }
557   PetscCall(PetscHSetIJDestroy(&ht));
558   for (i = m; i > 0; i--) rowstarts[i] = rowstarts[i - 1];
559   rowstarts[0] = 0;
560 
561   for (PetscInt i = 0; i < m; i++) PetscCall(PetscSortInt(rowstarts[i + 1] - rowstarts[i], &col[rowstarts[i]]));
562 
563   adj->i       = rowstarts;
564   adj->j       = col;
565   adj->nz      = rowstarts[m];
566   adj->freeaij = PETSC_TRUE;
567   PetscFunctionReturn(PETSC_SUCCESS);
568 }
569 
570 static struct _MatOps MatOps_Values = {MatSetValues_MPIAdj,
571                                        MatGetRow_MPIAdj,
572                                        NULL,
573                                        NULL,
574                                        /* 4*/ NULL,
575                                        NULL,
576                                        NULL,
577                                        NULL,
578                                        NULL,
579                                        NULL,
580                                        /*10*/ NULL,
581                                        NULL,
582                                        NULL,
583                                        NULL,
584                                        NULL,
585                                        /*15*/ NULL,
586                                        MatEqual_MPIAdj,
587                                        NULL,
588                                        NULL,
589                                        NULL,
590                                        /*20*/ MatAssemblyBegin_MPIAdj,
591                                        MatAssemblyEnd_MPIAdj,
592                                        MatSetOption_MPIAdj,
593                                        NULL,
594                                        /*24*/ NULL,
595                                        NULL,
596                                        NULL,
597                                        NULL,
598                                        NULL,
599                                        /*29*/ NULL,
600                                        NULL,
601                                        NULL,
602                                        NULL,
603                                        NULL,
604                                        /*34*/ NULL,
605                                        NULL,
606                                        NULL,
607                                        NULL,
608                                        NULL,
609                                        /*39*/ NULL,
610                                        MatCreateSubMatrices_MPIAdj,
611                                        NULL,
612                                        NULL,
613                                        NULL,
614                                        /*44*/ NULL,
615                                        NULL,
616                                        MatShift_Basic,
617                                        NULL,
618                                        NULL,
619                                        /*49*/ NULL,
620                                        MatGetRowIJ_MPIAdj,
621                                        MatRestoreRowIJ_MPIAdj,
622                                        NULL,
623                                        NULL,
624                                        /*54*/ NULL,
625                                        NULL,
626                                        NULL,
627                                        NULL,
628                                        NULL,
629                                        /*59*/ NULL,
630                                        MatDestroy_MPIAdj,
631                                        MatView_MPIAdj,
632                                        MatConvertFrom_MPIAdj,
633                                        NULL,
634                                        /*64*/ NULL,
635                                        NULL,
636                                        NULL,
637                                        NULL,
638                                        NULL,
639                                        /*69*/ NULL,
640                                        NULL,
641                                        NULL,
642                                        NULL,
643                                        NULL,
644                                        /*74*/ NULL,
645                                        NULL,
646                                        NULL,
647                                        NULL,
648                                        NULL,
649                                        /*79*/ NULL,
650                                        NULL,
651                                        NULL,
652                                        NULL,
653                                        NULL,
654                                        /*84*/ NULL,
655                                        NULL,
656                                        NULL,
657                                        NULL,
658                                        NULL,
659                                        /*89*/ NULL,
660                                        NULL,
661                                        NULL,
662                                        NULL,
663                                        NULL,
664                                        /*94*/ NULL,
665                                        NULL,
666                                        NULL,
667                                        NULL,
668                                        NULL,
669                                        /*99*/ NULL,
670                                        NULL,
671                                        NULL,
672                                        NULL,
673                                        NULL,
674                                        /*104*/ NULL,
675                                        NULL,
676                                        NULL,
677                                        NULL,
678                                        NULL,
679                                        /*109*/ NULL,
680                                        NULL,
681                                        NULL,
682                                        NULL,
683                                        NULL,
684                                        /*114*/ NULL,
685                                        NULL,
686                                        NULL,
687                                        NULL,
688                                        NULL,
689                                        /*119*/ NULL,
690                                        NULL,
691                                        NULL,
692                                        NULL,
693                                        NULL,
694                                        /*124*/ NULL,
695                                        NULL,
696                                        NULL,
697                                        NULL,
698                                        MatCreateSubMatricesMPI_MPIAdj,
699                                        /*129*/ NULL,
700                                        NULL,
701                                        NULL,
702                                        NULL,
703                                        NULL,
704                                        /*134*/ NULL,
705                                        NULL,
706                                        NULL,
707                                        NULL,
708                                        NULL,
709                                        /*139*/ NULL,
710                                        NULL,
711                                        NULL,
712                                        NULL,
713                                        NULL,
714                                        /*144*/ NULL,
715                                        NULL,
716                                        NULL,
717                                        NULL,
718                                        NULL,
719                                        NULL,
720                                        /*150*/ NULL,
721                                        NULL,
722                                        NULL,
723                                        NULL,
724                                        NULL,
725                                        /*155*/ NULL,
726                                        NULL};
727 
728 static PetscErrorCode MatMPIAdjSetPreallocation_MPIAdj(Mat B, PetscInt *i, PetscInt *j, PetscInt *values)
729 {
730   Mat_MPIAdj *b = (Mat_MPIAdj *)B->data;
731   PetscBool   useedgeweights;
732 
733   PetscFunctionBegin;
734   PetscCall(PetscLayoutSetUp(B->rmap));
735   PetscCall(PetscLayoutSetUp(B->cmap));
736   if (values) useedgeweights = PETSC_TRUE;
737   else useedgeweights = PETSC_FALSE;
738   /* Make everybody knows if they are using edge weights or not */
739   PetscCallMPI(MPIU_Allreduce((int *)&useedgeweights, (int *)&b->useedgeweights, 1, MPI_INT, MPI_MAX, PetscObjectComm((PetscObject)B)));
740 
741   if (PetscDefined(USE_DEBUG)) {
742     PetscInt ii;
743 
744     PetscCheck(i[0] == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "First i[] index must be zero, instead it is %" PetscInt_FMT, i[0]);
745     for (ii = 1; ii < B->rmap->n; ii++) {
746       PetscCheck(i[ii] >= 0 && i[ii] >= i[ii - 1], PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "i[%" PetscInt_FMT "]=%" PetscInt_FMT " index is out of range: i[%" PetscInt_FMT "]=%" PetscInt_FMT, ii, i[ii], ii - 1, i[ii - 1]);
747     }
748     for (ii = 0; ii < i[B->rmap->n]; ii++) PetscCheck(j[ii] >= 0 && j[ii] < B->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column index %" PetscInt_FMT " out of range %" PetscInt_FMT, ii, j[ii]);
749   }
750   b->j      = j;
751   b->i      = i;
752   b->values = values;
753 
754   b->nz        = i[B->rmap->n];
755   b->diag      = NULL;
756   b->symmetric = PETSC_FALSE;
757   b->freeaij   = PETSC_TRUE;
758 
759   B->ops->assemblybegin = NULL;
760   B->ops->assemblyend   = NULL;
761   PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
762   PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
763   PetscCall(MatStashDestroy_Private(&B->stash));
764   PetscFunctionReturn(PETSC_SUCCESS);
765 }
766 
767 static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A, Mat *B)
768 {
769   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data;
770   const PetscInt *ranges;
771   MPI_Comm        acomm, bcomm;
772   MPI_Group       agroup, bgroup;
773   PetscMPIInt     i, rank, size, nranks, *ranks;
774 
775   PetscFunctionBegin;
776   *B = NULL;
777   PetscCall(PetscObjectGetComm((PetscObject)A, &acomm));
778   PetscCallMPI(MPI_Comm_size(acomm, &size));
779   PetscCallMPI(MPI_Comm_size(acomm, &rank));
780   PetscCall(MatGetOwnershipRanges(A, &ranges));
781   for (i = 0, nranks = 0; i < size; i++) {
782     if (ranges[i + 1] - ranges[i] > 0) nranks++;
783   }
784   if (nranks == size) { /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
785     PetscCall(PetscObjectReference((PetscObject)A));
786     *B = A;
787     PetscFunctionReturn(PETSC_SUCCESS);
788   }
789 
790   PetscCall(PetscMalloc1(nranks, &ranks));
791   for (i = 0, nranks = 0; i < size; i++) {
792     if (ranges[i + 1] - ranges[i] > 0) ranks[nranks++] = i;
793   }
794   PetscCallMPI(MPI_Comm_group(acomm, &agroup));
795   PetscCallMPI(MPI_Group_incl(agroup, nranks, ranks, &bgroup));
796   PetscCall(PetscFree(ranks));
797   PetscCallMPI(MPI_Comm_create(acomm, bgroup, &bcomm));
798   PetscCallMPI(MPI_Group_free(&agroup));
799   PetscCallMPI(MPI_Group_free(&bgroup));
800   if (bcomm != MPI_COMM_NULL) {
801     PetscInt    m, N;
802     Mat_MPIAdj *b;
803     PetscCall(MatGetLocalSize(A, &m, NULL));
804     PetscCall(MatGetSize(A, NULL, &N));
805     PetscCall(MatCreateMPIAdj(bcomm, m, N, a->i, a->j, a->values, B));
806     b          = (Mat_MPIAdj *)(*B)->data;
807     b->freeaij = PETSC_FALSE;
808     PetscCallMPI(MPI_Comm_free(&bcomm));
809   }
810   PetscFunctionReturn(PETSC_SUCCESS);
811 }
812 
813 static PetscErrorCode MatMPIAdjToSeq_MPIAdj(Mat A, Mat *B)
814 {
815   PetscInt    M, N, *II, *J, NZ, nz, m, nzstart, i;
816   PetscInt   *Values = NULL;
817   Mat_MPIAdj *adj    = (Mat_MPIAdj *)A->data;
818   PetscMPIInt mnz, mm, *allnz, *allm, size, *dispnz, *dispm;
819 
820   PetscFunctionBegin;
821   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
822   PetscCall(MatGetSize(A, &M, &N));
823   PetscCall(MatGetLocalSize(A, &m, NULL));
824   nz = adj->nz;
825   PetscCheck(adj->i[m] == nz, PETSC_COMM_SELF, PETSC_ERR_PLIB, "nz %" PetscInt_FMT " not correct i[m] %" PetscInt_FMT, nz, adj->i[m]);
826   PetscCallMPI(MPIU_Allreduce(&nz, &NZ, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A)));
827 
828   PetscCall(PetscMPIIntCast(nz, &mnz));
829   PetscCall(PetscMalloc2(size, &allnz, size, &dispnz));
830   PetscCallMPI(MPI_Allgather(&mnz, 1, MPI_INT, allnz, 1, MPI_INT, PetscObjectComm((PetscObject)A)));
831   dispnz[0] = 0;
832   for (i = 1; i < size; i++) dispnz[i] = dispnz[i - 1] + allnz[i - 1];
833   if (adj->values) {
834     PetscCall(PetscMalloc1(NZ, &Values));
835     PetscCallMPI(MPI_Allgatherv(adj->values, mnz, MPIU_INT, Values, allnz, dispnz, MPIU_INT, PetscObjectComm((PetscObject)A)));
836   }
837   PetscCall(PetscMalloc1(NZ, &J));
838   PetscCallMPI(MPI_Allgatherv(adj->j, mnz, MPIU_INT, J, allnz, dispnz, MPIU_INT, PetscObjectComm((PetscObject)A)));
839   PetscCall(PetscFree2(allnz, dispnz));
840   PetscCallMPI(MPI_Scan(&nz, &nzstart, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A)));
841   nzstart -= nz;
842   /* shift the i[] values so they will be correct after being received */
843   for (i = 0; i < m; i++) adj->i[i] += nzstart;
844   PetscCall(PetscMalloc1(M + 1, &II));
845   PetscCall(PetscMPIIntCast(m, &mm));
846   PetscCall(PetscMalloc2(size, &allm, size, &dispm));
847   PetscCallMPI(MPI_Allgather(&mm, 1, MPI_INT, allm, 1, MPI_INT, PetscObjectComm((PetscObject)A)));
848   dispm[0] = 0;
849   for (i = 1; i < size; i++) dispm[i] = dispm[i - 1] + allm[i - 1];
850   PetscCallMPI(MPI_Allgatherv(adj->i, mm, MPIU_INT, II, allm, dispm, MPIU_INT, PetscObjectComm((PetscObject)A)));
851   PetscCall(PetscFree2(allm, dispm));
852   II[M] = NZ;
853   /* shift the i[] values back */
854   for (i = 0; i < m; i++) adj->i[i] -= nzstart;
855   PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, M, N, II, J, Values, B));
856   PetscFunctionReturn(PETSC_SUCCESS);
857 }
858 
859 static PetscErrorCode MatMPIAdjToSeqRankZero_MPIAdj(Mat A, Mat *B)
860 {
861   PetscInt    M, N, *II, *J, NZ, nz, m, nzstart, i;
862   PetscInt   *Values = NULL;
863   Mat_MPIAdj *adj    = (Mat_MPIAdj *)A->data;
864   PetscMPIInt mnz, mm, *allnz = NULL, *allm, size, *dispnz, *dispm, rank;
865 
866   PetscFunctionBegin;
867   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
868   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)A), &rank));
869   PetscCall(MatGetSize(A, &M, &N));
870   PetscCall(MatGetLocalSize(A, &m, NULL));
871   nz = adj->nz;
872   PetscCheck(adj->i[m] == nz, PETSC_COMM_SELF, PETSC_ERR_PLIB, "nz %" PetscInt_FMT " not correct i[m] %" PetscInt_FMT, nz, adj->i[m]);
873   PetscCallMPI(MPIU_Allreduce(&nz, &NZ, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A)));
874 
875   PetscCall(PetscMPIIntCast(nz, &mnz));
876   if (!rank) PetscCall(PetscMalloc2(size, &allnz, size, &dispnz));
877   PetscCallMPI(MPI_Gather(&mnz, 1, MPI_INT, allnz, 1, MPI_INT, 0, PetscObjectComm((PetscObject)A)));
878   if (!rank) {
879     dispnz[0] = 0;
880     for (i = 1; i < size; i++) dispnz[i] = dispnz[i - 1] + allnz[i - 1];
881     if (adj->values) {
882       PetscCall(PetscMalloc1(NZ, &Values));
883       PetscCallMPI(MPI_Gatherv(adj->values, mnz, MPIU_INT, Values, allnz, dispnz, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
884     }
885     PetscCall(PetscMalloc1(NZ, &J));
886     PetscCallMPI(MPI_Gatherv(adj->j, mnz, MPIU_INT, J, allnz, dispnz, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
887     PetscCall(PetscFree2(allnz, dispnz));
888   } else {
889     if (adj->values) PetscCallMPI(MPI_Gatherv(adj->values, mnz, MPIU_INT, NULL, NULL, NULL, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
890     PetscCallMPI(MPI_Gatherv(adj->j, mnz, MPIU_INT, NULL, NULL, NULL, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
891   }
892   PetscCallMPI(MPI_Scan(&nz, &nzstart, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A)));
893   nzstart -= nz;
894   /* shift the i[] values so they will be correct after being received */
895   for (i = 0; i < m; i++) adj->i[i] += nzstart;
896   PetscCall(PetscMPIIntCast(m, &mm));
897   if (!rank) {
898     PetscCall(PetscMalloc1(M + 1, &II));
899     PetscCall(PetscMalloc2(size, &allm, size, &dispm));
900     PetscCallMPI(MPI_Gather(&mm, 1, MPI_INT, allm, 1, MPI_INT, 0, PetscObjectComm((PetscObject)A)));
901     dispm[0] = 0;
902     for (i = 1; i < size; i++) dispm[i] = dispm[i - 1] + allm[i - 1];
903     PetscCallMPI(MPI_Gatherv(adj->i, mm, MPIU_INT, II, allm, dispm, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
904     PetscCall(PetscFree2(allm, dispm));
905     II[M] = NZ;
906   } else {
907     PetscCallMPI(MPI_Gather(&mm, 1, MPI_INT, NULL, 1, MPI_INT, 0, PetscObjectComm((PetscObject)A)));
908     PetscCallMPI(MPI_Gatherv(adj->i, mm, MPIU_INT, NULL, NULL, NULL, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
909   }
910   /* shift the i[] values back */
911   for (i = 0; i < m; i++) adj->i[i] -= nzstart;
912   if (!rank) PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, M, N, II, J, Values, B));
913   PetscFunctionReturn(PETSC_SUCCESS);
914 }
915 
916 /*@
917   MatMPIAdjCreateNonemptySubcommMat - create the same `MATMPIADJ` matrix on a subcommunicator containing only processes owning a positive number of rows
918 
919   Collective
920 
921   Input Parameter:
922 . A - original `MATMPIADJ` matrix
923 
924   Output Parameter:
925 . B - matrix on subcommunicator, `NULL` on MPI processes that own zero rows of `A`
926 
927   Level: developer
928 
929   Note:
930   The matrix `B` should be destroyed with `MatDestroy()`. The arrays are not copied, so `B` should be destroyed before `A` is destroyed.
931 
932   Developer Note:
933   This function is mostly useful for internal use by mesh partitioning packages, such as ParMETIS that require that every process owns at least one row.
934 
935 .seealso: [](ch_matrices), `Mat`, `MATMPIADJ`, `MatCreateMPIAdj()`
936 @*/
937 PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A, Mat *B)
938 {
939   PetscFunctionBegin;
940   PetscValidHeaderSpecific(A, MAT_CLASSID, 1);
941   PetscUseMethod(A, "MatMPIAdjCreateNonemptySubcommMat_C", (Mat, Mat *), (A, B));
942   PetscFunctionReturn(PETSC_SUCCESS);
943 }
944 
945 /*MC
946   MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
947   intended for use constructing orderings and partitionings.
948 
949   Level: beginner
950 
951   Note:
952   You can provide values to the matrix using `MatMPIAdjSetPreallocation()`, `MatCreateMPIAdj()`, or
953   by calling `MatSetValues()` and `MatAssemblyBegin()` followed by `MatAssemblyEnd()`
954 
955 .seealso: [](ch_matrices), `Mat`, `MatCreateMPIAdj()`, `MatMPIAdjSetPreallocation()`, `MatSetValues()`
956 M*/
957 PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B)
958 {
959   Mat_MPIAdj *b;
960 
961   PetscFunctionBegin;
962   PetscCall(PetscNew(&b));
963   B->data         = (void *)b;
964   B->ops[0]       = MatOps_Values;
965   B->assembled    = PETSC_FALSE;
966   B->preallocated = PETSC_TRUE; /* so that MatSetValues() may be used */
967 
968   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjSetPreallocation_C", MatMPIAdjSetPreallocation_MPIAdj));
969   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjCreateNonemptySubcommMat_C", MatMPIAdjCreateNonemptySubcommMat_MPIAdj));
970   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjToSeq_C", MatMPIAdjToSeq_MPIAdj));
971   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjToSeqRankZero_C", MatMPIAdjToSeqRankZero_MPIAdj));
972   PetscCall(PetscObjectChangeTypeName((PetscObject)B, MATMPIADJ));
973   PetscFunctionReturn(PETSC_SUCCESS);
974 }
975 
976 /*@
977   MatMPIAdjToSeq - Converts an parallel `MATMPIADJ` matrix to complete `MATMPIADJ` on each process (needed by sequential partitioners)
978 
979   Logically Collective
980 
981   Input Parameter:
982 . A - the matrix
983 
984   Output Parameter:
985 . B - the same matrix on all processes
986 
987   Level: intermediate
988 
989 .seealso: [](ch_matrices), `Mat`, `MATMPIADJ`, `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MatMPIAdjToSeqRankZero()`
990 @*/
991 PetscErrorCode MatMPIAdjToSeq(Mat A, Mat *B)
992 {
993   PetscFunctionBegin;
994   PetscUseMethod(A, "MatMPIAdjToSeq_C", (Mat, Mat *), (A, B));
995   PetscFunctionReturn(PETSC_SUCCESS);
996 }
997 
998 /*@
999   MatMPIAdjToSeqRankZero - Converts an parallel `MATMPIADJ` matrix to complete `MATMPIADJ` on rank zero (needed by sequential partitioners)
1000 
1001   Logically Collective
1002 
1003   Input Parameter:
1004 . A - the matrix
1005 
1006   Output Parameter:
1007 . B - the same matrix on rank zero, not set on other ranks
1008 
1009   Level: intermediate
1010 
1011   Note:
1012   This routine has the advantage on systems with multiple ranks per node since only one copy of the matrix
1013   is stored on the first node, instead of the number of ranks copies. This can allow partitioning much larger
1014   parallel graph sequentially.
1015 
1016 .seealso: [](ch_matrices), `Mat`, `MATMPIADJ`, `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MatMPIAdjToSeq()`
1017 @*/
1018 PetscErrorCode MatMPIAdjToSeqRankZero(Mat A, Mat *B)
1019 {
1020   PetscFunctionBegin;
1021   PetscUseMethod(A, "MatMPIAdjToSeqRankZero_C", (Mat, Mat *), (A, B));
1022   PetscFunctionReturn(PETSC_SUCCESS);
1023 }
1024 
1025 /*@
1026   MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
1027 
1028   Logically Collective
1029 
1030   Input Parameters:
1031 + B      - the matrix
1032 . i      - the indices into `j` for the start of each row
1033 . j      - the column indices for each row (sorted for each row).
1034            The indices in `i` and `j` start with zero (NOT with one).
1035 - values - [use `NULL` if not provided] edge weights
1036 
1037   Level: intermediate
1038 
1039   Notes:
1040   The indices in `i` and `j` start with zero (NOT with one).
1041 
1042   You must NOT free the `i`, `values` and `j` arrays yourself. PETSc will free them
1043   when the matrix is destroyed; you must allocate them with `PetscMalloc()`.
1044 
1045   You should not include the matrix diagonal elements.
1046 
1047   If you already have a matrix, you can create its adjacency matrix by a call
1048   to `MatConvert()`, specifying a type of `MATMPIADJ`.
1049 
1050   Possible values for `MatSetOption()` - `MAT_STRUCTURALLY_SYMMETRIC`
1051 
1052   Fortran Note:
1053   From Fortran the indices and values are copied so the array space need not be provided with `PetscMalloc()`.
1054 
1055 .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MATMPIADJ`
1056 @*/
1057 PetscErrorCode MatMPIAdjSetPreallocation(Mat B, PetscInt *i, PetscInt *j, PetscInt *values)
1058 {
1059   PetscFunctionBegin;
1060   PetscTryMethod(B, "MatMPIAdjSetPreallocation_C", (Mat, PetscInt *, PetscInt *, PetscInt *), (B, i, j, values));
1061   PetscFunctionReturn(PETSC_SUCCESS);
1062 }
1063 
1064 /*@C
1065   MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
1066   The matrix need not have numerical values associated with it, it is
1067   intended for ordering (to reduce bandwidth etc) and partitioning.
1068 
1069   Collective
1070 
1071   Input Parameters:
1072 + comm   - MPI communicator
1073 . m      - number of local rows
1074 . N      - number of global columns
1075 . i      - the indices into `j` for the start of each row
1076 . j      - the column indices for each row (sorted for each row).
1077 - values - the values, optional, use `NULL` if not provided
1078 
1079   Output Parameter:
1080 . A - the matrix
1081 
1082   Level: intermediate
1083 
1084   Notes:
1085   The indices in `i` and `j` start with zero (NOT with one).
1086 
1087   You must NOT free the `i`, `values` and `j` arrays yourself. PETSc will free them
1088   when the matrix is destroyed; you must allocate them with `PetscMalloc()`.
1089 
1090   You should not include the matrix diagonals.
1091 
1092   If you already have a matrix, you can create its adjacency matrix by a call
1093   to `MatConvert()`, specifying a type of `MATMPIADJ`.
1094 
1095   Possible values for `MatSetOption()` - `MAT_STRUCTURALLY_SYMMETRIC`
1096 
1097   Fortran Note:
1098   From Fortran the arrays `indices` and `values` must be retained by the user until `A` is destroyed
1099 
1100 .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatConvert()`, `MatGetOrdering()`, `MATMPIADJ`, `MatMPIAdjSetPreallocation()`
1101 @*/
1102 PetscErrorCode MatCreateMPIAdj(MPI_Comm comm, PetscInt m, PetscInt N, PetscInt *i, PetscInt *j, PetscInt *values, Mat *A)
1103 {
1104   PetscFunctionBegin;
1105   PetscCall(MatCreate(comm, A));
1106   PetscCall(MatSetSizes(*A, m, PETSC_DETERMINE, PETSC_DETERMINE, N));
1107   PetscCall(MatSetType(*A, MATMPIADJ));
1108   PetscCall(MatMPIAdjSetPreallocation(*A, i, j, values));
1109   PetscFunctionReturn(PETSC_SUCCESS);
1110 }
1111