xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 5d7a6ebe9dde080aedbe86be0085708de8f97bb7)
1 
2 /*
3     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
4 */
5 #include <../src/mat/impls/adj/mpi/mpiadj.h> /*I "petscmat.h" I*/
6 #include <petscsf.h>
7 
8 /*
9   The interface should be easy to use for both MatCreateSubMatrix (parallel sub-matrix) and MatCreateSubMatrices (sequential sub-matrices)
10 */
11 static PetscErrorCode MatCreateSubMatrix_MPIAdj_data(Mat adj, IS irows, IS icols, PetscInt **sadj_xadj, PetscInt **sadj_adjncy, PetscInt **sadj_values)
12 {
13   PetscInt        nlrows_is, icols_n, i, j, nroots, nleaves, rlocalindex, *ncols_send, *ncols_recv;
14   PetscInt        nlrows_mat, *adjncy_recv, Ncols_recv, Ncols_send, *xadj_recv, *values_recv;
15   PetscInt       *ncols_recv_offsets, loc, rnclos, *sadjncy, *sxadj, *svalues;
16   const PetscInt *irows_indices, *icols_indices, *xadj, *adjncy;
17   PetscMPIInt     owner;
18   Mat_MPIAdj     *a = (Mat_MPIAdj *)adj->data;
19   PetscLayout     rmap;
20   MPI_Comm        comm;
21   PetscSF         sf;
22   PetscSFNode    *iremote;
23   PetscBool       done;
24 
25   PetscFunctionBegin;
26   PetscCall(PetscObjectGetComm((PetscObject)adj, &comm));
27   PetscCall(MatGetLayouts(adj, &rmap, NULL));
28   PetscCall(ISGetLocalSize(irows, &nlrows_is));
29   PetscCall(ISGetIndices(irows, &irows_indices));
30   PetscCall(PetscMalloc1(nlrows_is, &iremote));
31   /* construct sf graph*/
32   nleaves = nlrows_is;
33   for (i = 0; i < nlrows_is; i++) {
34     owner       = -1;
35     rlocalindex = -1;
36     PetscCall(PetscLayoutFindOwnerIndex(rmap, irows_indices[i], &owner, &rlocalindex));
37     iremote[i].rank  = owner;
38     iremote[i].index = rlocalindex;
39   }
40   PetscCall(MatGetRowIJ(adj, 0, PETSC_FALSE, PETSC_FALSE, &nlrows_mat, &xadj, &adjncy, &done));
41   PetscCall(PetscCalloc4(nlrows_mat, &ncols_send, nlrows_is, &xadj_recv, nlrows_is + 1, &ncols_recv_offsets, nlrows_is, &ncols_recv));
42   nroots = nlrows_mat;
43   for (i = 0; i < nlrows_mat; i++) ncols_send[i] = xadj[i + 1] - xadj[i];
44   PetscCall(PetscSFCreate(comm, &sf));
45   PetscCall(PetscSFSetGraph(sf, nroots, nleaves, NULL, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER));
46   PetscCall(PetscSFSetType(sf, PETSCSFBASIC));
47   PetscCall(PetscSFSetFromOptions(sf));
48   PetscCall(PetscSFBcastBegin(sf, MPIU_INT, ncols_send, ncols_recv, MPI_REPLACE));
49   PetscCall(PetscSFBcastEnd(sf, MPIU_INT, ncols_send, ncols_recv, MPI_REPLACE));
50   PetscCall(PetscSFBcastBegin(sf, MPIU_INT, xadj, xadj_recv, MPI_REPLACE));
51   PetscCall(PetscSFBcastEnd(sf, MPIU_INT, xadj, xadj_recv, MPI_REPLACE));
52   PetscCall(PetscSFDestroy(&sf));
53   Ncols_recv = 0;
54   for (i = 0; i < nlrows_is; i++) {
55     Ncols_recv += ncols_recv[i];
56     ncols_recv_offsets[i + 1] = ncols_recv[i] + ncols_recv_offsets[i];
57   }
58   Ncols_send = 0;
59   for (i = 0; i < nlrows_mat; i++) Ncols_send += ncols_send[i];
60   PetscCall(PetscCalloc1(Ncols_recv, &iremote));
61   PetscCall(PetscCalloc1(Ncols_recv, &adjncy_recv));
62   nleaves    = Ncols_recv;
63   Ncols_recv = 0;
64   for (i = 0; i < nlrows_is; i++) {
65     PetscCall(PetscLayoutFindOwner(rmap, irows_indices[i], &owner));
66     for (j = 0; j < ncols_recv[i]; j++) {
67       iremote[Ncols_recv].rank    = owner;
68       iremote[Ncols_recv++].index = xadj_recv[i] + j;
69     }
70   }
71   PetscCall(ISRestoreIndices(irows, &irows_indices));
72   /*if we need to deal with edge weights ???*/
73   if (a->useedgeweights) PetscCall(PetscCalloc1(Ncols_recv, &values_recv));
74   nroots = Ncols_send;
75   PetscCall(PetscSFCreate(comm, &sf));
76   PetscCall(PetscSFSetGraph(sf, nroots, nleaves, NULL, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER));
77   PetscCall(PetscSFSetType(sf, PETSCSFBASIC));
78   PetscCall(PetscSFSetFromOptions(sf));
79   PetscCall(PetscSFBcastBegin(sf, MPIU_INT, adjncy, adjncy_recv, MPI_REPLACE));
80   PetscCall(PetscSFBcastEnd(sf, MPIU_INT, adjncy, adjncy_recv, MPI_REPLACE));
81   if (a->useedgeweights) {
82     PetscCall(PetscSFBcastBegin(sf, MPIU_INT, a->values, values_recv, MPI_REPLACE));
83     PetscCall(PetscSFBcastEnd(sf, MPIU_INT, a->values, values_recv, MPI_REPLACE));
84   }
85   PetscCall(PetscSFDestroy(&sf));
86   PetscCall(MatRestoreRowIJ(adj, 0, PETSC_FALSE, PETSC_FALSE, &nlrows_mat, &xadj, &adjncy, &done));
87   PetscCall(ISGetLocalSize(icols, &icols_n));
88   PetscCall(ISGetIndices(icols, &icols_indices));
89   rnclos = 0;
90   for (i = 0; i < nlrows_is; i++) {
91     for (j = ncols_recv_offsets[i]; j < ncols_recv_offsets[i + 1]; j++) {
92       PetscCall(PetscFindInt(adjncy_recv[j], icols_n, icols_indices, &loc));
93       if (loc < 0) {
94         adjncy_recv[j] = -1;
95         if (a->useedgeweights) values_recv[j] = -1;
96         ncols_recv[i]--;
97       } else {
98         rnclos++;
99       }
100     }
101   }
102   PetscCall(ISRestoreIndices(icols, &icols_indices));
103   PetscCall(PetscCalloc1(rnclos, &sadjncy));
104   if (a->useedgeweights) PetscCall(PetscCalloc1(rnclos, &svalues));
105   PetscCall(PetscCalloc1(nlrows_is + 1, &sxadj));
106   rnclos = 0;
107   for (i = 0; i < nlrows_is; i++) {
108     for (j = ncols_recv_offsets[i]; j < ncols_recv_offsets[i + 1]; j++) {
109       if (adjncy_recv[j] < 0) continue;
110       sadjncy[rnclos] = adjncy_recv[j];
111       if (a->useedgeweights) svalues[rnclos] = values_recv[j];
112       rnclos++;
113     }
114   }
115   for (i = 0; i < nlrows_is; i++) sxadj[i + 1] = sxadj[i] + ncols_recv[i];
116   if (sadj_xadj) {
117     *sadj_xadj = sxadj;
118   } else PetscCall(PetscFree(sxadj));
119   if (sadj_adjncy) {
120     *sadj_adjncy = sadjncy;
121   } else PetscCall(PetscFree(sadjncy));
122   if (sadj_values) {
123     if (a->useedgeweights) *sadj_values = svalues;
124     else *sadj_values = NULL;
125   } else {
126     if (a->useedgeweights) PetscCall(PetscFree(svalues));
127   }
128   PetscCall(PetscFree4(ncols_send, xadj_recv, ncols_recv_offsets, ncols_recv));
129   PetscCall(PetscFree(adjncy_recv));
130   if (a->useedgeweights) PetscCall(PetscFree(values_recv));
131   PetscFunctionReturn(PETSC_SUCCESS);
132 }
133 
134 static PetscErrorCode MatCreateSubMatrices_MPIAdj_Private(Mat mat, PetscInt n, const IS irow[], const IS icol[], PetscBool subcomm, MatReuse scall, Mat *submat[])
135 {
136   PetscInt        i, irow_n, icol_n, *sxadj, *sadjncy, *svalues;
137   PetscInt       *indices, nindx, j, k, loc;
138   PetscMPIInt     issame;
139   const PetscInt *irow_indices, *icol_indices;
140   MPI_Comm        scomm_row, scomm_col, scomm_mat;
141 
142   PetscFunctionBegin;
143   nindx = 0;
144   /*
145    * Estimate a maximum number for allocating memory
146    */
147   for (i = 0; i < n; i++) {
148     PetscCall(ISGetLocalSize(irow[i], &irow_n));
149     PetscCall(ISGetLocalSize(icol[i], &icol_n));
150     nindx = nindx > (irow_n + icol_n) ? nindx : (irow_n + icol_n);
151   }
152   PetscCall(PetscMalloc1(nindx, &indices));
153   /* construct a submat */
154   //  if (scall == MAT_INITIAL_MATRIX) PetscCall(PetscMalloc1(n,submat));
155 
156   for (i = 0; i < n; i++) {
157     if (subcomm) {
158       PetscCall(PetscObjectGetComm((PetscObject)irow[i], &scomm_row));
159       PetscCall(PetscObjectGetComm((PetscObject)icol[i], &scomm_col));
160       PetscCallMPI(MPI_Comm_compare(scomm_row, scomm_col, &issame));
161       PetscCheck(issame == MPI_IDENT, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "row index set must have the same comm as the col index set");
162       PetscCallMPI(MPI_Comm_compare(scomm_row, PETSC_COMM_SELF, &issame));
163       PetscCheck(issame != MPI_IDENT, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, " can not use PETSC_COMM_SELF as comm when extracting a parallel submatrix");
164     } else {
165       scomm_row = PETSC_COMM_SELF;
166     }
167     /*get sub-matrix data*/
168     sxadj   = NULL;
169     sadjncy = NULL;
170     svalues = NULL;
171     PetscCall(MatCreateSubMatrix_MPIAdj_data(mat, irow[i], icol[i], &sxadj, &sadjncy, &svalues));
172     PetscCall(ISGetLocalSize(irow[i], &irow_n));
173     PetscCall(ISGetLocalSize(icol[i], &icol_n));
174     PetscCall(ISGetIndices(irow[i], &irow_indices));
175     PetscCall(PetscArraycpy(indices, irow_indices, irow_n));
176     PetscCall(ISRestoreIndices(irow[i], &irow_indices));
177     PetscCall(ISGetIndices(icol[i], &icol_indices));
178     PetscCall(PetscArraycpy(indices + irow_n, icol_indices, icol_n));
179     PetscCall(ISRestoreIndices(icol[i], &icol_indices));
180     nindx = irow_n + icol_n;
181     PetscCall(PetscSortRemoveDupsInt(&nindx, indices));
182     /* renumber columns */
183     for (j = 0; j < irow_n; j++) {
184       for (k = sxadj[j]; k < sxadj[j + 1]; k++) {
185         PetscCall(PetscFindInt(sadjncy[k], nindx, indices, &loc));
186         PetscCheck(loc >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "can not find col %" PetscInt_FMT, sadjncy[k]);
187         sadjncy[k] = loc;
188       }
189     }
190     if (scall == MAT_INITIAL_MATRIX) {
191       PetscCall(MatCreateMPIAdj(scomm_row, irow_n, icol_n, sxadj, sadjncy, svalues, submat[i]));
192     } else {
193       Mat         sadj = *(submat[i]);
194       Mat_MPIAdj *sa   = (Mat_MPIAdj *)((sadj)->data);
195       PetscCall(PetscObjectGetComm((PetscObject)sadj, &scomm_mat));
196       PetscCallMPI(MPI_Comm_compare(scomm_row, scomm_mat, &issame));
197       PetscCheck(issame == MPI_IDENT, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "submatrix  must have the same comm as the col index set");
198       PetscCall(PetscArraycpy(sa->i, sxadj, irow_n + 1));
199       PetscCall(PetscArraycpy(sa->j, sadjncy, sxadj[irow_n]));
200       if (svalues) PetscCall(PetscArraycpy(sa->values, svalues, sxadj[irow_n]));
201       PetscCall(PetscFree(sxadj));
202       PetscCall(PetscFree(sadjncy));
203       PetscCall(PetscFree(svalues));
204     }
205   }
206   PetscCall(PetscFree(indices));
207   PetscFunctionReturn(PETSC_SUCCESS);
208 }
209 
210 static PetscErrorCode MatCreateSubMatricesMPI_MPIAdj(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
211 {
212   /*get sub-matrices across a sub communicator */
213   PetscFunctionBegin;
214   PetscCall(MatCreateSubMatrices_MPIAdj_Private(mat, n, irow, icol, PETSC_TRUE, scall, submat));
215   PetscFunctionReturn(PETSC_SUCCESS);
216 }
217 
218 static PetscErrorCode MatCreateSubMatrices_MPIAdj(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
219 {
220   PetscFunctionBegin;
221   /*get sub-matrices based on PETSC_COMM_SELF */
222   PetscCall(MatCreateSubMatrices_MPIAdj_Private(mat, n, irow, icol, PETSC_FALSE, scall, submat));
223   PetscFunctionReturn(PETSC_SUCCESS);
224 }
225 
226 static PetscErrorCode MatView_MPIAdj_ASCII(Mat A, PetscViewer viewer)
227 {
228   Mat_MPIAdj       *a = (Mat_MPIAdj *)A->data;
229   PetscInt          i, j, m = A->rmap->n;
230   const char       *name;
231   PetscViewerFormat format;
232 
233   PetscFunctionBegin;
234   PetscCall(PetscObjectGetName((PetscObject)A, &name));
235   PetscCall(PetscViewerGetFormat(viewer, &format));
236   if (format == PETSC_VIEWER_ASCII_INFO) {
237     PetscFunctionReturn(PETSC_SUCCESS);
238   } else {
239     PetscCheck(format != PETSC_VIEWER_ASCII_MATLAB, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MATLAB format not supported");
240     PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
241     PetscCall(PetscViewerASCIIPushSynchronized(viewer));
242     for (i = 0; i < m; i++) {
243       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "row %" PetscInt_FMT ":", i + A->rmap->rstart));
244       for (j = a->i[i]; j < a->i[i + 1]; j++) {
245         if (a->values) {
246           PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " (%" PetscInt_FMT ", %" PetscInt_FMT ") ", a->j[j], a->values[j]));
247         } else {
248           PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " %" PetscInt_FMT " ", a->j[j]));
249         }
250       }
251       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "\n"));
252     }
253     PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
254     PetscCall(PetscViewerFlush(viewer));
255     PetscCall(PetscViewerASCIIPopSynchronized(viewer));
256   }
257   PetscFunctionReturn(PETSC_SUCCESS);
258 }
259 
260 static PetscErrorCode MatView_MPIAdj(Mat A, PetscViewer viewer)
261 {
262   PetscBool iascii;
263 
264   PetscFunctionBegin;
265   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
266   if (iascii) PetscCall(MatView_MPIAdj_ASCII(A, viewer));
267   PetscFunctionReturn(PETSC_SUCCESS);
268 }
269 
270 static PetscErrorCode MatDestroy_MPIAdj(Mat mat)
271 {
272   Mat_MPIAdj *a = (Mat_MPIAdj *)mat->data;
273 
274   PetscFunctionBegin;
275   PetscCall(PetscLogObjectState((PetscObject)mat, "Rows=%" PetscInt_FMT ", Cols=%" PetscInt_FMT ", NZ=%" PetscInt_FMT, mat->rmap->n, mat->cmap->n, a->nz));
276   PetscCall(PetscFree(a->diag));
277   if (a->freeaij) {
278     if (a->freeaijwithfree) {
279       if (a->i) free(a->i);
280       if (a->j) free(a->j);
281     } else {
282       PetscCall(PetscFree(a->i));
283       PetscCall(PetscFree(a->j));
284       PetscCall(PetscFree(a->values));
285     }
286   }
287   PetscCall(PetscFree(a->rowvalues));
288   PetscCall(PetscFree(mat->data));
289   PetscCall(PetscObjectChangeTypeName((PetscObject)mat, NULL));
290   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjSetPreallocation_C", NULL));
291   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjCreateNonemptySubcommMat_C", NULL));
292   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjToSeq_C", NULL));
293   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjToSeqRankZero_C", NULL));
294   PetscFunctionReturn(PETSC_SUCCESS);
295 }
296 
297 static PetscErrorCode MatSetOption_MPIAdj(Mat A, MatOption op, PetscBool flg)
298 {
299   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
300 
301   PetscFunctionBegin;
302   switch (op) {
303   case MAT_SYMMETRIC:
304   case MAT_STRUCTURALLY_SYMMETRIC:
305   case MAT_HERMITIAN:
306   case MAT_SPD:
307     a->symmetric = flg;
308     break;
309   case MAT_SYMMETRY_ETERNAL:
310   case MAT_STRUCTURAL_SYMMETRY_ETERNAL:
311   case MAT_SPD_ETERNAL:
312     break;
313   default:
314     PetscCall(PetscInfo(A, "Option %s ignored\n", MatOptions[op]));
315     break;
316   }
317   PetscFunctionReturn(PETSC_SUCCESS);
318 }
319 
320 static PetscErrorCode MatGetRow_MPIAdj(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v)
321 {
322   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
323 
324   PetscFunctionBegin;
325   row -= A->rmap->rstart;
326   PetscCheck(row >= 0 && row < A->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row out of range");
327   *nz = a->i[row + 1] - a->i[row];
328   if (v) {
329     PetscInt j;
330     if (a->rowvalues_alloc < *nz) {
331       PetscCall(PetscFree(a->rowvalues));
332       a->rowvalues_alloc = PetscMax(a->rowvalues_alloc * 2, *nz);
333       PetscCall(PetscMalloc1(a->rowvalues_alloc, &a->rowvalues));
334     }
335     for (j = 0; j < *nz; j++) a->rowvalues[j] = a->values ? a->values[a->i[row] + j] : 1.0;
336     *v = (*nz) ? a->rowvalues : NULL;
337   }
338   if (idx) *idx = (*nz) ? a->j + a->i[row] : NULL;
339   PetscFunctionReturn(PETSC_SUCCESS);
340 }
341 
342 static PetscErrorCode MatRestoreRow_MPIAdj(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v)
343 {
344   PetscFunctionBegin;
345   PetscFunctionReturn(PETSC_SUCCESS);
346 }
347 
348 static PetscErrorCode MatEqual_MPIAdj(Mat A, Mat B, PetscBool *flg)
349 {
350   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data, *b = (Mat_MPIAdj *)B->data;
351   PetscBool   flag;
352 
353   PetscFunctionBegin;
354   /* If the  matrix dimensions are not equal,or no of nonzeros */
355   if ((A->rmap->n != B->rmap->n) || (a->nz != b->nz)) flag = PETSC_FALSE;
356 
357   /* if the a->i are the same */
358   PetscCall(PetscArraycmp(a->i, b->i, A->rmap->n + 1, &flag));
359 
360   /* if a->j are the same */
361   PetscCall(PetscMemcmp(a->j, b->j, (a->nz) * sizeof(PetscInt), &flag));
362 
363   PetscCall(MPIU_Allreduce(&flag, flg, 1, MPIU_BOOL, MPI_LAND, PetscObjectComm((PetscObject)A)));
364   PetscFunctionReturn(PETSC_SUCCESS);
365 }
366 
367 static PetscErrorCode MatGetRowIJ_MPIAdj(Mat A, PetscInt oshift, PetscBool symmetric, PetscBool blockcompressed, PetscInt *m, const PetscInt *inia[], const PetscInt *inja[], PetscBool *done)
368 {
369   PetscInt    i;
370   Mat_MPIAdj *a  = (Mat_MPIAdj *)A->data;
371   PetscInt  **ia = (PetscInt **)inia, **ja = (PetscInt **)inja;
372 
373   PetscFunctionBegin;
374   *m    = A->rmap->n;
375   *ia   = a->i;
376   *ja   = a->j;
377   *done = PETSC_TRUE;
378   if (oshift) {
379     for (i = 0; i < (*ia)[*m]; i++) (*ja)[i]++;
380     for (i = 0; i <= (*m); i++) (*ia)[i]++;
381   }
382   PetscFunctionReturn(PETSC_SUCCESS);
383 }
384 
385 static PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A, PetscInt oshift, PetscBool symmetric, PetscBool blockcompressed, PetscInt *m, const PetscInt *inia[], const PetscInt *inja[], PetscBool *done)
386 {
387   PetscInt    i;
388   Mat_MPIAdj *a  = (Mat_MPIAdj *)A->data;
389   PetscInt  **ia = (PetscInt **)inia, **ja = (PetscInt **)inja;
390 
391   PetscFunctionBegin;
392   PetscCheck(!ia || a->i == *ia, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "ia passed back is not one obtained with MatGetRowIJ()");
393   PetscCheck(!ja || a->j == *ja, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "ja passed back is not one obtained with MatGetRowIJ()");
394   if (oshift) {
395     PetscCheck(ia, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "If oshift then you must passed in inia[] argument");
396     PetscCheck(ja, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "If oshift then you must passed in inja[] argument");
397     for (i = 0; i <= (*m); i++) (*ia)[i]--;
398     for (i = 0; i < (*ia)[*m]; i++) (*ja)[i]--;
399   }
400   PetscFunctionReturn(PETSC_SUCCESS);
401 }
402 
403 static PetscErrorCode MatConvertFrom_MPIAdj(Mat A, MatType type, MatReuse reuse, Mat *newmat)
404 {
405   Mat                B;
406   PetscInt           i, m, N, nzeros = 0, *ia, *ja, len, rstart, cnt, j, *a;
407   const PetscInt    *rj;
408   const PetscScalar *ra;
409   MPI_Comm           comm;
410 
411   PetscFunctionBegin;
412   PetscCall(MatGetSize(A, NULL, &N));
413   PetscCall(MatGetLocalSize(A, &m, NULL));
414   PetscCall(MatGetOwnershipRange(A, &rstart, NULL));
415 
416   /* count the number of nonzeros per row */
417   for (i = 0; i < m; i++) {
418     PetscCall(MatGetRow(A, i + rstart, &len, &rj, NULL));
419     for (j = 0; j < len; j++) {
420       if (rj[j] == i + rstart) {
421         len--;
422         break;
423       } /* don't count diagonal */
424     }
425     nzeros += len;
426     PetscCall(MatRestoreRow(A, i + rstart, &len, &rj, NULL));
427   }
428 
429   /* malloc space for nonzeros */
430   PetscCall(PetscMalloc1(nzeros + 1, &a));
431   PetscCall(PetscMalloc1(N + 1, &ia));
432   PetscCall(PetscMalloc1(nzeros + 1, &ja));
433 
434   nzeros = 0;
435   ia[0]  = 0;
436   for (i = 0; i < m; i++) {
437     PetscCall(MatGetRow(A, i + rstart, &len, &rj, &ra));
438     cnt = 0;
439     for (j = 0; j < len; j++) {
440       if (rj[j] != i + rstart) { /* if not diagonal */
441         a[nzeros + cnt]    = (PetscInt)PetscAbsScalar(ra[j]);
442         ja[nzeros + cnt++] = rj[j];
443       }
444     }
445     PetscCall(MatRestoreRow(A, i + rstart, &len, &rj, &ra));
446     nzeros += cnt;
447     ia[i + 1] = nzeros;
448   }
449 
450   PetscCall(PetscObjectGetComm((PetscObject)A, &comm));
451   PetscCall(MatCreate(comm, &B));
452   PetscCall(MatSetSizes(B, m, PETSC_DETERMINE, PETSC_DETERMINE, N));
453   PetscCall(MatSetType(B, type));
454   PetscCall(MatMPIAdjSetPreallocation(B, ia, ja, a));
455 
456   if (reuse == MAT_INPLACE_MATRIX) {
457     PetscCall(MatHeaderReplace(A, &B));
458   } else {
459     *newmat = B;
460   }
461   PetscFunctionReturn(PETSC_SUCCESS);
462 }
463 
464 static PetscErrorCode MatSetValues_MPIAdj(Mat A, PetscInt m, const PetscInt *rows, PetscInt n, const PetscInt *cols, const PetscScalar *values, InsertMode im)
465 {
466   Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data;
467   PetscInt    rStart, rEnd, cStart, cEnd;
468 
469   PetscFunctionBegin;
470   PetscCheck(!adj->i, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Matrix is already assembled, cannot change its values");
471   PetscCall(MatGetOwnershipRange(A, &rStart, &rEnd));
472   PetscCall(MatGetOwnershipRangeColumn(A, &cStart, &cEnd));
473   if (!adj->ht) {
474     PetscCall(PetscHSetIJCreate(&adj->ht));
475     PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject)A), 1, &A->stash));
476     PetscCall(PetscLayoutSetUp(A->rmap));
477     PetscCall(PetscLayoutSetUp(A->cmap));
478   }
479   for (PetscInt r = 0; r < m; ++r) {
480     PetscHashIJKey key;
481 
482     key.i = rows[r];
483     if (key.i < 0) continue;
484     if ((key.i < rStart) || (key.i >= rEnd)) {
485       PetscCall(MatStashValuesRow_Private(&A->stash, key.i, n, cols, values, PETSC_FALSE));
486     } else {
487       for (PetscInt c = 0; c < n; ++c) {
488         key.j = cols[c];
489         if (key.j < 0 || key.i == key.j) continue;
490         PetscCall(PetscHSetIJAdd(adj->ht, key));
491       }
492     }
493   }
494   PetscFunctionReturn(PETSC_SUCCESS);
495 }
496 
497 static PetscErrorCode MatAssemblyBegin_MPIAdj(Mat A, MatAssemblyType type)
498 {
499   PetscInt    nstash, reallocs;
500   Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data;
501 
502   PetscFunctionBegin;
503   if (!adj->ht) {
504     PetscCall(PetscHSetIJCreate(&adj->ht));
505     PetscCall(PetscLayoutSetUp(A->rmap));
506     PetscCall(PetscLayoutSetUp(A->cmap));
507   }
508   PetscCall(MatStashScatterBegin_Private(A, &A->stash, A->rmap->range));
509   PetscCall(MatStashGetInfo_Private(&A->stash, &nstash, &reallocs));
510   PetscCall(PetscInfo(A, "Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
511   PetscFunctionReturn(PETSC_SUCCESS);
512 }
513 
514 static PetscErrorCode MatAssemblyEnd_MPIAdj(Mat A, MatAssemblyType type)
515 {
516   PetscScalar   *val;
517   PetscInt      *row, *col, m, rstart, *rowstarts;
518   PetscInt       i, j, ncols, flg, nz;
519   PetscMPIInt    n;
520   Mat_MPIAdj    *adj = (Mat_MPIAdj *)A->data;
521   PetscHashIter  hi;
522   PetscHashIJKey key;
523   PetscHSetIJ    ht = adj->ht;
524 
525   PetscFunctionBegin;
526   while (1) {
527     PetscCall(MatStashScatterGetMesg_Private(&A->stash, &n, &row, &col, &val, &flg));
528     if (!flg) break;
529 
530     for (i = 0; i < n;) {
531       /* Identify the consecutive vals belonging to the same row */
532       for (j = i, rstart = row[j]; j < n; j++) {
533         if (row[j] != rstart) break;
534       }
535       if (j < n) ncols = j - i;
536       else ncols = n - i;
537       /* Set all these values with a single function call */
538       PetscCall(MatSetValues_MPIAdj(A, 1, row + i, ncols, col + i, val + i, INSERT_VALUES));
539       i = j;
540     }
541   }
542   PetscCall(MatStashScatterEnd_Private(&A->stash));
543   PetscCall(MatStashDestroy_Private(&A->stash));
544 
545   PetscCall(MatGetLocalSize(A, &m, NULL));
546   PetscCall(MatGetOwnershipRange(A, &rstart, NULL));
547   PetscCall(PetscCalloc1(m + 1, &rowstarts));
548   PetscHashIterBegin(ht, hi);
549   for (; !PetscHashIterAtEnd(ht, hi);) {
550     PetscHashIterGetKey(ht, hi, key);
551     rowstarts[key.i - rstart + 1]++;
552     PetscHashIterNext(ht, hi);
553   }
554   for (i = 1; i < m + 1; i++) rowstarts[i] = rowstarts[i - 1] + rowstarts[i];
555 
556   PetscCall(PetscHSetIJGetSize(ht, &nz));
557   PetscCall(PetscMalloc1(nz, &col));
558   PetscHashIterBegin(ht, hi);
559   for (; !PetscHashIterAtEnd(ht, hi);) {
560     PetscHashIterGetKey(ht, hi, key);
561     col[rowstarts[key.i - rstart]++] = key.j;
562     PetscHashIterNext(ht, hi);
563   }
564   PetscCall(PetscHSetIJDestroy(&ht));
565   for (i = m; i > 0; i--) rowstarts[i] = rowstarts[i - 1];
566   rowstarts[0] = 0;
567 
568   for (PetscInt i = 0; i < m; i++) PetscCall(PetscSortInt(rowstarts[i + 1] - rowstarts[i], &col[rowstarts[i]]));
569 
570   adj->i       = rowstarts;
571   adj->j       = col;
572   adj->nz      = rowstarts[m];
573   adj->freeaij = PETSC_TRUE;
574   PetscFunctionReturn(PETSC_SUCCESS);
575 }
576 
577 static struct _MatOps MatOps_Values = {MatSetValues_MPIAdj,
578                                        MatGetRow_MPIAdj,
579                                        MatRestoreRow_MPIAdj,
580                                        NULL,
581                                        /* 4*/ NULL,
582                                        NULL,
583                                        NULL,
584                                        NULL,
585                                        NULL,
586                                        NULL,
587                                        /*10*/ NULL,
588                                        NULL,
589                                        NULL,
590                                        NULL,
591                                        NULL,
592                                        /*15*/ NULL,
593                                        MatEqual_MPIAdj,
594                                        NULL,
595                                        NULL,
596                                        NULL,
597                                        /*20*/ MatAssemblyBegin_MPIAdj,
598                                        MatAssemblyEnd_MPIAdj,
599                                        MatSetOption_MPIAdj,
600                                        NULL,
601                                        /*24*/ NULL,
602                                        NULL,
603                                        NULL,
604                                        NULL,
605                                        NULL,
606                                        /*29*/ NULL,
607                                        NULL,
608                                        NULL,
609                                        NULL,
610                                        NULL,
611                                        /*34*/ NULL,
612                                        NULL,
613                                        NULL,
614                                        NULL,
615                                        NULL,
616                                        /*39*/ NULL,
617                                        MatCreateSubMatrices_MPIAdj,
618                                        NULL,
619                                        NULL,
620                                        NULL,
621                                        /*44*/ NULL,
622                                        NULL,
623                                        MatShift_Basic,
624                                        NULL,
625                                        NULL,
626                                        /*49*/ NULL,
627                                        MatGetRowIJ_MPIAdj,
628                                        MatRestoreRowIJ_MPIAdj,
629                                        NULL,
630                                        NULL,
631                                        /*54*/ NULL,
632                                        NULL,
633                                        NULL,
634                                        NULL,
635                                        NULL,
636                                        /*59*/ NULL,
637                                        MatDestroy_MPIAdj,
638                                        MatView_MPIAdj,
639                                        MatConvertFrom_MPIAdj,
640                                        NULL,
641                                        /*64*/ NULL,
642                                        NULL,
643                                        NULL,
644                                        NULL,
645                                        NULL,
646                                        /*69*/ NULL,
647                                        NULL,
648                                        NULL,
649                                        NULL,
650                                        NULL,
651                                        /*74*/ NULL,
652                                        NULL,
653                                        NULL,
654                                        NULL,
655                                        NULL,
656                                        /*79*/ NULL,
657                                        NULL,
658                                        NULL,
659                                        NULL,
660                                        NULL,
661                                        /*84*/ NULL,
662                                        NULL,
663                                        NULL,
664                                        NULL,
665                                        NULL,
666                                        /*89*/ NULL,
667                                        NULL,
668                                        NULL,
669                                        NULL,
670                                        NULL,
671                                        /*94*/ NULL,
672                                        NULL,
673                                        NULL,
674                                        NULL,
675                                        NULL,
676                                        /*99*/ NULL,
677                                        NULL,
678                                        NULL,
679                                        NULL,
680                                        NULL,
681                                        /*104*/ NULL,
682                                        NULL,
683                                        NULL,
684                                        NULL,
685                                        NULL,
686                                        /*109*/ NULL,
687                                        NULL,
688                                        NULL,
689                                        NULL,
690                                        NULL,
691                                        /*114*/ NULL,
692                                        NULL,
693                                        NULL,
694                                        NULL,
695                                        NULL,
696                                        /*119*/ NULL,
697                                        NULL,
698                                        NULL,
699                                        NULL,
700                                        NULL,
701                                        /*124*/ NULL,
702                                        NULL,
703                                        NULL,
704                                        NULL,
705                                        MatCreateSubMatricesMPI_MPIAdj,
706                                        /*129*/ NULL,
707                                        NULL,
708                                        NULL,
709                                        NULL,
710                                        NULL,
711                                        /*134*/ NULL,
712                                        NULL,
713                                        NULL,
714                                        NULL,
715                                        NULL,
716                                        /*139*/ NULL,
717                                        NULL,
718                                        NULL,
719                                        NULL,
720                                        NULL,
721                                        /*144*/ NULL,
722                                        NULL,
723                                        NULL,
724                                        NULL,
725                                        NULL,
726                                        NULL,
727                                        /*150*/ NULL,
728                                        NULL};
729 
730 static PetscErrorCode MatMPIAdjSetPreallocation_MPIAdj(Mat B, PetscInt *i, PetscInt *j, PetscInt *values)
731 {
732   Mat_MPIAdj *b = (Mat_MPIAdj *)B->data;
733   PetscBool   useedgeweights;
734 
735   PetscFunctionBegin;
736   PetscCall(PetscLayoutSetUp(B->rmap));
737   PetscCall(PetscLayoutSetUp(B->cmap));
738   if (values) useedgeweights = PETSC_TRUE;
739   else useedgeweights = PETSC_FALSE;
740   /* Make everybody knows if they are using edge weights or not */
741   PetscCall(MPIU_Allreduce((int *)&useedgeweights, (int *)&b->useedgeweights, 1, MPI_INT, MPI_MAX, PetscObjectComm((PetscObject)B)));
742 
743   if (PetscDefined(USE_DEBUG)) {
744     PetscInt ii;
745 
746     PetscCheck(i[0] == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "First i[] index must be zero, instead it is %" PetscInt_FMT, i[0]);
747     for (ii = 1; ii < B->rmap->n; ii++) {
748       PetscCheck(i[ii] >= 0 && i[ii] >= i[ii - 1], PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "i[%" PetscInt_FMT "]=%" PetscInt_FMT " index is out of range: i[%" PetscInt_FMT "]=%" PetscInt_FMT, ii, i[ii], ii - 1, i[ii - 1]);
749     }
750     for (ii = 0; ii < i[B->rmap->n]; ii++) PetscCheck(j[ii] >= 0 && j[ii] < B->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column index %" PetscInt_FMT " out of range %" PetscInt_FMT, ii, j[ii]);
751   }
752   b->j      = j;
753   b->i      = i;
754   b->values = values;
755 
756   b->nz        = i[B->rmap->n];
757   b->diag      = NULL;
758   b->symmetric = PETSC_FALSE;
759   b->freeaij   = PETSC_TRUE;
760 
761   B->ops->assemblybegin = NULL;
762   B->ops->assemblyend   = NULL;
763   PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
764   PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
765   PetscCall(MatStashDestroy_Private(&B->stash));
766   PetscFunctionReturn(PETSC_SUCCESS);
767 }
768 
769 static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A, Mat *B)
770 {
771   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data;
772   const PetscInt *ranges;
773   MPI_Comm        acomm, bcomm;
774   MPI_Group       agroup, bgroup;
775   PetscMPIInt     i, rank, size, nranks, *ranks;
776 
777   PetscFunctionBegin;
778   *B = NULL;
779   PetscCall(PetscObjectGetComm((PetscObject)A, &acomm));
780   PetscCallMPI(MPI_Comm_size(acomm, &size));
781   PetscCallMPI(MPI_Comm_size(acomm, &rank));
782   PetscCall(MatGetOwnershipRanges(A, &ranges));
783   for (i = 0, nranks = 0; i < size; i++) {
784     if (ranges[i + 1] - ranges[i] > 0) nranks++;
785   }
786   if (nranks == size) { /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
787     PetscCall(PetscObjectReference((PetscObject)A));
788     *B = A;
789     PetscFunctionReturn(PETSC_SUCCESS);
790   }
791 
792   PetscCall(PetscMalloc1(nranks, &ranks));
793   for (i = 0, nranks = 0; i < size; i++) {
794     if (ranges[i + 1] - ranges[i] > 0) ranks[nranks++] = i;
795   }
796   PetscCallMPI(MPI_Comm_group(acomm, &agroup));
797   PetscCallMPI(MPI_Group_incl(agroup, nranks, ranks, &bgroup));
798   PetscCall(PetscFree(ranks));
799   PetscCallMPI(MPI_Comm_create(acomm, bgroup, &bcomm));
800   PetscCallMPI(MPI_Group_free(&agroup));
801   PetscCallMPI(MPI_Group_free(&bgroup));
802   if (bcomm != MPI_COMM_NULL) {
803     PetscInt    m, N;
804     Mat_MPIAdj *b;
805     PetscCall(MatGetLocalSize(A, &m, NULL));
806     PetscCall(MatGetSize(A, NULL, &N));
807     PetscCall(MatCreateMPIAdj(bcomm, m, N, a->i, a->j, a->values, B));
808     b          = (Mat_MPIAdj *)(*B)->data;
809     b->freeaij = PETSC_FALSE;
810     PetscCallMPI(MPI_Comm_free(&bcomm));
811   }
812   PetscFunctionReturn(PETSC_SUCCESS);
813 }
814 
815 static PetscErrorCode MatMPIAdjToSeq_MPIAdj(Mat A, Mat *B)
816 {
817   PetscInt    M, N, *II, *J, NZ, nz, m, nzstart, i;
818   PetscInt   *Values = NULL;
819   Mat_MPIAdj *adj    = (Mat_MPIAdj *)A->data;
820   PetscMPIInt mnz, mm, *allnz, *allm, size, *dispnz, *dispm;
821 
822   PetscFunctionBegin;
823   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
824   PetscCall(MatGetSize(A, &M, &N));
825   PetscCall(MatGetLocalSize(A, &m, NULL));
826   nz = adj->nz;
827   PetscCheck(adj->i[m] == nz, PETSC_COMM_SELF, PETSC_ERR_PLIB, "nz %" PetscInt_FMT " not correct i[m] %" PetscInt_FMT, nz, adj->i[m]);
828   PetscCall(MPIU_Allreduce(&nz, &NZ, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A)));
829 
830   PetscCall(PetscMPIIntCast(nz, &mnz));
831   PetscCall(PetscMalloc2(size, &allnz, size, &dispnz));
832   PetscCallMPI(MPI_Allgather(&mnz, 1, MPI_INT, allnz, 1, MPI_INT, PetscObjectComm((PetscObject)A)));
833   dispnz[0] = 0;
834   for (i = 1; i < size; i++) dispnz[i] = dispnz[i - 1] + allnz[i - 1];
835   if (adj->values) {
836     PetscCall(PetscMalloc1(NZ, &Values));
837     PetscCallMPI(MPI_Allgatherv(adj->values, mnz, MPIU_INT, Values, allnz, dispnz, MPIU_INT, PetscObjectComm((PetscObject)A)));
838   }
839   PetscCall(PetscMalloc1(NZ, &J));
840   PetscCallMPI(MPI_Allgatherv(adj->j, mnz, MPIU_INT, J, allnz, dispnz, MPIU_INT, PetscObjectComm((PetscObject)A)));
841   PetscCall(PetscFree2(allnz, dispnz));
842   PetscCallMPI(MPI_Scan(&nz, &nzstart, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A)));
843   nzstart -= nz;
844   /* shift the i[] values so they will be correct after being received */
845   for (i = 0; i < m; i++) adj->i[i] += nzstart;
846   PetscCall(PetscMalloc1(M + 1, &II));
847   PetscCall(PetscMPIIntCast(m, &mm));
848   PetscCall(PetscMalloc2(size, &allm, size, &dispm));
849   PetscCallMPI(MPI_Allgather(&mm, 1, MPI_INT, allm, 1, MPI_INT, PetscObjectComm((PetscObject)A)));
850   dispm[0] = 0;
851   for (i = 1; i < size; i++) dispm[i] = dispm[i - 1] + allm[i - 1];
852   PetscCallMPI(MPI_Allgatherv(adj->i, mm, MPIU_INT, II, allm, dispm, MPIU_INT, PetscObjectComm((PetscObject)A)));
853   PetscCall(PetscFree2(allm, dispm));
854   II[M] = NZ;
855   /* shift the i[] values back */
856   for (i = 0; i < m; i++) adj->i[i] -= nzstart;
857   PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, M, N, II, J, Values, B));
858   PetscFunctionReturn(PETSC_SUCCESS);
859 }
860 
861 static PetscErrorCode MatMPIAdjToSeqRankZero_MPIAdj(Mat A, Mat *B)
862 {
863   PetscInt    M, N, *II, *J, NZ, nz, m, nzstart, i;
864   PetscInt   *Values = NULL;
865   Mat_MPIAdj *adj    = (Mat_MPIAdj *)A->data;
866   PetscMPIInt mnz, mm, *allnz = NULL, *allm, size, *dispnz, *dispm, rank;
867 
868   PetscFunctionBegin;
869   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
870   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)A), &rank));
871   PetscCall(MatGetSize(A, &M, &N));
872   PetscCall(MatGetLocalSize(A, &m, NULL));
873   nz = adj->nz;
874   PetscCheck(adj->i[m] == nz, PETSC_COMM_SELF, PETSC_ERR_PLIB, "nz %" PetscInt_FMT " not correct i[m] %" PetscInt_FMT, nz, adj->i[m]);
875   PetscCall(MPIU_Allreduce(&nz, &NZ, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A)));
876 
877   PetscCall(PetscMPIIntCast(nz, &mnz));
878   if (!rank) PetscCall(PetscMalloc2(size, &allnz, size, &dispnz));
879   PetscCallMPI(MPI_Gather(&mnz, 1, MPI_INT, allnz, 1, MPI_INT, 0, PetscObjectComm((PetscObject)A)));
880   if (!rank) {
881     dispnz[0] = 0;
882     for (i = 1; i < size; i++) dispnz[i] = dispnz[i - 1] + allnz[i - 1];
883     if (adj->values) {
884       PetscCall(PetscMalloc1(NZ, &Values));
885       PetscCallMPI(MPI_Gatherv(adj->values, mnz, MPIU_INT, Values, allnz, dispnz, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
886     }
887     PetscCall(PetscMalloc1(NZ, &J));
888     PetscCallMPI(MPI_Gatherv(adj->j, mnz, MPIU_INT, J, allnz, dispnz, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
889     PetscCall(PetscFree2(allnz, dispnz));
890   } else {
891     if (adj->values) PetscCallMPI(MPI_Gatherv(adj->values, mnz, MPIU_INT, NULL, NULL, NULL, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
892     PetscCallMPI(MPI_Gatherv(adj->j, mnz, MPIU_INT, NULL, NULL, NULL, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
893   }
894   PetscCallMPI(MPI_Scan(&nz, &nzstart, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A)));
895   nzstart -= nz;
896   /* shift the i[] values so they will be correct after being received */
897   for (i = 0; i < m; i++) adj->i[i] += nzstart;
898   PetscCall(PetscMPIIntCast(m, &mm));
899   if (!rank) {
900     PetscCall(PetscMalloc1(M + 1, &II));
901     PetscCall(PetscMalloc2(size, &allm, size, &dispm));
902     PetscCallMPI(MPI_Gather(&mm, 1, MPI_INT, allm, 1, MPI_INT, 0, PetscObjectComm((PetscObject)A)));
903     dispm[0] = 0;
904     for (i = 1; i < size; i++) dispm[i] = dispm[i - 1] + allm[i - 1];
905     PetscCallMPI(MPI_Gatherv(adj->i, mm, MPIU_INT, II, allm, dispm, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
906     PetscCall(PetscFree2(allm, dispm));
907     II[M] = NZ;
908   } else {
909     PetscCallMPI(MPI_Gather(&mm, 1, MPI_INT, NULL, 1, MPI_INT, 0, PetscObjectComm((PetscObject)A)));
910     PetscCallMPI(MPI_Gatherv(adj->i, mm, MPIU_INT, NULL, NULL, NULL, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
911   }
912   /* shift the i[] values back */
913   for (i = 0; i < m; i++) adj->i[i] -= nzstart;
914   if (!rank) PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, M, N, II, J, Values, B));
915   PetscFunctionReturn(PETSC_SUCCESS);
916 }
917 
918 /*@
919   MatMPIAdjCreateNonemptySubcommMat - create the same `MATMPIADJ` matrix on a subcommunicator containing only processes owning a positive number of rows
920 
921   Collective
922 
923   Input Parameter:
924 . A - original `MATMPIADJ` matrix
925 
926   Output Parameter:
927 . B - matrix on subcommunicator, `NULL` on ranks that owned zero rows of `A`
928 
929   Level: developer
930 
931   Note:
932   This function is mostly useful for internal use by mesh partitioning packages that require that every process owns at least one row.
933 
934   The matrix `B` should be destroyed with `MatDestroy()`. The arrays are not copied, so `B` should be destroyed before `A` is destroyed.
935 
936 .seealso: [](ch_matrices), `Mat`, `MATMPIADJ`, `MatCreateMPIAdj()`
937 @*/
938 PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A, Mat *B)
939 {
940   PetscFunctionBegin;
941   PetscValidHeaderSpecific(A, MAT_CLASSID, 1);
942   PetscUseMethod(A, "MatMPIAdjCreateNonemptySubcommMat_C", (Mat, Mat *), (A, B));
943   PetscFunctionReturn(PETSC_SUCCESS);
944 }
945 
946 /*MC
947    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
948    intended for use constructing orderings and partitionings.
949 
950   Level: beginner
951 
952   Note:
953     You can provide values to the matrix using `MatMPIAdjSetPreallocation()`, `MatCreateMPIAdj()`, or
954     by calling `MatSetValues()` and `MatAssemblyBegin()` followed by `MatAssemblyEnd()`
955 
956 .seealso: [](ch_matrices), `Mat`, `MatCreateMPIAdj()`, `MatMPIAdjSetPreallocation()`, `MatSetValues()`
957 M*/
958 PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B)
959 {
960   Mat_MPIAdj *b;
961 
962   PetscFunctionBegin;
963   PetscCall(PetscNew(&b));
964   B->data         = (void *)b;
965   B->ops[0]       = MatOps_Values;
966   B->assembled    = PETSC_FALSE;
967   B->preallocated = PETSC_TRUE; /* so that MatSetValues() may be used */
968 
969   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjSetPreallocation_C", MatMPIAdjSetPreallocation_MPIAdj));
970   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjCreateNonemptySubcommMat_C", MatMPIAdjCreateNonemptySubcommMat_MPIAdj));
971   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjToSeq_C", MatMPIAdjToSeq_MPIAdj));
972   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjToSeqRankZero_C", MatMPIAdjToSeqRankZero_MPIAdj));
973   PetscCall(PetscObjectChangeTypeName((PetscObject)B, MATMPIADJ));
974   PetscFunctionReturn(PETSC_SUCCESS);
975 }
976 
977 /*@C
978   MatMPIAdjToSeq - Converts an parallel `MATMPIADJ` matrix to complete `MATMPIADJ` on each process (needed by sequential partitioner)
979 
980   Logically Collective
981 
982   Input Parameter:
983 . A - the matrix
984 
985   Output Parameter:
986 . B - the same matrix on all processes
987 
988   Level: intermediate
989 
990 .seealso: [](ch_matrices), `Mat`, `MATMPIADJ`, `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MatMPIAdjToSeqRankZero()`
991 @*/
992 PetscErrorCode MatMPIAdjToSeq(Mat A, Mat *B)
993 {
994   PetscFunctionBegin;
995   PetscUseMethod(A, "MatMPIAdjToSeq_C", (Mat, Mat *), (A, B));
996   PetscFunctionReturn(PETSC_SUCCESS);
997 }
998 
999 /*@C
1000   MatMPIAdjToSeqRankZero - Converts an parallel `MATMPIADJ` matrix to complete `MATMPIADJ` on rank zero (needed by sequential partitioner)
1001 
1002   Logically Collective
1003 
1004   Input Parameter:
1005 . A - the matrix
1006 
1007   Output Parameter:
1008 . B - the same matrix on rank zero, not set on other ranks
1009 
1010   Level: intermediate
1011 
1012   Note:
1013   This routine has the advantage on systems with multiple ranks per node since only one copy of the matrix
1014   is stored on the first node, instead of the number of ranks copies. This can allow partitioning much larger
1015   parallel graph sequentially.
1016 
1017 .seealso: [](ch_matrices), `Mat`, `MATMPIADJ`, `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MatMPIAdjToSeq()`
1018 @*/
1019 PetscErrorCode MatMPIAdjToSeqRankZero(Mat A, Mat *B)
1020 {
1021   PetscFunctionBegin;
1022   PetscUseMethod(A, "MatMPIAdjToSeqRankZero_C", (Mat, Mat *), (A, B));
1023   PetscFunctionReturn(PETSC_SUCCESS);
1024 }
1025 
1026 /*@C
1027   MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
1028 
1029   Logically Collective
1030 
1031   Input Parameters:
1032 + B      - the matrix
1033 . i      - the indices into `j` for the start of each row
1034 . j      - the column indices for each row (sorted for each row).
1035        The indices in `i` and `j` start with zero (NOT with one).
1036 - values - [use `NULL` if not provided] edge weights
1037 
1038   Level: intermediate
1039 
1040 .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MATMPIADJ`
1041 @*/
1042 PetscErrorCode MatMPIAdjSetPreallocation(Mat B, PetscInt *i, PetscInt *j, PetscInt *values)
1043 {
1044   PetscFunctionBegin;
1045   PetscTryMethod(B, "MatMPIAdjSetPreallocation_C", (Mat, PetscInt *, PetscInt *, PetscInt *), (B, i, j, values));
1046   PetscFunctionReturn(PETSC_SUCCESS);
1047 }
1048 
1049 /*@C
1050   MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
1051   The matrix does not have numerical values associated with it, but is
1052   intended for ordering (to reduce bandwidth etc) and partitioning.
1053 
1054   Collective
1055 
1056   Input Parameters:
1057 + comm   - MPI communicator
1058 . m      - number of local rows
1059 . N      - number of global columns
1060 . i      - the indices into `j` for the start of each row
1061 . j      - the column indices for each row (sorted for each row).
1062 - values - the values
1063 
1064   Output Parameter:
1065 . A - the matrix
1066 
1067   Level: intermediate
1068 
1069   Notes:
1070   The indices in `i` and `j` start with zero (NOT with one).
1071 
1072   You must NOT free the `i`, `values` and `j` arrays yourself. PETSc will free them
1073   when the matrix is destroyed; you must allocate them with `PetscMalloc()`. If you
1074   call from Fortran you need not create the arrays with `PetscMalloc()`.
1075 
1076   You should not include the matrix diagonals.
1077 
1078   If you already have a matrix, you can create its adjacency matrix by a call
1079   to `MatConvert()`, specifying a type of `MATMPIADJ`.
1080 
1081   Possible values for `MatSetOption()` - `MAT_STRUCTURALLY_SYMMETRIC`
1082 
1083 .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatConvert()`, `MatGetOrdering()`, `MATMPIADJ`, `MatMPIAdjSetPreallocation()`
1084 @*/
1085 PetscErrorCode MatCreateMPIAdj(MPI_Comm comm, PetscInt m, PetscInt N, PetscInt *i, PetscInt *j, PetscInt *values, Mat *A)
1086 {
1087   PetscFunctionBegin;
1088   PetscCall(MatCreate(comm, A));
1089   PetscCall(MatSetSizes(*A, m, PETSC_DETERMINE, PETSC_DETERMINE, N));
1090   PetscCall(MatSetType(*A, MATMPIADJ));
1091   PetscCall(MatMPIAdjSetPreallocation(*A, i, j, values));
1092   PetscFunctionReturn(PETSC_SUCCESS);
1093 }
1094