xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 21e3ffae2f3b73c0bd738cf6d0a809700fc04bb0)
1 
2 /*
3     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
4 */
5 #include <../src/mat/impls/adj/mpi/mpiadj.h> /*I "petscmat.h" I*/
6 #include <petscsf.h>
7 
8 /*
9  * The interface should be easy to use for both MatCreateSubMatrix (parallel sub-matrix) and MatCreateSubMatrices (sequential sub-matrices)
10  * */
11 static PetscErrorCode MatCreateSubMatrix_MPIAdj_data(Mat adj, IS irows, IS icols, PetscInt **sadj_xadj, PetscInt **sadj_adjncy, PetscInt **sadj_values)
12 {
13   PetscInt        nlrows_is, icols_n, i, j, nroots, nleaves, rlocalindex, *ncols_send, *ncols_recv;
14   PetscInt        nlrows_mat, *adjncy_recv, Ncols_recv, Ncols_send, *xadj_recv, *values_recv;
15   PetscInt       *ncols_recv_offsets, loc, rnclos, *sadjncy, *sxadj, *svalues;
16   const PetscInt *irows_indices, *icols_indices, *xadj, *adjncy;
17   PetscMPIInt     owner;
18   Mat_MPIAdj     *a = (Mat_MPIAdj *)adj->data;
19   PetscLayout     rmap;
20   MPI_Comm        comm;
21   PetscSF         sf;
22   PetscSFNode    *iremote;
23   PetscBool       done;
24 
25   PetscFunctionBegin;
26   PetscCall(PetscObjectGetComm((PetscObject)adj, &comm));
27   PetscCall(MatGetLayouts(adj, &rmap, NULL));
28   PetscCall(ISGetLocalSize(irows, &nlrows_is));
29   PetscCall(ISGetIndices(irows, &irows_indices));
30   PetscCall(PetscMalloc1(nlrows_is, &iremote));
31   /* construct sf graph*/
32   nleaves = nlrows_is;
33   for (i = 0; i < nlrows_is; i++) {
34     owner       = -1;
35     rlocalindex = -1;
36     PetscCall(PetscLayoutFindOwnerIndex(rmap, irows_indices[i], &owner, &rlocalindex));
37     iremote[i].rank  = owner;
38     iremote[i].index = rlocalindex;
39   }
40   PetscCall(MatGetRowIJ(adj, 0, PETSC_FALSE, PETSC_FALSE, &nlrows_mat, &xadj, &adjncy, &done));
41   PetscCall(PetscCalloc4(nlrows_mat, &ncols_send, nlrows_is, &xadj_recv, nlrows_is + 1, &ncols_recv_offsets, nlrows_is, &ncols_recv));
42   nroots = nlrows_mat;
43   for (i = 0; i < nlrows_mat; i++) ncols_send[i] = xadj[i + 1] - xadj[i];
44   PetscCall(PetscSFCreate(comm, &sf));
45   PetscCall(PetscSFSetGraph(sf, nroots, nleaves, NULL, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER));
46   PetscCall(PetscSFSetType(sf, PETSCSFBASIC));
47   PetscCall(PetscSFSetFromOptions(sf));
48   PetscCall(PetscSFBcastBegin(sf, MPIU_INT, ncols_send, ncols_recv, MPI_REPLACE));
49   PetscCall(PetscSFBcastEnd(sf, MPIU_INT, ncols_send, ncols_recv, MPI_REPLACE));
50   PetscCall(PetscSFBcastBegin(sf, MPIU_INT, xadj, xadj_recv, MPI_REPLACE));
51   PetscCall(PetscSFBcastEnd(sf, MPIU_INT, xadj, xadj_recv, MPI_REPLACE));
52   PetscCall(PetscSFDestroy(&sf));
53   Ncols_recv = 0;
54   for (i = 0; i < nlrows_is; i++) {
55     Ncols_recv += ncols_recv[i];
56     ncols_recv_offsets[i + 1] = ncols_recv[i] + ncols_recv_offsets[i];
57   }
58   Ncols_send = 0;
59   for (i = 0; i < nlrows_mat; i++) Ncols_send += ncols_send[i];
60   PetscCall(PetscCalloc1(Ncols_recv, &iremote));
61   PetscCall(PetscCalloc1(Ncols_recv, &adjncy_recv));
62   nleaves    = Ncols_recv;
63   Ncols_recv = 0;
64   for (i = 0; i < nlrows_is; i++) {
65     PetscCall(PetscLayoutFindOwner(rmap, irows_indices[i], &owner));
66     for (j = 0; j < ncols_recv[i]; j++) {
67       iremote[Ncols_recv].rank    = owner;
68       iremote[Ncols_recv++].index = xadj_recv[i] + j;
69     }
70   }
71   PetscCall(ISRestoreIndices(irows, &irows_indices));
72   /*if we need to deal with edge weights ???*/
73   if (a->useedgeweights) PetscCall(PetscCalloc1(Ncols_recv, &values_recv));
74   nroots = Ncols_send;
75   PetscCall(PetscSFCreate(comm, &sf));
76   PetscCall(PetscSFSetGraph(sf, nroots, nleaves, NULL, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER));
77   PetscCall(PetscSFSetType(sf, PETSCSFBASIC));
78   PetscCall(PetscSFSetFromOptions(sf));
79   PetscCall(PetscSFBcastBegin(sf, MPIU_INT, adjncy, adjncy_recv, MPI_REPLACE));
80   PetscCall(PetscSFBcastEnd(sf, MPIU_INT, adjncy, adjncy_recv, MPI_REPLACE));
81   if (a->useedgeweights) {
82     PetscCall(PetscSFBcastBegin(sf, MPIU_INT, a->values, values_recv, MPI_REPLACE));
83     PetscCall(PetscSFBcastEnd(sf, MPIU_INT, a->values, values_recv, MPI_REPLACE));
84   }
85   PetscCall(PetscSFDestroy(&sf));
86   PetscCall(MatRestoreRowIJ(adj, 0, PETSC_FALSE, PETSC_FALSE, &nlrows_mat, &xadj, &adjncy, &done));
87   PetscCall(ISGetLocalSize(icols, &icols_n));
88   PetscCall(ISGetIndices(icols, &icols_indices));
89   rnclos = 0;
90   for (i = 0; i < nlrows_is; i++) {
91     for (j = ncols_recv_offsets[i]; j < ncols_recv_offsets[i + 1]; j++) {
92       PetscCall(PetscFindInt(adjncy_recv[j], icols_n, icols_indices, &loc));
93       if (loc < 0) {
94         adjncy_recv[j] = -1;
95         if (a->useedgeweights) values_recv[j] = -1;
96         ncols_recv[i]--;
97       } else {
98         rnclos++;
99       }
100     }
101   }
102   PetscCall(ISRestoreIndices(icols, &icols_indices));
103   PetscCall(PetscCalloc1(rnclos, &sadjncy));
104   if (a->useedgeweights) PetscCall(PetscCalloc1(rnclos, &svalues));
105   PetscCall(PetscCalloc1(nlrows_is + 1, &sxadj));
106   rnclos = 0;
107   for (i = 0; i < nlrows_is; i++) {
108     for (j = ncols_recv_offsets[i]; j < ncols_recv_offsets[i + 1]; j++) {
109       if (adjncy_recv[j] < 0) continue;
110       sadjncy[rnclos] = adjncy_recv[j];
111       if (a->useedgeweights) svalues[rnclos] = values_recv[j];
112       rnclos++;
113     }
114   }
115   for (i = 0; i < nlrows_is; i++) sxadj[i + 1] = sxadj[i] + ncols_recv[i];
116   if (sadj_xadj) {
117     *sadj_xadj = sxadj;
118   } else PetscCall(PetscFree(sxadj));
119   if (sadj_adjncy) {
120     *sadj_adjncy = sadjncy;
121   } else PetscCall(PetscFree(sadjncy));
122   if (sadj_values) {
123     if (a->useedgeweights) *sadj_values = svalues;
124     else *sadj_values = NULL;
125   } else {
126     if (a->useedgeweights) PetscCall(PetscFree(svalues));
127   }
128   PetscCall(PetscFree4(ncols_send, xadj_recv, ncols_recv_offsets, ncols_recv));
129   PetscCall(PetscFree(adjncy_recv));
130   if (a->useedgeweights) PetscCall(PetscFree(values_recv));
131   PetscFunctionReturn(PETSC_SUCCESS);
132 }
133 
134 static PetscErrorCode MatCreateSubMatrices_MPIAdj_Private(Mat mat, PetscInt n, const IS irow[], const IS icol[], PetscBool subcomm, MatReuse scall, Mat *submat[])
135 {
136   PetscInt        i, irow_n, icol_n, *sxadj, *sadjncy, *svalues;
137   PetscInt       *indices, nindx, j, k, loc;
138   PetscMPIInt     issame;
139   const PetscInt *irow_indices, *icol_indices;
140   MPI_Comm        scomm_row, scomm_col, scomm_mat;
141 
142   PetscFunctionBegin;
143   nindx = 0;
144   /*
145    * Estimate a maximum number for allocating memory
146    */
147   for (i = 0; i < n; i++) {
148     PetscCall(ISGetLocalSize(irow[i], &irow_n));
149     PetscCall(ISGetLocalSize(icol[i], &icol_n));
150     nindx = nindx > (irow_n + icol_n) ? nindx : (irow_n + icol_n);
151   }
152   PetscCall(PetscMalloc1(nindx, &indices));
153   /* construct a submat */
154   //  if (scall == MAT_INITIAL_MATRIX) PetscCall(PetscMalloc1(n,submat));
155 
156   for (i = 0; i < n; i++) {
157     if (subcomm) {
158       PetscCall(PetscObjectGetComm((PetscObject)irow[i], &scomm_row));
159       PetscCall(PetscObjectGetComm((PetscObject)icol[i], &scomm_col));
160       PetscCallMPI(MPI_Comm_compare(scomm_row, scomm_col, &issame));
161       PetscCheck(issame == MPI_IDENT, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "row index set must have the same comm as the col index set");
162       PetscCallMPI(MPI_Comm_compare(scomm_row, PETSC_COMM_SELF, &issame));
163       PetscCheck(issame != MPI_IDENT, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, " can not use PETSC_COMM_SELF as comm when extracting a parallel submatrix");
164     } else {
165       scomm_row = PETSC_COMM_SELF;
166     }
167     /*get sub-matrix data*/
168     sxadj   = NULL;
169     sadjncy = NULL;
170     svalues = NULL;
171     PetscCall(MatCreateSubMatrix_MPIAdj_data(mat, irow[i], icol[i], &sxadj, &sadjncy, &svalues));
172     PetscCall(ISGetLocalSize(irow[i], &irow_n));
173     PetscCall(ISGetLocalSize(icol[i], &icol_n));
174     PetscCall(ISGetIndices(irow[i], &irow_indices));
175     PetscCall(PetscArraycpy(indices, irow_indices, irow_n));
176     PetscCall(ISRestoreIndices(irow[i], &irow_indices));
177     PetscCall(ISGetIndices(icol[i], &icol_indices));
178     PetscCall(PetscArraycpy(indices + irow_n, icol_indices, icol_n));
179     PetscCall(ISRestoreIndices(icol[i], &icol_indices));
180     nindx = irow_n + icol_n;
181     PetscCall(PetscSortRemoveDupsInt(&nindx, indices));
182     /* renumber columns */
183     for (j = 0; j < irow_n; j++) {
184       for (k = sxadj[j]; k < sxadj[j + 1]; k++) {
185         PetscCall(PetscFindInt(sadjncy[k], nindx, indices, &loc));
186         PetscCheck(loc >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "can not find col %" PetscInt_FMT, sadjncy[k]);
187         sadjncy[k] = loc;
188       }
189     }
190     if (scall == MAT_INITIAL_MATRIX) {
191       PetscCall(MatCreateMPIAdj(scomm_row, irow_n, icol_n, sxadj, sadjncy, svalues, submat[i]));
192     } else {
193       Mat         sadj = *(submat[i]);
194       Mat_MPIAdj *sa   = (Mat_MPIAdj *)((sadj)->data);
195       PetscCall(PetscObjectGetComm((PetscObject)sadj, &scomm_mat));
196       PetscCallMPI(MPI_Comm_compare(scomm_row, scomm_mat, &issame));
197       PetscCheck(issame == MPI_IDENT, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "submatrix  must have the same comm as the col index set");
198       PetscCall(PetscArraycpy(sa->i, sxadj, irow_n + 1));
199       PetscCall(PetscArraycpy(sa->j, sadjncy, sxadj[irow_n]));
200       if (svalues) PetscCall(PetscArraycpy(sa->values, svalues, sxadj[irow_n]));
201       PetscCall(PetscFree(sxadj));
202       PetscCall(PetscFree(sadjncy));
203       PetscCall(PetscFree(svalues));
204     }
205   }
206   PetscCall(PetscFree(indices));
207   PetscFunctionReturn(PETSC_SUCCESS);
208 }
209 
210 static PetscErrorCode MatCreateSubMatricesMPI_MPIAdj(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
211 {
212   /*get sub-matrices across a sub communicator */
213   PetscFunctionBegin;
214   PetscCall(MatCreateSubMatrices_MPIAdj_Private(mat, n, irow, icol, PETSC_TRUE, scall, submat));
215   PetscFunctionReturn(PETSC_SUCCESS);
216 }
217 
218 static PetscErrorCode MatCreateSubMatrices_MPIAdj(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
219 {
220   PetscFunctionBegin;
221   /*get sub-matrices based on PETSC_COMM_SELF */
222   PetscCall(MatCreateSubMatrices_MPIAdj_Private(mat, n, irow, icol, PETSC_FALSE, scall, submat));
223   PetscFunctionReturn(PETSC_SUCCESS);
224 }
225 
226 static PetscErrorCode MatView_MPIAdj_ASCII(Mat A, PetscViewer viewer)
227 {
228   Mat_MPIAdj       *a = (Mat_MPIAdj *)A->data;
229   PetscInt          i, j, m = A->rmap->n;
230   const char       *name;
231   PetscViewerFormat format;
232 
233   PetscFunctionBegin;
234   PetscCall(PetscObjectGetName((PetscObject)A, &name));
235   PetscCall(PetscViewerGetFormat(viewer, &format));
236   if (format == PETSC_VIEWER_ASCII_INFO) {
237     PetscFunctionReturn(PETSC_SUCCESS);
238   } else {
239     PetscCheck(format != PETSC_VIEWER_ASCII_MATLAB, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MATLAB format not supported");
240     PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
241     PetscCall(PetscViewerASCIIPushSynchronized(viewer));
242     for (i = 0; i < m; i++) {
243       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "row %" PetscInt_FMT ":", i + A->rmap->rstart));
244       for (j = a->i[i]; j < a->i[i + 1]; j++) {
245         if (a->values) {
246           PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " (%" PetscInt_FMT ", %" PetscInt_FMT ") ", a->j[j], a->values[j]));
247         } else {
248           PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " %" PetscInt_FMT " ", a->j[j]));
249         }
250       }
251       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "\n"));
252     }
253     PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
254     PetscCall(PetscViewerFlush(viewer));
255     PetscCall(PetscViewerASCIIPopSynchronized(viewer));
256   }
257   PetscFunctionReturn(PETSC_SUCCESS);
258 }
259 
260 static PetscErrorCode MatView_MPIAdj(Mat A, PetscViewer viewer)
261 {
262   PetscBool iascii;
263 
264   PetscFunctionBegin;
265   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
266   if (iascii) PetscCall(MatView_MPIAdj_ASCII(A, viewer));
267   PetscFunctionReturn(PETSC_SUCCESS);
268 }
269 
270 static PetscErrorCode MatDestroy_MPIAdj(Mat mat)
271 {
272   Mat_MPIAdj *a = (Mat_MPIAdj *)mat->data;
273 
274   PetscFunctionBegin;
275 #if defined(PETSC_USE_LOG)
276   PetscCall(PetscLogObjectState((PetscObject)mat, "Rows=%" PetscInt_FMT ", Cols=%" PetscInt_FMT ", NZ=%" PetscInt_FMT, mat->rmap->n, mat->cmap->n, a->nz));
277 #endif
278   PetscCall(PetscFree(a->diag));
279   if (a->freeaij) {
280     if (a->freeaijwithfree) {
281       if (a->i) free(a->i);
282       if (a->j) free(a->j);
283     } else {
284       PetscCall(PetscFree(a->i));
285       PetscCall(PetscFree(a->j));
286       PetscCall(PetscFree(a->values));
287     }
288   }
289   PetscCall(PetscFree(a->rowvalues));
290   PetscCall(PetscFree(mat->data));
291   PetscCall(PetscObjectChangeTypeName((PetscObject)mat, NULL));
292   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjSetPreallocation_C", NULL));
293   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjCreateNonemptySubcommMat_C", NULL));
294   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjToSeq_C", NULL));
295   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjToSeqRankZero_C", NULL));
296   PetscFunctionReturn(PETSC_SUCCESS);
297 }
298 
299 static PetscErrorCode MatSetOption_MPIAdj(Mat A, MatOption op, PetscBool flg)
300 {
301   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
302 
303   PetscFunctionBegin;
304   switch (op) {
305   case MAT_SYMMETRIC:
306   case MAT_STRUCTURALLY_SYMMETRIC:
307   case MAT_HERMITIAN:
308   case MAT_SPD:
309     a->symmetric = flg;
310     break;
311   case MAT_SYMMETRY_ETERNAL:
312   case MAT_STRUCTURAL_SYMMETRY_ETERNAL:
313   case MAT_SPD_ETERNAL:
314     break;
315   default:
316     PetscCall(PetscInfo(A, "Option %s ignored\n", MatOptions[op]));
317     break;
318   }
319   PetscFunctionReturn(PETSC_SUCCESS);
320 }
321 
322 static PetscErrorCode MatGetRow_MPIAdj(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v)
323 {
324   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
325 
326   PetscFunctionBegin;
327   row -= A->rmap->rstart;
328   PetscCheck(row >= 0 && row < A->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row out of range");
329   *nz = a->i[row + 1] - a->i[row];
330   if (v) {
331     PetscInt j;
332     if (a->rowvalues_alloc < *nz) {
333       PetscCall(PetscFree(a->rowvalues));
334       a->rowvalues_alloc = PetscMax(a->rowvalues_alloc * 2, *nz);
335       PetscCall(PetscMalloc1(a->rowvalues_alloc, &a->rowvalues));
336     }
337     for (j = 0; j < *nz; j++) a->rowvalues[j] = a->values ? a->values[a->i[row] + j] : 1.0;
338     *v = (*nz) ? a->rowvalues : NULL;
339   }
340   if (idx) *idx = (*nz) ? a->j + a->i[row] : NULL;
341   PetscFunctionReturn(PETSC_SUCCESS);
342 }
343 
344 static PetscErrorCode MatRestoreRow_MPIAdj(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v)
345 {
346   PetscFunctionBegin;
347   PetscFunctionReturn(PETSC_SUCCESS);
348 }
349 
350 static PetscErrorCode MatEqual_MPIAdj(Mat A, Mat B, PetscBool *flg)
351 {
352   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data, *b = (Mat_MPIAdj *)B->data;
353   PetscBool   flag;
354 
355   PetscFunctionBegin;
356   /* If the  matrix dimensions are not equal,or no of nonzeros */
357   if ((A->rmap->n != B->rmap->n) || (a->nz != b->nz)) flag = PETSC_FALSE;
358 
359   /* if the a->i are the same */
360   PetscCall(PetscArraycmp(a->i, b->i, A->rmap->n + 1, &flag));
361 
362   /* if a->j are the same */
363   PetscCall(PetscMemcmp(a->j, b->j, (a->nz) * sizeof(PetscInt), &flag));
364 
365   PetscCall(MPIU_Allreduce(&flag, flg, 1, MPIU_BOOL, MPI_LAND, PetscObjectComm((PetscObject)A)));
366   PetscFunctionReturn(PETSC_SUCCESS);
367 }
368 
369 static PetscErrorCode MatGetRowIJ_MPIAdj(Mat A, PetscInt oshift, PetscBool symmetric, PetscBool blockcompressed, PetscInt *m, const PetscInt *inia[], const PetscInt *inja[], PetscBool *done)
370 {
371   PetscInt    i;
372   Mat_MPIAdj *a  = (Mat_MPIAdj *)A->data;
373   PetscInt  **ia = (PetscInt **)inia, **ja = (PetscInt **)inja;
374 
375   PetscFunctionBegin;
376   *m    = A->rmap->n;
377   *ia   = a->i;
378   *ja   = a->j;
379   *done = PETSC_TRUE;
380   if (oshift) {
381     for (i = 0; i < (*ia)[*m]; i++) (*ja)[i]++;
382     for (i = 0; i <= (*m); i++) (*ia)[i]++;
383   }
384   PetscFunctionReturn(PETSC_SUCCESS);
385 }
386 
387 static PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A, PetscInt oshift, PetscBool symmetric, PetscBool blockcompressed, PetscInt *m, const PetscInt *inia[], const PetscInt *inja[], PetscBool *done)
388 {
389   PetscInt    i;
390   Mat_MPIAdj *a  = (Mat_MPIAdj *)A->data;
391   PetscInt  **ia = (PetscInt **)inia, **ja = (PetscInt **)inja;
392 
393   PetscFunctionBegin;
394   PetscCheck(!ia || a->i == *ia, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "ia passed back is not one obtained with MatGetRowIJ()");
395   PetscCheck(!ja || a->j == *ja, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "ja passed back is not one obtained with MatGetRowIJ()");
396   if (oshift) {
397     PetscCheck(ia, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "If oshift then you must passed in inia[] argument");
398     PetscCheck(ja, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "If oshift then you must passed in inja[] argument");
399     for (i = 0; i <= (*m); i++) (*ia)[i]--;
400     for (i = 0; i < (*ia)[*m]; i++) (*ja)[i]--;
401   }
402   PetscFunctionReturn(PETSC_SUCCESS);
403 }
404 
405 PetscErrorCode MatConvertFrom_MPIAdj(Mat A, MatType type, MatReuse reuse, Mat *newmat)
406 {
407   Mat                B;
408   PetscInt           i, m, N, nzeros = 0, *ia, *ja, len, rstart, cnt, j, *a;
409   const PetscInt    *rj;
410   const PetscScalar *ra;
411   MPI_Comm           comm;
412 
413   PetscFunctionBegin;
414   PetscCall(MatGetSize(A, NULL, &N));
415   PetscCall(MatGetLocalSize(A, &m, NULL));
416   PetscCall(MatGetOwnershipRange(A, &rstart, NULL));
417 
418   /* count the number of nonzeros per row */
419   for (i = 0; i < m; i++) {
420     PetscCall(MatGetRow(A, i + rstart, &len, &rj, NULL));
421     for (j = 0; j < len; j++) {
422       if (rj[j] == i + rstart) {
423         len--;
424         break;
425       } /* don't count diagonal */
426     }
427     nzeros += len;
428     PetscCall(MatRestoreRow(A, i + rstart, &len, &rj, NULL));
429   }
430 
431   /* malloc space for nonzeros */
432   PetscCall(PetscMalloc1(nzeros + 1, &a));
433   PetscCall(PetscMalloc1(N + 1, &ia));
434   PetscCall(PetscMalloc1(nzeros + 1, &ja));
435 
436   nzeros = 0;
437   ia[0]  = 0;
438   for (i = 0; i < m; i++) {
439     PetscCall(MatGetRow(A, i + rstart, &len, &rj, &ra));
440     cnt = 0;
441     for (j = 0; j < len; j++) {
442       if (rj[j] != i + rstart) { /* if not diagonal */
443         a[nzeros + cnt]    = (PetscInt)PetscAbsScalar(ra[j]);
444         ja[nzeros + cnt++] = rj[j];
445       }
446     }
447     PetscCall(MatRestoreRow(A, i + rstart, &len, &rj, &ra));
448     nzeros += cnt;
449     ia[i + 1] = nzeros;
450   }
451 
452   PetscCall(PetscObjectGetComm((PetscObject)A, &comm));
453   PetscCall(MatCreate(comm, &B));
454   PetscCall(MatSetSizes(B, m, PETSC_DETERMINE, PETSC_DETERMINE, N));
455   PetscCall(MatSetType(B, type));
456   PetscCall(MatMPIAdjSetPreallocation(B, ia, ja, a));
457 
458   if (reuse == MAT_INPLACE_MATRIX) {
459     PetscCall(MatHeaderReplace(A, &B));
460   } else {
461     *newmat = B;
462   }
463   PetscFunctionReturn(PETSC_SUCCESS);
464 }
465 
466 PetscErrorCode MatSetValues_MPIAdj(Mat A, PetscInt m, const PetscInt *rows, PetscInt n, const PetscInt *cols, const PetscScalar *values, InsertMode im)
467 {
468   Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data;
469   PetscInt    rStart, rEnd, cStart, cEnd;
470 
471   PetscFunctionBegin;
472   PetscCheck(!adj->i, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Matrix is already assembled, cannot change its values");
473   PetscCall(MatGetOwnershipRange(A, &rStart, &rEnd));
474   PetscCall(MatGetOwnershipRangeColumn(A, &cStart, &cEnd));
475   if (!adj->ht) {
476     PetscCall(PetscHSetIJCreate(&adj->ht));
477     PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject)A), 1, &A->stash));
478     PetscCall(PetscLayoutSetUp(A->rmap));
479     PetscCall(PetscLayoutSetUp(A->cmap));
480   }
481   for (PetscInt r = 0; r < m; ++r) {
482     PetscHashIJKey key;
483 
484     key.i = rows[r];
485     if (key.i < 0) continue;
486     if ((key.i < rStart) || (key.i >= rEnd)) {
487       PetscCall(MatStashValuesRow_Private(&A->stash, key.i, n, cols, values, PETSC_FALSE));
488     } else {
489       for (PetscInt c = 0; c < n; ++c) {
490         key.j = cols[c];
491         if (key.j < 0 || key.i == key.j) continue;
492         PetscCall(PetscHSetIJAdd(adj->ht, key));
493       }
494     }
495   }
496   PetscFunctionReturn(PETSC_SUCCESS);
497 }
498 
499 PetscErrorCode MatAssemblyBegin_MPIAdj(Mat A, MatAssemblyType type)
500 {
501   PetscInt    nstash, reallocs;
502   Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data;
503 
504   PetscFunctionBegin;
505   if (!adj->ht) {
506     PetscCall(PetscHSetIJCreate(&adj->ht));
507     PetscCall(PetscLayoutSetUp(A->rmap));
508     PetscCall(PetscLayoutSetUp(A->cmap));
509   }
510   PetscCall(MatStashScatterBegin_Private(A, &A->stash, A->rmap->range));
511   PetscCall(MatStashGetInfo_Private(&A->stash, &nstash, &reallocs));
512   PetscCall(PetscInfo(A, "Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
513   PetscFunctionReturn(PETSC_SUCCESS);
514 }
515 
516 PetscErrorCode MatAssemblyEnd_MPIAdj(Mat A, MatAssemblyType type)
517 {
518   PetscScalar   *val;
519   PetscInt      *row, *col, m, rstart, *rowstarts;
520   PetscInt       i, j, ncols, flg, nz;
521   PetscMPIInt    n;
522   Mat_MPIAdj    *adj = (Mat_MPIAdj *)A->data;
523   PetscHashIter  hi;
524   PetscHashIJKey key;
525   PetscHSetIJ    ht = adj->ht;
526 
527   PetscFunctionBegin;
528   while (1) {
529     PetscCall(MatStashScatterGetMesg_Private(&A->stash, &n, &row, &col, &val, &flg));
530     if (!flg) break;
531 
532     for (i = 0; i < n;) {
533       /* Identify the consecutive vals belonging to the same row */
534       for (j = i, rstart = row[j]; j < n; j++) {
535         if (row[j] != rstart) break;
536       }
537       if (j < n) ncols = j - i;
538       else ncols = n - i;
539       /* Set all these values with a single function call */
540       PetscCall(MatSetValues_MPIAdj(A, 1, row + i, ncols, col + i, val + i, INSERT_VALUES));
541       i = j;
542     }
543   }
544   PetscCall(MatStashScatterEnd_Private(&A->stash));
545   PetscCall(MatStashDestroy_Private(&A->stash));
546 
547   PetscCall(MatGetLocalSize(A, &m, NULL));
548   PetscCall(MatGetOwnershipRange(A, &rstart, NULL));
549   PetscCall(PetscCalloc1(m + 1, &rowstarts));
550   PetscHashIterBegin(ht, hi);
551   for (; !PetscHashIterAtEnd(ht, hi);) {
552     PetscHashIterGetKey(ht, hi, key);
553     rowstarts[key.i - rstart + 1]++;
554     PetscHashIterNext(ht, hi);
555   }
556   for (i = 1; i < m + 1; i++) rowstarts[i] = rowstarts[i - 1] + rowstarts[i];
557 
558   PetscCall(PetscHSetIJGetSize(ht, &nz));
559   PetscCall(PetscMalloc1(nz, &col));
560   PetscHashIterBegin(ht, hi);
561   for (; !PetscHashIterAtEnd(ht, hi);) {
562     PetscHashIterGetKey(ht, hi, key);
563     col[rowstarts[key.i - rstart]++] = key.j;
564     PetscHashIterNext(ht, hi);
565   }
566   PetscCall(PetscHSetIJDestroy(&ht));
567   for (i = m; i > 0; i--) rowstarts[i] = rowstarts[i - 1];
568   rowstarts[0] = 0;
569 
570   for (PetscInt i = 0; i < m; i++) PetscCall(PetscSortInt(rowstarts[i + 1] - rowstarts[i], &col[rowstarts[i]]));
571 
572   adj->i       = rowstarts;
573   adj->j       = col;
574   adj->nz      = rowstarts[m];
575   adj->freeaij = PETSC_TRUE;
576   PetscFunctionReturn(PETSC_SUCCESS);
577 }
578 
579 /* -------------------------------------------------------------------*/
580 static struct _MatOps MatOps_Values = {MatSetValues_MPIAdj,
581                                        MatGetRow_MPIAdj,
582                                        MatRestoreRow_MPIAdj,
583                                        NULL,
584                                        /* 4*/ NULL,
585                                        NULL,
586                                        NULL,
587                                        NULL,
588                                        NULL,
589                                        NULL,
590                                        /*10*/ NULL,
591                                        NULL,
592                                        NULL,
593                                        NULL,
594                                        NULL,
595                                        /*15*/ NULL,
596                                        MatEqual_MPIAdj,
597                                        NULL,
598                                        NULL,
599                                        NULL,
600                                        /*20*/ MatAssemblyBegin_MPIAdj,
601                                        MatAssemblyEnd_MPIAdj,
602                                        MatSetOption_MPIAdj,
603                                        NULL,
604                                        /*24*/ NULL,
605                                        NULL,
606                                        NULL,
607                                        NULL,
608                                        NULL,
609                                        /*29*/ NULL,
610                                        NULL,
611                                        NULL,
612                                        NULL,
613                                        NULL,
614                                        /*34*/ NULL,
615                                        NULL,
616                                        NULL,
617                                        NULL,
618                                        NULL,
619                                        /*39*/ NULL,
620                                        MatCreateSubMatrices_MPIAdj,
621                                        NULL,
622                                        NULL,
623                                        NULL,
624                                        /*44*/ NULL,
625                                        NULL,
626                                        MatShift_Basic,
627                                        NULL,
628                                        NULL,
629                                        /*49*/ NULL,
630                                        MatGetRowIJ_MPIAdj,
631                                        MatRestoreRowIJ_MPIAdj,
632                                        NULL,
633                                        NULL,
634                                        /*54*/ NULL,
635                                        NULL,
636                                        NULL,
637                                        NULL,
638                                        NULL,
639                                        /*59*/ NULL,
640                                        MatDestroy_MPIAdj,
641                                        MatView_MPIAdj,
642                                        MatConvertFrom_MPIAdj,
643                                        NULL,
644                                        /*64*/ NULL,
645                                        NULL,
646                                        NULL,
647                                        NULL,
648                                        NULL,
649                                        /*69*/ NULL,
650                                        NULL,
651                                        NULL,
652                                        NULL,
653                                        NULL,
654                                        /*74*/ NULL,
655                                        NULL,
656                                        NULL,
657                                        NULL,
658                                        NULL,
659                                        /*79*/ NULL,
660                                        NULL,
661                                        NULL,
662                                        NULL,
663                                        NULL,
664                                        /*84*/ NULL,
665                                        NULL,
666                                        NULL,
667                                        NULL,
668                                        NULL,
669                                        /*89*/ NULL,
670                                        NULL,
671                                        NULL,
672                                        NULL,
673                                        NULL,
674                                        /*94*/ NULL,
675                                        NULL,
676                                        NULL,
677                                        NULL,
678                                        NULL,
679                                        /*99*/ NULL,
680                                        NULL,
681                                        NULL,
682                                        NULL,
683                                        NULL,
684                                        /*104*/ NULL,
685                                        NULL,
686                                        NULL,
687                                        NULL,
688                                        NULL,
689                                        /*109*/ NULL,
690                                        NULL,
691                                        NULL,
692                                        NULL,
693                                        NULL,
694                                        /*114*/ NULL,
695                                        NULL,
696                                        NULL,
697                                        NULL,
698                                        NULL,
699                                        /*119*/ NULL,
700                                        NULL,
701                                        NULL,
702                                        NULL,
703                                        NULL,
704                                        /*124*/ NULL,
705                                        NULL,
706                                        NULL,
707                                        NULL,
708                                        MatCreateSubMatricesMPI_MPIAdj,
709                                        /*129*/ NULL,
710                                        NULL,
711                                        NULL,
712                                        NULL,
713                                        NULL,
714                                        /*134*/ NULL,
715                                        NULL,
716                                        NULL,
717                                        NULL,
718                                        NULL,
719                                        /*139*/ NULL,
720                                        NULL,
721                                        NULL,
722                                        NULL,
723                                        NULL,
724                                        /*144*/ NULL,
725                                        NULL,
726                                        NULL,
727                                        NULL,
728                                        NULL,
729                                        NULL,
730                                        /*150*/ NULL,
731                                        NULL};
732 
733 static PetscErrorCode MatMPIAdjSetPreallocation_MPIAdj(Mat B, PetscInt *i, PetscInt *j, PetscInt *values)
734 {
735   Mat_MPIAdj *b = (Mat_MPIAdj *)B->data;
736   PetscBool   useedgeweights;
737 
738   PetscFunctionBegin;
739   PetscCall(PetscLayoutSetUp(B->rmap));
740   PetscCall(PetscLayoutSetUp(B->cmap));
741   if (values) useedgeweights = PETSC_TRUE;
742   else useedgeweights = PETSC_FALSE;
743   /* Make everybody knows if they are using edge weights or not */
744   PetscCall(MPIU_Allreduce((int *)&useedgeweights, (int *)&b->useedgeweights, 1, MPI_INT, MPI_MAX, PetscObjectComm((PetscObject)B)));
745 
746   if (PetscDefined(USE_DEBUG)) {
747     PetscInt ii;
748 
749     PetscCheck(i[0] == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "First i[] index must be zero, instead it is %" PetscInt_FMT, i[0]);
750     for (ii = 1; ii < B->rmap->n; ii++) {
751       PetscCheck(i[ii] >= 0 && i[ii] >= i[ii - 1], PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "i[%" PetscInt_FMT "]=%" PetscInt_FMT " index is out of range: i[%" PetscInt_FMT "]=%" PetscInt_FMT, ii, i[ii], ii - 1, i[ii - 1]);
752     }
753     for (ii = 0; ii < i[B->rmap->n]; ii++) PetscCheck(j[ii] >= 0 && j[ii] < B->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column index %" PetscInt_FMT " out of range %" PetscInt_FMT, ii, j[ii]);
754   }
755   b->j      = j;
756   b->i      = i;
757   b->values = values;
758 
759   b->nz        = i[B->rmap->n];
760   b->diag      = NULL;
761   b->symmetric = PETSC_FALSE;
762   b->freeaij   = PETSC_TRUE;
763 
764   B->ops->assemblybegin = NULL;
765   B->ops->assemblyend   = NULL;
766   PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
767   PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
768   PetscCall(MatStashDestroy_Private(&B->stash));
769   PetscFunctionReturn(PETSC_SUCCESS);
770 }
771 
772 static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A, Mat *B)
773 {
774   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data;
775   const PetscInt *ranges;
776   MPI_Comm        acomm, bcomm;
777   MPI_Group       agroup, bgroup;
778   PetscMPIInt     i, rank, size, nranks, *ranks;
779 
780   PetscFunctionBegin;
781   *B = NULL;
782   PetscCall(PetscObjectGetComm((PetscObject)A, &acomm));
783   PetscCallMPI(MPI_Comm_size(acomm, &size));
784   PetscCallMPI(MPI_Comm_size(acomm, &rank));
785   PetscCall(MatGetOwnershipRanges(A, &ranges));
786   for (i = 0, nranks = 0; i < size; i++) {
787     if (ranges[i + 1] - ranges[i] > 0) nranks++;
788   }
789   if (nranks == size) { /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
790     PetscCall(PetscObjectReference((PetscObject)A));
791     *B = A;
792     PetscFunctionReturn(PETSC_SUCCESS);
793   }
794 
795   PetscCall(PetscMalloc1(nranks, &ranks));
796   for (i = 0, nranks = 0; i < size; i++) {
797     if (ranges[i + 1] - ranges[i] > 0) ranks[nranks++] = i;
798   }
799   PetscCallMPI(MPI_Comm_group(acomm, &agroup));
800   PetscCallMPI(MPI_Group_incl(agroup, nranks, ranks, &bgroup));
801   PetscCall(PetscFree(ranks));
802   PetscCallMPI(MPI_Comm_create(acomm, bgroup, &bcomm));
803   PetscCallMPI(MPI_Group_free(&agroup));
804   PetscCallMPI(MPI_Group_free(&bgroup));
805   if (bcomm != MPI_COMM_NULL) {
806     PetscInt    m, N;
807     Mat_MPIAdj *b;
808     PetscCall(MatGetLocalSize(A, &m, NULL));
809     PetscCall(MatGetSize(A, NULL, &N));
810     PetscCall(MatCreateMPIAdj(bcomm, m, N, a->i, a->j, a->values, B));
811     b          = (Mat_MPIAdj *)(*B)->data;
812     b->freeaij = PETSC_FALSE;
813     PetscCallMPI(MPI_Comm_free(&bcomm));
814   }
815   PetscFunctionReturn(PETSC_SUCCESS);
816 }
817 
818 PetscErrorCode MatMPIAdjToSeq_MPIAdj(Mat A, Mat *B)
819 {
820   PetscInt    M, N, *II, *J, NZ, nz, m, nzstart, i;
821   PetscInt   *Values = NULL;
822   Mat_MPIAdj *adj    = (Mat_MPIAdj *)A->data;
823   PetscMPIInt mnz, mm, *allnz, *allm, size, *dispnz, *dispm;
824 
825   PetscFunctionBegin;
826   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
827   PetscCall(MatGetSize(A, &M, &N));
828   PetscCall(MatGetLocalSize(A, &m, NULL));
829   nz = adj->nz;
830   PetscCheck(adj->i[m] == nz, PETSC_COMM_SELF, PETSC_ERR_PLIB, "nz %" PetscInt_FMT " not correct i[m] %" PetscInt_FMT, nz, adj->i[m]);
831   PetscCallMPI(MPI_Allreduce(&nz, &NZ, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A)));
832 
833   PetscCall(PetscMPIIntCast(nz, &mnz));
834   PetscCall(PetscMalloc2(size, &allnz, size, &dispnz));
835   PetscCallMPI(MPI_Allgather(&mnz, 1, MPI_INT, allnz, 1, MPI_INT, PetscObjectComm((PetscObject)A)));
836   dispnz[0] = 0;
837   for (i = 1; i < size; i++) dispnz[i] = dispnz[i - 1] + allnz[i - 1];
838   if (adj->values) {
839     PetscCall(PetscMalloc1(NZ, &Values));
840     PetscCallMPI(MPI_Allgatherv(adj->values, mnz, MPIU_INT, Values, allnz, dispnz, MPIU_INT, PetscObjectComm((PetscObject)A)));
841   }
842   PetscCall(PetscMalloc1(NZ, &J));
843   PetscCallMPI(MPI_Allgatherv(adj->j, mnz, MPIU_INT, J, allnz, dispnz, MPIU_INT, PetscObjectComm((PetscObject)A)));
844   PetscCall(PetscFree2(allnz, dispnz));
845   PetscCallMPI(MPI_Scan(&nz, &nzstart, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A)));
846   nzstart -= nz;
847   /* shift the i[] values so they will be correct after being received */
848   for (i = 0; i < m; i++) adj->i[i] += nzstart;
849   PetscCall(PetscMalloc1(M + 1, &II));
850   PetscCall(PetscMPIIntCast(m, &mm));
851   PetscCall(PetscMalloc2(size, &allm, size, &dispm));
852   PetscCallMPI(MPI_Allgather(&mm, 1, MPI_INT, allm, 1, MPI_INT, PetscObjectComm((PetscObject)A)));
853   dispm[0] = 0;
854   for (i = 1; i < size; i++) dispm[i] = dispm[i - 1] + allm[i - 1];
855   PetscCallMPI(MPI_Allgatherv(adj->i, mm, MPIU_INT, II, allm, dispm, MPIU_INT, PetscObjectComm((PetscObject)A)));
856   PetscCall(PetscFree2(allm, dispm));
857   II[M] = NZ;
858   /* shift the i[] values back */
859   for (i = 0; i < m; i++) adj->i[i] -= nzstart;
860   PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, M, N, II, J, Values, B));
861   PetscFunctionReturn(PETSC_SUCCESS);
862 }
863 
864 PetscErrorCode MatMPIAdjToSeqRankZero_MPIAdj(Mat A, Mat *B)
865 {
866   PetscInt    M, N, *II, *J, NZ, nz, m, nzstart, i;
867   PetscInt   *Values = NULL;
868   Mat_MPIAdj *adj    = (Mat_MPIAdj *)A->data;
869   PetscMPIInt mnz, mm, *allnz = NULL, *allm, size, *dispnz, *dispm, rank;
870 
871   PetscFunctionBegin;
872   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
873   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)A), &rank));
874   PetscCall(MatGetSize(A, &M, &N));
875   PetscCall(MatGetLocalSize(A, &m, NULL));
876   nz = adj->nz;
877   PetscCheck(adj->i[m] == nz, PETSC_COMM_SELF, PETSC_ERR_PLIB, "nz %" PetscInt_FMT " not correct i[m] %" PetscInt_FMT, nz, adj->i[m]);
878   PetscCallMPI(MPI_Allreduce(&nz, &NZ, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A)));
879 
880   PetscCall(PetscMPIIntCast(nz, &mnz));
881   if (!rank) PetscCall(PetscMalloc2(size, &allnz, size, &dispnz));
882   PetscCallMPI(MPI_Gather(&mnz, 1, MPI_INT, allnz, 1, MPI_INT, 0, PetscObjectComm((PetscObject)A)));
883   if (!rank) {
884     dispnz[0] = 0;
885     for (i = 1; i < size; i++) dispnz[i] = dispnz[i - 1] + allnz[i - 1];
886     if (adj->values) {
887       PetscCall(PetscMalloc1(NZ, &Values));
888       PetscCallMPI(MPI_Gatherv(adj->values, mnz, MPIU_INT, Values, allnz, dispnz, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
889     }
890     PetscCall(PetscMalloc1(NZ, &J));
891     PetscCallMPI(MPI_Gatherv(adj->j, mnz, MPIU_INT, J, allnz, dispnz, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
892     PetscCall(PetscFree2(allnz, dispnz));
893   } else {
894     if (adj->values) PetscCallMPI(MPI_Gatherv(adj->values, mnz, MPIU_INT, NULL, NULL, NULL, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
895     PetscCallMPI(MPI_Gatherv(adj->j, mnz, MPIU_INT, NULL, NULL, NULL, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
896   }
897   PetscCallMPI(MPI_Scan(&nz, &nzstart, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A)));
898   nzstart -= nz;
899   /* shift the i[] values so they will be correct after being received */
900   for (i = 0; i < m; i++) adj->i[i] += nzstart;
901   PetscCall(PetscMPIIntCast(m, &mm));
902   if (!rank) {
903     PetscCall(PetscMalloc1(M + 1, &II));
904     PetscCall(PetscMalloc2(size, &allm, size, &dispm));
905     PetscCallMPI(MPI_Gather(&mm, 1, MPI_INT, allm, 1, MPI_INT, 0, PetscObjectComm((PetscObject)A)));
906     dispm[0] = 0;
907     for (i = 1; i < size; i++) dispm[i] = dispm[i - 1] + allm[i - 1];
908     PetscCallMPI(MPI_Gatherv(adj->i, mm, MPIU_INT, II, allm, dispm, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
909     PetscCall(PetscFree2(allm, dispm));
910     II[M] = NZ;
911   } else {
912     PetscCallMPI(MPI_Gather(&mm, 1, MPI_INT, NULL, 1, MPI_INT, 0, PetscObjectComm((PetscObject)A)));
913     PetscCallMPI(MPI_Gatherv(adj->i, mm, MPIU_INT, NULL, NULL, NULL, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
914   }
915   /* shift the i[] values back */
916   for (i = 0; i < m; i++) adj->i[i] -= nzstart;
917   if (!rank) PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, M, N, II, J, Values, B));
918   PetscFunctionReturn(PETSC_SUCCESS);
919 }
920 
921 /*@
922    MatMPIAdjCreateNonemptySubcommMat - create the same `MATMPIADJ` matrix on a subcommunicator containing only processes owning a positive number of rows
923 
924    Collective
925 
926    Input Parameter:
927 .  A - original MPIAdj matrix
928 
929    Output Parameter:
930 .  B - matrix on subcommunicator, NULL on ranks that owned zero rows of A
931 
932    Level: developer
933 
934    Note:
935    This function is mostly useful for internal use by mesh partitioning packages that require that every process owns at least one row.
936 
937    The matrix B should be destroyed with `MatDestroy()`. The arrays are not copied, so B should be destroyed before A is destroyed.
938 
939 .seealso: `MATMPIADJ`, `MatCreateMPIAdj()`
940 @*/
941 PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A, Mat *B)
942 {
943   PetscFunctionBegin;
944   PetscValidHeaderSpecific(A, MAT_CLASSID, 1);
945   PetscUseMethod(A, "MatMPIAdjCreateNonemptySubcommMat_C", (Mat, Mat *), (A, B));
946   PetscFunctionReturn(PETSC_SUCCESS);
947 }
948 
949 /*MC
950    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
951    intended for use constructing orderings and partitionings.
952 
953   Level: beginner
954 
955   Note:
956     You can provide values to the matrix using `MatMPIAdjSetPreallocation()`, `MatCreateMPIAdj()`, or
957     by calling `MatSetValues()` and `MatAssemblyBegin()` followed by `MatAssemblyEnd()`
958 
959 .seealso: `MatCreateMPIAdj()`, `MatMPIAdjSetPreallocation()`, `MatSetValues()`
960 M*/
961 
962 PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B)
963 {
964   Mat_MPIAdj *b;
965 
966   PetscFunctionBegin;
967   PetscCall(PetscNew(&b));
968   B->data = (void *)b;
969   PetscCall(PetscMemcpy(B->ops, &MatOps_Values, sizeof(struct _MatOps)));
970   B->assembled    = PETSC_FALSE;
971   B->preallocated = PETSC_TRUE; /* so that MatSetValues() may be used */
972 
973   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjSetPreallocation_C", MatMPIAdjSetPreallocation_MPIAdj));
974   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjCreateNonemptySubcommMat_C", MatMPIAdjCreateNonemptySubcommMat_MPIAdj));
975   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjToSeq_C", MatMPIAdjToSeq_MPIAdj));
976   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjToSeqRankZero_C", MatMPIAdjToSeqRankZero_MPIAdj));
977   PetscCall(PetscObjectChangeTypeName((PetscObject)B, MATMPIADJ));
978   PetscFunctionReturn(PETSC_SUCCESS);
979 }
980 
981 /*@C
982    MatMPIAdjToSeq - Converts an parallel `MATMPIADJ` matrix to complete `MATMPIADJ` on each process (needed by sequential partitioner)
983 
984    Logically Collective
985 
986    Input Parameter:
987 .  A - the matrix
988 
989    Output Parameter:
990 .  B - the same matrix on all processes
991 
992    Level: intermediate
993 
994 .seealso: `MATMPIADJ`, `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MatMPIAdjToSeqRankZero()`
995 @*/
996 PetscErrorCode MatMPIAdjToSeq(Mat A, Mat *B)
997 {
998   PetscFunctionBegin;
999   PetscUseMethod(A, "MatMPIAdjToSeq_C", (Mat, Mat *), (A, B));
1000   PetscFunctionReturn(PETSC_SUCCESS);
1001 }
1002 
1003 /*@C
1004    MatMPIAdjToSeqRankZero - Converts an parallel `MATMPIADJ` matrix to complete `MATMPIADJ` on rank zero (needed by sequential partitioner)
1005 
1006    Logically Collective
1007 
1008    Input Parameter:
1009 .  A - the matrix
1010 
1011    Output Parameter:
1012 .  B - the same matrix on rank zero, not set on other ranks
1013 
1014    Level: intermediate
1015 
1016    Note:
1017      This routine has the advantage on systems with multiple ranks per node since only one copy of the matrix
1018      is stored on the first node, instead of the number of ranks copies. This can allow partitioning much larger
1019      parallel graph sequentially.
1020 
1021 .seealso: `MATMPIADJ`, `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MatMPIAdjToSeq()`
1022 @*/
1023 PetscErrorCode MatMPIAdjToSeqRankZero(Mat A, Mat *B)
1024 {
1025   PetscFunctionBegin;
1026   PetscUseMethod(A, "MatMPIAdjToSeqRankZero_C", (Mat, Mat *), (A, B));
1027   PetscFunctionReturn(PETSC_SUCCESS);
1028 }
1029 
1030 /*@C
1031    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
1032 
1033    Logically Collective
1034 
1035    Input Parameters:
1036 +  A - the matrix
1037 .  i - the indices into j for the start of each row
1038 .  j - the column indices for each row (sorted for each row).
1039        The indices in i and j start with zero (NOT with one).
1040 -  values - [optional] edge weights
1041 
1042    Level: intermediate
1043 
1044 .seealso: `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MATMPIADJ`, `MatCreateMPIAdj()`
1045 @*/
1046 PetscErrorCode MatMPIAdjSetPreallocation(Mat B, PetscInt *i, PetscInt *j, PetscInt *values)
1047 {
1048   PetscFunctionBegin;
1049   PetscTryMethod(B, "MatMPIAdjSetPreallocation_C", (Mat, PetscInt *, PetscInt *, PetscInt *), (B, i, j, values));
1050   PetscFunctionReturn(PETSC_SUCCESS);
1051 }
1052 
1053 /*@C
1054    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
1055    The matrix does not have numerical values associated with it, but is
1056    intended for ordering (to reduce bandwidth etc) and partitioning.
1057 
1058    Collective
1059 
1060    Input Parameters:
1061 +  comm - MPI communicator
1062 .  m - number of local rows
1063 .  N - number of global columns
1064 .  i - the indices into j for the start of each row
1065 .  j - the column indices for each row (sorted for each row).
1066        The indices in i and j start with zero (NOT with one).
1067 -  values -[optional] edge weights
1068 
1069    Output Parameter:
1070 .  A - the matrix
1071 
1072    Level: intermediate
1073 
1074    Notes:
1075    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
1076    when the matrix is destroyed; you must allocate them with `PetscMalloc()`. If you
1077    call from Fortran you need not create the arrays with `PetscMalloc()`.
1078 
1079    You should not include the matrix diagonals.
1080 
1081    If you already have a matrix, you can create its adjacency matrix by a call
1082    to `MatConvert()`, specifying a type of `MATMPIADJ`.
1083 
1084    Possible values for `MatSetOption()` - `MAT_STRUCTURALLY_SYMMETRIC`
1085 
1086 .seealso: `MatCreate()`, `MatConvert()`, `MatGetOrdering()`, `MATMPIADJ`, `MatMPIAdjSetPreallocation()`
1087 @*/
1088 PetscErrorCode MatCreateMPIAdj(MPI_Comm comm, PetscInt m, PetscInt N, PetscInt *i, PetscInt *j, PetscInt *values, Mat *A)
1089 {
1090   PetscFunctionBegin;
1091   PetscCall(MatCreate(comm, A));
1092   PetscCall(MatSetSizes(*A, m, PETSC_DETERMINE, PETSC_DETERMINE, N));
1093   PetscCall(MatSetType(*A, MATMPIADJ));
1094   PetscCall(MatMPIAdjSetPreallocation(*A, i, j, values));
1095   PetscFunctionReturn(PETSC_SUCCESS);
1096 }
1097