xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision bf0c7fc2d7fb4e90d42e412b25194b878e6e442d)
1 /*
2     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
3 */
4 #include <../src/mat/impls/adj/mpi/mpiadj.h> /*I "petscmat.h" I*/
5 #include <petscsf.h>
6 
7 /*
8   The interface should be easy to use for both MatCreateSubMatrix (parallel sub-matrix) and MatCreateSubMatrices (sequential sub-matrices)
9 */
10 static PetscErrorCode MatCreateSubMatrix_MPIAdj_data(Mat adj, IS irows, IS icols, PetscInt **sadj_xadj, PetscInt **sadj_adjncy, PetscInt **sadj_values)
11 {
12   PetscInt        nlrows_is, icols_n, i, j, nroots, nleaves, rlocalindex, *ncols_send, *ncols_recv;
13   PetscInt        nlrows_mat, *adjncy_recv, Ncols_recv, Ncols_send, *xadj_recv, *values_recv;
14   PetscInt       *ncols_recv_offsets, loc, rnclos, *sadjncy, *sxadj, *svalues;
15   const PetscInt *irows_indices, *icols_indices, *xadj, *adjncy;
16   PetscMPIInt     owner;
17   Mat_MPIAdj     *a = (Mat_MPIAdj *)adj->data;
18   PetscLayout     rmap;
19   MPI_Comm        comm;
20   PetscSF         sf;
21   PetscSFNode    *iremote;
22   PetscBool       done;
23 
24   PetscFunctionBegin;
25   PetscCall(PetscObjectGetComm((PetscObject)adj, &comm));
26   PetscCall(MatGetLayouts(adj, &rmap, NULL));
27   PetscCall(ISGetLocalSize(irows, &nlrows_is));
28   PetscCall(ISGetIndices(irows, &irows_indices));
29   PetscCall(PetscMalloc1(nlrows_is, &iremote));
30   /* construct sf graph*/
31   nleaves = nlrows_is;
32   for (i = 0; i < nlrows_is; i++) {
33     owner       = -1;
34     rlocalindex = -1;
35     PetscCall(PetscLayoutFindOwnerIndex(rmap, irows_indices[i], &owner, &rlocalindex));
36     iremote[i].rank  = owner;
37     iremote[i].index = rlocalindex;
38   }
39   PetscCall(MatGetRowIJ(adj, 0, PETSC_FALSE, PETSC_FALSE, &nlrows_mat, &xadj, &adjncy, &done));
40   PetscCall(PetscCalloc4(nlrows_mat, &ncols_send, nlrows_is, &xadj_recv, nlrows_is + 1, &ncols_recv_offsets, nlrows_is, &ncols_recv));
41   nroots = nlrows_mat;
42   for (i = 0; i < nlrows_mat; i++) ncols_send[i] = xadj[i + 1] - xadj[i];
43   PetscCall(PetscSFCreate(comm, &sf));
44   PetscCall(PetscSFSetGraph(sf, nroots, nleaves, NULL, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER));
45   PetscCall(PetscSFSetType(sf, PETSCSFBASIC));
46   PetscCall(PetscSFSetFromOptions(sf));
47   PetscCall(PetscSFBcastBegin(sf, MPIU_INT, ncols_send, ncols_recv, MPI_REPLACE));
48   PetscCall(PetscSFBcastEnd(sf, MPIU_INT, ncols_send, ncols_recv, MPI_REPLACE));
49   PetscCall(PetscSFBcastBegin(sf, MPIU_INT, xadj, xadj_recv, MPI_REPLACE));
50   PetscCall(PetscSFBcastEnd(sf, MPIU_INT, xadj, xadj_recv, MPI_REPLACE));
51   PetscCall(PetscSFDestroy(&sf));
52   Ncols_recv = 0;
53   for (i = 0; i < nlrows_is; i++) {
54     Ncols_recv += ncols_recv[i];
55     ncols_recv_offsets[i + 1] = ncols_recv[i] + ncols_recv_offsets[i];
56   }
57   Ncols_send = 0;
58   for (i = 0; i < nlrows_mat; i++) Ncols_send += ncols_send[i];
59   PetscCall(PetscCalloc1(Ncols_recv, &iremote));
60   PetscCall(PetscCalloc1(Ncols_recv, &adjncy_recv));
61   nleaves    = Ncols_recv;
62   Ncols_recv = 0;
63   for (i = 0; i < nlrows_is; i++) {
64     PetscCall(PetscLayoutFindOwner(rmap, irows_indices[i], &owner));
65     for (j = 0; j < ncols_recv[i]; j++) {
66       iremote[Ncols_recv].rank    = owner;
67       iremote[Ncols_recv++].index = xadj_recv[i] + j;
68     }
69   }
70   PetscCall(ISRestoreIndices(irows, &irows_indices));
71   /*if we need to deal with edge weights ???*/
72   if (a->useedgeweights) PetscCall(PetscCalloc1(Ncols_recv, &values_recv));
73   nroots = Ncols_send;
74   PetscCall(PetscSFCreate(comm, &sf));
75   PetscCall(PetscSFSetGraph(sf, nroots, nleaves, NULL, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER));
76   PetscCall(PetscSFSetType(sf, PETSCSFBASIC));
77   PetscCall(PetscSFSetFromOptions(sf));
78   PetscCall(PetscSFBcastBegin(sf, MPIU_INT, adjncy, adjncy_recv, MPI_REPLACE));
79   PetscCall(PetscSFBcastEnd(sf, MPIU_INT, adjncy, adjncy_recv, MPI_REPLACE));
80   if (a->useedgeweights) {
81     PetscCall(PetscSFBcastBegin(sf, MPIU_INT, a->values, values_recv, MPI_REPLACE));
82     PetscCall(PetscSFBcastEnd(sf, MPIU_INT, a->values, values_recv, MPI_REPLACE));
83   }
84   PetscCall(PetscSFDestroy(&sf));
85   PetscCall(MatRestoreRowIJ(adj, 0, PETSC_FALSE, PETSC_FALSE, &nlrows_mat, &xadj, &adjncy, &done));
86   PetscCall(ISGetLocalSize(icols, &icols_n));
87   PetscCall(ISGetIndices(icols, &icols_indices));
88   rnclos = 0;
89   for (i = 0; i < nlrows_is; i++) {
90     for (j = ncols_recv_offsets[i]; j < ncols_recv_offsets[i + 1]; j++) {
91       PetscCall(PetscFindInt(adjncy_recv[j], icols_n, icols_indices, &loc));
92       if (loc < 0) {
93         adjncy_recv[j] = -1;
94         if (a->useedgeweights) values_recv[j] = -1;
95         ncols_recv[i]--;
96       } else {
97         rnclos++;
98       }
99     }
100   }
101   PetscCall(ISRestoreIndices(icols, &icols_indices));
102   PetscCall(PetscCalloc1(rnclos, &sadjncy));
103   if (a->useedgeweights) PetscCall(PetscCalloc1(rnclos, &svalues));
104   PetscCall(PetscCalloc1(nlrows_is + 1, &sxadj));
105   rnclos = 0;
106   for (i = 0; i < nlrows_is; i++) {
107     for (j = ncols_recv_offsets[i]; j < ncols_recv_offsets[i + 1]; j++) {
108       if (adjncy_recv[j] < 0) continue;
109       sadjncy[rnclos] = adjncy_recv[j];
110       if (a->useedgeweights) svalues[rnclos] = values_recv[j];
111       rnclos++;
112     }
113   }
114   for (i = 0; i < nlrows_is; i++) sxadj[i + 1] = sxadj[i] + ncols_recv[i];
115   if (sadj_xadj) {
116     *sadj_xadj = sxadj;
117   } else PetscCall(PetscFree(sxadj));
118   if (sadj_adjncy) {
119     *sadj_adjncy = sadjncy;
120   } else PetscCall(PetscFree(sadjncy));
121   if (sadj_values) {
122     if (a->useedgeweights) *sadj_values = svalues;
123     else *sadj_values = NULL;
124   } else {
125     if (a->useedgeweights) PetscCall(PetscFree(svalues));
126   }
127   PetscCall(PetscFree4(ncols_send, xadj_recv, ncols_recv_offsets, ncols_recv));
128   PetscCall(PetscFree(adjncy_recv));
129   if (a->useedgeweights) PetscCall(PetscFree(values_recv));
130   PetscFunctionReturn(PETSC_SUCCESS);
131 }
132 
133 static PetscErrorCode MatCreateSubMatrices_MPIAdj_Private(Mat mat, PetscInt n, const IS irow[], const IS icol[], PetscBool subcomm, MatReuse scall, Mat *submat[])
134 {
135   PetscInt        i, irow_n, icol_n, *sxadj, *sadjncy, *svalues;
136   PetscInt       *indices, nindx, j, k, loc;
137   PetscMPIInt     issame;
138   const PetscInt *irow_indices, *icol_indices;
139   MPI_Comm        scomm_row, scomm_col, scomm_mat;
140 
141   PetscFunctionBegin;
142   nindx = 0;
143   /*
144    * Estimate a maximum number for allocating memory
145    */
146   for (i = 0; i < n; i++) {
147     PetscCall(ISGetLocalSize(irow[i], &irow_n));
148     PetscCall(ISGetLocalSize(icol[i], &icol_n));
149     nindx = nindx > (irow_n + icol_n) ? nindx : (irow_n + icol_n);
150   }
151   PetscCall(PetscMalloc1(nindx, &indices));
152   /* construct a submat */
153   //  if (scall == MAT_INITIAL_MATRIX) PetscCall(PetscMalloc1(n,submat));
154 
155   for (i = 0; i < n; i++) {
156     if (subcomm) {
157       PetscCall(PetscObjectGetComm((PetscObject)irow[i], &scomm_row));
158       PetscCall(PetscObjectGetComm((PetscObject)icol[i], &scomm_col));
159       PetscCallMPI(MPI_Comm_compare(scomm_row, scomm_col, &issame));
160       PetscCheck(issame == MPI_IDENT, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "row index set must have the same comm as the col index set");
161       PetscCallMPI(MPI_Comm_compare(scomm_row, PETSC_COMM_SELF, &issame));
162       PetscCheck(issame != MPI_IDENT, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, " can not use PETSC_COMM_SELF as comm when extracting a parallel submatrix");
163     } else {
164       scomm_row = PETSC_COMM_SELF;
165     }
166     /*get sub-matrix data*/
167     sxadj   = NULL;
168     sadjncy = NULL;
169     svalues = NULL;
170     PetscCall(MatCreateSubMatrix_MPIAdj_data(mat, irow[i], icol[i], &sxadj, &sadjncy, &svalues));
171     PetscCall(ISGetLocalSize(irow[i], &irow_n));
172     PetscCall(ISGetLocalSize(icol[i], &icol_n));
173     PetscCall(ISGetIndices(irow[i], &irow_indices));
174     PetscCall(PetscArraycpy(indices, irow_indices, irow_n));
175     PetscCall(ISRestoreIndices(irow[i], &irow_indices));
176     PetscCall(ISGetIndices(icol[i], &icol_indices));
177     PetscCall(PetscArraycpy(PetscSafePointerPlusOffset(indices, irow_n), icol_indices, icol_n));
178     PetscCall(ISRestoreIndices(icol[i], &icol_indices));
179     nindx = irow_n + icol_n;
180     PetscCall(PetscSortRemoveDupsInt(&nindx, indices));
181     /* renumber columns */
182     for (j = 0; j < irow_n; j++) {
183       for (k = sxadj[j]; k < sxadj[j + 1]; k++) {
184         PetscCall(PetscFindInt(sadjncy[k], nindx, indices, &loc));
185         PetscCheck(loc >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "can not find col %" PetscInt_FMT, sadjncy[k]);
186         sadjncy[k] = loc;
187       }
188     }
189     if (scall == MAT_INITIAL_MATRIX) {
190       PetscCall(MatCreateMPIAdj(scomm_row, irow_n, icol_n, sxadj, sadjncy, svalues, submat[i]));
191     } else {
192       Mat         sadj = *submat[i];
193       Mat_MPIAdj *sa   = (Mat_MPIAdj *)((sadj)->data);
194       PetscCall(PetscObjectGetComm((PetscObject)sadj, &scomm_mat));
195       PetscCallMPI(MPI_Comm_compare(scomm_row, scomm_mat, &issame));
196       PetscCheck(issame == MPI_IDENT, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "submatrix  must have the same comm as the col index set");
197       PetscCall(PetscArraycpy(sa->i, sxadj, irow_n + 1));
198       PetscCall(PetscArraycpy(sa->j, sadjncy, sxadj[irow_n]));
199       if (svalues) PetscCall(PetscArraycpy(sa->values, svalues, sxadj[irow_n]));
200       PetscCall(PetscFree(sxadj));
201       PetscCall(PetscFree(sadjncy));
202       PetscCall(PetscFree(svalues));
203     }
204   }
205   PetscCall(PetscFree(indices));
206   PetscFunctionReturn(PETSC_SUCCESS);
207 }
208 
209 static PetscErrorCode MatCreateSubMatricesMPI_MPIAdj(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
210 {
211   /*get sub-matrices across a sub communicator */
212   PetscFunctionBegin;
213   PetscCall(MatCreateSubMatrices_MPIAdj_Private(mat, n, irow, icol, PETSC_TRUE, scall, submat));
214   PetscFunctionReturn(PETSC_SUCCESS);
215 }
216 
217 static PetscErrorCode MatCreateSubMatrices_MPIAdj(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
218 {
219   PetscFunctionBegin;
220   /*get sub-matrices based on PETSC_COMM_SELF */
221   PetscCall(MatCreateSubMatrices_MPIAdj_Private(mat, n, irow, icol, PETSC_FALSE, scall, submat));
222   PetscFunctionReturn(PETSC_SUCCESS);
223 }
224 
225 static PetscErrorCode MatView_MPIAdj_ASCII(Mat A, PetscViewer viewer)
226 {
227   Mat_MPIAdj       *a = (Mat_MPIAdj *)A->data;
228   PetscInt          i, j, m = A->rmap->n;
229   const char       *name;
230   PetscViewerFormat format;
231 
232   PetscFunctionBegin;
233   PetscCall(PetscObjectGetName((PetscObject)A, &name));
234   PetscCall(PetscViewerGetFormat(viewer, &format));
235   if (format == PETSC_VIEWER_ASCII_INFO) {
236     PetscFunctionReturn(PETSC_SUCCESS);
237   } else {
238     PetscCheck(format != PETSC_VIEWER_ASCII_MATLAB, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MATLAB format not supported");
239     PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
240     PetscCall(PetscViewerASCIIPushSynchronized(viewer));
241     for (i = 0; i < m; i++) {
242       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "row %" PetscInt_FMT ":", i + A->rmap->rstart));
243       for (j = a->i[i]; j < a->i[i + 1]; j++) {
244         if (a->values) {
245           PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " (%" PetscInt_FMT ", %" PetscInt_FMT ") ", a->j[j], a->values[j]));
246         } else {
247           PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " %" PetscInt_FMT " ", a->j[j]));
248         }
249       }
250       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "\n"));
251     }
252     PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
253     PetscCall(PetscViewerFlush(viewer));
254     PetscCall(PetscViewerASCIIPopSynchronized(viewer));
255   }
256   PetscFunctionReturn(PETSC_SUCCESS);
257 }
258 
259 static PetscErrorCode MatView_MPIAdj(Mat A, PetscViewer viewer)
260 {
261   PetscBool isascii;
262 
263   PetscFunctionBegin;
264   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
265   if (isascii) PetscCall(MatView_MPIAdj_ASCII(A, viewer));
266   PetscFunctionReturn(PETSC_SUCCESS);
267 }
268 
269 static PetscErrorCode MatDestroy_MPIAdj(Mat mat)
270 {
271   Mat_MPIAdj *a = (Mat_MPIAdj *)mat->data;
272 
273   PetscFunctionBegin;
274   PetscCall(PetscLogObjectState((PetscObject)mat, "Rows=%" PetscInt_FMT ", Cols=%" PetscInt_FMT ", NZ=%" PetscInt_FMT, mat->rmap->n, mat->cmap->n, a->nz));
275   PetscCall(PetscFree(a->diag));
276   if (a->freeaij) {
277     if (a->freeaijwithfree) {
278       if (a->i) free(a->i);
279       if (a->j) free(a->j);
280     } else {
281       PetscCall(PetscFree(a->i));
282       PetscCall(PetscFree(a->j));
283       PetscCall(PetscFree(a->values));
284     }
285   }
286   PetscCall(PetscFree(a->rowvalues));
287   PetscCall(PetscFree(mat->data));
288   PetscCall(PetscObjectChangeTypeName((PetscObject)mat, NULL));
289   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjSetPreallocation_C", NULL));
290   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjCreateNonemptySubcommMat_C", NULL));
291   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjToSeq_C", NULL));
292   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjToSeqRankZero_C", NULL));
293   PetscFunctionReturn(PETSC_SUCCESS);
294 }
295 
296 static PetscErrorCode MatSetOption_MPIAdj(Mat A, MatOption op, PetscBool flg)
297 {
298   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
299 
300   PetscFunctionBegin;
301   switch (op) {
302   case MAT_SYMMETRIC:
303   case MAT_STRUCTURALLY_SYMMETRIC:
304   case MAT_HERMITIAN:
305   case MAT_SPD:
306     a->symmetric = flg;
307     break;
308   default:
309     break;
310   }
311   PetscFunctionReturn(PETSC_SUCCESS);
312 }
313 
314 static PetscErrorCode MatGetRow_MPIAdj(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v)
315 {
316   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
317 
318   PetscFunctionBegin;
319   row -= A->rmap->rstart;
320   PetscCheck(row >= 0 && row < A->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row out of range");
321   *nz = a->i[row + 1] - a->i[row];
322   if (v) {
323     PetscInt j;
324     if (a->rowvalues_alloc < *nz) {
325       PetscCall(PetscFree(a->rowvalues));
326       a->rowvalues_alloc = PetscMax(a->rowvalues_alloc * 2, *nz);
327       PetscCall(PetscMalloc1(a->rowvalues_alloc, &a->rowvalues));
328     }
329     for (j = 0; j < *nz; j++) a->rowvalues[j] = a->values ? a->values[a->i[row] + j] : 1.0;
330     *v = (*nz) ? a->rowvalues : NULL;
331   }
332   if (idx) *idx = (*nz) ? a->j + a->i[row] : NULL;
333   PetscFunctionReturn(PETSC_SUCCESS);
334 }
335 
336 static PetscErrorCode MatEqual_MPIAdj(Mat A, Mat B, PetscBool *flg)
337 {
338   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data, *b = (Mat_MPIAdj *)B->data;
339   PetscBool   flag;
340 
341   PetscFunctionBegin;
342   /* If the  matrix dimensions are not equal,or no of nonzeros */
343   if ((A->rmap->n != B->rmap->n) || (a->nz != b->nz)) flag = PETSC_FALSE;
344 
345   /* if the a->i are the same */
346   PetscCall(PetscArraycmp(a->i, b->i, A->rmap->n + 1, &flag));
347 
348   /* if a->j are the same */
349   PetscCall(PetscArraycmp(a->j, b->j, a->nz, &flag));
350 
351   PetscCallMPI(MPIU_Allreduce(&flag, flg, 1, MPI_C_BOOL, MPI_LAND, PetscObjectComm((PetscObject)A)));
352   PetscFunctionReturn(PETSC_SUCCESS);
353 }
354 
355 static PetscErrorCode MatGetRowIJ_MPIAdj(Mat A, PetscInt oshift, PetscBool symmetric, PetscBool blockcompressed, PetscInt *m, const PetscInt *inia[], const PetscInt *inja[], PetscBool *done)
356 {
357   PetscInt    i;
358   Mat_MPIAdj *a  = (Mat_MPIAdj *)A->data;
359   PetscInt  **ia = (PetscInt **)inia, **ja = (PetscInt **)inja;
360 
361   PetscFunctionBegin;
362   *m    = A->rmap->n;
363   *ia   = a->i;
364   *ja   = a->j;
365   *done = PETSC_TRUE;
366   if (oshift) {
367     for (i = 0; i < (*ia)[*m]; i++) (*ja)[i]++;
368     for (i = 0; i <= (*m); i++) (*ia)[i]++;
369   }
370   PetscFunctionReturn(PETSC_SUCCESS);
371 }
372 
373 static PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A, PetscInt oshift, PetscBool symmetric, PetscBool blockcompressed, PetscInt *m, const PetscInt *inia[], const PetscInt *inja[], PetscBool *done)
374 {
375   PetscInt    i;
376   Mat_MPIAdj *a  = (Mat_MPIAdj *)A->data;
377   PetscInt  **ia = (PetscInt **)inia, **ja = (PetscInt **)inja;
378 
379   PetscFunctionBegin;
380   PetscCheck(!ia || a->i == *ia, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "ia passed back is not one obtained with MatGetRowIJ()");
381   PetscCheck(!ja || a->j == *ja, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "ja passed back is not one obtained with MatGetRowIJ()");
382   if (oshift) {
383     PetscCheck(ia, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "If oshift then you must passed in inia[] argument");
384     PetscCheck(ja, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "If oshift then you must passed in inja[] argument");
385     for (i = 0; i <= (*m); i++) (*ia)[i]--;
386     for (i = 0; i < (*ia)[*m]; i++) (*ja)[i]--;
387   }
388   PetscFunctionReturn(PETSC_SUCCESS);
389 }
390 
391 static PetscErrorCode MatConvertFrom_MPIAdj(Mat A, MatType type, MatReuse reuse, Mat *newmat)
392 {
393   Mat                B;
394   PetscInt           i, m, N, nzeros = 0, *ia, *ja, len, rstart, cnt, j, *a;
395   const PetscInt    *rj;
396   const PetscScalar *ra;
397   MPI_Comm           comm;
398 
399   PetscFunctionBegin;
400   PetscCall(MatGetSize(A, NULL, &N));
401   PetscCall(MatGetLocalSize(A, &m, NULL));
402   PetscCall(MatGetOwnershipRange(A, &rstart, NULL));
403 
404   /* count the number of nonzeros per row */
405   for (i = 0; i < m; i++) {
406     PetscCall(MatGetRow(A, i + rstart, &len, &rj, NULL));
407     for (j = 0; j < len; j++) {
408       if (rj[j] == i + rstart) {
409         len--;
410         break;
411       } /* don't count diagonal */
412     }
413     nzeros += len;
414     PetscCall(MatRestoreRow(A, i + rstart, &len, &rj, NULL));
415   }
416 
417   /* malloc space for nonzeros */
418   PetscCall(PetscMalloc1(nzeros + 1, &a));
419   PetscCall(PetscMalloc1(N + 1, &ia));
420   PetscCall(PetscMalloc1(nzeros + 1, &ja));
421 
422   nzeros = 0;
423   ia[0]  = 0;
424   for (i = 0; i < m; i++) {
425     PetscCall(MatGetRow(A, i + rstart, &len, &rj, &ra));
426     cnt = 0;
427     for (j = 0; j < len; j++) {
428       if (rj[j] != i + rstart) { /* if not diagonal */
429         a[nzeros + cnt]    = (PetscInt)PetscAbsScalar(ra[j]);
430         ja[nzeros + cnt++] = rj[j];
431       }
432     }
433     PetscCall(MatRestoreRow(A, i + rstart, &len, &rj, &ra));
434     nzeros += cnt;
435     ia[i + 1] = nzeros;
436   }
437 
438   PetscCall(PetscObjectGetComm((PetscObject)A, &comm));
439   PetscCall(MatCreate(comm, &B));
440   PetscCall(MatSetSizes(B, m, PETSC_DETERMINE, PETSC_DETERMINE, N));
441   PetscCall(MatSetType(B, type));
442   PetscCall(MatMPIAdjSetPreallocation(B, ia, ja, a));
443 
444   if (reuse == MAT_INPLACE_MATRIX) {
445     PetscCall(MatHeaderReplace(A, &B));
446   } else {
447     *newmat = B;
448   }
449   PetscFunctionReturn(PETSC_SUCCESS);
450 }
451 
452 static PetscErrorCode MatSetValues_MPIAdj(Mat A, PetscInt m, const PetscInt *rows, PetscInt n, const PetscInt *cols, const PetscScalar *values, InsertMode im)
453 {
454   Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data;
455   PetscInt    rStart, rEnd, cStart, cEnd;
456 
457   PetscFunctionBegin;
458   PetscCheck(!adj->i, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Matrix is already assembled, cannot change its values");
459   PetscCall(MatGetOwnershipRange(A, &rStart, &rEnd));
460   PetscCall(MatGetOwnershipRangeColumn(A, &cStart, &cEnd));
461   if (!adj->ht) {
462     PetscCall(PetscHSetIJCreate(&adj->ht));
463     PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject)A), 1, &A->stash));
464     PetscCall(PetscLayoutSetUp(A->rmap));
465     PetscCall(PetscLayoutSetUp(A->cmap));
466   }
467   for (PetscInt r = 0; r < m; ++r) {
468     PetscHashIJKey key;
469 
470     key.i = rows[r];
471     if (key.i < 0) continue;
472     if ((key.i < rStart) || (key.i >= rEnd)) {
473       PetscCall(MatStashValuesRow_Private(&A->stash, key.i, n, cols, values, PETSC_FALSE));
474     } else {
475       for (PetscInt c = 0; c < n; ++c) {
476         key.j = cols[c];
477         if (key.j < 0 || key.i == key.j) continue;
478         PetscCall(PetscHSetIJAdd(adj->ht, key));
479       }
480     }
481   }
482   PetscFunctionReturn(PETSC_SUCCESS);
483 }
484 
485 static PetscErrorCode MatAssemblyBegin_MPIAdj(Mat A, MatAssemblyType type)
486 {
487   PetscInt    nstash, reallocs;
488   Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data;
489 
490   PetscFunctionBegin;
491   if (!adj->ht) {
492     PetscCall(PetscHSetIJCreate(&adj->ht));
493     PetscCall(PetscLayoutSetUp(A->rmap));
494     PetscCall(PetscLayoutSetUp(A->cmap));
495   }
496   PetscCall(MatStashScatterBegin_Private(A, &A->stash, A->rmap->range));
497   PetscCall(MatStashGetInfo_Private(&A->stash, &nstash, &reallocs));
498   PetscCall(PetscInfo(A, "Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
499   PetscFunctionReturn(PETSC_SUCCESS);
500 }
501 
502 static PetscErrorCode MatAssemblyEnd_MPIAdj(Mat A, MatAssemblyType type)
503 {
504   PetscScalar   *val;
505   PetscInt      *row, *col, m, rstart, *rowstarts;
506   PetscInt       i, j, ncols, flg, nz;
507   PetscMPIInt    n;
508   Mat_MPIAdj    *adj = (Mat_MPIAdj *)A->data;
509   PetscHashIter  hi;
510   PetscHashIJKey key;
511   PetscHSetIJ    ht = adj->ht;
512 
513   PetscFunctionBegin;
514   while (1) {
515     PetscCall(MatStashScatterGetMesg_Private(&A->stash, &n, &row, &col, &val, &flg));
516     if (!flg) break;
517 
518     for (i = 0; i < n;) {
519       /* Identify the consecutive vals belonging to the same row */
520       for (j = i, rstart = row[j]; j < n; j++) {
521         if (row[j] != rstart) break;
522       }
523       if (j < n) ncols = j - i;
524       else ncols = n - i;
525       /* Set all these values with a single function call */
526       PetscCall(MatSetValues_MPIAdj(A, 1, row + i, ncols, col + i, val + i, INSERT_VALUES));
527       i = j;
528     }
529   }
530   PetscCall(MatStashScatterEnd_Private(&A->stash));
531   PetscCall(MatStashDestroy_Private(&A->stash));
532 
533   PetscCall(MatGetLocalSize(A, &m, NULL));
534   PetscCall(MatGetOwnershipRange(A, &rstart, NULL));
535   PetscCall(PetscCalloc1(m + 1, &rowstarts));
536   PetscHashIterBegin(ht, hi);
537   for (; !PetscHashIterAtEnd(ht, hi);) {
538     PetscHashIterGetKey(ht, hi, key);
539     rowstarts[key.i - rstart + 1]++;
540     PetscHashIterNext(ht, hi);
541   }
542   for (i = 1; i < m + 1; i++) rowstarts[i] = rowstarts[i - 1] + rowstarts[i];
543 
544   PetscCall(PetscHSetIJGetSize(ht, &nz));
545   PetscCall(PetscMalloc1(nz, &col));
546   PetscHashIterBegin(ht, hi);
547   for (; !PetscHashIterAtEnd(ht, hi);) {
548     PetscHashIterGetKey(ht, hi, key);
549     col[rowstarts[key.i - rstart]++] = key.j;
550     PetscHashIterNext(ht, hi);
551   }
552   PetscCall(PetscHSetIJDestroy(&ht));
553   for (i = m; i > 0; i--) rowstarts[i] = rowstarts[i - 1];
554   rowstarts[0] = 0;
555 
556   for (PetscInt i = 0; i < m; i++) PetscCall(PetscSortInt(rowstarts[i + 1] - rowstarts[i], &col[rowstarts[i]]));
557 
558   adj->i       = rowstarts;
559   adj->j       = col;
560   adj->nz      = rowstarts[m];
561   adj->freeaij = PETSC_TRUE;
562   PetscFunctionReturn(PETSC_SUCCESS);
563 }
564 
565 static struct _MatOps MatOps_Values = {MatSetValues_MPIAdj,
566                                        MatGetRow_MPIAdj,
567                                        NULL,
568                                        NULL,
569                                        /* 4*/ NULL,
570                                        NULL,
571                                        NULL,
572                                        NULL,
573                                        NULL,
574                                        NULL,
575                                        /*10*/ NULL,
576                                        NULL,
577                                        NULL,
578                                        NULL,
579                                        NULL,
580                                        /*15*/ NULL,
581                                        MatEqual_MPIAdj,
582                                        NULL,
583                                        NULL,
584                                        NULL,
585                                        /*20*/ MatAssemblyBegin_MPIAdj,
586                                        MatAssemblyEnd_MPIAdj,
587                                        MatSetOption_MPIAdj,
588                                        NULL,
589                                        /*24*/ NULL,
590                                        NULL,
591                                        NULL,
592                                        NULL,
593                                        NULL,
594                                        /*29*/ NULL,
595                                        NULL,
596                                        NULL,
597                                        NULL,
598                                        NULL,
599                                        /*34*/ NULL,
600                                        NULL,
601                                        NULL,
602                                        NULL,
603                                        NULL,
604                                        /*39*/ NULL,
605                                        MatCreateSubMatrices_MPIAdj,
606                                        NULL,
607                                        NULL,
608                                        NULL,
609                                        /*44*/ NULL,
610                                        NULL,
611                                        MatShift_Basic,
612                                        NULL,
613                                        NULL,
614                                        /*49*/ NULL,
615                                        MatGetRowIJ_MPIAdj,
616                                        MatRestoreRowIJ_MPIAdj,
617                                        NULL,
618                                        NULL,
619                                        /*54*/ NULL,
620                                        NULL,
621                                        NULL,
622                                        NULL,
623                                        NULL,
624                                        /*59*/ NULL,
625                                        MatDestroy_MPIAdj,
626                                        MatView_MPIAdj,
627                                        MatConvertFrom_MPIAdj,
628                                        NULL,
629                                        /*64*/ NULL,
630                                        NULL,
631                                        NULL,
632                                        NULL,
633                                        NULL,
634                                        /*69*/ NULL,
635                                        NULL,
636                                        NULL,
637                                        NULL,
638                                        NULL,
639                                        /*74*/ NULL,
640                                        NULL,
641                                        NULL,
642                                        NULL,
643                                        NULL,
644                                        /*79*/ NULL,
645                                        NULL,
646                                        NULL,
647                                        NULL,
648                                        NULL,
649                                        /*84*/ NULL,
650                                        NULL,
651                                        NULL,
652                                        NULL,
653                                        NULL,
654                                        /*89*/ NULL,
655                                        NULL,
656                                        NULL,
657                                        NULL,
658                                        NULL,
659                                        /*94*/ NULL,
660                                        NULL,
661                                        NULL,
662                                        NULL,
663                                        NULL,
664                                        /*99*/ NULL,
665                                        NULL,
666                                        NULL,
667                                        NULL,
668                                        NULL,
669                                        /*104*/ NULL,
670                                        NULL,
671                                        NULL,
672                                        NULL,
673                                        NULL,
674                                        /*109*/ NULL,
675                                        NULL,
676                                        NULL,
677                                        NULL,
678                                        NULL,
679                                        /*114*/ NULL,
680                                        NULL,
681                                        NULL,
682                                        NULL,
683                                        MatCreateSubMatricesMPI_MPIAdj,
684                                        /*119*/ NULL,
685                                        NULL,
686                                        NULL,
687                                        NULL,
688                                        NULL,
689                                        /*124*/ NULL,
690                                        NULL,
691                                        NULL,
692                                        NULL,
693                                        NULL,
694                                        /*129*/ NULL,
695                                        NULL,
696                                        NULL,
697                                        NULL,
698                                        NULL,
699                                        /*134*/ NULL,
700                                        NULL,
701                                        NULL,
702                                        NULL,
703                                        NULL,
704                                        /*139*/ NULL,
705                                        NULL,
706                                        NULL,
707                                        NULL,
708                                        NULL};
709 
710 static PetscErrorCode MatMPIAdjSetPreallocation_MPIAdj(Mat B, PetscInt *i, PetscInt *j, PetscInt *values)
711 {
712   Mat_MPIAdj *b = (Mat_MPIAdj *)B->data;
713   PetscBool   useedgeweights;
714 
715   PetscFunctionBegin;
716   PetscCall(PetscLayoutSetUp(B->rmap));
717   PetscCall(PetscLayoutSetUp(B->cmap));
718   if (values) useedgeweights = PETSC_TRUE;
719   else useedgeweights = PETSC_FALSE;
720   /* Make everybody knows if they are using edge weights or not */
721   PetscCallMPI(MPIU_Allreduce(&useedgeweights, &b->useedgeweights, 1, MPI_C_BOOL, MPI_LOR, PetscObjectComm((PetscObject)B)));
722 
723   if (PetscDefined(USE_DEBUG)) {
724     PetscInt ii;
725 
726     PetscCheck(i[0] == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "First i[] index must be zero, instead it is %" PetscInt_FMT, i[0]);
727     for (ii = 1; ii < B->rmap->n; ii++) {
728       PetscCheck(i[ii] >= 0 && i[ii] >= i[ii - 1], PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "i[%" PetscInt_FMT "]=%" PetscInt_FMT " index is out of range: i[%" PetscInt_FMT "]=%" PetscInt_FMT, ii, i[ii], ii - 1, i[ii - 1]);
729     }
730     for (ii = 0; ii < i[B->rmap->n]; ii++) PetscCheck(j[ii] >= 0 && j[ii] < B->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column index %" PetscInt_FMT " out of range %" PetscInt_FMT, ii, j[ii]);
731   }
732   b->j      = j;
733   b->i      = i;
734   b->values = values;
735 
736   b->nz        = i[B->rmap->n];
737   b->diag      = NULL;
738   b->symmetric = PETSC_FALSE;
739   b->freeaij   = PETSC_TRUE;
740 
741   B->ops->assemblybegin = NULL;
742   B->ops->assemblyend   = NULL;
743   PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
744   PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
745   PetscCall(MatStashDestroy_Private(&B->stash));
746   PetscFunctionReturn(PETSC_SUCCESS);
747 }
748 
749 static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A, Mat *B)
750 {
751   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data;
752   const PetscInt *ranges;
753   MPI_Comm        acomm, bcomm;
754   MPI_Group       agroup, bgroup;
755   PetscMPIInt     i, size, nranks, *ranks;
756 
757   PetscFunctionBegin;
758   *B = NULL;
759   PetscCall(PetscObjectGetComm((PetscObject)A, &acomm));
760   PetscCallMPI(MPI_Comm_size(acomm, &size));
761   PetscCall(MatGetOwnershipRanges(A, &ranges));
762   for (i = 0, nranks = 0; i < size; i++) {
763     if (ranges[i + 1] - ranges[i] > 0) nranks++;
764   }
765   if (nranks == size) { /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
766     PetscCall(PetscObjectReference((PetscObject)A));
767     *B = A;
768     PetscFunctionReturn(PETSC_SUCCESS);
769   }
770 
771   PetscCall(PetscMalloc1(nranks, &ranks));
772   for (i = 0, nranks = 0; i < size; i++) {
773     if (ranges[i + 1] - ranges[i] > 0) ranks[nranks++] = i;
774   }
775   PetscCallMPI(MPI_Comm_group(acomm, &agroup));
776   PetscCallMPI(MPI_Group_incl(agroup, nranks, ranks, &bgroup));
777   PetscCall(PetscFree(ranks));
778   PetscCallMPI(MPI_Comm_create(acomm, bgroup, &bcomm));
779   PetscCallMPI(MPI_Group_free(&agroup));
780   PetscCallMPI(MPI_Group_free(&bgroup));
781   if (bcomm != MPI_COMM_NULL) {
782     PetscInt    m, N;
783     Mat_MPIAdj *b;
784     PetscCall(MatGetLocalSize(A, &m, NULL));
785     PetscCall(MatGetSize(A, NULL, &N));
786     PetscCall(MatCreateMPIAdj(bcomm, m, N, a->i, a->j, a->values, B));
787     b          = (Mat_MPIAdj *)(*B)->data;
788     b->freeaij = PETSC_FALSE;
789     PetscCallMPI(MPI_Comm_free(&bcomm));
790   }
791   PetscFunctionReturn(PETSC_SUCCESS);
792 }
793 
794 static PetscErrorCode MatMPIAdjToSeq_MPIAdj(Mat A, Mat *B)
795 {
796   PetscInt    M, N, *II, *J, NZ, nz, m, nzstart, i;
797   PetscInt   *Values = NULL;
798   Mat_MPIAdj *adj    = (Mat_MPIAdj *)A->data;
799   PetscMPIInt mnz, mm, *allnz, *allm, size, *dispnz, *dispm;
800 
801   PetscFunctionBegin;
802   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
803   PetscCall(MatGetSize(A, &M, &N));
804   PetscCall(MatGetLocalSize(A, &m, NULL));
805   nz = adj->nz;
806   PetscCheck(adj->i[m] == nz, PETSC_COMM_SELF, PETSC_ERR_PLIB, "nz %" PetscInt_FMT " not correct i[m] %" PetscInt_FMT, nz, adj->i[m]);
807   PetscCallMPI(MPIU_Allreduce(&nz, &NZ, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A)));
808 
809   PetscCall(PetscMPIIntCast(nz, &mnz));
810   PetscCall(PetscMalloc2(size, &allnz, size, &dispnz));
811   PetscCallMPI(MPI_Allgather(&mnz, 1, MPI_INT, allnz, 1, MPI_INT, PetscObjectComm((PetscObject)A)));
812   dispnz[0] = 0;
813   for (i = 1; i < size; i++) dispnz[i] = dispnz[i - 1] + allnz[i - 1];
814   if (adj->values) {
815     PetscCall(PetscMalloc1(NZ, &Values));
816     PetscCallMPI(MPI_Allgatherv(adj->values, mnz, MPIU_INT, Values, allnz, dispnz, MPIU_INT, PetscObjectComm((PetscObject)A)));
817   }
818   PetscCall(PetscMalloc1(NZ, &J));
819   PetscCallMPI(MPI_Allgatherv(adj->j, mnz, MPIU_INT, J, allnz, dispnz, MPIU_INT, PetscObjectComm((PetscObject)A)));
820   PetscCall(PetscFree2(allnz, dispnz));
821   PetscCallMPI(MPI_Scan(&nz, &nzstart, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A)));
822   nzstart -= nz;
823   /* shift the i[] values so they will be correct after being received */
824   for (i = 0; i < m; i++) adj->i[i] += nzstart;
825   PetscCall(PetscMalloc1(M + 1, &II));
826   PetscCall(PetscMPIIntCast(m, &mm));
827   PetscCall(PetscMalloc2(size, &allm, size, &dispm));
828   PetscCallMPI(MPI_Allgather(&mm, 1, MPI_INT, allm, 1, MPI_INT, PetscObjectComm((PetscObject)A)));
829   dispm[0] = 0;
830   for (i = 1; i < size; i++) dispm[i] = dispm[i - 1] + allm[i - 1];
831   PetscCallMPI(MPI_Allgatherv(adj->i, mm, MPIU_INT, II, allm, dispm, MPIU_INT, PetscObjectComm((PetscObject)A)));
832   PetscCall(PetscFree2(allm, dispm));
833   II[M] = NZ;
834   /* shift the i[] values back */
835   for (i = 0; i < m; i++) adj->i[i] -= nzstart;
836   PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, M, N, II, J, Values, B));
837   PetscFunctionReturn(PETSC_SUCCESS);
838 }
839 
840 static PetscErrorCode MatMPIAdjToSeqRankZero_MPIAdj(Mat A, Mat *B)
841 {
842   PetscInt    M, N, *II, *J, NZ, nz, m, nzstart, i;
843   PetscInt   *Values = NULL;
844   Mat_MPIAdj *adj    = (Mat_MPIAdj *)A->data;
845   PetscMPIInt mnz, mm, *allnz = NULL, *allm, size, *dispnz, *dispm, rank;
846 
847   PetscFunctionBegin;
848   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
849   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)A), &rank));
850   PetscCall(MatGetSize(A, &M, &N));
851   PetscCall(MatGetLocalSize(A, &m, NULL));
852   nz = adj->nz;
853   PetscCheck(adj->i[m] == nz, PETSC_COMM_SELF, PETSC_ERR_PLIB, "nz %" PetscInt_FMT " not correct i[m] %" PetscInt_FMT, nz, adj->i[m]);
854   PetscCallMPI(MPIU_Allreduce(&nz, &NZ, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A)));
855 
856   PetscCall(PetscMPIIntCast(nz, &mnz));
857   if (!rank) PetscCall(PetscMalloc2(size, &allnz, size, &dispnz));
858   PetscCallMPI(MPI_Gather(&mnz, 1, MPI_INT, allnz, 1, MPI_INT, 0, PetscObjectComm((PetscObject)A)));
859   if (!rank) {
860     dispnz[0] = 0;
861     for (i = 1; i < size; i++) dispnz[i] = dispnz[i - 1] + allnz[i - 1];
862     if (adj->values) {
863       PetscCall(PetscMalloc1(NZ, &Values));
864       PetscCallMPI(MPI_Gatherv(adj->values, mnz, MPIU_INT, Values, allnz, dispnz, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
865     }
866     PetscCall(PetscMalloc1(NZ, &J));
867     PetscCallMPI(MPI_Gatherv(adj->j, mnz, MPIU_INT, J, allnz, dispnz, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
868     PetscCall(PetscFree2(allnz, dispnz));
869   } else {
870     if (adj->values) PetscCallMPI(MPI_Gatherv(adj->values, mnz, MPIU_INT, NULL, NULL, NULL, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
871     PetscCallMPI(MPI_Gatherv(adj->j, mnz, MPIU_INT, NULL, NULL, NULL, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
872   }
873   PetscCallMPI(MPI_Scan(&nz, &nzstart, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A)));
874   nzstart -= nz;
875   /* shift the i[] values so they will be correct after being received */
876   for (i = 0; i < m; i++) adj->i[i] += nzstart;
877   PetscCall(PetscMPIIntCast(m, &mm));
878   if (!rank) {
879     PetscCall(PetscMalloc1(M + 1, &II));
880     PetscCall(PetscMalloc2(size, &allm, size, &dispm));
881     PetscCallMPI(MPI_Gather(&mm, 1, MPI_INT, allm, 1, MPI_INT, 0, PetscObjectComm((PetscObject)A)));
882     dispm[0] = 0;
883     for (i = 1; i < size; i++) dispm[i] = dispm[i - 1] + allm[i - 1];
884     PetscCallMPI(MPI_Gatherv(adj->i, mm, MPIU_INT, II, allm, dispm, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
885     PetscCall(PetscFree2(allm, dispm));
886     II[M] = NZ;
887   } else {
888     PetscCallMPI(MPI_Gather(&mm, 1, MPI_INT, NULL, 1, MPI_INT, 0, PetscObjectComm((PetscObject)A)));
889     PetscCallMPI(MPI_Gatherv(adj->i, mm, MPIU_INT, NULL, NULL, NULL, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
890   }
891   /* shift the i[] values back */
892   for (i = 0; i < m; i++) adj->i[i] -= nzstart;
893   if (!rank) PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, M, N, II, J, Values, B));
894   PetscFunctionReturn(PETSC_SUCCESS);
895 }
896 
897 /*@
898   MatMPIAdjCreateNonemptySubcommMat - create the same `MATMPIADJ` matrix on a subcommunicator containing only processes owning a positive number of rows
899 
900   Collective
901 
902   Input Parameter:
903 . A - original `MATMPIADJ` matrix
904 
905   Output Parameter:
906 . B - matrix on subcommunicator, `NULL` on MPI processes that own zero rows of `A`
907 
908   Level: developer
909 
910   Note:
911   The matrix `B` should be destroyed with `MatDestroy()`. The arrays are not copied, so `B` should be destroyed before `A` is destroyed.
912 
913   Developer Note:
914   This function is mostly useful for internal use by mesh partitioning packages, such as ParMETIS that require that every process owns at least one row.
915 
916 .seealso: [](ch_matrices), `Mat`, `MATMPIADJ`, `MatCreateMPIAdj()`
917 @*/
918 PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A, Mat *B)
919 {
920   PetscFunctionBegin;
921   PetscValidHeaderSpecific(A, MAT_CLASSID, 1);
922   PetscUseMethod(A, "MatMPIAdjCreateNonemptySubcommMat_C", (Mat, Mat *), (A, B));
923   PetscFunctionReturn(PETSC_SUCCESS);
924 }
925 
926 /*MC
927   MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
928   intended for use constructing orderings and partitionings.
929 
930   Level: beginner
931 
932   Note:
933   You can provide values to the matrix using `MatMPIAdjSetPreallocation()`, `MatCreateMPIAdj()`, or
934   by calling `MatSetValues()` and `MatAssemblyBegin()` followed by `MatAssemblyEnd()`
935 
936 .seealso: [](ch_matrices), `Mat`, `MatCreateMPIAdj()`, `MatMPIAdjSetPreallocation()`, `MatSetValues()`
937 M*/
938 PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B)
939 {
940   Mat_MPIAdj *b;
941 
942   PetscFunctionBegin;
943   PetscCall(PetscNew(&b));
944   B->data         = (void *)b;
945   B->ops[0]       = MatOps_Values;
946   B->assembled    = PETSC_FALSE;
947   B->preallocated = PETSC_TRUE; /* so that MatSetValues() may be used */
948 
949   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjSetPreallocation_C", MatMPIAdjSetPreallocation_MPIAdj));
950   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjCreateNonemptySubcommMat_C", MatMPIAdjCreateNonemptySubcommMat_MPIAdj));
951   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjToSeq_C", MatMPIAdjToSeq_MPIAdj));
952   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjToSeqRankZero_C", MatMPIAdjToSeqRankZero_MPIAdj));
953   PetscCall(PetscObjectChangeTypeName((PetscObject)B, MATMPIADJ));
954   PetscFunctionReturn(PETSC_SUCCESS);
955 }
956 
957 /*@
958   MatMPIAdjToSeq - Converts an parallel `MATMPIADJ` matrix to complete `MATMPIADJ` on each process (needed by sequential partitioners)
959 
960   Logically Collective
961 
962   Input Parameter:
963 . A - the matrix
964 
965   Output Parameter:
966 . B - the same matrix on all processes
967 
968   Level: intermediate
969 
970 .seealso: [](ch_matrices), `Mat`, `MATMPIADJ`, `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MatMPIAdjToSeqRankZero()`
971 @*/
972 PetscErrorCode MatMPIAdjToSeq(Mat A, Mat *B)
973 {
974   PetscFunctionBegin;
975   PetscUseMethod(A, "MatMPIAdjToSeq_C", (Mat, Mat *), (A, B));
976   PetscFunctionReturn(PETSC_SUCCESS);
977 }
978 
979 /*@
980   MatMPIAdjToSeqRankZero - Converts an parallel `MATMPIADJ` matrix to complete `MATMPIADJ` on rank zero (needed by sequential partitioners)
981 
982   Logically Collective
983 
984   Input Parameter:
985 . A - the matrix
986 
987   Output Parameter:
988 . B - the same matrix on rank zero, not set on other ranks
989 
990   Level: intermediate
991 
992   Note:
993   This routine has the advantage on systems with multiple ranks per node since only one copy of the matrix
994   is stored on the first node, instead of the number of ranks copies. This can allow partitioning much larger
995   parallel graph sequentially.
996 
997 .seealso: [](ch_matrices), `Mat`, `MATMPIADJ`, `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MatMPIAdjToSeq()`
998 @*/
999 PetscErrorCode MatMPIAdjToSeqRankZero(Mat A, Mat *B)
1000 {
1001   PetscFunctionBegin;
1002   PetscUseMethod(A, "MatMPIAdjToSeqRankZero_C", (Mat, Mat *), (A, B));
1003   PetscFunctionReturn(PETSC_SUCCESS);
1004 }
1005 
1006 /*@
1007   MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
1008 
1009   Logically Collective
1010 
1011   Input Parameters:
1012 + B      - the matrix
1013 . i      - the indices into `j` for the start of each row
1014 . j      - the column indices for each row (sorted for each row).
1015            The indices in `i` and `j` start with zero (NOT with one).
1016 - values - [use `NULL` if not provided] edge weights
1017 
1018   Level: intermediate
1019 
1020   Notes:
1021   The indices in `i` and `j` start with zero (NOT with one).
1022 
1023   You must NOT free the `i`, `values` and `j` arrays yourself. PETSc will free them
1024   when the matrix is destroyed; you must allocate them with `PetscMalloc()`.
1025 
1026   You should not include the matrix diagonal elements.
1027 
1028   If you already have a matrix, you can create its adjacency matrix by a call
1029   to `MatConvert()`, specifying a type of `MATMPIADJ`.
1030 
1031   Possible values for `MatSetOption()` - `MAT_STRUCTURALLY_SYMMETRIC`
1032 
1033   Fortran Note:
1034   From Fortran the indices and values are copied so the array space need not be provided with `PetscMalloc()`.
1035 
1036 .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MATMPIADJ`
1037 @*/
1038 PetscErrorCode MatMPIAdjSetPreallocation(Mat B, PetscInt *i, PetscInt *j, PetscInt *values)
1039 {
1040   PetscFunctionBegin;
1041   PetscTryMethod(B, "MatMPIAdjSetPreallocation_C", (Mat, PetscInt *, PetscInt *, PetscInt *), (B, i, j, values));
1042   PetscFunctionReturn(PETSC_SUCCESS);
1043 }
1044 
1045 /*@C
1046   MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
1047   The matrix need not have numerical values associated with it, it is
1048   intended for ordering (to reduce bandwidth etc) and partitioning.
1049 
1050   Collective
1051 
1052   Input Parameters:
1053 + comm   - MPI communicator
1054 . m      - number of local rows
1055 . N      - number of global columns
1056 . i      - the indices into `j` for the start of each row
1057 . j      - the column indices for each row (sorted for each row).
1058 - values - the values, optional, use `NULL` if not provided
1059 
1060   Output Parameter:
1061 . A - the matrix
1062 
1063   Level: intermediate
1064 
1065   Notes:
1066   The indices in `i` and `j` start with zero (NOT with one).
1067 
1068   You must NOT free the `i`, `values` and `j` arrays yourself. PETSc will free them
1069   when the matrix is destroyed; you must allocate them with `PetscMalloc()`.
1070 
1071   You should not include the matrix diagonals.
1072 
1073   If you already have a matrix, you can create its adjacency matrix by a call
1074   to `MatConvert()`, specifying a type of `MATMPIADJ`.
1075 
1076   Possible values for `MatSetOption()` - `MAT_STRUCTURALLY_SYMMETRIC`
1077 
1078   Fortran Note:
1079   From Fortran the arrays `indices` and `values` must be retained by the user until `A` is destroyed
1080 
1081 .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatConvert()`, `MatGetOrdering()`, `MATMPIADJ`, `MatMPIAdjSetPreallocation()`
1082 @*/
1083 PetscErrorCode MatCreateMPIAdj(MPI_Comm comm, PetscInt m, PetscInt N, PetscInt i[], PetscInt j[], PetscInt values[], Mat *A) PeNSS
1084 {
1085   PetscFunctionBegin;
1086   PetscCall(MatCreate(comm, A));
1087   PetscCall(MatSetSizes(*A, m, PETSC_DETERMINE, PETSC_DETERMINE, N));
1088   PetscCall(MatSetType(*A, MATMPIADJ));
1089   PetscCall(MatMPIAdjSetPreallocation(*A, i, j, values));
1090   PetscFunctionReturn(PETSC_SUCCESS);
1091 }
1092