xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 0dc45c67dd8cb6796abcc61ff7db4f0ecf01d6ed)
1 
2 /*
3     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
4 */
5 #include <../src/mat/impls/adj/mpi/mpiadj.h>    /*I "petscmat.h" I*/
6 #include <petscsf.h>
7 
8 /*
9  * The interface should be easy to use for both MatCreateSubMatrix (parallel sub-matrix) and MatCreateSubMatrices (sequential sub-matrices)
10  * */
11 static PetscErrorCode MatCreateSubMatrix_MPIAdj_data(Mat adj,IS irows, IS icols, PetscInt **sadj_xadj,PetscInt **sadj_adjncy,PetscInt **sadj_values)
12 {
13   PetscInt           nlrows_is,icols_n,i,j,nroots,nleaves,rlocalindex,*ncols_send,*ncols_recv;
14   PetscInt           nlrows_mat,*adjncy_recv,Ncols_recv,Ncols_send,*xadj_recv,*values_recv;
15   PetscInt          *ncols_recv_offsets,loc,rnclos,*sadjncy,*sxadj,*svalues;
16   const PetscInt    *irows_indices,*icols_indices,*xadj, *adjncy;
17   PetscMPIInt        owner;
18   Mat_MPIAdj        *a = (Mat_MPIAdj*)adj->data;
19   PetscLayout        rmap;
20   MPI_Comm           comm;
21   PetscSF            sf;
22   PetscSFNode       *iremote;
23   PetscBool          done;
24 
25   PetscFunctionBegin;
26   PetscCall(PetscObjectGetComm((PetscObject)adj,&comm));
27   PetscCall(MatGetLayouts(adj,&rmap,NULL));
28   PetscCall(ISGetLocalSize(irows,&nlrows_is));
29   PetscCall(ISGetIndices(irows,&irows_indices));
30   PetscCall(PetscMalloc1(nlrows_is,&iremote));
31   /* construct sf graph*/
32   nleaves = nlrows_is;
33   for (i=0; i<nlrows_is; i++) {
34     owner = -1;
35     rlocalindex = -1;
36     PetscCall(PetscLayoutFindOwnerIndex(rmap,irows_indices[i],&owner,&rlocalindex));
37     iremote[i].rank  = owner;
38     iremote[i].index = rlocalindex;
39   }
40   PetscCall(MatGetRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done));
41   PetscCall(PetscCalloc4(nlrows_mat,&ncols_send,nlrows_is,&xadj_recv,nlrows_is+1,&ncols_recv_offsets,nlrows_is,&ncols_recv));
42   nroots = nlrows_mat;
43   for (i=0; i<nlrows_mat; i++) {
44     ncols_send[i] = xadj[i+1]-xadj[i];
45   }
46   PetscCall(PetscSFCreate(comm,&sf));
47   PetscCall(PetscSFSetGraph(sf,nroots,nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER));
48   PetscCall(PetscSFSetType(sf,PETSCSFBASIC));
49   PetscCall(PetscSFSetFromOptions(sf));
50   PetscCall(PetscSFBcastBegin(sf,MPIU_INT,ncols_send,ncols_recv,MPI_REPLACE));
51   PetscCall(PetscSFBcastEnd(sf,MPIU_INT,ncols_send,ncols_recv,MPI_REPLACE));
52   PetscCall(PetscSFBcastBegin(sf,MPIU_INT,xadj,xadj_recv,MPI_REPLACE));
53   PetscCall(PetscSFBcastEnd(sf,MPIU_INT,xadj,xadj_recv,MPI_REPLACE));
54   PetscCall(PetscSFDestroy(&sf));
55   Ncols_recv =0;
56   for (i=0; i<nlrows_is; i++) {
57     Ncols_recv             += ncols_recv[i];
58     ncols_recv_offsets[i+1] = ncols_recv[i]+ncols_recv_offsets[i];
59   }
60   Ncols_send = 0;
61   for (i=0; i<nlrows_mat; i++) {
62     Ncols_send += ncols_send[i];
63   }
64   PetscCall(PetscCalloc1(Ncols_recv,&iremote));
65   PetscCall(PetscCalloc1(Ncols_recv,&adjncy_recv));
66   nleaves = Ncols_recv;
67   Ncols_recv = 0;
68   for (i=0; i<nlrows_is; i++) {
69     PetscCall(PetscLayoutFindOwner(rmap,irows_indices[i],&owner));
70     for (j=0; j<ncols_recv[i]; j++) {
71       iremote[Ncols_recv].rank    = owner;
72       iremote[Ncols_recv++].index = xadj_recv[i]+j;
73     }
74   }
75   PetscCall(ISRestoreIndices(irows,&irows_indices));
76   /*if we need to deal with edge weights ???*/
77   if (a->useedgeweights) PetscCall(PetscCalloc1(Ncols_recv,&values_recv));
78   nroots = Ncols_send;
79   PetscCall(PetscSFCreate(comm,&sf));
80   PetscCall(PetscSFSetGraph(sf,nroots,nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER));
81   PetscCall(PetscSFSetType(sf,PETSCSFBASIC));
82   PetscCall(PetscSFSetFromOptions(sf));
83   PetscCall(PetscSFBcastBegin(sf,MPIU_INT,adjncy,adjncy_recv,MPI_REPLACE));
84   PetscCall(PetscSFBcastEnd(sf,MPIU_INT,adjncy,adjncy_recv,MPI_REPLACE));
85   if (a->useedgeweights) {
86     PetscCall(PetscSFBcastBegin(sf,MPIU_INT,a->values,values_recv,MPI_REPLACE));
87     PetscCall(PetscSFBcastEnd(sf,MPIU_INT,a->values,values_recv,MPI_REPLACE));
88   }
89   PetscCall(PetscSFDestroy(&sf));
90   PetscCall(MatRestoreRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done));
91   PetscCall(ISGetLocalSize(icols,&icols_n));
92   PetscCall(ISGetIndices(icols,&icols_indices));
93   rnclos = 0;
94   for (i=0; i<nlrows_is; i++) {
95     for (j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++) {
96       PetscCall(PetscFindInt(adjncy_recv[j], icols_n, icols_indices, &loc));
97       if (loc<0) {
98         adjncy_recv[j] = -1;
99         if (a->useedgeweights) values_recv[j] = -1;
100         ncols_recv[i]--;
101       } else {
102         rnclos++;
103       }
104     }
105   }
106   PetscCall(ISRestoreIndices(icols,&icols_indices));
107   PetscCall(PetscCalloc1(rnclos,&sadjncy));
108   if (a->useedgeweights) PetscCall(PetscCalloc1(rnclos,&svalues));
109   PetscCall(PetscCalloc1(nlrows_is+1,&sxadj));
110   rnclos = 0;
111   for (i=0; i<nlrows_is; i++) {
112     for (j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++) {
113       if (adjncy_recv[j]<0) continue;
114       sadjncy[rnclos] = adjncy_recv[j];
115       if (a->useedgeweights) svalues[rnclos] = values_recv[j];
116       rnclos++;
117     }
118   }
119   for (i=0; i<nlrows_is; i++) {
120     sxadj[i+1] = sxadj[i]+ncols_recv[i];
121   }
122   if (sadj_xadj)  { *sadj_xadj = sxadj;} else    PetscCall(PetscFree(sxadj));
123   if (sadj_adjncy) { *sadj_adjncy = sadjncy;} else PetscCall(PetscFree(sadjncy));
124   if (sadj_values) {
125     if (a->useedgeweights) *sadj_values = svalues; else *sadj_values=NULL;
126   } else {
127     if (a->useedgeweights) PetscCall(PetscFree(svalues));
128   }
129   PetscCall(PetscFree4(ncols_send,xadj_recv,ncols_recv_offsets,ncols_recv));
130   PetscCall(PetscFree(adjncy_recv));
131   if (a->useedgeweights) PetscCall(PetscFree(values_recv));
132   PetscFunctionReturn(0);
133 }
134 
135 static PetscErrorCode MatCreateSubMatrices_MPIAdj_Private(Mat mat,PetscInt n,const IS irow[],const IS icol[],PetscBool subcomm,MatReuse scall,Mat *submat[])
136 {
137   PetscInt           i,irow_n,icol_n,*sxadj,*sadjncy,*svalues;
138   PetscInt          *indices,nindx,j,k,loc;
139   PetscMPIInt        issame;
140   const PetscInt    *irow_indices,*icol_indices;
141   MPI_Comm           scomm_row,scomm_col,scomm_mat;
142 
143   PetscFunctionBegin;
144   nindx = 0;
145   /*
146    * Estimate a maximum number for allocating memory
147    */
148   for (i=0; i<n; i++) {
149     PetscCall(ISGetLocalSize(irow[i],&irow_n));
150     PetscCall(ISGetLocalSize(icol[i],&icol_n));
151     nindx = nindx>(irow_n+icol_n)? nindx:(irow_n+icol_n);
152   }
153   PetscCall(PetscMalloc1(nindx,&indices));
154   /* construct a submat */
155   //  if (scall == MAT_INITIAL_MATRIX) PetscCall(PetscMalloc1(n,submat));
156 
157   for (i=0; i<n; i++) {
158     if (subcomm) {
159       PetscCall(PetscObjectGetComm((PetscObject)irow[i],&scomm_row));
160       PetscCall(PetscObjectGetComm((PetscObject)icol[i],&scomm_col));
161       PetscCallMPI(MPI_Comm_compare(scomm_row,scomm_col,&issame));
162       PetscCheck(issame == MPI_IDENT,PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"row index set must have the same comm as the col index set");
163       PetscCallMPI(MPI_Comm_compare(scomm_row,PETSC_COMM_SELF,&issame));
164       PetscCheck(issame != MPI_IDENT,PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP," can not use PETSC_COMM_SELF as comm when extracting a parallel submatrix");
165     } else {
166       scomm_row = PETSC_COMM_SELF;
167     }
168     /*get sub-matrix data*/
169     sxadj=NULL; sadjncy=NULL; svalues=NULL;
170     PetscCall(MatCreateSubMatrix_MPIAdj_data(mat,irow[i],icol[i],&sxadj,&sadjncy,&svalues));
171     PetscCall(ISGetLocalSize(irow[i],&irow_n));
172     PetscCall(ISGetLocalSize(icol[i],&icol_n));
173     PetscCall(ISGetIndices(irow[i],&irow_indices));
174     PetscCall(PetscArraycpy(indices,irow_indices,irow_n));
175     PetscCall(ISRestoreIndices(irow[i],&irow_indices));
176     PetscCall(ISGetIndices(icol[i],&icol_indices));
177     PetscCall(PetscArraycpy(indices+irow_n,icol_indices,icol_n));
178     PetscCall(ISRestoreIndices(icol[i],&icol_indices));
179     nindx = irow_n+icol_n;
180     PetscCall(PetscSortRemoveDupsInt(&nindx,indices));
181     /* renumber columns */
182     for (j=0; j<irow_n; j++) {
183       for (k=sxadj[j]; k<sxadj[j+1]; k++) {
184         PetscCall(PetscFindInt(sadjncy[k],nindx,indices,&loc));
185         PetscCheck(loc>=0,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"can not find col %" PetscInt_FMT,sadjncy[k]);
186         sadjncy[k] = loc;
187       }
188     }
189     if (scall==MAT_INITIAL_MATRIX) {
190       PetscCall(MatCreateMPIAdj(scomm_row,irow_n,icol_n,sxadj,sadjncy,svalues,submat[i]));
191     } else {
192        Mat                sadj = *(submat[i]);
193        Mat_MPIAdj         *sa  = (Mat_MPIAdj*)((sadj)->data);
194        PetscCall(PetscObjectGetComm((PetscObject)sadj,&scomm_mat));
195        PetscCallMPI(MPI_Comm_compare(scomm_row,scomm_mat,&issame));
196        PetscCheck(issame == MPI_IDENT,PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"submatrix  must have the same comm as the col index set");
197        PetscCall(PetscArraycpy(sa->i,sxadj,irow_n+1));
198        PetscCall(PetscArraycpy(sa->j,sadjncy,sxadj[irow_n]));
199        if (svalues) PetscCall(PetscArraycpy(sa->values,svalues,sxadj[irow_n]));
200        PetscCall(PetscFree(sxadj));
201        PetscCall(PetscFree(sadjncy));
202        PetscCall(PetscFree(svalues));
203     }
204   }
205   PetscCall(PetscFree(indices));
206   PetscFunctionReturn(0);
207 }
208 
209 static PetscErrorCode MatCreateSubMatricesMPI_MPIAdj(Mat mat,PetscInt n, const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
210 {
211   /*get sub-matrices across a sub communicator */
212   PetscFunctionBegin;
213   PetscCall(MatCreateSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_TRUE,scall,submat));
214   PetscFunctionReturn(0);
215 }
216 
217 static PetscErrorCode MatCreateSubMatrices_MPIAdj(Mat mat,PetscInt n,const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
218 {
219   PetscFunctionBegin;
220   /*get sub-matrices based on PETSC_COMM_SELF */
221   PetscCall(MatCreateSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_FALSE,scall,submat));
222   PetscFunctionReturn(0);
223 }
224 
225 static PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
226 {
227   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
228   PetscInt          i,j,m = A->rmap->n;
229   const char        *name;
230   PetscViewerFormat format;
231 
232   PetscFunctionBegin;
233   PetscCall(PetscObjectGetName((PetscObject)A,&name));
234   PetscCall(PetscViewerGetFormat(viewer,&format));
235   if (format == PETSC_VIEWER_ASCII_INFO) {
236     PetscFunctionReturn(0);
237   } else {
238     PetscCheck(format != PETSC_VIEWER_ASCII_MATLAB,PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MATLAB format not supported");
239     PetscCall(PetscViewerASCIIUseTabs(viewer,PETSC_FALSE));
240     PetscCall(PetscViewerASCIIPushSynchronized(viewer));
241     for (i=0; i<m; i++) {
242       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer,"row %" PetscInt_FMT ":",i+A->rmap->rstart));
243       for (j=a->i[i]; j<a->i[i+1]; j++) {
244         if (a->values) {
245           PetscCall(PetscViewerASCIISynchronizedPrintf(viewer," (%" PetscInt_FMT ", %" PetscInt_FMT ") ",a->j[j], a->values[j]));
246         } else {
247           PetscCall(PetscViewerASCIISynchronizedPrintf(viewer," %" PetscInt_FMT " ",a->j[j]));
248         }
249       }
250       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer,"\n"));
251     }
252     PetscCall(PetscViewerASCIIUseTabs(viewer,PETSC_TRUE));
253     PetscCall(PetscViewerFlush(viewer));
254     PetscCall(PetscViewerASCIIPopSynchronized(viewer));
255   }
256   PetscFunctionReturn(0);
257 }
258 
259 static PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
260 {
261   PetscBool      iascii;
262 
263   PetscFunctionBegin;
264   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii));
265   if (iascii) PetscCall(MatView_MPIAdj_ASCII(A,viewer));
266   PetscFunctionReturn(0);
267 }
268 
269 static PetscErrorCode MatDestroy_MPIAdj(Mat mat)
270 {
271   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;
272 
273   PetscFunctionBegin;
274 #if defined(PETSC_USE_LOG)
275   PetscLogObjectState((PetscObject)mat,"Rows=%" PetscInt_FMT ", Cols=%" PetscInt_FMT ", NZ=%" PetscInt_FMT,mat->rmap->n,mat->cmap->n,a->nz);
276 #endif
277   PetscCall(PetscFree(a->diag));
278   if (a->freeaij) {
279     if (a->freeaijwithfree) {
280       if (a->i) free(a->i);
281       if (a->j) free(a->j);
282     } else {
283       PetscCall(PetscFree(a->i));
284       PetscCall(PetscFree(a->j));
285       PetscCall(PetscFree(a->values));
286     }
287   }
288   PetscCall(PetscFree(a->rowvalues));
289   PetscCall(PetscFree(mat->data));
290   PetscCall(PetscObjectChangeTypeName((PetscObject)mat,NULL));
291   PetscCall(PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C",NULL));
292   PetscCall(PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjCreateNonemptySubcommMat_C",NULL));
293   PetscCall(PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjToSeq_C",NULL));
294   PetscCall(PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjToSeqRankZero_C",NULL));
295   PetscFunctionReturn(0);
296 }
297 
298 static PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscBool flg)
299 {
300   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
301 
302   PetscFunctionBegin;
303   switch (op) {
304   case MAT_SYMMETRIC:
305   case MAT_STRUCTURALLY_SYMMETRIC:
306   case MAT_HERMITIAN:
307     a->symmetric = flg;
308     break;
309   case MAT_SYMMETRY_ETERNAL:
310     break;
311   default:
312     PetscCall(PetscInfo(A,"Option %s ignored\n",MatOptions[op]));
313     break;
314   }
315   PetscFunctionReturn(0);
316 }
317 
318 static PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
319 {
320   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
321 
322   PetscFunctionBegin;
323   row -= A->rmap->rstart;
324   PetscCheck(row >= 0 && row < A->rmap->n,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
325   *nz = a->i[row+1] - a->i[row];
326   if (v) {
327     PetscInt j;
328     if (a->rowvalues_alloc < *nz) {
329       PetscCall(PetscFree(a->rowvalues));
330       a->rowvalues_alloc = PetscMax(a->rowvalues_alloc*2, *nz);
331       PetscCall(PetscMalloc1(a->rowvalues_alloc,&a->rowvalues));
332     }
333     for (j=0; j<*nz; j++) {
334       a->rowvalues[j] = a->values ? a->values[a->i[row]+j]:1.0;
335     }
336     *v = (*nz) ? a->rowvalues : NULL;
337   }
338   if (idx) *idx = (*nz) ? a->j + a->i[row] : NULL;
339   PetscFunctionReturn(0);
340 }
341 
342 static PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
343 {
344   PetscFunctionBegin;
345   PetscFunctionReturn(0);
346 }
347 
348 static PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscBool * flg)
349 {
350   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data,*b = (Mat_MPIAdj*)B->data;
351   PetscBool      flag;
352 
353   PetscFunctionBegin;
354   /* If the  matrix dimensions are not equal,or no of nonzeros */
355   if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
356     flag = PETSC_FALSE;
357   }
358 
359   /* if the a->i are the same */
360   PetscCall(PetscArraycmp(a->i,b->i,A->rmap->n+1,&flag));
361 
362   /* if a->j are the same */
363   PetscCall(PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag));
364 
365   PetscCall(MPIU_Allreduce(&flag,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A)));
366   PetscFunctionReturn(0);
367 }
368 
369 static PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
370 {
371   PetscInt   i;
372   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
373   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
374 
375   PetscFunctionBegin;
376   *m    = A->rmap->n;
377   *ia   = a->i;
378   *ja   = a->j;
379   *done = PETSC_TRUE;
380   if (oshift) {
381     for (i=0; i<(*ia)[*m]; i++) {
382       (*ja)[i]++;
383     }
384     for (i=0; i<=(*m); i++) (*ia)[i]++;
385   }
386   PetscFunctionReturn(0);
387 }
388 
389 static PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
390 {
391   PetscInt   i;
392   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
393   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
394 
395   PetscFunctionBegin;
396   PetscCheck(!ia || a->i == *ia,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
397   PetscCheck(!ja || a->j == *ja,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
398   if (oshift) {
399     PetscCheck(ia,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"If oshift then you must passed in inia[] argument");
400     PetscCheck(ja,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"If oshift then you must passed in inja[] argument");
401     for (i=0; i<=(*m); i++) (*ia)[i]--;
402     for (i=0; i<(*ia)[*m]; i++) {
403       (*ja)[i]--;
404     }
405   }
406   PetscFunctionReturn(0);
407 }
408 
409 PetscErrorCode  MatConvertFrom_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
410 {
411   Mat               B;
412   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
413   const PetscInt    *rj;
414   const PetscScalar *ra;
415   MPI_Comm          comm;
416 
417   PetscFunctionBegin;
418   PetscCall(MatGetSize(A,NULL,&N));
419   PetscCall(MatGetLocalSize(A,&m,NULL));
420   PetscCall(MatGetOwnershipRange(A,&rstart,NULL));
421 
422   /* count the number of nonzeros per row */
423   for (i=0; i<m; i++) {
424     PetscCall(MatGetRow(A,i+rstart,&len,&rj,NULL));
425     for (j=0; j<len; j++) {
426       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
427     }
428     nzeros += len;
429     PetscCall(MatRestoreRow(A,i+rstart,&len,&rj,NULL));
430   }
431 
432   /* malloc space for nonzeros */
433   PetscCall(PetscMalloc1(nzeros+1,&a));
434   PetscCall(PetscMalloc1(N+1,&ia));
435   PetscCall(PetscMalloc1(nzeros+1,&ja));
436 
437   nzeros = 0;
438   ia[0]  = 0;
439   for (i=0; i<m; i++) {
440     PetscCall(MatGetRow(A,i+rstart,&len,&rj,&ra));
441     cnt  = 0;
442     for (j=0; j<len; j++) {
443       if (rj[j] != i+rstart) { /* if not diagonal */
444         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
445         ja[nzeros+cnt++] = rj[j];
446       }
447     }
448     PetscCall(MatRestoreRow(A,i+rstart,&len,&rj,&ra));
449     nzeros += cnt;
450     ia[i+1] = nzeros;
451   }
452 
453   PetscCall(PetscObjectGetComm((PetscObject)A,&comm));
454   PetscCall(MatCreate(comm,&B));
455   PetscCall(MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N));
456   PetscCall(MatSetType(B,type));
457   PetscCall(MatMPIAdjSetPreallocation(B,ia,ja,a));
458 
459   if (reuse == MAT_INPLACE_MATRIX) {
460     PetscCall(MatHeaderReplace(A,&B));
461   } else {
462     *newmat = B;
463   }
464   PetscFunctionReturn(0);
465 }
466 
467 PetscErrorCode MatSetValues_MPIAdj(Mat A,PetscInt m,const PetscInt *rows,PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode im)
468 {
469   Mat_MPIAdj *adj = (Mat_MPIAdj*)A->data;
470   PetscInt   rStart, rEnd, cStart, cEnd;
471 
472   PetscFunctionBegin;
473   PetscCheck(!adj->i,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Matrix is already assembled, cannot change its values");
474   PetscCall(MatGetOwnershipRange(A, &rStart, &rEnd));
475   PetscCall(MatGetOwnershipRangeColumn(A, &cStart, &cEnd));
476   if (!adj->ht) {
477     PetscCall(PetscHSetIJCreate(&adj->ht));
478     PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject) A), 1, &A->stash));
479     PetscCall(PetscLayoutSetUp(A->rmap));
480     PetscCall(PetscLayoutSetUp(A->cmap));
481   }
482   for (PetscInt r = 0; r < m; ++r) {
483     PetscHashIJKey key;
484 
485     key.i = rows[r];
486     if (key.i < 0) continue;
487     if ((key.i < rStart) || (key.i >= rEnd)) {
488       PetscCall(MatStashValuesRow_Private(&A->stash, key.i, n, cols, values, PETSC_FALSE));
489     } else {
490       for (PetscInt c = 0; c < n; ++c) {
491         key.j = cols[c];
492         if (key.j < 0 || key.i == key.j) continue;
493         PetscCall(PetscHSetIJAdd(adj->ht, key));
494       }
495     }
496   }
497   PetscFunctionReturn(0);
498 }
499 
500 PetscErrorCode MatAssemblyBegin_MPIAdj(Mat A, MatAssemblyType type)
501 {
502   PetscInt   nstash, reallocs;
503   Mat_MPIAdj *adj = (Mat_MPIAdj*)A->data;
504 
505   PetscFunctionBegin;
506   if (!adj->ht) {
507     PetscCall(PetscHSetIJCreate(&adj->ht));
508     PetscCall(PetscLayoutSetUp(A->rmap));
509     PetscCall(PetscLayoutSetUp(A->cmap));
510   }
511   PetscCall(MatStashScatterBegin_Private(A, &A->stash, A->rmap->range));
512   PetscCall(MatStashGetInfo_Private(&A->stash, &nstash, &reallocs));
513   PetscCall(PetscInfo(A, "Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
514   PetscFunctionReturn(0);
515 }
516 
517 PetscErrorCode MatAssemblyEnd_MPIAdj(Mat A, MatAssemblyType type)
518 {
519   PetscScalar    *val;
520   PetscInt       *row, *col,m,rstart,*rowstarts;
521   PetscInt       i, j, ncols, flg, nz;
522   PetscMPIInt    n;
523   Mat_MPIAdj     *adj = (Mat_MPIAdj*)A->data;
524   PetscHashIter  hi;
525   PetscHashIJKey key;
526   PetscHSetIJ    ht = adj->ht;
527 
528   PetscFunctionBegin;
529   while (1) {
530     PetscCall(MatStashScatterGetMesg_Private(&A->stash, &n, &row, &col, &val, &flg));
531      if (!flg) break;
532 
533     for (i = 0; i < n;) {
534       /* Identify the consecutive vals belonging to the same row */
535       for (j = i, rstart = row[j]; j < n; j++) {
536         if (row[j] != rstart) break;
537       }
538       if (j < n) ncols = j-i;
539       else       ncols = n-i;
540       /* Set all these values with a single function call */
541       PetscCall(MatSetValues_MPIAdj(A, 1, row+i, ncols, col+i, val+i, INSERT_VALUES));
542       i = j;
543     }
544   }
545   PetscCall(MatStashScatterEnd_Private(&A->stash));
546   PetscCall(MatStashDestroy_Private(&A->stash));
547 
548   PetscCall(MatGetLocalSize(A,&m,NULL));
549   PetscCall(MatGetOwnershipRange(A,&rstart,NULL));
550   PetscCall(PetscCalloc1(m+1,&rowstarts));
551   PetscHashIterBegin(ht,hi);
552   for (; !PetscHashIterAtEnd(ht,hi);) {
553     PetscHashIterGetKey(ht,hi,key);
554     rowstarts[key.i - rstart + 1]++;
555     PetscHashIterNext(ht,hi);
556   }
557   for (i=1; i<m+1; i++) {
558     rowstarts[i] = rowstarts[i-1] + rowstarts[i];
559   }
560 
561   PetscCall(PetscHSetIJGetSize(ht,&nz));
562   PetscCall(PetscMalloc1(nz,&col));
563   PetscHashIterBegin(ht,hi);
564   for (; !PetscHashIterAtEnd(ht,hi);) {
565     PetscHashIterGetKey(ht,hi,key);
566     col[rowstarts[key.i - rstart]++] = key.j;
567     PetscHashIterNext(ht,hi);
568   }
569   PetscCall(PetscHSetIJDestroy(&ht));
570   for (i=m; i>0; i--) {
571     rowstarts[i] = rowstarts[i-1];
572   }
573   rowstarts[0] = 0;
574 
575   for (PetscInt i=0; i<m; i++) {
576     PetscCall(PetscSortInt(rowstarts[i+1]-rowstarts[i],&col[rowstarts[i]]));
577   }
578 
579   adj->i       = rowstarts;
580   adj->j       = col;
581   adj->nz      = rowstarts[m];
582   adj->freeaij = PETSC_TRUE;
583   PetscFunctionReturn(0);
584 }
585 
586 /* -------------------------------------------------------------------*/
587 static struct _MatOps MatOps_Values = {MatSetValues_MPIAdj,
588                                        MatGetRow_MPIAdj,
589                                        MatRestoreRow_MPIAdj,
590                                        NULL,
591                                 /* 4*/ NULL,
592                                        NULL,
593                                        NULL,
594                                        NULL,
595                                        NULL,
596                                        NULL,
597                                 /*10*/ NULL,
598                                        NULL,
599                                        NULL,
600                                        NULL,
601                                        NULL,
602                                 /*15*/ NULL,
603                                        MatEqual_MPIAdj,
604                                        NULL,
605                                        NULL,
606                                        NULL,
607                                 /*20*/ MatAssemblyBegin_MPIAdj,
608                                        MatAssemblyEnd_MPIAdj,
609                                        MatSetOption_MPIAdj,
610                                        NULL,
611                                 /*24*/ NULL,
612                                        NULL,
613                                        NULL,
614                                        NULL,
615                                        NULL,
616                                 /*29*/ NULL,
617                                        NULL,
618                                        NULL,
619                                        NULL,
620                                        NULL,
621                                 /*34*/ NULL,
622                                        NULL,
623                                        NULL,
624                                        NULL,
625                                        NULL,
626                                 /*39*/ NULL,
627                                        MatCreateSubMatrices_MPIAdj,
628                                        NULL,
629                                        NULL,
630                                        NULL,
631                                 /*44*/ NULL,
632                                        NULL,
633                                        MatShift_Basic,
634                                        NULL,
635                                        NULL,
636                                 /*49*/ NULL,
637                                        MatGetRowIJ_MPIAdj,
638                                        MatRestoreRowIJ_MPIAdj,
639                                        NULL,
640                                        NULL,
641                                 /*54*/ NULL,
642                                        NULL,
643                                        NULL,
644                                        NULL,
645                                        NULL,
646                                 /*59*/ NULL,
647                                        MatDestroy_MPIAdj,
648                                        MatView_MPIAdj,
649                                        MatConvertFrom_MPIAdj,
650                                        NULL,
651                                 /*64*/ NULL,
652                                        NULL,
653                                        NULL,
654                                        NULL,
655                                        NULL,
656                                 /*69*/ NULL,
657                                        NULL,
658                                        NULL,
659                                        NULL,
660                                        NULL,
661                                 /*74*/ NULL,
662                                        NULL,
663                                        NULL,
664                                        NULL,
665                                        NULL,
666                                 /*79*/ NULL,
667                                        NULL,
668                                        NULL,
669                                        NULL,
670                                        NULL,
671                                 /*84*/ NULL,
672                                        NULL,
673                                        NULL,
674                                        NULL,
675                                        NULL,
676                                 /*89*/ NULL,
677                                        NULL,
678                                        NULL,
679                                        NULL,
680                                        NULL,
681                                 /*94*/ NULL,
682                                        NULL,
683                                        NULL,
684                                        NULL,
685                                        NULL,
686                                 /*99*/ NULL,
687                                        NULL,
688                                        NULL,
689                                        NULL,
690                                        NULL,
691                                /*104*/ NULL,
692                                        NULL,
693                                        NULL,
694                                        NULL,
695                                        NULL,
696                                /*109*/ NULL,
697                                        NULL,
698                                        NULL,
699                                        NULL,
700                                        NULL,
701                                /*114*/ NULL,
702                                        NULL,
703                                        NULL,
704                                        NULL,
705                                        NULL,
706                                /*119*/ NULL,
707                                        NULL,
708                                        NULL,
709                                        NULL,
710                                        NULL,
711                                /*124*/ NULL,
712                                        NULL,
713                                        NULL,
714                                        NULL,
715                                        MatCreateSubMatricesMPI_MPIAdj,
716                                /*129*/ NULL,
717                                        NULL,
718                                        NULL,
719                                        NULL,
720                                        NULL,
721                                /*134*/ NULL,
722                                        NULL,
723                                        NULL,
724                                        NULL,
725                                        NULL,
726                                /*139*/ NULL,
727                                        NULL,
728                                        NULL,
729                                        NULL,
730                                        NULL,
731                                 /*144*/NULL,
732                                        NULL,
733                                        NULL,
734                                        NULL,
735                                        NULL,
736                                        NULL
737 };
738 
739 static PetscErrorCode  MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
740 {
741   Mat_MPIAdj     *b = (Mat_MPIAdj*)B->data;
742   PetscBool       useedgeweights;
743 
744   PetscFunctionBegin;
745   PetscCall(PetscLayoutSetUp(B->rmap));
746   PetscCall(PetscLayoutSetUp(B->cmap));
747   if (values) useedgeweights = PETSC_TRUE; else useedgeweights = PETSC_FALSE;
748   /* Make everybody knows if they are using edge weights or not */
749   PetscCall(MPIU_Allreduce((int*)&useedgeweights,(int*)&b->useedgeweights,1,MPI_INT,MPI_MAX,PetscObjectComm((PetscObject)B)));
750 
751   if (PetscDefined(USE_DEBUG)) {
752     PetscInt ii;
753 
754     PetscCheck(i[0] == 0,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %" PetscInt_FMT,i[0]);
755     for (ii=1; ii<B->rmap->n; ii++) {
756       PetscCheck(i[ii] >= 0 && i[ii] >= i[ii-1],PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i[%" PetscInt_FMT "]=%" PetscInt_FMT " index is out of range: i[%" PetscInt_FMT "]=%" PetscInt_FMT,ii,i[ii],ii-1,i[ii-1]);
757     }
758     for (ii=0; ii<i[B->rmap->n]; ii++) {
759       PetscCheck(j[ii] >= 0 && j[ii] < B->cmap->N,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column index %" PetscInt_FMT " out of range %" PetscInt_FMT,ii,j[ii]);
760     }
761   }
762   b->j      = j;
763   b->i      = i;
764   b->values = values;
765 
766   b->nz        = i[B->rmap->n];
767   b->diag      = NULL;
768   b->symmetric = PETSC_FALSE;
769   b->freeaij   = PETSC_TRUE;
770 
771   B->ops->assemblybegin = NULL;
772   B->ops->assemblyend   = NULL;
773   PetscCall(MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY));
774   PetscCall(MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY));
775   PetscCall(MatStashDestroy_Private(&B->stash));
776   PetscFunctionReturn(0);
777 }
778 
779 static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A,Mat *B)
780 {
781   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
782   const PetscInt *ranges;
783   MPI_Comm       acomm,bcomm;
784   MPI_Group      agroup,bgroup;
785   PetscMPIInt    i,rank,size,nranks,*ranks;
786 
787   PetscFunctionBegin;
788   *B    = NULL;
789   PetscCall(PetscObjectGetComm((PetscObject)A,&acomm));
790   PetscCallMPI(MPI_Comm_size(acomm,&size));
791   PetscCallMPI(MPI_Comm_size(acomm,&rank));
792   PetscCall(MatGetOwnershipRanges(A,&ranges));
793   for (i=0,nranks=0; i<size; i++) {
794     if (ranges[i+1] - ranges[i] > 0) nranks++;
795   }
796   if (nranks == size) {         /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
797     PetscCall(PetscObjectReference((PetscObject)A));
798     *B   = A;
799     PetscFunctionReturn(0);
800   }
801 
802   PetscCall(PetscMalloc1(nranks,&ranks));
803   for (i=0,nranks=0; i<size; i++) {
804     if (ranges[i+1] - ranges[i] > 0) ranks[nranks++] = i;
805   }
806   PetscCallMPI(MPI_Comm_group(acomm,&agroup));
807   PetscCallMPI(MPI_Group_incl(agroup,nranks,ranks,&bgroup));
808   PetscCall(PetscFree(ranks));
809   PetscCallMPI(MPI_Comm_create(acomm,bgroup,&bcomm));
810   PetscCallMPI(MPI_Group_free(&agroup));
811   PetscCallMPI(MPI_Group_free(&bgroup));
812   if (bcomm != MPI_COMM_NULL) {
813     PetscInt   m,N;
814     Mat_MPIAdj *b;
815     PetscCall(MatGetLocalSize(A,&m,NULL));
816     PetscCall(MatGetSize(A,NULL,&N));
817     PetscCall(MatCreateMPIAdj(bcomm,m,N,a->i,a->j,a->values,B));
818     b          = (Mat_MPIAdj*)(*B)->data;
819     b->freeaij = PETSC_FALSE;
820     PetscCallMPI(MPI_Comm_free(&bcomm));
821   }
822   PetscFunctionReturn(0);
823 }
824 
825 PetscErrorCode  MatMPIAdjToSeq_MPIAdj(Mat A,Mat *B)
826 {
827   PetscInt       M,N,*II,*J,NZ,nz,m,nzstart,i;
828   PetscInt       *Values = NULL;
829   Mat_MPIAdj     *adj = (Mat_MPIAdj*)A->data;
830   PetscMPIInt    mnz,mm,*allnz,*allm,size,*dispnz,*dispm;
831 
832   PetscFunctionBegin;
833   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A),&size));
834   PetscCall(MatGetSize(A,&M,&N));
835   PetscCall(MatGetLocalSize(A,&m,NULL));
836   nz   = adj->nz;
837   PetscCheck(adj->i[m] == nz,PETSC_COMM_SELF,PETSC_ERR_PLIB,"nz %" PetscInt_FMT " not correct i[m] %" PetscInt_FMT,nz,adj->i[m]);
838   PetscCallMPI(MPI_Allreduce(&nz,&NZ,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)A)));
839 
840   PetscCall(PetscMPIIntCast(nz,&mnz));
841   PetscCall(PetscMalloc2(size,&allnz,size,&dispnz));
842   PetscCallMPI(MPI_Allgather(&mnz,1,MPI_INT,allnz,1,MPI_INT,PetscObjectComm((PetscObject)A)));
843   dispnz[0] = 0; for (i=1; i<size; i++) dispnz[i] = dispnz[i-1]+ allnz[i-1];
844   if (adj->values) {
845     PetscCall(PetscMalloc1(NZ,&Values));
846     PetscCallMPI(MPI_Allgatherv(adj->values,mnz,MPIU_INT,Values,allnz,dispnz,MPIU_INT,PetscObjectComm((PetscObject)A)));
847   }
848   PetscCall(PetscMalloc1(NZ,&J));
849   PetscCallMPI(MPI_Allgatherv(adj->j,mnz,MPIU_INT,J,allnz,dispnz,MPIU_INT,PetscObjectComm((PetscObject)A)));
850   PetscCall(PetscFree2(allnz,dispnz));
851   PetscCallMPI(MPI_Scan(&nz,&nzstart,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)A)));
852   nzstart -= nz;
853   /* shift the i[] values so they will be correct after being received */
854   for (i=0; i<m; i++) adj->i[i] += nzstart;
855   PetscCall(PetscMalloc1(M+1,&II));
856   PetscCall(PetscMPIIntCast(m,&mm));
857   PetscCall(PetscMalloc2(size,&allm,size,&dispm));
858   PetscCallMPI(MPI_Allgather(&mm,1,MPI_INT,allm,1,MPI_INT,PetscObjectComm((PetscObject)A)));
859   dispm[0] = 0; for (i=1; i<size; i++) dispm[i] = dispm[i-1]+ allm[i-1];
860   PetscCallMPI(MPI_Allgatherv(adj->i,mm,MPIU_INT,II,allm,dispm,MPIU_INT,PetscObjectComm((PetscObject)A)));
861   PetscCall(PetscFree2(allm,dispm));
862   II[M] = NZ;
863   /* shift the i[] values back */
864   for (i=0; i<m; i++) adj->i[i] -= nzstart;
865   PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF,M,N,II,J,Values,B));
866   PetscFunctionReturn(0);
867 }
868 
869 PetscErrorCode  MatMPIAdjToSeqRankZero_MPIAdj(Mat A,Mat *B)
870 {
871   PetscInt       M,N,*II,*J,NZ,nz,m,nzstart,i;
872   PetscInt       *Values = NULL;
873   Mat_MPIAdj     *adj = (Mat_MPIAdj*)A->data;
874   PetscMPIInt    mnz,mm,*allnz = NULL,*allm,size,*dispnz,*dispm,rank;
875 
876   PetscFunctionBegin;
877   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A),&size));
878   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)A),&rank));
879   PetscCall(MatGetSize(A,&M,&N));
880   PetscCall(MatGetLocalSize(A,&m,NULL));
881   nz   = adj->nz;
882   PetscCheck(adj->i[m] == nz,PETSC_COMM_SELF,PETSC_ERR_PLIB,"nz %" PetscInt_FMT " not correct i[m] %" PetscInt_FMT,nz,adj->i[m]);
883   PetscCallMPI(MPI_Allreduce(&nz,&NZ,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)A)));
884 
885   PetscCall(PetscMPIIntCast(nz,&mnz));
886   if (!rank) PetscCall(PetscMalloc2(size,&allnz,size,&dispnz));
887   PetscCallMPI(MPI_Gather(&mnz,1,MPI_INT,allnz,1,MPI_INT,0,PetscObjectComm((PetscObject)A)));
888   if (!rank) {
889     dispnz[0] = 0; for (i=1; i<size; i++) dispnz[i] = dispnz[i-1]+ allnz[i-1];
890     if (adj->values) {
891       PetscCall(PetscMalloc1(NZ,&Values));
892       PetscCallMPI(MPI_Gatherv(adj->values,mnz,MPIU_INT,Values,allnz,dispnz,MPIU_INT,0,PetscObjectComm((PetscObject)A)));
893     }
894     PetscCall(PetscMalloc1(NZ,&J));
895     PetscCallMPI(MPI_Gatherv(adj->j,mnz,MPIU_INT,J,allnz,dispnz,MPIU_INT,0,PetscObjectComm((PetscObject)A)));
896     PetscCall(PetscFree2(allnz,dispnz));
897   } else {
898     if (adj->values) PetscCallMPI(MPI_Gatherv(adj->values,mnz,MPIU_INT,NULL,NULL,NULL,MPIU_INT,0,PetscObjectComm((PetscObject)A)));
899     PetscCallMPI(MPI_Gatherv(adj->j,mnz,MPIU_INT,NULL,NULL,NULL,MPIU_INT,0,PetscObjectComm((PetscObject)A)));
900   }
901   PetscCallMPI(MPI_Scan(&nz,&nzstart,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)A)));
902   nzstart -= nz;
903   /* shift the i[] values so they will be correct after being received */
904   for (i=0; i<m; i++) adj->i[i] += nzstart;
905   PetscCall(PetscMPIIntCast(m,&mm));
906   if (!rank) {
907     PetscCall(PetscMalloc1(M+1,&II));
908     PetscCall(PetscMalloc2(size,&allm,size,&dispm));
909     PetscCallMPI(MPI_Gather(&mm,1,MPI_INT,allm,1,MPI_INT,0,PetscObjectComm((PetscObject)A)));
910     dispm[0] = 0; for (i=1; i<size; i++) dispm[i] = dispm[i-1]+ allm[i-1];
911     PetscCallMPI(MPI_Gatherv(adj->i,mm,MPIU_INT,II,allm,dispm,MPIU_INT,0,PetscObjectComm((PetscObject)A)));
912     PetscCall(PetscFree2(allm,dispm));
913     II[M] = NZ;
914   } else {
915     PetscCallMPI(MPI_Gather(&mm,1,MPI_INT,NULL,1,MPI_INT,0,PetscObjectComm((PetscObject)A)));
916     PetscCallMPI(MPI_Gatherv(adj->i,mm,MPIU_INT,NULL,NULL,NULL,MPIU_INT,0,PetscObjectComm((PetscObject)A)));
917   }
918    /* shift the i[] values back */
919   for (i=0; i<m; i++) adj->i[i] -= nzstart;
920   if (!rank) PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF,M,N,II,J,Values,B));
921   PetscFunctionReturn(0);
922 }
923 
924 /*@
925    MatMPIAdjCreateNonemptySubcommMat - create the same MPIAdj matrix on a subcommunicator containing only processes owning a positive number of rows
926 
927    Collective
928 
929    Input Parameter:
930 .  A - original MPIAdj matrix
931 
932    Output Parameter:
933 .  B - matrix on subcommunicator, NULL on ranks that owned zero rows of A
934 
935    Level: developer
936 
937    Note:
938    This function is mostly useful for internal use by mesh partitioning packages that require that every process owns at least one row.
939 
940    The matrix B should be destroyed with MatDestroy(). The arrays are not copied, so B should be destroyed before A is destroyed.
941 
942 .seealso: `MatCreateMPIAdj()`
943 @*/
944 PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A,Mat *B)
945 {
946   PetscFunctionBegin;
947   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
948   PetscUseMethod(A,"MatMPIAdjCreateNonemptySubcommMat_C",(Mat,Mat*),(A,B));
949   PetscFunctionReturn(0);
950 }
951 
952 /*MC
953    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
954    intended for use constructing orderings and partitionings.
955 
956   Level: beginner
957 
958   Notes:
959     You can provide values to the matrix using MatMPIAdjSetPreallocation(), MatCreateMPIAdj(), or
960     by calling MatSetValues() and MatAssemblyBegin() followed by MatAssemblyEnd()
961 
962 .seealso: `MatCreateMPIAdj()`, `MatMPIAdjSetPreallocation()`, `MatSetValues()`
963 M*/
964 
965 PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B)
966 {
967   Mat_MPIAdj     *b;
968 
969   PetscFunctionBegin;
970   PetscCall(PetscNewLog(B,&b));
971   B->data      = (void*)b;
972   PetscCall(PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps)));
973   B->assembled = PETSC_FALSE;
974   B->preallocated = PETSC_TRUE; /* so that MatSetValues() may be used */
975 
976   PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",MatMPIAdjSetPreallocation_MPIAdj));
977   PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjCreateNonemptySubcommMat_C",MatMPIAdjCreateNonemptySubcommMat_MPIAdj));
978   PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjToSeq_C",MatMPIAdjToSeq_MPIAdj));
979   PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjToSeqRankZero_C",MatMPIAdjToSeqRankZero_MPIAdj));
980   PetscCall(PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ));
981   PetscFunctionReturn(0);
982 }
983 
984 /*@C
985    MatMPIAdjToSeq - Converts an parallel MPIAdj matrix to complete MPIAdj on each process (needed by sequential partitioner)
986 
987    Logically Collective
988 
989    Input Parameter:
990 .  A - the matrix
991 
992    Output Parameter:
993 .  B - the same matrix on all processes
994 
995    Level: intermediate
996 
997 .seealso: `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MatMPIAdjToSeqRankZero()`
998 @*/
999 PetscErrorCode  MatMPIAdjToSeq(Mat A,Mat *B)
1000 {
1001   PetscFunctionBegin;
1002   PetscUseMethod(A,"MatMPIAdjToSeq_C",(Mat,Mat*),(A,B));
1003   PetscFunctionReturn(0);
1004 }
1005 
1006 /*@C
1007    MatMPIAdjToSeqRankZero - Converts an parallel MPIAdj matrix to complete MPIAdj on rank zero (needed by sequential partitioner)
1008 
1009    Logically Collective
1010 
1011    Input Parameter:
1012 .  A - the matrix
1013 
1014    Output Parameter:
1015 .  B - the same matrix on rank zero, not set on other ranks
1016 
1017    Level: intermediate
1018 
1019    Notes:
1020      This routine has the advantage on systems with multiple ranks per node since only one copy of the matrix
1021      is stored on the first node, instead of the number of ranks copies. This can allow partitioning much larger
1022      paralllel graph sequentially.
1023 
1024 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues(), MatMPIAdjToSeq()
1025 @*/
1026 PetscErrorCode  MatMPIAdjToSeqRankZero(Mat A,Mat *B)
1027 {
1028   PetscFunctionBegin;
1029   PetscUseMethod(A,"MatMPIAdjToSeqRankZero_C",(Mat,Mat*),(A,B));
1030   PetscFunctionReturn(0);
1031 }
1032 
1033 /*@C
1034    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
1035 
1036    Logically Collective
1037 
1038    Input Parameters:
1039 +  A - the matrix
1040 .  i - the indices into j for the start of each row
1041 .  j - the column indices for each row (sorted for each row).
1042        The indices in i and j start with zero (NOT with one).
1043 -  values - [optional] edge weights
1044 
1045    Level: intermediate
1046 
1047 .seealso: `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MATMPIADJ`, `MatCreateMPIAdj()`
1048 @*/
1049 PetscErrorCode  MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
1050 {
1051   PetscFunctionBegin;
1052   PetscTryMethod(B,"MatMPIAdjSetPreallocation_C",(Mat,PetscInt*,PetscInt*,PetscInt*),(B,i,j,values));
1053   PetscFunctionReturn(0);
1054 }
1055 
1056 /*@C
1057    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
1058    The matrix does not have numerical values associated with it, but is
1059    intended for ordering (to reduce bandwidth etc) and partitioning.
1060 
1061    Collective
1062 
1063    Input Parameters:
1064 +  comm - MPI communicator
1065 .  m - number of local rows
1066 .  N - number of global columns
1067 .  i - the indices into j for the start of each row
1068 .  j - the column indices for each row (sorted for each row).
1069        The indices in i and j start with zero (NOT with one).
1070 -  values -[optional] edge weights
1071 
1072    Output Parameter:
1073 .  A - the matrix
1074 
1075    Level: intermediate
1076 
1077    Notes:
1078    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
1079    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
1080    call from Fortran you need not create the arrays with PetscMalloc().
1081 
1082    You should not include the matrix diagonals.
1083 
1084    If you already have a matrix, you can create its adjacency matrix by a call
1085    to MatConvert(), specifying a type of MATMPIADJ.
1086 
1087    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
1088 
1089 .seealso: `MatCreate()`, `MatConvert()`, `MatGetOrdering()`, `MATMPIADJ`, `MatMPIAdjSetPreallocation()`
1090 @*/
1091 PetscErrorCode  MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
1092 {
1093   PetscFunctionBegin;
1094   PetscCall(MatCreate(comm,A));
1095   PetscCall(MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N));
1096   PetscCall(MatSetType(*A,MATMPIADJ));
1097   PetscCall(MatMPIAdjSetPreallocation(*A,i,j,values));
1098   PetscFunctionReturn(0);
1099 }
1100