xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 6a09307c880f3cf726d2c2fd1cfca70c4fcbcfe1) !
1 
2 /*
3     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
4 */
5 #include <../src/mat/impls/adj/mpi/mpiadj.h>    /*I "petscmat.h" I*/
6 #include <petscsf.h>
7 
8 /*
9  * The interface should be easy to use for both MatCreateSubMatrix (parallel sub-matrix) and MatCreateSubMatrices (sequential sub-matrices)
10  * */
11 static PetscErrorCode MatCreateSubMatrix_MPIAdj_data(Mat adj,IS irows, IS icols, PetscInt **sadj_xadj,PetscInt **sadj_adjncy,PetscInt **sadj_values)
12 {
13   PetscInt           nlrows_is,icols_n,i,j,nroots,nleaves,rlocalindex,*ncols_send,*ncols_recv;
14   PetscInt           nlrows_mat,*adjncy_recv,Ncols_recv,Ncols_send,*xadj_recv,*values_recv;
15   PetscInt          *ncols_recv_offsets,loc,rnclos,*sadjncy,*sxadj,*svalues;
16   const PetscInt    *irows_indices,*icols_indices,*xadj, *adjncy;
17   PetscMPIInt        owner;
18   Mat_MPIAdj        *a = (Mat_MPIAdj*)adj->data;
19   PetscLayout        rmap;
20   MPI_Comm           comm;
21   PetscSF            sf;
22   PetscSFNode       *iremote;
23   PetscBool          done;
24 
25   PetscFunctionBegin;
26   PetscCall(PetscObjectGetComm((PetscObject)adj,&comm));
27   PetscCall(MatGetLayouts(adj,&rmap,NULL));
28   PetscCall(ISGetLocalSize(irows,&nlrows_is));
29   PetscCall(ISGetIndices(irows,&irows_indices));
30   PetscCall(PetscMalloc1(nlrows_is,&iremote));
31   /* construct sf graph*/
32   nleaves = nlrows_is;
33   for (i=0; i<nlrows_is; i++) {
34     owner = -1;
35     rlocalindex = -1;
36     PetscCall(PetscLayoutFindOwnerIndex(rmap,irows_indices[i],&owner,&rlocalindex));
37     iremote[i].rank  = owner;
38     iremote[i].index = rlocalindex;
39   }
40   PetscCall(MatGetRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done));
41   PetscCall(PetscCalloc4(nlrows_mat,&ncols_send,nlrows_is,&xadj_recv,nlrows_is+1,&ncols_recv_offsets,nlrows_is,&ncols_recv));
42   nroots = nlrows_mat;
43   for (i=0; i<nlrows_mat; i++) {
44     ncols_send[i] = xadj[i+1]-xadj[i];
45   }
46   PetscCall(PetscSFCreate(comm,&sf));
47   PetscCall(PetscSFSetGraph(sf,nroots,nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER));
48   PetscCall(PetscSFSetType(sf,PETSCSFBASIC));
49   PetscCall(PetscSFSetFromOptions(sf));
50   PetscCall(PetscSFBcastBegin(sf,MPIU_INT,ncols_send,ncols_recv,MPI_REPLACE));
51   PetscCall(PetscSFBcastEnd(sf,MPIU_INT,ncols_send,ncols_recv,MPI_REPLACE));
52   PetscCall(PetscSFBcastBegin(sf,MPIU_INT,xadj,xadj_recv,MPI_REPLACE));
53   PetscCall(PetscSFBcastEnd(sf,MPIU_INT,xadj,xadj_recv,MPI_REPLACE));
54   PetscCall(PetscSFDestroy(&sf));
55   Ncols_recv =0;
56   for (i=0; i<nlrows_is; i++) {
57     Ncols_recv             += ncols_recv[i];
58     ncols_recv_offsets[i+1] = ncols_recv[i]+ncols_recv_offsets[i];
59   }
60   Ncols_send = 0;
61   for (i=0; i<nlrows_mat; i++) {
62     Ncols_send += ncols_send[i];
63   }
64   PetscCall(PetscCalloc1(Ncols_recv,&iremote));
65   PetscCall(PetscCalloc1(Ncols_recv,&adjncy_recv));
66   nleaves = Ncols_recv;
67   Ncols_recv = 0;
68   for (i=0; i<nlrows_is; i++) {
69     PetscCall(PetscLayoutFindOwner(rmap,irows_indices[i],&owner));
70     for (j=0; j<ncols_recv[i]; j++) {
71       iremote[Ncols_recv].rank    = owner;
72       iremote[Ncols_recv++].index = xadj_recv[i]+j;
73     }
74   }
75   PetscCall(ISRestoreIndices(irows,&irows_indices));
76   /*if we need to deal with edge weights ???*/
77   if (a->useedgeweights) PetscCall(PetscCalloc1(Ncols_recv,&values_recv));
78   nroots = Ncols_send;
79   PetscCall(PetscSFCreate(comm,&sf));
80   PetscCall(PetscSFSetGraph(sf,nroots,nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER));
81   PetscCall(PetscSFSetType(sf,PETSCSFBASIC));
82   PetscCall(PetscSFSetFromOptions(sf));
83   PetscCall(PetscSFBcastBegin(sf,MPIU_INT,adjncy,adjncy_recv,MPI_REPLACE));
84   PetscCall(PetscSFBcastEnd(sf,MPIU_INT,adjncy,adjncy_recv,MPI_REPLACE));
85   if (a->useedgeweights) {
86     PetscCall(PetscSFBcastBegin(sf,MPIU_INT,a->values,values_recv,MPI_REPLACE));
87     PetscCall(PetscSFBcastEnd(sf,MPIU_INT,a->values,values_recv,MPI_REPLACE));
88   }
89   PetscCall(PetscSFDestroy(&sf));
90   PetscCall(MatRestoreRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done));
91   PetscCall(ISGetLocalSize(icols,&icols_n));
92   PetscCall(ISGetIndices(icols,&icols_indices));
93   rnclos = 0;
94   for (i=0; i<nlrows_is; i++) {
95     for (j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++) {
96       PetscCall(PetscFindInt(adjncy_recv[j], icols_n, icols_indices, &loc));
97       if (loc<0) {
98         adjncy_recv[j] = -1;
99         if (a->useedgeweights) values_recv[j] = -1;
100         ncols_recv[i]--;
101       } else {
102         rnclos++;
103       }
104     }
105   }
106   PetscCall(ISRestoreIndices(icols,&icols_indices));
107   PetscCall(PetscCalloc1(rnclos,&sadjncy));
108   if (a->useedgeweights) PetscCall(PetscCalloc1(rnclos,&svalues));
109   PetscCall(PetscCalloc1(nlrows_is+1,&sxadj));
110   rnclos = 0;
111   for (i=0; i<nlrows_is; i++) {
112     for (j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++) {
113       if (adjncy_recv[j]<0) continue;
114       sadjncy[rnclos] = adjncy_recv[j];
115       if (a->useedgeweights) svalues[rnclos] = values_recv[j];
116       rnclos++;
117     }
118   }
119   for (i=0; i<nlrows_is; i++) {
120     sxadj[i+1] = sxadj[i]+ncols_recv[i];
121   }
122   if (sadj_xadj)  { *sadj_xadj = sxadj;} else    PetscCall(PetscFree(sxadj));
123   if (sadj_adjncy) { *sadj_adjncy = sadjncy;} else PetscCall(PetscFree(sadjncy));
124   if (sadj_values) {
125     if (a->useedgeweights) *sadj_values = svalues; else *sadj_values=NULL;
126   } else {
127     if (a->useedgeweights) PetscCall(PetscFree(svalues));
128   }
129   PetscCall(PetscFree4(ncols_send,xadj_recv,ncols_recv_offsets,ncols_recv));
130   PetscCall(PetscFree(adjncy_recv));
131   if (a->useedgeweights) PetscCall(PetscFree(values_recv));
132   PetscFunctionReturn(0);
133 }
134 
135 static PetscErrorCode MatCreateSubMatrices_MPIAdj_Private(Mat mat,PetscInt n,const IS irow[],const IS icol[],PetscBool subcomm,MatReuse scall,Mat *submat[])
136 {
137   PetscInt           i,irow_n,icol_n,*sxadj,*sadjncy,*svalues;
138   PetscInt          *indices,nindx,j,k,loc;
139   PetscMPIInt        issame;
140   const PetscInt    *irow_indices,*icol_indices;
141   MPI_Comm           scomm_row,scomm_col,scomm_mat;
142 
143   PetscFunctionBegin;
144   nindx = 0;
145   /*
146    * Estimate a maximum number for allocating memory
147    */
148   for (i=0; i<n; i++) {
149     PetscCall(ISGetLocalSize(irow[i],&irow_n));
150     PetscCall(ISGetLocalSize(icol[i],&icol_n));
151     nindx = nindx>(irow_n+icol_n)? nindx:(irow_n+icol_n);
152   }
153   PetscCall(PetscMalloc1(nindx,&indices));
154   /* construct a submat */
155   for (i=0; i<n; i++) {
156     if (subcomm) {
157       PetscCall(PetscObjectGetComm((PetscObject)irow[i],&scomm_row));
158       PetscCall(PetscObjectGetComm((PetscObject)icol[i],&scomm_col));
159       PetscCallMPI(MPI_Comm_compare(scomm_row,scomm_col,&issame));
160       PetscCheck(issame == MPI_IDENT,PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"row index set must have the same comm as the col index set");
161       PetscCallMPI(MPI_Comm_compare(scomm_row,PETSC_COMM_SELF,&issame));
162       PetscCheck(issame != MPI_IDENT,PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP," can not use PETSC_COMM_SELF as comm when extracting a parallel submatrix");
163     } else {
164       scomm_row = PETSC_COMM_SELF;
165     }
166     /*get sub-matrix data*/
167     sxadj=NULL; sadjncy=NULL; svalues=NULL;
168     PetscCall(MatCreateSubMatrix_MPIAdj_data(mat,irow[i],icol[i],&sxadj,&sadjncy,&svalues));
169     PetscCall(ISGetLocalSize(irow[i],&irow_n));
170     PetscCall(ISGetLocalSize(icol[i],&icol_n));
171     PetscCall(ISGetIndices(irow[i],&irow_indices));
172     PetscCall(PetscArraycpy(indices,irow_indices,irow_n));
173     PetscCall(ISRestoreIndices(irow[i],&irow_indices));
174     PetscCall(ISGetIndices(icol[i],&icol_indices));
175     PetscCall(PetscArraycpy(indices+irow_n,icol_indices,icol_n));
176     PetscCall(ISRestoreIndices(icol[i],&icol_indices));
177     nindx = irow_n+icol_n;
178     PetscCall(PetscSortRemoveDupsInt(&nindx,indices));
179     /* renumber columns */
180     for (j=0; j<irow_n; j++) {
181       for (k=sxadj[j]; k<sxadj[j+1]; k++) {
182         PetscCall(PetscFindInt(sadjncy[k],nindx,indices,&loc));
183         PetscCheck(loc>=0,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"can not find col %" PetscInt_FMT,sadjncy[k]);
184         sadjncy[k] = loc;
185       }
186     }
187     if (scall==MAT_INITIAL_MATRIX) {
188       PetscCall(MatCreateMPIAdj(scomm_row,irow_n,icol_n,sxadj,sadjncy,svalues,submat[i]));
189     } else {
190        Mat                sadj = *(submat[i]);
191        Mat_MPIAdj         *sa  = (Mat_MPIAdj*)((sadj)->data);
192        PetscCall(PetscObjectGetComm((PetscObject)sadj,&scomm_mat));
193        PetscCallMPI(MPI_Comm_compare(scomm_row,scomm_mat,&issame));
194        PetscCheck(issame == MPI_IDENT,PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"submatrix  must have the same comm as the col index set");
195        PetscCall(PetscArraycpy(sa->i,sxadj,irow_n+1));
196        PetscCall(PetscArraycpy(sa->j,sadjncy,sxadj[irow_n]));
197        if (svalues) PetscCall(PetscArraycpy(sa->values,svalues,sxadj[irow_n]));
198        PetscCall(PetscFree(sxadj));
199        PetscCall(PetscFree(sadjncy));
200        if (svalues) PetscCall(PetscFree(svalues));
201     }
202   }
203   PetscCall(PetscFree(indices));
204   PetscFunctionReturn(0);
205 }
206 
207 static PetscErrorCode MatCreateSubMatricesMPI_MPIAdj(Mat mat,PetscInt n, const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
208 {
209   /*get sub-matrices across a sub communicator */
210   PetscFunctionBegin;
211   PetscCall(MatCreateSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_TRUE,scall,submat));
212   PetscFunctionReturn(0);
213 }
214 
215 static PetscErrorCode MatCreateSubMatrices_MPIAdj(Mat mat,PetscInt n,const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
216 {
217   PetscFunctionBegin;
218   /*get sub-matrices based on PETSC_COMM_SELF */
219   PetscCall(MatCreateSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_FALSE,scall,submat));
220   PetscFunctionReturn(0);
221 }
222 
223 static PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
224 {
225   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
226   PetscInt          i,j,m = A->rmap->n;
227   const char        *name;
228   PetscViewerFormat format;
229 
230   PetscFunctionBegin;
231   PetscCall(PetscObjectGetName((PetscObject)A,&name));
232   PetscCall(PetscViewerGetFormat(viewer,&format));
233   if (format == PETSC_VIEWER_ASCII_INFO) {
234     PetscFunctionReturn(0);
235   } else {
236     PetscCheck(format != PETSC_VIEWER_ASCII_MATLAB,PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MATLAB format not supported");
237     PetscCall(PetscViewerASCIIUseTabs(viewer,PETSC_FALSE));
238     PetscCall(PetscViewerASCIIPushSynchronized(viewer));
239     for (i=0; i<m; i++) {
240       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer,"row %" PetscInt_FMT ":",i+A->rmap->rstart));
241       for (j=a->i[i]; j<a->i[i+1]; j++) {
242         if (a->values) {
243           PetscCall(PetscViewerASCIISynchronizedPrintf(viewer," (%" PetscInt_FMT ", %" PetscInt_FMT ") ",a->j[j], a->values[j]));
244         } else {
245           PetscCall(PetscViewerASCIISynchronizedPrintf(viewer," %" PetscInt_FMT " ",a->j[j]));
246         }
247       }
248       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer,"\n"));
249     }
250     PetscCall(PetscViewerASCIIUseTabs(viewer,PETSC_TRUE));
251     PetscCall(PetscViewerFlush(viewer));
252     PetscCall(PetscViewerASCIIPopSynchronized(viewer));
253   }
254   PetscFunctionReturn(0);
255 }
256 
257 static PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
258 {
259   PetscBool      iascii;
260 
261   PetscFunctionBegin;
262   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii));
263   if (iascii) PetscCall(MatView_MPIAdj_ASCII(A,viewer));
264   PetscFunctionReturn(0);
265 }
266 
267 static PetscErrorCode MatDestroy_MPIAdj(Mat mat)
268 {
269   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;
270 
271   PetscFunctionBegin;
272 #if defined(PETSC_USE_LOG)
273   PetscLogObjectState((PetscObject)mat,"Rows=%" PetscInt_FMT ", Cols=%" PetscInt_FMT ", NZ=%" PetscInt_FMT,mat->rmap->n,mat->cmap->n,a->nz);
274 #endif
275   PetscCall(PetscFree(a->diag));
276   if (a->freeaij) {
277     if (a->freeaijwithfree) {
278       if (a->i) free(a->i);
279       if (a->j) free(a->j);
280     } else {
281       PetscCall(PetscFree(a->i));
282       PetscCall(PetscFree(a->j));
283       PetscCall(PetscFree(a->values));
284     }
285   }
286   PetscCall(PetscFree(a->rowvalues));
287   PetscCall(PetscFree(mat->data));
288   PetscCall(PetscObjectChangeTypeName((PetscObject)mat,NULL));
289   PetscCall(PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C",NULL));
290   PetscCall(PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjCreateNonemptySubcommMat_C",NULL));
291   PetscCall(PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjToSeq_C",NULL));
292   PetscFunctionReturn(0);
293 }
294 
295 static PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscBool flg)
296 {
297   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
298 
299   PetscFunctionBegin;
300   switch (op) {
301   case MAT_SYMMETRIC:
302   case MAT_STRUCTURALLY_SYMMETRIC:
303   case MAT_HERMITIAN:
304     a->symmetric = flg;
305     break;
306   case MAT_SYMMETRY_ETERNAL:
307     break;
308   default:
309     PetscCall(PetscInfo(A,"Option %s ignored\n",MatOptions[op]));
310     break;
311   }
312   PetscFunctionReturn(0);
313 }
314 
315 static PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
316 {
317   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
318 
319   PetscFunctionBegin;
320   row -= A->rmap->rstart;
321   PetscCheck(row >= 0 && row < A->rmap->n,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
322   *nz = a->i[row+1] - a->i[row];
323   if (v) {
324     PetscInt j;
325     if (a->rowvalues_alloc < *nz) {
326       PetscCall(PetscFree(a->rowvalues));
327       a->rowvalues_alloc = PetscMax(a->rowvalues_alloc*2, *nz);
328       PetscCall(PetscMalloc1(a->rowvalues_alloc,&a->rowvalues));
329     }
330     for (j=0; j<*nz; j++) {
331       a->rowvalues[j] = a->values ? a->values[a->i[row]+j]:1.0;
332     }
333     *v = (*nz) ? a->rowvalues : NULL;
334   }
335   if (idx) *idx = (*nz) ? a->j + a->i[row] : NULL;
336   PetscFunctionReturn(0);
337 }
338 
339 static PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
340 {
341   PetscFunctionBegin;
342   PetscFunctionReturn(0);
343 }
344 
345 static PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscBool * flg)
346 {
347   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data,*b = (Mat_MPIAdj*)B->data;
348   PetscBool      flag;
349 
350   PetscFunctionBegin;
351   /* If the  matrix dimensions are not equal,or no of nonzeros */
352   if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
353     flag = PETSC_FALSE;
354   }
355 
356   /* if the a->i are the same */
357   PetscCall(PetscArraycmp(a->i,b->i,A->rmap->n+1,&flag));
358 
359   /* if a->j are the same */
360   PetscCall(PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag));
361 
362   PetscCall(MPIU_Allreduce(&flag,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A)));
363   PetscFunctionReturn(0);
364 }
365 
366 static PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
367 {
368   PetscInt   i;
369   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
370   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
371 
372   PetscFunctionBegin;
373   *m    = A->rmap->n;
374   *ia   = a->i;
375   *ja   = a->j;
376   *done = PETSC_TRUE;
377   if (oshift) {
378     for (i=0; i<(*ia)[*m]; i++) {
379       (*ja)[i]++;
380     }
381     for (i=0; i<=(*m); i++) (*ia)[i]++;
382   }
383   PetscFunctionReturn(0);
384 }
385 
386 static PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
387 {
388   PetscInt   i;
389   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
390   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
391 
392   PetscFunctionBegin;
393   PetscCheck(!ia || a->i == *ia,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
394   PetscCheck(!ja || a->j == *ja,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
395   if (oshift) {
396     PetscCheck(ia,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"If oshift then you must passed in inia[] argument");
397     PetscCheck(ja,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"If oshift then you must passed in inja[] argument");
398     for (i=0; i<=(*m); i++) (*ia)[i]--;
399     for (i=0; i<(*ia)[*m]; i++) {
400       (*ja)[i]--;
401     }
402   }
403   PetscFunctionReturn(0);
404 }
405 
406 PetscErrorCode  MatConvertFrom_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
407 {
408   Mat               B;
409   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
410   const PetscInt    *rj;
411   const PetscScalar *ra;
412   MPI_Comm          comm;
413 
414   PetscFunctionBegin;
415   PetscCall(MatGetSize(A,NULL,&N));
416   PetscCall(MatGetLocalSize(A,&m,NULL));
417   PetscCall(MatGetOwnershipRange(A,&rstart,NULL));
418 
419   /* count the number of nonzeros per row */
420   for (i=0; i<m; i++) {
421     PetscCall(MatGetRow(A,i+rstart,&len,&rj,NULL));
422     for (j=0; j<len; j++) {
423       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
424     }
425     nzeros += len;
426     PetscCall(MatRestoreRow(A,i+rstart,&len,&rj,NULL));
427   }
428 
429   /* malloc space for nonzeros */
430   PetscCall(PetscMalloc1(nzeros+1,&a));
431   PetscCall(PetscMalloc1(N+1,&ia));
432   PetscCall(PetscMalloc1(nzeros+1,&ja));
433 
434   nzeros = 0;
435   ia[0]  = 0;
436   for (i=0; i<m; i++) {
437     PetscCall(MatGetRow(A,i+rstart,&len,&rj,&ra));
438     cnt  = 0;
439     for (j=0; j<len; j++) {
440       if (rj[j] != i+rstart) { /* if not diagonal */
441         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
442         ja[nzeros+cnt++] = rj[j];
443       }
444     }
445     PetscCall(MatRestoreRow(A,i+rstart,&len,&rj,&ra));
446     nzeros += cnt;
447     ia[i+1] = nzeros;
448   }
449 
450   PetscCall(PetscObjectGetComm((PetscObject)A,&comm));
451   PetscCall(MatCreate(comm,&B));
452   PetscCall(MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N));
453   PetscCall(MatSetType(B,type));
454   PetscCall(MatMPIAdjSetPreallocation(B,ia,ja,a));
455 
456   if (reuse == MAT_INPLACE_MATRIX) {
457     PetscCall(MatHeaderReplace(A,&B));
458   } else {
459     *newmat = B;
460   }
461   PetscFunctionReturn(0);
462 }
463 
464 PetscErrorCode MatSetValues_MPIAdj(Mat A,PetscInt m,const PetscInt *rows,PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode im)
465 {
466   Mat_MPIAdj *adj = (Mat_MPIAdj*)A->data;
467   PetscInt   rStart, rEnd, cStart, cEnd;
468 
469   PetscFunctionBegin;
470   PetscCheck(!adj->i,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Matrix is already assembled, cannot change its values");
471   PetscCall(MatGetOwnershipRange(A, &rStart, &rEnd));
472   PetscCall(MatGetOwnershipRangeColumn(A, &cStart, &cEnd));
473   if (!adj->ht) {
474     PetscCall(PetscHSetIJCreate(&adj->ht));
475     PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject) A), 1, &A->stash));
476     PetscCall(PetscLayoutSetUp(A->rmap));
477     PetscCall(PetscLayoutSetUp(A->cmap));
478   }
479   for (PetscInt r = 0; r < m; ++r) {
480     PetscHashIJKey key;
481 
482     key.i = rows[r];
483     if (key.i < 0) continue;
484     if ((key.i < rStart) || (key.i >= rEnd)) {
485       PetscCall(MatStashValuesRow_Private(&A->stash, key.i, n, cols, values, PETSC_FALSE));
486     } else {
487       for (PetscInt c = 0; c < n; ++c) {
488         key.j = cols[c];
489         if (key.j < 0 || key.i == key.j) continue;
490         PetscCall(PetscHSetIJAdd(adj->ht, key));
491       }
492     }
493   }
494   PetscFunctionReturn(0);
495 }
496 
497 PetscErrorCode MatAssemblyBegin_MPIAdj(Mat A, MatAssemblyType type)
498 {
499   PetscInt   nstash, reallocs;
500   Mat_MPIAdj *adj = (Mat_MPIAdj*)A->data;
501 
502   PetscFunctionBegin;
503   if (!adj->ht) {
504     PetscCall(PetscHSetIJCreate(&adj->ht));
505     PetscCall(PetscLayoutSetUp(A->rmap));
506     PetscCall(PetscLayoutSetUp(A->cmap));
507   }
508   PetscCall(MatStashScatterBegin_Private(A, &A->stash, A->rmap->range));
509   PetscCall(MatStashGetInfo_Private(&A->stash, &nstash, &reallocs));
510   PetscCall(PetscInfo(A, "Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
511   PetscFunctionReturn(0);
512 }
513 
514 PetscErrorCode MatAssemblyEnd_MPIAdj(Mat A, MatAssemblyType type)
515 {
516   PetscScalar    *val;
517   PetscInt       *row, *col,m,rstart,*rowstarts;
518   PetscInt       i, j, ncols, flg, nz;
519   PetscMPIInt    n;
520   Mat_MPIAdj     *adj = (Mat_MPIAdj*)A->data;
521   PetscHashIter  hi;
522   PetscHashIJKey key;
523   PetscHSetIJ    ht = adj->ht;
524 
525   PetscFunctionBegin;
526   while (1) {
527     PetscCall(MatStashScatterGetMesg_Private(&A->stash, &n, &row, &col, &val, &flg));
528      if (!flg) break;
529 
530     for (i = 0; i < n;) {
531       /* Identify the consecutive vals belonging to the same row */
532       for (j = i, rstart = row[j]; j < n; j++) {
533         if (row[j] != rstart) break;
534       }
535       if (j < n) ncols = j-i;
536       else       ncols = n-i;
537       /* Set all these values with a single function call */
538       PetscCall(MatSetValues_MPIAdj(A, 1, row+i, ncols, col+i, val+i, INSERT_VALUES));
539       i = j;
540     }
541   }
542   PetscCall(MatStashScatterEnd_Private(&A->stash));
543   PetscCall(MatStashDestroy_Private(&A->stash));
544 
545   PetscCall(MatGetLocalSize(A,&m,NULL));
546   PetscCall(MatGetOwnershipRange(A,&rstart,NULL));
547   PetscCall(PetscCalloc1(m+1,&rowstarts));
548   PetscHashIterBegin(ht,hi);
549   for (; !PetscHashIterAtEnd(ht,hi);) {
550     PetscHashIterGetKey(ht,hi,key);
551     rowstarts[key.i - rstart + 1]++;
552     PetscHashIterNext(ht,hi);
553   }
554   for (i=1; i<m+1; i++) {
555     rowstarts[i] = rowstarts[i-1] + rowstarts[i];
556   }
557 
558   PetscCall(PetscHSetIJGetSize(ht,&nz));
559   PetscCall(PetscMalloc1(nz,&col));
560   PetscHashIterBegin(ht,hi);
561   for (; !PetscHashIterAtEnd(ht,hi);) {
562     PetscHashIterGetKey(ht,hi,key);
563     col[rowstarts[key.i - rstart]++] = key.j;
564     PetscHashIterNext(ht,hi);
565   }
566   PetscCall(PetscHSetIJDestroy(&ht));
567   for (i=m; i>0; i--) {
568     rowstarts[i] = rowstarts[i-1];
569   }
570   rowstarts[0] = 0;
571 
572   for (PetscInt i=0; i<m; i++) {
573     PetscCall(PetscSortInt(rowstarts[i+1]-rowstarts[i],&col[rowstarts[i]]));
574   }
575 
576   adj->i       = rowstarts;
577   adj->j       = col;
578   adj->freeaij = PETSC_TRUE;
579   PetscFunctionReturn(0);
580 }
581 
582 /* -------------------------------------------------------------------*/
583 static struct _MatOps MatOps_Values = {MatSetValues_MPIAdj,
584                                        MatGetRow_MPIAdj,
585                                        MatRestoreRow_MPIAdj,
586                                        NULL,
587                                 /* 4*/ NULL,
588                                        NULL,
589                                        NULL,
590                                        NULL,
591                                        NULL,
592                                        NULL,
593                                 /*10*/ NULL,
594                                        NULL,
595                                        NULL,
596                                        NULL,
597                                        NULL,
598                                 /*15*/ NULL,
599                                        MatEqual_MPIAdj,
600                                        NULL,
601                                        NULL,
602                                        NULL,
603                                 /*20*/ MatAssemblyBegin_MPIAdj,
604                                        MatAssemblyEnd_MPIAdj,
605                                        MatSetOption_MPIAdj,
606                                        NULL,
607                                 /*24*/ NULL,
608                                        NULL,
609                                        NULL,
610                                        NULL,
611                                        NULL,
612                                 /*29*/ NULL,
613                                        NULL,
614                                        NULL,
615                                        NULL,
616                                        NULL,
617                                 /*34*/ NULL,
618                                        NULL,
619                                        NULL,
620                                        NULL,
621                                        NULL,
622                                 /*39*/ NULL,
623                                        MatCreateSubMatrices_MPIAdj,
624                                        NULL,
625                                        NULL,
626                                        NULL,
627                                 /*44*/ NULL,
628                                        NULL,
629                                        MatShift_Basic,
630                                        NULL,
631                                        NULL,
632                                 /*49*/ NULL,
633                                        MatGetRowIJ_MPIAdj,
634                                        MatRestoreRowIJ_MPIAdj,
635                                        NULL,
636                                        NULL,
637                                 /*54*/ NULL,
638                                        NULL,
639                                        NULL,
640                                        NULL,
641                                        NULL,
642                                 /*59*/ NULL,
643                                        MatDestroy_MPIAdj,
644                                        MatView_MPIAdj,
645                                        MatConvertFrom_MPIAdj,
646                                        NULL,
647                                 /*64*/ NULL,
648                                        NULL,
649                                        NULL,
650                                        NULL,
651                                        NULL,
652                                 /*69*/ NULL,
653                                        NULL,
654                                        NULL,
655                                        NULL,
656                                        NULL,
657                                 /*74*/ NULL,
658                                        NULL,
659                                        NULL,
660                                        NULL,
661                                        NULL,
662                                 /*79*/ NULL,
663                                        NULL,
664                                        NULL,
665                                        NULL,
666                                        NULL,
667                                 /*84*/ NULL,
668                                        NULL,
669                                        NULL,
670                                        NULL,
671                                        NULL,
672                                 /*89*/ NULL,
673                                        NULL,
674                                        NULL,
675                                        NULL,
676                                        NULL,
677                                 /*94*/ NULL,
678                                        NULL,
679                                        NULL,
680                                        NULL,
681                                        NULL,
682                                 /*99*/ NULL,
683                                        NULL,
684                                        NULL,
685                                        NULL,
686                                        NULL,
687                                /*104*/ NULL,
688                                        NULL,
689                                        NULL,
690                                        NULL,
691                                        NULL,
692                                /*109*/ NULL,
693                                        NULL,
694                                        NULL,
695                                        NULL,
696                                        NULL,
697                                /*114*/ NULL,
698                                        NULL,
699                                        NULL,
700                                        NULL,
701                                        NULL,
702                                /*119*/ NULL,
703                                        NULL,
704                                        NULL,
705                                        NULL,
706                                        NULL,
707                                /*124*/ NULL,
708                                        NULL,
709                                        NULL,
710                                        NULL,
711                                        MatCreateSubMatricesMPI_MPIAdj,
712                                /*129*/ NULL,
713                                        NULL,
714                                        NULL,
715                                        NULL,
716                                        NULL,
717                                /*134*/ NULL,
718                                        NULL,
719                                        NULL,
720                                        NULL,
721                                        NULL,
722                                /*139*/ NULL,
723                                        NULL,
724                                        NULL,
725                                        NULL,
726                                        NULL,
727                                 /*144*/NULL,
728                                        NULL,
729                                        NULL,
730                                        NULL,
731                                        NULL,
732                                        NULL
733 };
734 
735 static PetscErrorCode  MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
736 {
737   Mat_MPIAdj     *b = (Mat_MPIAdj*)B->data;
738   PetscBool       useedgeweights;
739 
740   PetscFunctionBegin;
741   PetscCall(PetscLayoutSetUp(B->rmap));
742   PetscCall(PetscLayoutSetUp(B->cmap));
743   if (values) useedgeweights = PETSC_TRUE; else useedgeweights = PETSC_FALSE;
744   /* Make everybody knows if they are using edge weights or not */
745   PetscCall(MPIU_Allreduce((int*)&useedgeweights,(int*)&b->useedgeweights,1,MPI_INT,MPI_MAX,PetscObjectComm((PetscObject)B)));
746 
747   if (PetscDefined(USE_DEBUG)) {
748     PetscInt ii;
749 
750     PetscCheck(i[0] == 0,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %" PetscInt_FMT,i[0]);
751     for (ii=1; ii<B->rmap->n; ii++) {
752       PetscCheck(i[ii] >= 0 && i[ii] >= i[ii-1],PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i[%" PetscInt_FMT "]=%" PetscInt_FMT " index is out of range: i[%" PetscInt_FMT "]=%" PetscInt_FMT,ii,i[ii],ii-1,i[ii-1]);
753     }
754     for (ii=0; ii<i[B->rmap->n]; ii++) {
755       PetscCheck(j[ii] >= 0 && j[ii] < B->cmap->N,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column index %" PetscInt_FMT " out of range %" PetscInt_FMT,ii,j[ii]);
756     }
757   }
758   b->j      = j;
759   b->i      = i;
760   b->values = values;
761 
762   b->nz        = i[B->rmap->n];
763   b->diag      = NULL;
764   b->symmetric = PETSC_FALSE;
765   b->freeaij   = PETSC_TRUE;
766 
767   B->ops->assemblybegin = NULL;
768   B->ops->assemblyend   = NULL;
769   PetscCall(MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY));
770   PetscCall(MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY));
771   PetscCall(MatStashDestroy_Private(&B->stash));
772   PetscFunctionReturn(0);
773 }
774 
775 static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A,Mat *B)
776 {
777   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
778   const PetscInt *ranges;
779   MPI_Comm       acomm,bcomm;
780   MPI_Group      agroup,bgroup;
781   PetscMPIInt    i,rank,size,nranks,*ranks;
782 
783   PetscFunctionBegin;
784   *B    = NULL;
785   PetscCall(PetscObjectGetComm((PetscObject)A,&acomm));
786   PetscCallMPI(MPI_Comm_size(acomm,&size));
787   PetscCallMPI(MPI_Comm_size(acomm,&rank));
788   PetscCall(MatGetOwnershipRanges(A,&ranges));
789   for (i=0,nranks=0; i<size; i++) {
790     if (ranges[i+1] - ranges[i] > 0) nranks++;
791   }
792   if (nranks == size) {         /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
793     PetscCall(PetscObjectReference((PetscObject)A));
794     *B   = A;
795     PetscFunctionReturn(0);
796   }
797 
798   PetscCall(PetscMalloc1(nranks,&ranks));
799   for (i=0,nranks=0; i<size; i++) {
800     if (ranges[i+1] - ranges[i] > 0) ranks[nranks++] = i;
801   }
802   PetscCallMPI(MPI_Comm_group(acomm,&agroup));
803   PetscCallMPI(MPI_Group_incl(agroup,nranks,ranks,&bgroup));
804   PetscCall(PetscFree(ranks));
805   PetscCallMPI(MPI_Comm_create(acomm,bgroup,&bcomm));
806   PetscCallMPI(MPI_Group_free(&agroup));
807   PetscCallMPI(MPI_Group_free(&bgroup));
808   if (bcomm != MPI_COMM_NULL) {
809     PetscInt   m,N;
810     Mat_MPIAdj *b;
811     PetscCall(MatGetLocalSize(A,&m,NULL));
812     PetscCall(MatGetSize(A,NULL,&N));
813     PetscCall(MatCreateMPIAdj(bcomm,m,N,a->i,a->j,a->values,B));
814     b          = (Mat_MPIAdj*)(*B)->data;
815     b->freeaij = PETSC_FALSE;
816     PetscCallMPI(MPI_Comm_free(&bcomm));
817   }
818   PetscFunctionReturn(0);
819 }
820 
821 PetscErrorCode  MatMPIAdjToSeq_MPIAdj(Mat A,Mat *B)
822 {
823   PetscInt       M,N,*II,*J,NZ,nz,m,nzstart,i;
824   PetscInt       *Values = NULL;
825   Mat_MPIAdj     *adj = (Mat_MPIAdj*)A->data;
826   PetscMPIInt    mnz,mm,*allnz,*allm,size,*dispnz,*dispm;
827 
828   PetscFunctionBegin;
829   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A),&size));
830   PetscCall(MatGetSize(A,&M,&N));
831   PetscCall(MatGetLocalSize(A,&m,NULL));
832   nz   = adj->nz;
833   PetscCheck(adj->i[m] == nz,PETSC_COMM_SELF,PETSC_ERR_PLIB,"nz %" PetscInt_FMT " not correct i[m] %" PetscInt_FMT,nz,adj->i[m]);
834   PetscCallMPI(MPI_Allreduce(&nz,&NZ,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)A)));
835 
836   PetscCall(PetscMPIIntCast(nz,&mnz));
837   PetscCall(PetscMalloc2(size,&allnz,size,&dispnz));
838   PetscCallMPI(MPI_Allgather(&mnz,1,MPI_INT,allnz,1,MPI_INT,PetscObjectComm((PetscObject)A)));
839   dispnz[0] = 0; for (i=1; i<size; i++) dispnz[i] = dispnz[i-1]+ allnz[i-1];
840   if (adj->values) {
841     PetscCall(PetscMalloc1(NZ,&Values));
842     PetscCallMPI(MPI_Allgatherv(adj->values,mnz,MPIU_INT,Values,allnz,dispnz,MPIU_INT,PetscObjectComm((PetscObject)A)));
843   }
844   PetscCall(PetscMalloc1(NZ,&J));
845   PetscCallMPI(MPI_Allgatherv(adj->j,mnz,MPIU_INT,J,allnz,dispnz,MPIU_INT,PetscObjectComm((PetscObject)A)));
846   PetscCall(PetscFree2(allnz,dispnz));
847   PetscCallMPI(MPI_Scan(&nz,&nzstart,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)A)));
848   nzstart -= nz;
849   /* shift the i[] values so they will be correct after being received */
850   for (i=0; i<m; i++) adj->i[i] += nzstart;
851   PetscCall(PetscMalloc1(M+1,&II));
852   PetscCall(PetscMPIIntCast(m,&mm));
853   PetscCall(PetscMalloc2(size,&allm,size,&dispm));
854   PetscCallMPI(MPI_Allgather(&mm,1,MPI_INT,allm,1,MPI_INT,PetscObjectComm((PetscObject)A)));
855   dispm[0] = 0; for (i=1; i<size; i++) dispm[i] = dispm[i-1]+ allm[i-1];
856   PetscCallMPI(MPI_Allgatherv(adj->i,mm,MPIU_INT,II,allm,dispm,MPIU_INT,PetscObjectComm((PetscObject)A)));
857   PetscCall(PetscFree2(allm,dispm));
858   II[M] = NZ;
859   /* shift the i[] values back */
860   for (i=0; i<m; i++) adj->i[i] -= nzstart;
861   PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF,M,N,II,J,Values,B));
862   PetscFunctionReturn(0);
863 }
864 
865 /*@
866    MatMPIAdjCreateNonemptySubcommMat - create the same MPIAdj matrix on a subcommunicator containing only processes owning a positive number of rows
867 
868    Collective
869 
870    Input Parameter:
871 .  A - original MPIAdj matrix
872 
873    Output Parameter:
874 .  B - matrix on subcommunicator, NULL on ranks that owned zero rows of A
875 
876    Level: developer
877 
878    Note:
879    This function is mostly useful for internal use by mesh partitioning packages that require that every process owns at least one row.
880 
881    The matrix B should be destroyed with MatDestroy(). The arrays are not copied, so B should be destroyed before A is destroyed.
882 
883 .seealso: `MatCreateMPIAdj()`
884 @*/
885 PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A,Mat *B)
886 {
887   PetscFunctionBegin;
888   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
889   PetscUseMethod(A,"MatMPIAdjCreateNonemptySubcommMat_C",(Mat,Mat*),(A,B));
890   PetscFunctionReturn(0);
891 }
892 
893 /*MC
894    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
895    intended for use constructing orderings and partitionings.
896 
897   Level: beginner
898 
899   Notes:
900     You can provide values to the matrix using MatMPIAdjSetPreallocation(), MatCreateMPIAdj(), or
901     by calling MatSetValues() and MatAssemblyBegin() followed by MatAssemblyEnd()
902 
903 .seealso: `MatCreateMPIAdj()`, `MatMPIAdjSetPreallocation()`, `MatSetValues()`
904 M*/
905 
906 PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B)
907 {
908   Mat_MPIAdj     *b;
909 
910   PetscFunctionBegin;
911   PetscCall(PetscNewLog(B,&b));
912   B->data      = (void*)b;
913   PetscCall(PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps)));
914   B->assembled = PETSC_FALSE;
915   B->preallocated = PETSC_TRUE; /* so that MatSetValues() may be used */
916 
917   PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",MatMPIAdjSetPreallocation_MPIAdj));
918   PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjCreateNonemptySubcommMat_C",MatMPIAdjCreateNonemptySubcommMat_MPIAdj));
919   PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjToSeq_C",MatMPIAdjToSeq_MPIAdj));
920   PetscCall(PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ));
921   PetscFunctionReturn(0);
922 }
923 
924 /*@C
925    MatMPIAdjToSeq - Converts an parallel MPIAdj matrix to complete MPIAdj on each process (needed by sequential preconditioners)
926 
927    Logically Collective
928 
929    Input Parameter:
930 .  A - the matrix
931 
932    Output Parameter:
933 .  B - the same matrix on all processes
934 
935    Level: intermediate
936 
937 .seealso: `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`
938 @*/
939 PetscErrorCode  MatMPIAdjToSeq(Mat A,Mat *B)
940 {
941   PetscFunctionBegin;
942   PetscUseMethod(A,"MatMPIAdjToSeq_C",(Mat,Mat*),(A,B));
943   PetscFunctionReturn(0);
944 }
945 
946 /*@C
947    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
948 
949    Logically Collective
950 
951    Input Parameters:
952 +  A - the matrix
953 .  i - the indices into j for the start of each row
954 .  j - the column indices for each row (sorted for each row).
955        The indices in i and j start with zero (NOT with one).
956 -  values - [optional] edge weights
957 
958    Level: intermediate
959 
960 .seealso: `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MATMPIADJ`, `MatCreateMPIAdj()`
961 @*/
962 PetscErrorCode  MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
963 {
964   PetscFunctionBegin;
965   PetscTryMethod(B,"MatMPIAdjSetPreallocation_C",(Mat,PetscInt*,PetscInt*,PetscInt*),(B,i,j,values));
966   PetscFunctionReturn(0);
967 }
968 
969 /*@C
970    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
971    The matrix does not have numerical values associated with it, but is
972    intended for ordering (to reduce bandwidth etc) and partitioning.
973 
974    Collective
975 
976    Input Parameters:
977 +  comm - MPI communicator
978 .  m - number of local rows
979 .  N - number of global columns
980 .  i - the indices into j for the start of each row
981 .  j - the column indices for each row (sorted for each row).
982        The indices in i and j start with zero (NOT with one).
983 -  values -[optional] edge weights
984 
985    Output Parameter:
986 .  A - the matrix
987 
988    Level: intermediate
989 
990    Notes:
991    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
992    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
993    call from Fortran you need not create the arrays with PetscMalloc().
994 
995    You should not include the matrix diagonals.
996 
997    If you already have a matrix, you can create its adjacency matrix by a call
998    to MatConvert(), specifying a type of MATMPIADJ.
999 
1000    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
1001 
1002 .seealso: `MatCreate()`, `MatConvert()`, `MatGetOrdering()`, `MATMPIADJ`, `MatMPIAdjSetPreallocation()`
1003 @*/
1004 PetscErrorCode  MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
1005 {
1006   PetscFunctionBegin;
1007   PetscCall(MatCreate(comm,A));
1008   PetscCall(MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N));
1009   PetscCall(MatSetType(*A,MATMPIADJ));
1010   PetscCall(MatMPIAdjSetPreallocation(*A,i,j,values));
1011   PetscFunctionReturn(0);
1012 }
1013