xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision fdf6c4e30aafdbc795e4f76379caa977fd5cdf5a)
1 
2 /*
3     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
4 */
5 #include <../src/mat/impls/adj/mpi/mpiadj.h>    /*I "petscmat.h" I*/
6 #include <petscsf.h>
7 
8 /*
9  * The interface should be easy to use for both MatCreateSubMatrix (parallel sub-matrix) and MatCreateSubMatrices (sequential sub-matrices)
10  * */
11 static PetscErrorCode MatCreateSubMatrix_MPIAdj_data(Mat adj,IS irows, IS icols, PetscInt **sadj_xadj,PetscInt **sadj_adjncy,PetscInt **sadj_values)
12 {
13   PetscInt           nlrows_is,icols_n,i,j,nroots,nleaves,rlocalindex,*ncols_send,*ncols_recv;
14   PetscInt           nlrows_mat,*adjncy_recv,Ncols_recv,Ncols_send,*xadj_recv,*values_recv;
15   PetscInt          *ncols_recv_offsets,loc,rnclos,*sadjncy,*sxadj,*svalues;
16   const PetscInt    *irows_indices,*icols_indices,*xadj, *adjncy;
17   PetscMPIInt        owner;
18   Mat_MPIAdj        *a = (Mat_MPIAdj*)adj->data;
19   PetscLayout        rmap;
20   MPI_Comm           comm;
21   PetscSF            sf;
22   PetscSFNode       *iremote;
23   PetscBool          done;
24 
25   PetscFunctionBegin;
26   PetscCall(PetscObjectGetComm((PetscObject)adj,&comm));
27   PetscCall(MatGetLayouts(adj,&rmap,NULL));
28   PetscCall(ISGetLocalSize(irows,&nlrows_is));
29   PetscCall(ISGetIndices(irows,&irows_indices));
30   PetscCall(PetscMalloc1(nlrows_is,&iremote));
31   /* construct sf graph*/
32   nleaves = nlrows_is;
33   for (i=0; i<nlrows_is; i++) {
34     owner = -1;
35     rlocalindex = -1;
36     PetscCall(PetscLayoutFindOwnerIndex(rmap,irows_indices[i],&owner,&rlocalindex));
37     iremote[i].rank  = owner;
38     iremote[i].index = rlocalindex;
39   }
40   PetscCall(MatGetRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done));
41   PetscCall(PetscCalloc4(nlrows_mat,&ncols_send,nlrows_is,&xadj_recv,nlrows_is+1,&ncols_recv_offsets,nlrows_is,&ncols_recv));
42   nroots = nlrows_mat;
43   for (i=0; i<nlrows_mat; i++) {
44     ncols_send[i] = xadj[i+1]-xadj[i];
45   }
46   PetscCall(PetscSFCreate(comm,&sf));
47   PetscCall(PetscSFSetGraph(sf,nroots,nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER));
48   PetscCall(PetscSFSetType(sf,PETSCSFBASIC));
49   PetscCall(PetscSFSetFromOptions(sf));
50   PetscCall(PetscSFBcastBegin(sf,MPIU_INT,ncols_send,ncols_recv,MPI_REPLACE));
51   PetscCall(PetscSFBcastEnd(sf,MPIU_INT,ncols_send,ncols_recv,MPI_REPLACE));
52   PetscCall(PetscSFBcastBegin(sf,MPIU_INT,xadj,xadj_recv,MPI_REPLACE));
53   PetscCall(PetscSFBcastEnd(sf,MPIU_INT,xadj,xadj_recv,MPI_REPLACE));
54   PetscCall(PetscSFDestroy(&sf));
55   Ncols_recv =0;
56   for (i=0; i<nlrows_is; i++) {
57     Ncols_recv             += ncols_recv[i];
58     ncols_recv_offsets[i+1] = ncols_recv[i]+ncols_recv_offsets[i];
59   }
60   Ncols_send = 0;
61   for (i=0; i<nlrows_mat; i++) {
62     Ncols_send += ncols_send[i];
63   }
64   PetscCall(PetscCalloc1(Ncols_recv,&iremote));
65   PetscCall(PetscCalloc1(Ncols_recv,&adjncy_recv));
66   nleaves = Ncols_recv;
67   Ncols_recv = 0;
68   for (i=0; i<nlrows_is; i++) {
69     PetscCall(PetscLayoutFindOwner(rmap,irows_indices[i],&owner));
70     for (j=0; j<ncols_recv[i]; j++) {
71       iremote[Ncols_recv].rank    = owner;
72       iremote[Ncols_recv++].index = xadj_recv[i]+j;
73     }
74   }
75   PetscCall(ISRestoreIndices(irows,&irows_indices));
76   /*if we need to deal with edge weights ???*/
77   if (a->useedgeweights) PetscCall(PetscCalloc1(Ncols_recv,&values_recv));
78   nroots = Ncols_send;
79   PetscCall(PetscSFCreate(comm,&sf));
80   PetscCall(PetscSFSetGraph(sf,nroots,nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER));
81   PetscCall(PetscSFSetType(sf,PETSCSFBASIC));
82   PetscCall(PetscSFSetFromOptions(sf));
83   PetscCall(PetscSFBcastBegin(sf,MPIU_INT,adjncy,adjncy_recv,MPI_REPLACE));
84   PetscCall(PetscSFBcastEnd(sf,MPIU_INT,adjncy,adjncy_recv,MPI_REPLACE));
85   if (a->useedgeweights) {
86     PetscCall(PetscSFBcastBegin(sf,MPIU_INT,a->values,values_recv,MPI_REPLACE));
87     PetscCall(PetscSFBcastEnd(sf,MPIU_INT,a->values,values_recv,MPI_REPLACE));
88   }
89   PetscCall(PetscSFDestroy(&sf));
90   PetscCall(MatRestoreRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done));
91   PetscCall(ISGetLocalSize(icols,&icols_n));
92   PetscCall(ISGetIndices(icols,&icols_indices));
93   rnclos = 0;
94   for (i=0; i<nlrows_is; i++) {
95     for (j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++) {
96       PetscCall(PetscFindInt(adjncy_recv[j], icols_n, icols_indices, &loc));
97       if (loc<0) {
98         adjncy_recv[j] = -1;
99         if (a->useedgeweights) values_recv[j] = -1;
100         ncols_recv[i]--;
101       } else {
102         rnclos++;
103       }
104     }
105   }
106   PetscCall(ISRestoreIndices(icols,&icols_indices));
107   PetscCall(PetscCalloc1(rnclos,&sadjncy));
108   if (a->useedgeweights) PetscCall(PetscCalloc1(rnclos,&svalues));
109   PetscCall(PetscCalloc1(nlrows_is+1,&sxadj));
110   rnclos = 0;
111   for (i=0; i<nlrows_is; i++) {
112     for (j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++) {
113       if (adjncy_recv[j]<0) continue;
114       sadjncy[rnclos] = adjncy_recv[j];
115       if (a->useedgeweights) svalues[rnclos] = values_recv[j];
116       rnclos++;
117     }
118   }
119   for (i=0; i<nlrows_is; i++) {
120     sxadj[i+1] = sxadj[i]+ncols_recv[i];
121   }
122   if (sadj_xadj)  { *sadj_xadj = sxadj;} else    PetscCall(PetscFree(sxadj));
123   if (sadj_adjncy) { *sadj_adjncy = sadjncy;} else PetscCall(PetscFree(sadjncy));
124   if (sadj_values) {
125     if (a->useedgeweights) *sadj_values = svalues; else *sadj_values=NULL;
126   } else {
127     if (a->useedgeweights) PetscCall(PetscFree(svalues));
128   }
129   PetscCall(PetscFree4(ncols_send,xadj_recv,ncols_recv_offsets,ncols_recv));
130   PetscCall(PetscFree(adjncy_recv));
131   if (a->useedgeweights) PetscCall(PetscFree(values_recv));
132   PetscFunctionReturn(0);
133 }
134 
135 static PetscErrorCode MatCreateSubMatrices_MPIAdj_Private(Mat mat,PetscInt n,const IS irow[],const IS icol[],PetscBool subcomm,MatReuse scall,Mat *submat[])
136 {
137   PetscInt           i,irow_n,icol_n,*sxadj,*sadjncy,*svalues;
138   PetscInt          *indices,nindx,j,k,loc;
139   PetscMPIInt        issame;
140   const PetscInt    *irow_indices,*icol_indices;
141   MPI_Comm           scomm_row,scomm_col,scomm_mat;
142 
143   PetscFunctionBegin;
144   nindx = 0;
145   /*
146    * Estimate a maximum number for allocating memory
147    */
148   for (i=0; i<n; i++) {
149     PetscCall(ISGetLocalSize(irow[i],&irow_n));
150     PetscCall(ISGetLocalSize(icol[i],&icol_n));
151     nindx = nindx>(irow_n+icol_n)? nindx:(irow_n+icol_n);
152   }
153   PetscCall(PetscMalloc1(nindx,&indices));
154   /* construct a submat */
155   //  if (scall == MAT_INITIAL_MATRIX) PetscCall(PetscMalloc1(n,submat));
156 
157   for (i=0; i<n; i++) {
158     if (subcomm) {
159       PetscCall(PetscObjectGetComm((PetscObject)irow[i],&scomm_row));
160       PetscCall(PetscObjectGetComm((PetscObject)icol[i],&scomm_col));
161       PetscCallMPI(MPI_Comm_compare(scomm_row,scomm_col,&issame));
162       PetscCheck(issame == MPI_IDENT,PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"row index set must have the same comm as the col index set");
163       PetscCallMPI(MPI_Comm_compare(scomm_row,PETSC_COMM_SELF,&issame));
164       PetscCheck(issame != MPI_IDENT,PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP," can not use PETSC_COMM_SELF as comm when extracting a parallel submatrix");
165     } else {
166       scomm_row = PETSC_COMM_SELF;
167     }
168     /*get sub-matrix data*/
169     sxadj=NULL; sadjncy=NULL; svalues=NULL;
170     PetscCall(MatCreateSubMatrix_MPIAdj_data(mat,irow[i],icol[i],&sxadj,&sadjncy,&svalues));
171     PetscCall(ISGetLocalSize(irow[i],&irow_n));
172     PetscCall(ISGetLocalSize(icol[i],&icol_n));
173     PetscCall(ISGetIndices(irow[i],&irow_indices));
174     PetscCall(PetscArraycpy(indices,irow_indices,irow_n));
175     PetscCall(ISRestoreIndices(irow[i],&irow_indices));
176     PetscCall(ISGetIndices(icol[i],&icol_indices));
177     PetscCall(PetscArraycpy(indices+irow_n,icol_indices,icol_n));
178     PetscCall(ISRestoreIndices(icol[i],&icol_indices));
179     nindx = irow_n+icol_n;
180     PetscCall(PetscSortRemoveDupsInt(&nindx,indices));
181     /* renumber columns */
182     for (j=0; j<irow_n; j++) {
183       for (k=sxadj[j]; k<sxadj[j+1]; k++) {
184         PetscCall(PetscFindInt(sadjncy[k],nindx,indices,&loc));
185         PetscCheck(loc>=0,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"can not find col %" PetscInt_FMT,sadjncy[k]);
186         sadjncy[k] = loc;
187       }
188     }
189     if (scall==MAT_INITIAL_MATRIX) {
190       PetscCall(MatCreateMPIAdj(scomm_row,irow_n,icol_n,sxadj,sadjncy,svalues,submat[i]));
191     } else {
192        Mat                sadj = *(submat[i]);
193        Mat_MPIAdj         *sa  = (Mat_MPIAdj*)((sadj)->data);
194        PetscCall(PetscObjectGetComm((PetscObject)sadj,&scomm_mat));
195        PetscCallMPI(MPI_Comm_compare(scomm_row,scomm_mat,&issame));
196        PetscCheck(issame == MPI_IDENT,PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"submatrix  must have the same comm as the col index set");
197        PetscCall(PetscArraycpy(sa->i,sxadj,irow_n+1));
198        PetscCall(PetscArraycpy(sa->j,sadjncy,sxadj[irow_n]));
199        if (svalues) PetscCall(PetscArraycpy(sa->values,svalues,sxadj[irow_n]));
200        PetscCall(PetscFree(sxadj));
201        PetscCall(PetscFree(sadjncy));
202        PetscCall(PetscFree(svalues));
203     }
204   }
205   PetscCall(PetscFree(indices));
206   PetscFunctionReturn(0);
207 }
208 
209 static PetscErrorCode MatCreateSubMatricesMPI_MPIAdj(Mat mat,PetscInt n, const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
210 {
211   /*get sub-matrices across a sub communicator */
212   PetscFunctionBegin;
213   PetscCall(MatCreateSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_TRUE,scall,submat));
214   PetscFunctionReturn(0);
215 }
216 
217 static PetscErrorCode MatCreateSubMatrices_MPIAdj(Mat mat,PetscInt n,const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
218 {
219   PetscFunctionBegin;
220   /*get sub-matrices based on PETSC_COMM_SELF */
221   PetscCall(MatCreateSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_FALSE,scall,submat));
222   PetscFunctionReturn(0);
223 }
224 
225 static PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
226 {
227   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
228   PetscInt          i,j,m = A->rmap->n;
229   const char        *name;
230   PetscViewerFormat format;
231 
232   PetscFunctionBegin;
233   PetscCall(PetscObjectGetName((PetscObject)A,&name));
234   PetscCall(PetscViewerGetFormat(viewer,&format));
235   if (format == PETSC_VIEWER_ASCII_INFO) {
236     PetscFunctionReturn(0);
237   } else {
238     PetscCheck(format != PETSC_VIEWER_ASCII_MATLAB,PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MATLAB format not supported");
239     PetscCall(PetscViewerASCIIUseTabs(viewer,PETSC_FALSE));
240     PetscCall(PetscViewerASCIIPushSynchronized(viewer));
241     for (i=0; i<m; i++) {
242       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer,"row %" PetscInt_FMT ":",i+A->rmap->rstart));
243       for (j=a->i[i]; j<a->i[i+1]; j++) {
244         if (a->values) {
245           PetscCall(PetscViewerASCIISynchronizedPrintf(viewer," (%" PetscInt_FMT ", %" PetscInt_FMT ") ",a->j[j], a->values[j]));
246         } else {
247           PetscCall(PetscViewerASCIISynchronizedPrintf(viewer," %" PetscInt_FMT " ",a->j[j]));
248         }
249       }
250       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer,"\n"));
251     }
252     PetscCall(PetscViewerASCIIUseTabs(viewer,PETSC_TRUE));
253     PetscCall(PetscViewerFlush(viewer));
254     PetscCall(PetscViewerASCIIPopSynchronized(viewer));
255   }
256   PetscFunctionReturn(0);
257 }
258 
259 static PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
260 {
261   PetscBool      iascii;
262 
263   PetscFunctionBegin;
264   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii));
265   if (iascii) PetscCall(MatView_MPIAdj_ASCII(A,viewer));
266   PetscFunctionReturn(0);
267 }
268 
269 static PetscErrorCode MatDestroy_MPIAdj(Mat mat)
270 {
271   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;
272 
273   PetscFunctionBegin;
274 #if defined(PETSC_USE_LOG)
275   PetscLogObjectState((PetscObject)mat,"Rows=%" PetscInt_FMT ", Cols=%" PetscInt_FMT ", NZ=%" PetscInt_FMT,mat->rmap->n,mat->cmap->n,a->nz);
276 #endif
277   PetscCall(PetscFree(a->diag));
278   if (a->freeaij) {
279     if (a->freeaijwithfree) {
280       if (a->i) free(a->i);
281       if (a->j) free(a->j);
282     } else {
283       PetscCall(PetscFree(a->i));
284       PetscCall(PetscFree(a->j));
285       PetscCall(PetscFree(a->values));
286     }
287   }
288   PetscCall(PetscFree(a->rowvalues));
289   PetscCall(PetscFree(mat->data));
290   PetscCall(PetscObjectChangeTypeName((PetscObject)mat,NULL));
291   PetscCall(PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C",NULL));
292   PetscCall(PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjCreateNonemptySubcommMat_C",NULL));
293   PetscCall(PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjToSeq_C",NULL));
294   PetscCall(PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjToSeqRankZero_C",NULL));
295   PetscFunctionReturn(0);
296 }
297 
298 static PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscBool flg)
299 {
300   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
301 
302   PetscFunctionBegin;
303   switch (op) {
304   case MAT_SYMMETRIC:
305   case MAT_STRUCTURALLY_SYMMETRIC:
306   case MAT_HERMITIAN:
307   case MAT_SPD:
308     a->symmetric = flg;
309     break;
310   case MAT_SYMMETRY_ETERNAL:
311   case MAT_STRUCTURAL_SYMMETRY_ETERNAL:
312   case MAT_SPD_ETERNAL:
313     break;
314   default:
315     PetscCall(PetscInfo(A,"Option %s ignored\n",MatOptions[op]));
316     break;
317   }
318   PetscFunctionReturn(0);
319 }
320 
321 static PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
322 {
323   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
324 
325   PetscFunctionBegin;
326   row -= A->rmap->rstart;
327   PetscCheck(row >= 0 && row < A->rmap->n,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
328   *nz = a->i[row+1] - a->i[row];
329   if (v) {
330     PetscInt j;
331     if (a->rowvalues_alloc < *nz) {
332       PetscCall(PetscFree(a->rowvalues));
333       a->rowvalues_alloc = PetscMax(a->rowvalues_alloc*2, *nz);
334       PetscCall(PetscMalloc1(a->rowvalues_alloc,&a->rowvalues));
335     }
336     for (j=0; j<*nz; j++) {
337       a->rowvalues[j] = a->values ? a->values[a->i[row]+j]:1.0;
338     }
339     *v = (*nz) ? a->rowvalues : NULL;
340   }
341   if (idx) *idx = (*nz) ? a->j + a->i[row] : NULL;
342   PetscFunctionReturn(0);
343 }
344 
345 static PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
346 {
347   PetscFunctionBegin;
348   PetscFunctionReturn(0);
349 }
350 
351 static PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscBool * flg)
352 {
353   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data,*b = (Mat_MPIAdj*)B->data;
354   PetscBool      flag;
355 
356   PetscFunctionBegin;
357   /* If the  matrix dimensions are not equal,or no of nonzeros */
358   if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
359     flag = PETSC_FALSE;
360   }
361 
362   /* if the a->i are the same */
363   PetscCall(PetscArraycmp(a->i,b->i,A->rmap->n+1,&flag));
364 
365   /* if a->j are the same */
366   PetscCall(PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag));
367 
368   PetscCall(MPIU_Allreduce(&flag,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A)));
369   PetscFunctionReturn(0);
370 }
371 
372 static PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
373 {
374   PetscInt   i;
375   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
376   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
377 
378   PetscFunctionBegin;
379   *m    = A->rmap->n;
380   *ia   = a->i;
381   *ja   = a->j;
382   *done = PETSC_TRUE;
383   if (oshift) {
384     for (i=0; i<(*ia)[*m]; i++) {
385       (*ja)[i]++;
386     }
387     for (i=0; i<=(*m); i++) (*ia)[i]++;
388   }
389   PetscFunctionReturn(0);
390 }
391 
392 static PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
393 {
394   PetscInt   i;
395   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
396   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
397 
398   PetscFunctionBegin;
399   PetscCheck(!ia || a->i == *ia,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
400   PetscCheck(!ja || a->j == *ja,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
401   if (oshift) {
402     PetscCheck(ia,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"If oshift then you must passed in inia[] argument");
403     PetscCheck(ja,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"If oshift then you must passed in inja[] argument");
404     for (i=0; i<=(*m); i++) (*ia)[i]--;
405     for (i=0; i<(*ia)[*m]; i++) {
406       (*ja)[i]--;
407     }
408   }
409   PetscFunctionReturn(0);
410 }
411 
412 PetscErrorCode  MatConvertFrom_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
413 {
414   Mat               B;
415   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
416   const PetscInt    *rj;
417   const PetscScalar *ra;
418   MPI_Comm          comm;
419 
420   PetscFunctionBegin;
421   PetscCall(MatGetSize(A,NULL,&N));
422   PetscCall(MatGetLocalSize(A,&m,NULL));
423   PetscCall(MatGetOwnershipRange(A,&rstart,NULL));
424 
425   /* count the number of nonzeros per row */
426   for (i=0; i<m; i++) {
427     PetscCall(MatGetRow(A,i+rstart,&len,&rj,NULL));
428     for (j=0; j<len; j++) {
429       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
430     }
431     nzeros += len;
432     PetscCall(MatRestoreRow(A,i+rstart,&len,&rj,NULL));
433   }
434 
435   /* malloc space for nonzeros */
436   PetscCall(PetscMalloc1(nzeros+1,&a));
437   PetscCall(PetscMalloc1(N+1,&ia));
438   PetscCall(PetscMalloc1(nzeros+1,&ja));
439 
440   nzeros = 0;
441   ia[0]  = 0;
442   for (i=0; i<m; i++) {
443     PetscCall(MatGetRow(A,i+rstart,&len,&rj,&ra));
444     cnt  = 0;
445     for (j=0; j<len; j++) {
446       if (rj[j] != i+rstart) { /* if not diagonal */
447         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
448         ja[nzeros+cnt++] = rj[j];
449       }
450     }
451     PetscCall(MatRestoreRow(A,i+rstart,&len,&rj,&ra));
452     nzeros += cnt;
453     ia[i+1] = nzeros;
454   }
455 
456   PetscCall(PetscObjectGetComm((PetscObject)A,&comm));
457   PetscCall(MatCreate(comm,&B));
458   PetscCall(MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N));
459   PetscCall(MatSetType(B,type));
460   PetscCall(MatMPIAdjSetPreallocation(B,ia,ja,a));
461 
462   if (reuse == MAT_INPLACE_MATRIX) {
463     PetscCall(MatHeaderReplace(A,&B));
464   } else {
465     *newmat = B;
466   }
467   PetscFunctionReturn(0);
468 }
469 
470 PetscErrorCode MatSetValues_MPIAdj(Mat A,PetscInt m,const PetscInt *rows,PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode im)
471 {
472   Mat_MPIAdj *adj = (Mat_MPIAdj*)A->data;
473   PetscInt   rStart, rEnd, cStart, cEnd;
474 
475   PetscFunctionBegin;
476   PetscCheck(!adj->i,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Matrix is already assembled, cannot change its values");
477   PetscCall(MatGetOwnershipRange(A, &rStart, &rEnd));
478   PetscCall(MatGetOwnershipRangeColumn(A, &cStart, &cEnd));
479   if (!adj->ht) {
480     PetscCall(PetscHSetIJCreate(&adj->ht));
481     PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject) A), 1, &A->stash));
482     PetscCall(PetscLayoutSetUp(A->rmap));
483     PetscCall(PetscLayoutSetUp(A->cmap));
484   }
485   for (PetscInt r = 0; r < m; ++r) {
486     PetscHashIJKey key;
487 
488     key.i = rows[r];
489     if (key.i < 0) continue;
490     if ((key.i < rStart) || (key.i >= rEnd)) {
491       PetscCall(MatStashValuesRow_Private(&A->stash, key.i, n, cols, values, PETSC_FALSE));
492     } else {
493       for (PetscInt c = 0; c < n; ++c) {
494         key.j = cols[c];
495         if (key.j < 0 || key.i == key.j) continue;
496         PetscCall(PetscHSetIJAdd(adj->ht, key));
497       }
498     }
499   }
500   PetscFunctionReturn(0);
501 }
502 
503 PetscErrorCode MatAssemblyBegin_MPIAdj(Mat A, MatAssemblyType type)
504 {
505   PetscInt   nstash, reallocs;
506   Mat_MPIAdj *adj = (Mat_MPIAdj*)A->data;
507 
508   PetscFunctionBegin;
509   if (!adj->ht) {
510     PetscCall(PetscHSetIJCreate(&adj->ht));
511     PetscCall(PetscLayoutSetUp(A->rmap));
512     PetscCall(PetscLayoutSetUp(A->cmap));
513   }
514   PetscCall(MatStashScatterBegin_Private(A, &A->stash, A->rmap->range));
515   PetscCall(MatStashGetInfo_Private(&A->stash, &nstash, &reallocs));
516   PetscCall(PetscInfo(A, "Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
517   PetscFunctionReturn(0);
518 }
519 
520 PetscErrorCode MatAssemblyEnd_MPIAdj(Mat A, MatAssemblyType type)
521 {
522   PetscScalar    *val;
523   PetscInt       *row, *col,m,rstart,*rowstarts;
524   PetscInt       i, j, ncols, flg, nz;
525   PetscMPIInt    n;
526   Mat_MPIAdj     *adj = (Mat_MPIAdj*)A->data;
527   PetscHashIter  hi;
528   PetscHashIJKey key;
529   PetscHSetIJ    ht = adj->ht;
530 
531   PetscFunctionBegin;
532   while (1) {
533     PetscCall(MatStashScatterGetMesg_Private(&A->stash, &n, &row, &col, &val, &flg));
534      if (!flg) break;
535 
536     for (i = 0; i < n;) {
537       /* Identify the consecutive vals belonging to the same row */
538       for (j = i, rstart = row[j]; j < n; j++) {
539         if (row[j] != rstart) break;
540       }
541       if (j < n) ncols = j-i;
542       else       ncols = n-i;
543       /* Set all these values with a single function call */
544       PetscCall(MatSetValues_MPIAdj(A, 1, row+i, ncols, col+i, val+i, INSERT_VALUES));
545       i = j;
546     }
547   }
548   PetscCall(MatStashScatterEnd_Private(&A->stash));
549   PetscCall(MatStashDestroy_Private(&A->stash));
550 
551   PetscCall(MatGetLocalSize(A,&m,NULL));
552   PetscCall(MatGetOwnershipRange(A,&rstart,NULL));
553   PetscCall(PetscCalloc1(m+1,&rowstarts));
554   PetscHashIterBegin(ht,hi);
555   for (; !PetscHashIterAtEnd(ht,hi);) {
556     PetscHashIterGetKey(ht,hi,key);
557     rowstarts[key.i - rstart + 1]++;
558     PetscHashIterNext(ht,hi);
559   }
560   for (i=1; i<m+1; i++) {
561     rowstarts[i] = rowstarts[i-1] + rowstarts[i];
562   }
563 
564   PetscCall(PetscHSetIJGetSize(ht,&nz));
565   PetscCall(PetscMalloc1(nz,&col));
566   PetscHashIterBegin(ht,hi);
567   for (; !PetscHashIterAtEnd(ht,hi);) {
568     PetscHashIterGetKey(ht,hi,key);
569     col[rowstarts[key.i - rstart]++] = key.j;
570     PetscHashIterNext(ht,hi);
571   }
572   PetscCall(PetscHSetIJDestroy(&ht));
573   for (i=m; i>0; i--) {
574     rowstarts[i] = rowstarts[i-1];
575   }
576   rowstarts[0] = 0;
577 
578   for (PetscInt i=0; i<m; i++) {
579     PetscCall(PetscSortInt(rowstarts[i+1]-rowstarts[i],&col[rowstarts[i]]));
580   }
581 
582   adj->i       = rowstarts;
583   adj->j       = col;
584   adj->nz      = rowstarts[m];
585   adj->freeaij = PETSC_TRUE;
586   PetscFunctionReturn(0);
587 }
588 
589 /* -------------------------------------------------------------------*/
590 static struct _MatOps MatOps_Values = {MatSetValues_MPIAdj,
591                                        MatGetRow_MPIAdj,
592                                        MatRestoreRow_MPIAdj,
593                                        NULL,
594                                 /* 4*/ NULL,
595                                        NULL,
596                                        NULL,
597                                        NULL,
598                                        NULL,
599                                        NULL,
600                                 /*10*/ NULL,
601                                        NULL,
602                                        NULL,
603                                        NULL,
604                                        NULL,
605                                 /*15*/ NULL,
606                                        MatEqual_MPIAdj,
607                                        NULL,
608                                        NULL,
609                                        NULL,
610                                 /*20*/ MatAssemblyBegin_MPIAdj,
611                                        MatAssemblyEnd_MPIAdj,
612                                        MatSetOption_MPIAdj,
613                                        NULL,
614                                 /*24*/ NULL,
615                                        NULL,
616                                        NULL,
617                                        NULL,
618                                        NULL,
619                                 /*29*/ NULL,
620                                        NULL,
621                                        NULL,
622                                        NULL,
623                                        NULL,
624                                 /*34*/ NULL,
625                                        NULL,
626                                        NULL,
627                                        NULL,
628                                        NULL,
629                                 /*39*/ NULL,
630                                        MatCreateSubMatrices_MPIAdj,
631                                        NULL,
632                                        NULL,
633                                        NULL,
634                                 /*44*/ NULL,
635                                        NULL,
636                                        MatShift_Basic,
637                                        NULL,
638                                        NULL,
639                                 /*49*/ NULL,
640                                        MatGetRowIJ_MPIAdj,
641                                        MatRestoreRowIJ_MPIAdj,
642                                        NULL,
643                                        NULL,
644                                 /*54*/ NULL,
645                                        NULL,
646                                        NULL,
647                                        NULL,
648                                        NULL,
649                                 /*59*/ NULL,
650                                        MatDestroy_MPIAdj,
651                                        MatView_MPIAdj,
652                                        MatConvertFrom_MPIAdj,
653                                        NULL,
654                                 /*64*/ NULL,
655                                        NULL,
656                                        NULL,
657                                        NULL,
658                                        NULL,
659                                 /*69*/ NULL,
660                                        NULL,
661                                        NULL,
662                                        NULL,
663                                        NULL,
664                                 /*74*/ NULL,
665                                        NULL,
666                                        NULL,
667                                        NULL,
668                                        NULL,
669                                 /*79*/ NULL,
670                                        NULL,
671                                        NULL,
672                                        NULL,
673                                        NULL,
674                                 /*84*/ NULL,
675                                        NULL,
676                                        NULL,
677                                        NULL,
678                                        NULL,
679                                 /*89*/ NULL,
680                                        NULL,
681                                        NULL,
682                                        NULL,
683                                        NULL,
684                                 /*94*/ NULL,
685                                        NULL,
686                                        NULL,
687                                        NULL,
688                                        NULL,
689                                 /*99*/ NULL,
690                                        NULL,
691                                        NULL,
692                                        NULL,
693                                        NULL,
694                                /*104*/ NULL,
695                                        NULL,
696                                        NULL,
697                                        NULL,
698                                        NULL,
699                                /*109*/ NULL,
700                                        NULL,
701                                        NULL,
702                                        NULL,
703                                        NULL,
704                                /*114*/ NULL,
705                                        NULL,
706                                        NULL,
707                                        NULL,
708                                        NULL,
709                                /*119*/ NULL,
710                                        NULL,
711                                        NULL,
712                                        NULL,
713                                        NULL,
714                                /*124*/ NULL,
715                                        NULL,
716                                        NULL,
717                                        NULL,
718                                        MatCreateSubMatricesMPI_MPIAdj,
719                                /*129*/ NULL,
720                                        NULL,
721                                        NULL,
722                                        NULL,
723                                        NULL,
724                                /*134*/ NULL,
725                                        NULL,
726                                        NULL,
727                                        NULL,
728                                        NULL,
729                                /*139*/ NULL,
730                                        NULL,
731                                        NULL,
732                                        NULL,
733                                        NULL,
734                                 /*144*/NULL,
735                                        NULL,
736                                        NULL,
737                                        NULL,
738                                        NULL,
739                                        NULL
740 };
741 
742 static PetscErrorCode  MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
743 {
744   Mat_MPIAdj     *b = (Mat_MPIAdj*)B->data;
745   PetscBool       useedgeweights;
746 
747   PetscFunctionBegin;
748   PetscCall(PetscLayoutSetUp(B->rmap));
749   PetscCall(PetscLayoutSetUp(B->cmap));
750   if (values) useedgeweights = PETSC_TRUE; else useedgeweights = PETSC_FALSE;
751   /* Make everybody knows if they are using edge weights or not */
752   PetscCall(MPIU_Allreduce((int*)&useedgeweights,(int*)&b->useedgeweights,1,MPI_INT,MPI_MAX,PetscObjectComm((PetscObject)B)));
753 
754   if (PetscDefined(USE_DEBUG)) {
755     PetscInt ii;
756 
757     PetscCheck(i[0] == 0,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %" PetscInt_FMT,i[0]);
758     for (ii=1; ii<B->rmap->n; ii++) {
759       PetscCheck(i[ii] >= 0 && i[ii] >= i[ii-1],PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i[%" PetscInt_FMT "]=%" PetscInt_FMT " index is out of range: i[%" PetscInt_FMT "]=%" PetscInt_FMT,ii,i[ii],ii-1,i[ii-1]);
760     }
761     for (ii=0; ii<i[B->rmap->n]; ii++) {
762       PetscCheck(j[ii] >= 0 && j[ii] < B->cmap->N,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column index %" PetscInt_FMT " out of range %" PetscInt_FMT,ii,j[ii]);
763     }
764   }
765   b->j      = j;
766   b->i      = i;
767   b->values = values;
768 
769   b->nz        = i[B->rmap->n];
770   b->diag      = NULL;
771   b->symmetric = PETSC_FALSE;
772   b->freeaij   = PETSC_TRUE;
773 
774   B->ops->assemblybegin = NULL;
775   B->ops->assemblyend   = NULL;
776   PetscCall(MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY));
777   PetscCall(MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY));
778   PetscCall(MatStashDestroy_Private(&B->stash));
779   PetscFunctionReturn(0);
780 }
781 
782 static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A,Mat *B)
783 {
784   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
785   const PetscInt *ranges;
786   MPI_Comm       acomm,bcomm;
787   MPI_Group      agroup,bgroup;
788   PetscMPIInt    i,rank,size,nranks,*ranks;
789 
790   PetscFunctionBegin;
791   *B    = NULL;
792   PetscCall(PetscObjectGetComm((PetscObject)A,&acomm));
793   PetscCallMPI(MPI_Comm_size(acomm,&size));
794   PetscCallMPI(MPI_Comm_size(acomm,&rank));
795   PetscCall(MatGetOwnershipRanges(A,&ranges));
796   for (i=0,nranks=0; i<size; i++) {
797     if (ranges[i+1] - ranges[i] > 0) nranks++;
798   }
799   if (nranks == size) {         /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
800     PetscCall(PetscObjectReference((PetscObject)A));
801     *B   = A;
802     PetscFunctionReturn(0);
803   }
804 
805   PetscCall(PetscMalloc1(nranks,&ranks));
806   for (i=0,nranks=0; i<size; i++) {
807     if (ranges[i+1] - ranges[i] > 0) ranks[nranks++] = i;
808   }
809   PetscCallMPI(MPI_Comm_group(acomm,&agroup));
810   PetscCallMPI(MPI_Group_incl(agroup,nranks,ranks,&bgroup));
811   PetscCall(PetscFree(ranks));
812   PetscCallMPI(MPI_Comm_create(acomm,bgroup,&bcomm));
813   PetscCallMPI(MPI_Group_free(&agroup));
814   PetscCallMPI(MPI_Group_free(&bgroup));
815   if (bcomm != MPI_COMM_NULL) {
816     PetscInt   m,N;
817     Mat_MPIAdj *b;
818     PetscCall(MatGetLocalSize(A,&m,NULL));
819     PetscCall(MatGetSize(A,NULL,&N));
820     PetscCall(MatCreateMPIAdj(bcomm,m,N,a->i,a->j,a->values,B));
821     b          = (Mat_MPIAdj*)(*B)->data;
822     b->freeaij = PETSC_FALSE;
823     PetscCallMPI(MPI_Comm_free(&bcomm));
824   }
825   PetscFunctionReturn(0);
826 }
827 
828 PetscErrorCode  MatMPIAdjToSeq_MPIAdj(Mat A,Mat *B)
829 {
830   PetscInt       M,N,*II,*J,NZ,nz,m,nzstart,i;
831   PetscInt       *Values = NULL;
832   Mat_MPIAdj     *adj = (Mat_MPIAdj*)A->data;
833   PetscMPIInt    mnz,mm,*allnz,*allm,size,*dispnz,*dispm;
834 
835   PetscFunctionBegin;
836   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A),&size));
837   PetscCall(MatGetSize(A,&M,&N));
838   PetscCall(MatGetLocalSize(A,&m,NULL));
839   nz   = adj->nz;
840   PetscCheck(adj->i[m] == nz,PETSC_COMM_SELF,PETSC_ERR_PLIB,"nz %" PetscInt_FMT " not correct i[m] %" PetscInt_FMT,nz,adj->i[m]);
841   PetscCallMPI(MPI_Allreduce(&nz,&NZ,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)A)));
842 
843   PetscCall(PetscMPIIntCast(nz,&mnz));
844   PetscCall(PetscMalloc2(size,&allnz,size,&dispnz));
845   PetscCallMPI(MPI_Allgather(&mnz,1,MPI_INT,allnz,1,MPI_INT,PetscObjectComm((PetscObject)A)));
846   dispnz[0] = 0; for (i=1; i<size; i++) dispnz[i] = dispnz[i-1]+ allnz[i-1];
847   if (adj->values) {
848     PetscCall(PetscMalloc1(NZ,&Values));
849     PetscCallMPI(MPI_Allgatherv(adj->values,mnz,MPIU_INT,Values,allnz,dispnz,MPIU_INT,PetscObjectComm((PetscObject)A)));
850   }
851   PetscCall(PetscMalloc1(NZ,&J));
852   PetscCallMPI(MPI_Allgatherv(adj->j,mnz,MPIU_INT,J,allnz,dispnz,MPIU_INT,PetscObjectComm((PetscObject)A)));
853   PetscCall(PetscFree2(allnz,dispnz));
854   PetscCallMPI(MPI_Scan(&nz,&nzstart,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)A)));
855   nzstart -= nz;
856   /* shift the i[] values so they will be correct after being received */
857   for (i=0; i<m; i++) adj->i[i] += nzstart;
858   PetscCall(PetscMalloc1(M+1,&II));
859   PetscCall(PetscMPIIntCast(m,&mm));
860   PetscCall(PetscMalloc2(size,&allm,size,&dispm));
861   PetscCallMPI(MPI_Allgather(&mm,1,MPI_INT,allm,1,MPI_INT,PetscObjectComm((PetscObject)A)));
862   dispm[0] = 0; for (i=1; i<size; i++) dispm[i] = dispm[i-1]+ allm[i-1];
863   PetscCallMPI(MPI_Allgatherv(adj->i,mm,MPIU_INT,II,allm,dispm,MPIU_INT,PetscObjectComm((PetscObject)A)));
864   PetscCall(PetscFree2(allm,dispm));
865   II[M] = NZ;
866   /* shift the i[] values back */
867   for (i=0; i<m; i++) adj->i[i] -= nzstart;
868   PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF,M,N,II,J,Values,B));
869   PetscFunctionReturn(0);
870 }
871 
872 PetscErrorCode  MatMPIAdjToSeqRankZero_MPIAdj(Mat A,Mat *B)
873 {
874   PetscInt       M,N,*II,*J,NZ,nz,m,nzstart,i;
875   PetscInt       *Values = NULL;
876   Mat_MPIAdj     *adj = (Mat_MPIAdj*)A->data;
877   PetscMPIInt    mnz,mm,*allnz = NULL,*allm,size,*dispnz,*dispm,rank;
878 
879   PetscFunctionBegin;
880   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A),&size));
881   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)A),&rank));
882   PetscCall(MatGetSize(A,&M,&N));
883   PetscCall(MatGetLocalSize(A,&m,NULL));
884   nz   = adj->nz;
885   PetscCheck(adj->i[m] == nz,PETSC_COMM_SELF,PETSC_ERR_PLIB,"nz %" PetscInt_FMT " not correct i[m] %" PetscInt_FMT,nz,adj->i[m]);
886   PetscCallMPI(MPI_Allreduce(&nz,&NZ,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)A)));
887 
888   PetscCall(PetscMPIIntCast(nz,&mnz));
889   if (!rank) PetscCall(PetscMalloc2(size,&allnz,size,&dispnz));
890   PetscCallMPI(MPI_Gather(&mnz,1,MPI_INT,allnz,1,MPI_INT,0,PetscObjectComm((PetscObject)A)));
891   if (!rank) {
892     dispnz[0] = 0; for (i=1; i<size; i++) dispnz[i] = dispnz[i-1]+ allnz[i-1];
893     if (adj->values) {
894       PetscCall(PetscMalloc1(NZ,&Values));
895       PetscCallMPI(MPI_Gatherv(adj->values,mnz,MPIU_INT,Values,allnz,dispnz,MPIU_INT,0,PetscObjectComm((PetscObject)A)));
896     }
897     PetscCall(PetscMalloc1(NZ,&J));
898     PetscCallMPI(MPI_Gatherv(adj->j,mnz,MPIU_INT,J,allnz,dispnz,MPIU_INT,0,PetscObjectComm((PetscObject)A)));
899     PetscCall(PetscFree2(allnz,dispnz));
900   } else {
901     if (adj->values) PetscCallMPI(MPI_Gatherv(adj->values,mnz,MPIU_INT,NULL,NULL,NULL,MPIU_INT,0,PetscObjectComm((PetscObject)A)));
902     PetscCallMPI(MPI_Gatherv(adj->j,mnz,MPIU_INT,NULL,NULL,NULL,MPIU_INT,0,PetscObjectComm((PetscObject)A)));
903   }
904   PetscCallMPI(MPI_Scan(&nz,&nzstart,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)A)));
905   nzstart -= nz;
906   /* shift the i[] values so they will be correct after being received */
907   for (i=0; i<m; i++) adj->i[i] += nzstart;
908   PetscCall(PetscMPIIntCast(m,&mm));
909   if (!rank) {
910     PetscCall(PetscMalloc1(M+1,&II));
911     PetscCall(PetscMalloc2(size,&allm,size,&dispm));
912     PetscCallMPI(MPI_Gather(&mm,1,MPI_INT,allm,1,MPI_INT,0,PetscObjectComm((PetscObject)A)));
913     dispm[0] = 0; for (i=1; i<size; i++) dispm[i] = dispm[i-1]+ allm[i-1];
914     PetscCallMPI(MPI_Gatherv(adj->i,mm,MPIU_INT,II,allm,dispm,MPIU_INT,0,PetscObjectComm((PetscObject)A)));
915     PetscCall(PetscFree2(allm,dispm));
916     II[M] = NZ;
917   } else {
918     PetscCallMPI(MPI_Gather(&mm,1,MPI_INT,NULL,1,MPI_INT,0,PetscObjectComm((PetscObject)A)));
919     PetscCallMPI(MPI_Gatherv(adj->i,mm,MPIU_INT,NULL,NULL,NULL,MPIU_INT,0,PetscObjectComm((PetscObject)A)));
920   }
921    /* shift the i[] values back */
922   for (i=0; i<m; i++) adj->i[i] -= nzstart;
923   if (!rank) PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF,M,N,II,J,Values,B));
924   PetscFunctionReturn(0);
925 }
926 
927 /*@
928    MatMPIAdjCreateNonemptySubcommMat - create the same MPIAdj matrix on a subcommunicator containing only processes owning a positive number of rows
929 
930    Collective
931 
932    Input Parameter:
933 .  A - original MPIAdj matrix
934 
935    Output Parameter:
936 .  B - matrix on subcommunicator, NULL on ranks that owned zero rows of A
937 
938    Level: developer
939 
940    Note:
941    This function is mostly useful for internal use by mesh partitioning packages that require that every process owns at least one row.
942 
943    The matrix B should be destroyed with MatDestroy(). The arrays are not copied, so B should be destroyed before A is destroyed.
944 
945 .seealso: `MatCreateMPIAdj()`
946 @*/
947 PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A,Mat *B)
948 {
949   PetscFunctionBegin;
950   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
951   PetscUseMethod(A,"MatMPIAdjCreateNonemptySubcommMat_C",(Mat,Mat*),(A,B));
952   PetscFunctionReturn(0);
953 }
954 
955 /*MC
956    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
957    intended for use constructing orderings and partitionings.
958 
959   Level: beginner
960 
961   Notes:
962     You can provide values to the matrix using MatMPIAdjSetPreallocation(), MatCreateMPIAdj(), or
963     by calling MatSetValues() and MatAssemblyBegin() followed by MatAssemblyEnd()
964 
965 .seealso: `MatCreateMPIAdj()`, `MatMPIAdjSetPreallocation()`, `MatSetValues()`
966 M*/
967 
968 PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B)
969 {
970   Mat_MPIAdj     *b;
971 
972   PetscFunctionBegin;
973   PetscCall(PetscNewLog(B,&b));
974   B->data      = (void*)b;
975   PetscCall(PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps)));
976   B->assembled = PETSC_FALSE;
977   B->preallocated = PETSC_TRUE; /* so that MatSetValues() may be used */
978 
979   PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",MatMPIAdjSetPreallocation_MPIAdj));
980   PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjCreateNonemptySubcommMat_C",MatMPIAdjCreateNonemptySubcommMat_MPIAdj));
981   PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjToSeq_C",MatMPIAdjToSeq_MPIAdj));
982   PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjToSeqRankZero_C",MatMPIAdjToSeqRankZero_MPIAdj));
983   PetscCall(PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ));
984   PetscFunctionReturn(0);
985 }
986 
987 /*@C
988    MatMPIAdjToSeq - Converts an parallel MPIAdj matrix to complete MPIAdj on each process (needed by sequential partitioner)
989 
990    Logically Collective
991 
992    Input Parameter:
993 .  A - the matrix
994 
995    Output Parameter:
996 .  B - the same matrix on all processes
997 
998    Level: intermediate
999 
1000 .seealso: `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MatMPIAdjToSeqRankZero()`
1001 @*/
1002 PetscErrorCode  MatMPIAdjToSeq(Mat A,Mat *B)
1003 {
1004   PetscFunctionBegin;
1005   PetscUseMethod(A,"MatMPIAdjToSeq_C",(Mat,Mat*),(A,B));
1006   PetscFunctionReturn(0);
1007 }
1008 
1009 /*@C
1010    MatMPIAdjToSeqRankZero - Converts an parallel MPIAdj matrix to complete MPIAdj on rank zero (needed by sequential partitioner)
1011 
1012    Logically Collective
1013 
1014    Input Parameter:
1015 .  A - the matrix
1016 
1017    Output Parameter:
1018 .  B - the same matrix on rank zero, not set on other ranks
1019 
1020    Level: intermediate
1021 
1022    Notes:
1023      This routine has the advantage on systems with multiple ranks per node since only one copy of the matrix
1024      is stored on the first node, instead of the number of ranks copies. This can allow partitioning much larger
1025      paralllel graph sequentially.
1026 
1027 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues(), MatMPIAdjToSeq()
1028 @*/
1029 PetscErrorCode  MatMPIAdjToSeqRankZero(Mat A,Mat *B)
1030 {
1031   PetscFunctionBegin;
1032   PetscUseMethod(A,"MatMPIAdjToSeqRankZero_C",(Mat,Mat*),(A,B));
1033   PetscFunctionReturn(0);
1034 }
1035 
1036 /*@C
1037    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
1038 
1039    Logically Collective
1040 
1041    Input Parameters:
1042 +  A - the matrix
1043 .  i - the indices into j for the start of each row
1044 .  j - the column indices for each row (sorted for each row).
1045        The indices in i and j start with zero (NOT with one).
1046 -  values - [optional] edge weights
1047 
1048    Level: intermediate
1049 
1050 .seealso: `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MATMPIADJ`, `MatCreateMPIAdj()`
1051 @*/
1052 PetscErrorCode  MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
1053 {
1054   PetscFunctionBegin;
1055   PetscTryMethod(B,"MatMPIAdjSetPreallocation_C",(Mat,PetscInt*,PetscInt*,PetscInt*),(B,i,j,values));
1056   PetscFunctionReturn(0);
1057 }
1058 
1059 /*@C
1060    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
1061    The matrix does not have numerical values associated with it, but is
1062    intended for ordering (to reduce bandwidth etc) and partitioning.
1063 
1064    Collective
1065 
1066    Input Parameters:
1067 +  comm - MPI communicator
1068 .  m - number of local rows
1069 .  N - number of global columns
1070 .  i - the indices into j for the start of each row
1071 .  j - the column indices for each row (sorted for each row).
1072        The indices in i and j start with zero (NOT with one).
1073 -  values -[optional] edge weights
1074 
1075    Output Parameter:
1076 .  A - the matrix
1077 
1078    Level: intermediate
1079 
1080    Notes:
1081    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
1082    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
1083    call from Fortran you need not create the arrays with PetscMalloc().
1084 
1085    You should not include the matrix diagonals.
1086 
1087    If you already have a matrix, you can create its adjacency matrix by a call
1088    to MatConvert(), specifying a type of MATMPIADJ.
1089 
1090    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
1091 
1092 .seealso: `MatCreate()`, `MatConvert()`, `MatGetOrdering()`, `MATMPIADJ`, `MatMPIAdjSetPreallocation()`
1093 @*/
1094 PetscErrorCode  MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
1095 {
1096   PetscFunctionBegin;
1097   PetscCall(MatCreate(comm,A));
1098   PetscCall(MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N));
1099   PetscCall(MatSetType(*A,MATMPIADJ));
1100   PetscCall(MatMPIAdjSetPreallocation(*A,i,j,values));
1101   PetscFunctionReturn(0);
1102 }
1103