xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision ebead697dbf761eb322f829370bbe90b3bd93fa3)
1 
2 /*
3     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
4 */
5 #include <../src/mat/impls/adj/mpi/mpiadj.h>    /*I "petscmat.h" I*/
6 #include <petscsf.h>
7 
8 /*
9  * The interface should be easy to use for both MatCreateSubMatrix (parallel sub-matrix) and MatCreateSubMatrices (sequential sub-matrices)
10  * */
11 static PetscErrorCode MatCreateSubMatrix_MPIAdj_data(Mat adj,IS irows, IS icols, PetscInt **sadj_xadj,PetscInt **sadj_adjncy,PetscInt **sadj_values)
12 {
13   PetscInt           nlrows_is,icols_n,i,j,nroots,nleaves,rlocalindex,*ncols_send,*ncols_recv;
14   PetscInt           nlrows_mat,*adjncy_recv,Ncols_recv,Ncols_send,*xadj_recv,*values_recv;
15   PetscInt          *ncols_recv_offsets,loc,rnclos,*sadjncy,*sxadj,*svalues;
16   const PetscInt    *irows_indices,*icols_indices,*xadj, *adjncy;
17   PetscMPIInt        owner;
18   Mat_MPIAdj        *a = (Mat_MPIAdj*)adj->data;
19   PetscLayout        rmap;
20   MPI_Comm           comm;
21   PetscSF            sf;
22   PetscSFNode       *iremote;
23   PetscBool          done;
24 
25   PetscFunctionBegin;
26   PetscCall(PetscObjectGetComm((PetscObject)adj,&comm));
27   PetscCall(MatGetLayouts(adj,&rmap,NULL));
28   PetscCall(ISGetLocalSize(irows,&nlrows_is));
29   PetscCall(ISGetIndices(irows,&irows_indices));
30   PetscCall(PetscMalloc1(nlrows_is,&iremote));
31   /* construct sf graph*/
32   nleaves = nlrows_is;
33   for (i=0; i<nlrows_is; i++) {
34     owner = -1;
35     rlocalindex = -1;
36     PetscCall(PetscLayoutFindOwnerIndex(rmap,irows_indices[i],&owner,&rlocalindex));
37     iremote[i].rank  = owner;
38     iremote[i].index = rlocalindex;
39   }
40   PetscCall(MatGetRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done));
41   PetscCall(PetscCalloc4(nlrows_mat,&ncols_send,nlrows_is,&xadj_recv,nlrows_is+1,&ncols_recv_offsets,nlrows_is,&ncols_recv));
42   nroots = nlrows_mat;
43   for (i=0; i<nlrows_mat; i++) {
44     ncols_send[i] = xadj[i+1]-xadj[i];
45   }
46   PetscCall(PetscSFCreate(comm,&sf));
47   PetscCall(PetscSFSetGraph(sf,nroots,nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER));
48   PetscCall(PetscSFSetType(sf,PETSCSFBASIC));
49   PetscCall(PetscSFSetFromOptions(sf));
50   PetscCall(PetscSFBcastBegin(sf,MPIU_INT,ncols_send,ncols_recv,MPI_REPLACE));
51   PetscCall(PetscSFBcastEnd(sf,MPIU_INT,ncols_send,ncols_recv,MPI_REPLACE));
52   PetscCall(PetscSFBcastBegin(sf,MPIU_INT,xadj,xadj_recv,MPI_REPLACE));
53   PetscCall(PetscSFBcastEnd(sf,MPIU_INT,xadj,xadj_recv,MPI_REPLACE));
54   PetscCall(PetscSFDestroy(&sf));
55   Ncols_recv =0;
56   for (i=0; i<nlrows_is; i++) {
57     Ncols_recv             += ncols_recv[i];
58     ncols_recv_offsets[i+1] = ncols_recv[i]+ncols_recv_offsets[i];
59   }
60   Ncols_send = 0;
61   for (i=0; i<nlrows_mat; i++) {
62     Ncols_send += ncols_send[i];
63   }
64   PetscCall(PetscCalloc1(Ncols_recv,&iremote));
65   PetscCall(PetscCalloc1(Ncols_recv,&adjncy_recv));
66   nleaves = Ncols_recv;
67   Ncols_recv = 0;
68   for (i=0; i<nlrows_is; i++) {
69     PetscCall(PetscLayoutFindOwner(rmap,irows_indices[i],&owner));
70     for (j=0; j<ncols_recv[i]; j++) {
71       iremote[Ncols_recv].rank    = owner;
72       iremote[Ncols_recv++].index = xadj_recv[i]+j;
73     }
74   }
75   PetscCall(ISRestoreIndices(irows,&irows_indices));
76   /*if we need to deal with edge weights ???*/
77   if (a->useedgeweights) PetscCall(PetscCalloc1(Ncols_recv,&values_recv));
78   nroots = Ncols_send;
79   PetscCall(PetscSFCreate(comm,&sf));
80   PetscCall(PetscSFSetGraph(sf,nroots,nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER));
81   PetscCall(PetscSFSetType(sf,PETSCSFBASIC));
82   PetscCall(PetscSFSetFromOptions(sf));
83   PetscCall(PetscSFBcastBegin(sf,MPIU_INT,adjncy,adjncy_recv,MPI_REPLACE));
84   PetscCall(PetscSFBcastEnd(sf,MPIU_INT,adjncy,adjncy_recv,MPI_REPLACE));
85   if (a->useedgeweights) {
86     PetscCall(PetscSFBcastBegin(sf,MPIU_INT,a->values,values_recv,MPI_REPLACE));
87     PetscCall(PetscSFBcastEnd(sf,MPIU_INT,a->values,values_recv,MPI_REPLACE));
88   }
89   PetscCall(PetscSFDestroy(&sf));
90   PetscCall(MatRestoreRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done));
91   PetscCall(ISGetLocalSize(icols,&icols_n));
92   PetscCall(ISGetIndices(icols,&icols_indices));
93   rnclos = 0;
94   for (i=0; i<nlrows_is; i++) {
95     for (j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++) {
96       PetscCall(PetscFindInt(adjncy_recv[j], icols_n, icols_indices, &loc));
97       if (loc<0) {
98         adjncy_recv[j] = -1;
99         if (a->useedgeweights) values_recv[j] = -1;
100         ncols_recv[i]--;
101       } else {
102         rnclos++;
103       }
104     }
105   }
106   PetscCall(ISRestoreIndices(icols,&icols_indices));
107   PetscCall(PetscCalloc1(rnclos,&sadjncy));
108   if (a->useedgeweights) PetscCall(PetscCalloc1(rnclos,&svalues));
109   PetscCall(PetscCalloc1(nlrows_is+1,&sxadj));
110   rnclos = 0;
111   for (i=0; i<nlrows_is; i++) {
112     for (j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++) {
113       if (adjncy_recv[j]<0) continue;
114       sadjncy[rnclos] = adjncy_recv[j];
115       if (a->useedgeweights) svalues[rnclos] = values_recv[j];
116       rnclos++;
117     }
118   }
119   for (i=0; i<nlrows_is; i++) {
120     sxadj[i+1] = sxadj[i]+ncols_recv[i];
121   }
122   if (sadj_xadj)  { *sadj_xadj = sxadj;} else    PetscCall(PetscFree(sxadj));
123   if (sadj_adjncy) { *sadj_adjncy = sadjncy;} else PetscCall(PetscFree(sadjncy));
124   if (sadj_values) {
125     if (a->useedgeweights) *sadj_values = svalues; else *sadj_values=NULL;
126   } else {
127     if (a->useedgeweights) PetscCall(PetscFree(svalues));
128   }
129   PetscCall(PetscFree4(ncols_send,xadj_recv,ncols_recv_offsets,ncols_recv));
130   PetscCall(PetscFree(adjncy_recv));
131   if (a->useedgeweights) PetscCall(PetscFree(values_recv));
132   PetscFunctionReturn(0);
133 }
134 
135 static PetscErrorCode MatCreateSubMatrices_MPIAdj_Private(Mat mat,PetscInt n,const IS irow[],const IS icol[],PetscBool subcomm,MatReuse scall,Mat *submat[])
136 {
137   PetscInt           i,irow_n,icol_n,*sxadj,*sadjncy,*svalues;
138   PetscInt          *indices,nindx,j,k,loc;
139   PetscMPIInt        issame;
140   const PetscInt    *irow_indices,*icol_indices;
141   MPI_Comm           scomm_row,scomm_col,scomm_mat;
142 
143   PetscFunctionBegin;
144   nindx = 0;
145   /*
146    * Estimate a maximum number for allocating memory
147    */
148   for (i=0; i<n; i++) {
149     PetscCall(ISGetLocalSize(irow[i],&irow_n));
150     PetscCall(ISGetLocalSize(icol[i],&icol_n));
151     nindx = nindx>(irow_n+icol_n)? nindx:(irow_n+icol_n);
152   }
153   PetscCall(PetscMalloc1(nindx,&indices));
154   /* construct a submat */
155   //  if (scall == MAT_INITIAL_MATRIX) PetscCall(PetscMalloc1(n,submat));
156 
157   for (i=0; i<n; i++) {
158     if (subcomm) {
159       PetscCall(PetscObjectGetComm((PetscObject)irow[i],&scomm_row));
160       PetscCall(PetscObjectGetComm((PetscObject)icol[i],&scomm_col));
161       PetscCallMPI(MPI_Comm_compare(scomm_row,scomm_col,&issame));
162       PetscCheck(issame == MPI_IDENT,PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"row index set must have the same comm as the col index set");
163       PetscCallMPI(MPI_Comm_compare(scomm_row,PETSC_COMM_SELF,&issame));
164       PetscCheck(issame != MPI_IDENT,PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP," can not use PETSC_COMM_SELF as comm when extracting a parallel submatrix");
165     } else {
166       scomm_row = PETSC_COMM_SELF;
167     }
168     /*get sub-matrix data*/
169     sxadj=NULL; sadjncy=NULL; svalues=NULL;
170     PetscCall(MatCreateSubMatrix_MPIAdj_data(mat,irow[i],icol[i],&sxadj,&sadjncy,&svalues));
171     PetscCall(ISGetLocalSize(irow[i],&irow_n));
172     PetscCall(ISGetLocalSize(icol[i],&icol_n));
173     PetscCall(ISGetIndices(irow[i],&irow_indices));
174     PetscCall(PetscArraycpy(indices,irow_indices,irow_n));
175     PetscCall(ISRestoreIndices(irow[i],&irow_indices));
176     PetscCall(ISGetIndices(icol[i],&icol_indices));
177     PetscCall(PetscArraycpy(indices+irow_n,icol_indices,icol_n));
178     PetscCall(ISRestoreIndices(icol[i],&icol_indices));
179     nindx = irow_n+icol_n;
180     PetscCall(PetscSortRemoveDupsInt(&nindx,indices));
181     /* renumber columns */
182     for (j=0; j<irow_n; j++) {
183       for (k=sxadj[j]; k<sxadj[j+1]; k++) {
184         PetscCall(PetscFindInt(sadjncy[k],nindx,indices,&loc));
185         PetscCheck(loc>=0,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"can not find col %" PetscInt_FMT,sadjncy[k]);
186         sadjncy[k] = loc;
187       }
188     }
189     if (scall==MAT_INITIAL_MATRIX) {
190       PetscCall(MatCreateMPIAdj(scomm_row,irow_n,icol_n,sxadj,sadjncy,svalues,submat[i]));
191     } else {
192        Mat                sadj = *(submat[i]);
193        Mat_MPIAdj         *sa  = (Mat_MPIAdj*)((sadj)->data);
194        PetscCall(PetscObjectGetComm((PetscObject)sadj,&scomm_mat));
195        PetscCallMPI(MPI_Comm_compare(scomm_row,scomm_mat,&issame));
196        PetscCheck(issame == MPI_IDENT,PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"submatrix  must have the same comm as the col index set");
197        PetscCall(PetscArraycpy(sa->i,sxadj,irow_n+1));
198        PetscCall(PetscArraycpy(sa->j,sadjncy,sxadj[irow_n]));
199        if (svalues) PetscCall(PetscArraycpy(sa->values,svalues,sxadj[irow_n]));
200        PetscCall(PetscFree(sxadj));
201        PetscCall(PetscFree(sadjncy));
202        PetscCall(PetscFree(svalues));
203     }
204   }
205   PetscCall(PetscFree(indices));
206   PetscFunctionReturn(0);
207 }
208 
209 static PetscErrorCode MatCreateSubMatricesMPI_MPIAdj(Mat mat,PetscInt n, const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
210 {
211   /*get sub-matrices across a sub communicator */
212   PetscFunctionBegin;
213   PetscCall(MatCreateSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_TRUE,scall,submat));
214   PetscFunctionReturn(0);
215 }
216 
217 static PetscErrorCode MatCreateSubMatrices_MPIAdj(Mat mat,PetscInt n,const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
218 {
219   PetscFunctionBegin;
220   /*get sub-matrices based on PETSC_COMM_SELF */
221   PetscCall(MatCreateSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_FALSE,scall,submat));
222   PetscFunctionReturn(0);
223 }
224 
225 static PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
226 {
227   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
228   PetscInt          i,j,m = A->rmap->n;
229   const char        *name;
230   PetscViewerFormat format;
231 
232   PetscFunctionBegin;
233   PetscCall(PetscObjectGetName((PetscObject)A,&name));
234   PetscCall(PetscViewerGetFormat(viewer,&format));
235   if (format == PETSC_VIEWER_ASCII_INFO) {
236     PetscFunctionReturn(0);
237   } else {
238     PetscCheck(format != PETSC_VIEWER_ASCII_MATLAB,PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MATLAB format not supported");
239     PetscCall(PetscViewerASCIIUseTabs(viewer,PETSC_FALSE));
240     PetscCall(PetscViewerASCIIPushSynchronized(viewer));
241     for (i=0; i<m; i++) {
242       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer,"row %" PetscInt_FMT ":",i+A->rmap->rstart));
243       for (j=a->i[i]; j<a->i[i+1]; j++) {
244         if (a->values) {
245           PetscCall(PetscViewerASCIISynchronizedPrintf(viewer," (%" PetscInt_FMT ", %" PetscInt_FMT ") ",a->j[j], a->values[j]));
246         } else {
247           PetscCall(PetscViewerASCIISynchronizedPrintf(viewer," %" PetscInt_FMT " ",a->j[j]));
248         }
249       }
250       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer,"\n"));
251     }
252     PetscCall(PetscViewerASCIIUseTabs(viewer,PETSC_TRUE));
253     PetscCall(PetscViewerFlush(viewer));
254     PetscCall(PetscViewerASCIIPopSynchronized(viewer));
255   }
256   PetscFunctionReturn(0);
257 }
258 
259 static PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
260 {
261   PetscBool      iascii;
262 
263   PetscFunctionBegin;
264   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii));
265   if (iascii) PetscCall(MatView_MPIAdj_ASCII(A,viewer));
266   PetscFunctionReturn(0);
267 }
268 
269 static PetscErrorCode MatDestroy_MPIAdj(Mat mat)
270 {
271   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;
272 
273   PetscFunctionBegin;
274 #if defined(PETSC_USE_LOG)
275   PetscLogObjectState((PetscObject)mat,"Rows=%" PetscInt_FMT ", Cols=%" PetscInt_FMT ", NZ=%" PetscInt_FMT,mat->rmap->n,mat->cmap->n,a->nz);
276 #endif
277   PetscCall(PetscFree(a->diag));
278   if (a->freeaij) {
279     if (a->freeaijwithfree) {
280       if (a->i) free(a->i);
281       if (a->j) free(a->j);
282     } else {
283       PetscCall(PetscFree(a->i));
284       PetscCall(PetscFree(a->j));
285       PetscCall(PetscFree(a->values));
286     }
287   }
288   PetscCall(PetscFree(a->rowvalues));
289   PetscCall(PetscFree(mat->data));
290   PetscCall(PetscObjectChangeTypeName((PetscObject)mat,NULL));
291   PetscCall(PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C",NULL));
292   PetscCall(PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjCreateNonemptySubcommMat_C",NULL));
293   PetscCall(PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjToSeq_C",NULL));
294   PetscCall(PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjToSeqRankZero_C",NULL));
295   PetscFunctionReturn(0);
296 }
297 
298 static PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscBool flg)
299 {
300   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
301 
302   PetscFunctionBegin;
303   switch (op) {
304   case MAT_SYMMETRIC:
305   case MAT_STRUCTURALLY_SYMMETRIC:
306   case MAT_HERMITIAN:
307   case MAT_SPD:
308     a->symmetric = flg;
309     break;
310   case MAT_SYMMETRY_ETERNAL:
311   case MAT_STRUCTURAL_SYMMETRY_ETERNAL:
312   case MAT_SPD_ETERNAL:
313     break;
314   default:
315     PetscCall(PetscInfo(A,"Option %s ignored\n",MatOptions[op]));
316     break;
317   }
318   PetscFunctionReturn(0);
319 }
320 
321 static PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
322 {
323   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
324 
325   PetscFunctionBegin;
326   row -= A->rmap->rstart;
327   PetscCheck(row >= 0 && row < A->rmap->n,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
328   *nz = a->i[row+1] - a->i[row];
329   if (v) {
330     PetscInt j;
331     if (a->rowvalues_alloc < *nz) {
332       PetscCall(PetscFree(a->rowvalues));
333       a->rowvalues_alloc = PetscMax(a->rowvalues_alloc*2, *nz);
334       PetscCall(PetscMalloc1(a->rowvalues_alloc,&a->rowvalues));
335     }
336     for (j=0; j<*nz; j++) {
337       a->rowvalues[j] = a->values ? a->values[a->i[row]+j]:1.0;
338     }
339     *v = (*nz) ? a->rowvalues : NULL;
340   }
341   if (idx) *idx = (*nz) ? a->j + a->i[row] : NULL;
342   PetscFunctionReturn(0);
343 }
344 
345 static PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
346 {
347   PetscFunctionBegin;
348   PetscFunctionReturn(0);
349 }
350 
351 static PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscBool * flg)
352 {
353   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data,*b = (Mat_MPIAdj*)B->data;
354   PetscBool      flag;
355 
356   PetscFunctionBegin;
357   /* If the  matrix dimensions are not equal,or no of nonzeros */
358   if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
359     flag = PETSC_FALSE;
360   }
361 
362   /* if the a->i are the same */
363   PetscCall(PetscArraycmp(a->i,b->i,A->rmap->n+1,&flag));
364 
365   /* if a->j are the same */
366   PetscCall(PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag));
367 
368   PetscCall(MPIU_Allreduce(&flag,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A)));
369   PetscFunctionReturn(0);
370 }
371 
372 static PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
373 {
374   PetscInt   i;
375   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
376   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
377 
378   PetscFunctionBegin;
379   *m    = A->rmap->n;
380   *ia   = a->i;
381   *ja   = a->j;
382   *done = PETSC_TRUE;
383   if (oshift) {
384     for (i=0; i<(*ia)[*m]; i++) {
385       (*ja)[i]++;
386     }
387     for (i=0; i<=(*m); i++) (*ia)[i]++;
388   }
389   PetscFunctionReturn(0);
390 }
391 
392 static PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
393 {
394   PetscInt   i;
395   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
396   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
397 
398   PetscFunctionBegin;
399   PetscCheck(!ia || a->i == *ia,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
400   PetscCheck(!ja || a->j == *ja,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
401   if (oshift) {
402     PetscCheck(ia,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"If oshift then you must passed in inia[] argument");
403     PetscCheck(ja,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"If oshift then you must passed in inja[] argument");
404     for (i=0; i<=(*m); i++) (*ia)[i]--;
405     for (i=0; i<(*ia)[*m]; i++) {
406       (*ja)[i]--;
407     }
408   }
409   PetscFunctionReturn(0);
410 }
411 
412 PetscErrorCode  MatConvertFrom_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
413 {
414   Mat               B;
415   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
416   const PetscInt    *rj;
417   const PetscScalar *ra;
418   MPI_Comm          comm;
419 
420   PetscFunctionBegin;
421   PetscCall(MatGetSize(A,NULL,&N));
422   PetscCall(MatGetLocalSize(A,&m,NULL));
423   PetscCall(MatGetOwnershipRange(A,&rstart,NULL));
424 
425   /* count the number of nonzeros per row */
426   for (i=0; i<m; i++) {
427     PetscCall(MatGetRow(A,i+rstart,&len,&rj,NULL));
428     for (j=0; j<len; j++) {
429       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
430     }
431     nzeros += len;
432     PetscCall(MatRestoreRow(A,i+rstart,&len,&rj,NULL));
433   }
434 
435   /* malloc space for nonzeros */
436   PetscCall(PetscMalloc1(nzeros+1,&a));
437   PetscCall(PetscMalloc1(N+1,&ia));
438   PetscCall(PetscMalloc1(nzeros+1,&ja));
439 
440   nzeros = 0;
441   ia[0]  = 0;
442   for (i=0; i<m; i++) {
443     PetscCall(MatGetRow(A,i+rstart,&len,&rj,&ra));
444     cnt  = 0;
445     for (j=0; j<len; j++) {
446       if (rj[j] != i+rstart) { /* if not diagonal */
447         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
448         ja[nzeros+cnt++] = rj[j];
449       }
450     }
451     PetscCall(MatRestoreRow(A,i+rstart,&len,&rj,&ra));
452     nzeros += cnt;
453     ia[i+1] = nzeros;
454   }
455 
456   PetscCall(PetscObjectGetComm((PetscObject)A,&comm));
457   PetscCall(MatCreate(comm,&B));
458   PetscCall(MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N));
459   PetscCall(MatSetType(B,type));
460   PetscCall(MatMPIAdjSetPreallocation(B,ia,ja,a));
461 
462   if (reuse == MAT_INPLACE_MATRIX) {
463     PetscCall(MatHeaderReplace(A,&B));
464   } else {
465     *newmat = B;
466   }
467   PetscFunctionReturn(0);
468 }
469 
470 PetscErrorCode MatSetValues_MPIAdj(Mat A,PetscInt m,const PetscInt *rows,PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode im)
471 {
472   Mat_MPIAdj *adj = (Mat_MPIAdj*)A->data;
473   PetscInt   rStart, rEnd, cStart, cEnd;
474 
475   PetscFunctionBegin;
476   PetscCheck(!adj->i,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Matrix is already assembled, cannot change its values");
477   PetscCall(MatGetOwnershipRange(A, &rStart, &rEnd));
478   PetscCall(MatGetOwnershipRangeColumn(A, &cStart, &cEnd));
479   if (!adj->ht) {
480     PetscCall(PetscHSetIJCreate(&adj->ht));
481     PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject) A), 1, &A->stash));
482     PetscCall(PetscLayoutSetUp(A->rmap));
483     PetscCall(PetscLayoutSetUp(A->cmap));
484   }
485   for (PetscInt r = 0; r < m; ++r) {
486     PetscHashIJKey key;
487 
488     key.i = rows[r];
489     if (key.i < 0) continue;
490     if ((key.i < rStart) || (key.i >= rEnd)) {
491       PetscCall(MatStashValuesRow_Private(&A->stash, key.i, n, cols, values, PETSC_FALSE));
492     } else {
493       for (PetscInt c = 0; c < n; ++c) {
494         key.j = cols[c];
495         if (key.j < 0 || key.i == key.j) continue;
496         PetscCall(PetscHSetIJAdd(adj->ht, key));
497       }
498     }
499   }
500   PetscFunctionReturn(0);
501 }
502 
503 PetscErrorCode MatAssemblyBegin_MPIAdj(Mat A, MatAssemblyType type)
504 {
505   PetscInt   nstash, reallocs;
506   Mat_MPIAdj *adj = (Mat_MPIAdj*)A->data;
507 
508   PetscFunctionBegin;
509   if (!adj->ht) {
510     PetscCall(PetscHSetIJCreate(&adj->ht));
511     PetscCall(PetscLayoutSetUp(A->rmap));
512     PetscCall(PetscLayoutSetUp(A->cmap));
513   }
514   PetscCall(MatStashScatterBegin_Private(A, &A->stash, A->rmap->range));
515   PetscCall(MatStashGetInfo_Private(&A->stash, &nstash, &reallocs));
516   PetscCall(PetscInfo(A, "Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
517   PetscFunctionReturn(0);
518 }
519 
520 PetscErrorCode MatAssemblyEnd_MPIAdj(Mat A, MatAssemblyType type)
521 {
522   PetscScalar    *val;
523   PetscInt       *row, *col,m,rstart,*rowstarts;
524   PetscInt       i, j, ncols, flg, nz;
525   PetscMPIInt    n;
526   Mat_MPIAdj     *adj = (Mat_MPIAdj*)A->data;
527   PetscHashIter  hi;
528   PetscHashIJKey key;
529   PetscHSetIJ    ht = adj->ht;
530 
531   PetscFunctionBegin;
532   while (1) {
533     PetscCall(MatStashScatterGetMesg_Private(&A->stash, &n, &row, &col, &val, &flg));
534      if (!flg) break;
535 
536     for (i = 0; i < n;) {
537       /* Identify the consecutive vals belonging to the same row */
538       for (j = i, rstart = row[j]; j < n; j++) {
539         if (row[j] != rstart) break;
540       }
541       if (j < n) ncols = j-i;
542       else       ncols = n-i;
543       /* Set all these values with a single function call */
544       PetscCall(MatSetValues_MPIAdj(A, 1, row+i, ncols, col+i, val+i, INSERT_VALUES));
545       i = j;
546     }
547   }
548   PetscCall(MatStashScatterEnd_Private(&A->stash));
549   PetscCall(MatStashDestroy_Private(&A->stash));
550 
551   PetscCall(MatGetLocalSize(A,&m,NULL));
552   PetscCall(MatGetOwnershipRange(A,&rstart,NULL));
553   PetscCall(PetscCalloc1(m+1,&rowstarts));
554   PetscHashIterBegin(ht,hi);
555   for (; !PetscHashIterAtEnd(ht,hi);) {
556     PetscHashIterGetKey(ht,hi,key);
557     rowstarts[key.i - rstart + 1]++;
558     PetscHashIterNext(ht,hi);
559   }
560   for (i=1; i<m+1; i++) {
561     rowstarts[i] = rowstarts[i-1] + rowstarts[i];
562   }
563 
564   PetscCall(PetscHSetIJGetSize(ht,&nz));
565   PetscCall(PetscMalloc1(nz,&col));
566   PetscHashIterBegin(ht,hi);
567   for (; !PetscHashIterAtEnd(ht,hi);) {
568     PetscHashIterGetKey(ht,hi,key);
569     col[rowstarts[key.i - rstart]++] = key.j;
570     PetscHashIterNext(ht,hi);
571   }
572   PetscCall(PetscHSetIJDestroy(&ht));
573   for (i=m; i>0; i--) {
574     rowstarts[i] = rowstarts[i-1];
575   }
576   rowstarts[0] = 0;
577 
578   for (PetscInt i=0; i<m; i++) {
579     PetscCall(PetscSortInt(rowstarts[i+1]-rowstarts[i],&col[rowstarts[i]]));
580   }
581 
582   adj->i       = rowstarts;
583   adj->j       = col;
584   adj->nz      = rowstarts[m];
585   adj->freeaij = PETSC_TRUE;
586   PetscFunctionReturn(0);
587 }
588 
589 /* -------------------------------------------------------------------*/
590 static struct _MatOps MatOps_Values = {MatSetValues_MPIAdj,
591                                        MatGetRow_MPIAdj,
592                                        MatRestoreRow_MPIAdj,
593                                        NULL,
594                                 /* 4*/ NULL,
595                                        NULL,
596                                        NULL,
597                                        NULL,
598                                        NULL,
599                                        NULL,
600                                 /*10*/ NULL,
601                                        NULL,
602                                        NULL,
603                                        NULL,
604                                        NULL,
605                                 /*15*/ NULL,
606                                        MatEqual_MPIAdj,
607                                        NULL,
608                                        NULL,
609                                        NULL,
610                                 /*20*/ MatAssemblyBegin_MPIAdj,
611                                        MatAssemblyEnd_MPIAdj,
612                                        MatSetOption_MPIAdj,
613                                        NULL,
614                                 /*24*/ NULL,
615                                        NULL,
616                                        NULL,
617                                        NULL,
618                                        NULL,
619                                 /*29*/ NULL,
620                                        NULL,
621                                        NULL,
622                                        NULL,
623                                        NULL,
624                                 /*34*/ NULL,
625                                        NULL,
626                                        NULL,
627                                        NULL,
628                                        NULL,
629                                 /*39*/ NULL,
630                                        MatCreateSubMatrices_MPIAdj,
631                                        NULL,
632                                        NULL,
633                                        NULL,
634                                 /*44*/ NULL,
635                                        NULL,
636                                        MatShift_Basic,
637                                        NULL,
638                                        NULL,
639                                 /*49*/ NULL,
640                                        MatGetRowIJ_MPIAdj,
641                                        MatRestoreRowIJ_MPIAdj,
642                                        NULL,
643                                        NULL,
644                                 /*54*/ NULL,
645                                        NULL,
646                                        NULL,
647                                        NULL,
648                                        NULL,
649                                 /*59*/ NULL,
650                                        MatDestroy_MPIAdj,
651                                        MatView_MPIAdj,
652                                        MatConvertFrom_MPIAdj,
653                                        NULL,
654                                 /*64*/ NULL,
655                                        NULL,
656                                        NULL,
657                                        NULL,
658                                        NULL,
659                                 /*69*/ NULL,
660                                        NULL,
661                                        NULL,
662                                        NULL,
663                                        NULL,
664                                 /*74*/ NULL,
665                                        NULL,
666                                        NULL,
667                                        NULL,
668                                        NULL,
669                                 /*79*/ NULL,
670                                        NULL,
671                                        NULL,
672                                        NULL,
673                                        NULL,
674                                 /*84*/ NULL,
675                                        NULL,
676                                        NULL,
677                                        NULL,
678                                        NULL,
679                                 /*89*/ NULL,
680                                        NULL,
681                                        NULL,
682                                        NULL,
683                                        NULL,
684                                 /*94*/ NULL,
685                                        NULL,
686                                        NULL,
687                                        NULL,
688                                        NULL,
689                                 /*99*/ NULL,
690                                        NULL,
691                                        NULL,
692                                        NULL,
693                                        NULL,
694                                /*104*/ NULL,
695                                        NULL,
696                                        NULL,
697                                        NULL,
698                                        NULL,
699                                /*109*/ NULL,
700                                        NULL,
701                                        NULL,
702                                        NULL,
703                                        NULL,
704                                /*114*/ NULL,
705                                        NULL,
706                                        NULL,
707                                        NULL,
708                                        NULL,
709                                /*119*/ NULL,
710                                        NULL,
711                                        NULL,
712                                        NULL,
713                                        NULL,
714                                /*124*/ NULL,
715                                        NULL,
716                                        NULL,
717                                        NULL,
718                                        MatCreateSubMatricesMPI_MPIAdj,
719                                /*129*/ NULL,
720                                        NULL,
721                                        NULL,
722                                        NULL,
723                                        NULL,
724                                /*134*/ NULL,
725                                        NULL,
726                                        NULL,
727                                        NULL,
728                                        NULL,
729                                /*139*/ NULL,
730                                        NULL,
731                                        NULL,
732                                        NULL,
733                                        NULL,
734                                /*144*/ NULL,
735                                        NULL,
736                                        NULL,
737                                        NULL,
738                                        NULL,
739                                        NULL,
740                                /*150*/ NULL
741 };
742 
743 static PetscErrorCode  MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
744 {
745   Mat_MPIAdj     *b = (Mat_MPIAdj*)B->data;
746   PetscBool       useedgeweights;
747 
748   PetscFunctionBegin;
749   PetscCall(PetscLayoutSetUp(B->rmap));
750   PetscCall(PetscLayoutSetUp(B->cmap));
751   if (values) useedgeweights = PETSC_TRUE; else useedgeweights = PETSC_FALSE;
752   /* Make everybody knows if they are using edge weights or not */
753   PetscCall(MPIU_Allreduce((int*)&useedgeweights,(int*)&b->useedgeweights,1,MPI_INT,MPI_MAX,PetscObjectComm((PetscObject)B)));
754 
755   if (PetscDefined(USE_DEBUG)) {
756     PetscInt ii;
757 
758     PetscCheck(i[0] == 0,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %" PetscInt_FMT,i[0]);
759     for (ii=1; ii<B->rmap->n; ii++) {
760       PetscCheck(i[ii] >= 0 && i[ii] >= i[ii-1],PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i[%" PetscInt_FMT "]=%" PetscInt_FMT " index is out of range: i[%" PetscInt_FMT "]=%" PetscInt_FMT,ii,i[ii],ii-1,i[ii-1]);
761     }
762     for (ii=0; ii<i[B->rmap->n]; ii++) {
763       PetscCheck(j[ii] >= 0 && j[ii] < B->cmap->N,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column index %" PetscInt_FMT " out of range %" PetscInt_FMT,ii,j[ii]);
764     }
765   }
766   b->j      = j;
767   b->i      = i;
768   b->values = values;
769 
770   b->nz        = i[B->rmap->n];
771   b->diag      = NULL;
772   b->symmetric = PETSC_FALSE;
773   b->freeaij   = PETSC_TRUE;
774 
775   B->ops->assemblybegin = NULL;
776   B->ops->assemblyend   = NULL;
777   PetscCall(MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY));
778   PetscCall(MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY));
779   PetscCall(MatStashDestroy_Private(&B->stash));
780   PetscFunctionReturn(0);
781 }
782 
783 static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A,Mat *B)
784 {
785   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
786   const PetscInt *ranges;
787   MPI_Comm       acomm,bcomm;
788   MPI_Group      agroup,bgroup;
789   PetscMPIInt    i,rank,size,nranks,*ranks;
790 
791   PetscFunctionBegin;
792   *B    = NULL;
793   PetscCall(PetscObjectGetComm((PetscObject)A,&acomm));
794   PetscCallMPI(MPI_Comm_size(acomm,&size));
795   PetscCallMPI(MPI_Comm_size(acomm,&rank));
796   PetscCall(MatGetOwnershipRanges(A,&ranges));
797   for (i=0,nranks=0; i<size; i++) {
798     if (ranges[i+1] - ranges[i] > 0) nranks++;
799   }
800   if (nranks == size) {         /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
801     PetscCall(PetscObjectReference((PetscObject)A));
802     *B   = A;
803     PetscFunctionReturn(0);
804   }
805 
806   PetscCall(PetscMalloc1(nranks,&ranks));
807   for (i=0,nranks=0; i<size; i++) {
808     if (ranges[i+1] - ranges[i] > 0) ranks[nranks++] = i;
809   }
810   PetscCallMPI(MPI_Comm_group(acomm,&agroup));
811   PetscCallMPI(MPI_Group_incl(agroup,nranks,ranks,&bgroup));
812   PetscCall(PetscFree(ranks));
813   PetscCallMPI(MPI_Comm_create(acomm,bgroup,&bcomm));
814   PetscCallMPI(MPI_Group_free(&agroup));
815   PetscCallMPI(MPI_Group_free(&bgroup));
816   if (bcomm != MPI_COMM_NULL) {
817     PetscInt   m,N;
818     Mat_MPIAdj *b;
819     PetscCall(MatGetLocalSize(A,&m,NULL));
820     PetscCall(MatGetSize(A,NULL,&N));
821     PetscCall(MatCreateMPIAdj(bcomm,m,N,a->i,a->j,a->values,B));
822     b          = (Mat_MPIAdj*)(*B)->data;
823     b->freeaij = PETSC_FALSE;
824     PetscCallMPI(MPI_Comm_free(&bcomm));
825   }
826   PetscFunctionReturn(0);
827 }
828 
829 PetscErrorCode  MatMPIAdjToSeq_MPIAdj(Mat A,Mat *B)
830 {
831   PetscInt       M,N,*II,*J,NZ,nz,m,nzstart,i;
832   PetscInt       *Values = NULL;
833   Mat_MPIAdj     *adj = (Mat_MPIAdj*)A->data;
834   PetscMPIInt    mnz,mm,*allnz,*allm,size,*dispnz,*dispm;
835 
836   PetscFunctionBegin;
837   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A),&size));
838   PetscCall(MatGetSize(A,&M,&N));
839   PetscCall(MatGetLocalSize(A,&m,NULL));
840   nz   = adj->nz;
841   PetscCheck(adj->i[m] == nz,PETSC_COMM_SELF,PETSC_ERR_PLIB,"nz %" PetscInt_FMT " not correct i[m] %" PetscInt_FMT,nz,adj->i[m]);
842   PetscCallMPI(MPI_Allreduce(&nz,&NZ,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)A)));
843 
844   PetscCall(PetscMPIIntCast(nz,&mnz));
845   PetscCall(PetscMalloc2(size,&allnz,size,&dispnz));
846   PetscCallMPI(MPI_Allgather(&mnz,1,MPI_INT,allnz,1,MPI_INT,PetscObjectComm((PetscObject)A)));
847   dispnz[0] = 0; for (i=1; i<size; i++) dispnz[i] = dispnz[i-1]+ allnz[i-1];
848   if (adj->values) {
849     PetscCall(PetscMalloc1(NZ,&Values));
850     PetscCallMPI(MPI_Allgatherv(adj->values,mnz,MPIU_INT,Values,allnz,dispnz,MPIU_INT,PetscObjectComm((PetscObject)A)));
851   }
852   PetscCall(PetscMalloc1(NZ,&J));
853   PetscCallMPI(MPI_Allgatherv(adj->j,mnz,MPIU_INT,J,allnz,dispnz,MPIU_INT,PetscObjectComm((PetscObject)A)));
854   PetscCall(PetscFree2(allnz,dispnz));
855   PetscCallMPI(MPI_Scan(&nz,&nzstart,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)A)));
856   nzstart -= nz;
857   /* shift the i[] values so they will be correct after being received */
858   for (i=0; i<m; i++) adj->i[i] += nzstart;
859   PetscCall(PetscMalloc1(M+1,&II));
860   PetscCall(PetscMPIIntCast(m,&mm));
861   PetscCall(PetscMalloc2(size,&allm,size,&dispm));
862   PetscCallMPI(MPI_Allgather(&mm,1,MPI_INT,allm,1,MPI_INT,PetscObjectComm((PetscObject)A)));
863   dispm[0] = 0; for (i=1; i<size; i++) dispm[i] = dispm[i-1]+ allm[i-1];
864   PetscCallMPI(MPI_Allgatherv(adj->i,mm,MPIU_INT,II,allm,dispm,MPIU_INT,PetscObjectComm((PetscObject)A)));
865   PetscCall(PetscFree2(allm,dispm));
866   II[M] = NZ;
867   /* shift the i[] values back */
868   for (i=0; i<m; i++) adj->i[i] -= nzstart;
869   PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF,M,N,II,J,Values,B));
870   PetscFunctionReturn(0);
871 }
872 
873 PetscErrorCode  MatMPIAdjToSeqRankZero_MPIAdj(Mat A,Mat *B)
874 {
875   PetscInt       M,N,*II,*J,NZ,nz,m,nzstart,i;
876   PetscInt       *Values = NULL;
877   Mat_MPIAdj     *adj = (Mat_MPIAdj*)A->data;
878   PetscMPIInt    mnz,mm,*allnz = NULL,*allm,size,*dispnz,*dispm,rank;
879 
880   PetscFunctionBegin;
881   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A),&size));
882   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)A),&rank));
883   PetscCall(MatGetSize(A,&M,&N));
884   PetscCall(MatGetLocalSize(A,&m,NULL));
885   nz   = adj->nz;
886   PetscCheck(adj->i[m] == nz,PETSC_COMM_SELF,PETSC_ERR_PLIB,"nz %" PetscInt_FMT " not correct i[m] %" PetscInt_FMT,nz,adj->i[m]);
887   PetscCallMPI(MPI_Allreduce(&nz,&NZ,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)A)));
888 
889   PetscCall(PetscMPIIntCast(nz,&mnz));
890   if (!rank) PetscCall(PetscMalloc2(size,&allnz,size,&dispnz));
891   PetscCallMPI(MPI_Gather(&mnz,1,MPI_INT,allnz,1,MPI_INT,0,PetscObjectComm((PetscObject)A)));
892   if (!rank) {
893     dispnz[0] = 0; for (i=1; i<size; i++) dispnz[i] = dispnz[i-1]+ allnz[i-1];
894     if (adj->values) {
895       PetscCall(PetscMalloc1(NZ,&Values));
896       PetscCallMPI(MPI_Gatherv(adj->values,mnz,MPIU_INT,Values,allnz,dispnz,MPIU_INT,0,PetscObjectComm((PetscObject)A)));
897     }
898     PetscCall(PetscMalloc1(NZ,&J));
899     PetscCallMPI(MPI_Gatherv(adj->j,mnz,MPIU_INT,J,allnz,dispnz,MPIU_INT,0,PetscObjectComm((PetscObject)A)));
900     PetscCall(PetscFree2(allnz,dispnz));
901   } else {
902     if (adj->values) PetscCallMPI(MPI_Gatherv(adj->values,mnz,MPIU_INT,NULL,NULL,NULL,MPIU_INT,0,PetscObjectComm((PetscObject)A)));
903     PetscCallMPI(MPI_Gatherv(adj->j,mnz,MPIU_INT,NULL,NULL,NULL,MPIU_INT,0,PetscObjectComm((PetscObject)A)));
904   }
905   PetscCallMPI(MPI_Scan(&nz,&nzstart,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)A)));
906   nzstart -= nz;
907   /* shift the i[] values so they will be correct after being received */
908   for (i=0; i<m; i++) adj->i[i] += nzstart;
909   PetscCall(PetscMPIIntCast(m,&mm));
910   if (!rank) {
911     PetscCall(PetscMalloc1(M+1,&II));
912     PetscCall(PetscMalloc2(size,&allm,size,&dispm));
913     PetscCallMPI(MPI_Gather(&mm,1,MPI_INT,allm,1,MPI_INT,0,PetscObjectComm((PetscObject)A)));
914     dispm[0] = 0; for (i=1; i<size; i++) dispm[i] = dispm[i-1]+ allm[i-1];
915     PetscCallMPI(MPI_Gatherv(adj->i,mm,MPIU_INT,II,allm,dispm,MPIU_INT,0,PetscObjectComm((PetscObject)A)));
916     PetscCall(PetscFree2(allm,dispm));
917     II[M] = NZ;
918   } else {
919     PetscCallMPI(MPI_Gather(&mm,1,MPI_INT,NULL,1,MPI_INT,0,PetscObjectComm((PetscObject)A)));
920     PetscCallMPI(MPI_Gatherv(adj->i,mm,MPIU_INT,NULL,NULL,NULL,MPIU_INT,0,PetscObjectComm((PetscObject)A)));
921   }
922    /* shift the i[] values back */
923   for (i=0; i<m; i++) adj->i[i] -= nzstart;
924   if (!rank) PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF,M,N,II,J,Values,B));
925   PetscFunctionReturn(0);
926 }
927 
928 /*@
929    MatMPIAdjCreateNonemptySubcommMat - create the same MPIAdj matrix on a subcommunicator containing only processes owning a positive number of rows
930 
931    Collective
932 
933    Input Parameter:
934 .  A - original MPIAdj matrix
935 
936    Output Parameter:
937 .  B - matrix on subcommunicator, NULL on ranks that owned zero rows of A
938 
939    Level: developer
940 
941    Note:
942    This function is mostly useful for internal use by mesh partitioning packages that require that every process owns at least one row.
943 
944    The matrix B should be destroyed with MatDestroy(). The arrays are not copied, so B should be destroyed before A is destroyed.
945 
946 .seealso: `MatCreateMPIAdj()`
947 @*/
948 PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A,Mat *B)
949 {
950   PetscFunctionBegin;
951   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
952   PetscUseMethod(A,"MatMPIAdjCreateNonemptySubcommMat_C",(Mat,Mat*),(A,B));
953   PetscFunctionReturn(0);
954 }
955 
956 /*MC
957    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
958    intended for use constructing orderings and partitionings.
959 
960   Level: beginner
961 
962   Notes:
963     You can provide values to the matrix using MatMPIAdjSetPreallocation(), MatCreateMPIAdj(), or
964     by calling MatSetValues() and MatAssemblyBegin() followed by MatAssemblyEnd()
965 
966 .seealso: `MatCreateMPIAdj()`, `MatMPIAdjSetPreallocation()`, `MatSetValues()`
967 M*/
968 
969 PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B)
970 {
971   Mat_MPIAdj     *b;
972 
973   PetscFunctionBegin;
974   PetscCall(PetscNewLog(B,&b));
975   B->data      = (void*)b;
976   PetscCall(PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps)));
977   B->assembled = PETSC_FALSE;
978   B->preallocated = PETSC_TRUE; /* so that MatSetValues() may be used */
979 
980   PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",MatMPIAdjSetPreallocation_MPIAdj));
981   PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjCreateNonemptySubcommMat_C",MatMPIAdjCreateNonemptySubcommMat_MPIAdj));
982   PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjToSeq_C",MatMPIAdjToSeq_MPIAdj));
983   PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjToSeqRankZero_C",MatMPIAdjToSeqRankZero_MPIAdj));
984   PetscCall(PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ));
985   PetscFunctionReturn(0);
986 }
987 
988 /*@C
989    MatMPIAdjToSeq - Converts an parallel MPIAdj matrix to complete MPIAdj on each process (needed by sequential partitioner)
990 
991    Logically Collective
992 
993    Input Parameter:
994 .  A - the matrix
995 
996    Output Parameter:
997 .  B - the same matrix on all processes
998 
999    Level: intermediate
1000 
1001 .seealso: `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MatMPIAdjToSeqRankZero()`
1002 @*/
1003 PetscErrorCode  MatMPIAdjToSeq(Mat A,Mat *B)
1004 {
1005   PetscFunctionBegin;
1006   PetscUseMethod(A,"MatMPIAdjToSeq_C",(Mat,Mat*),(A,B));
1007   PetscFunctionReturn(0);
1008 }
1009 
1010 /*@C
1011    MatMPIAdjToSeqRankZero - Converts an parallel MPIAdj matrix to complete MPIAdj on rank zero (needed by sequential partitioner)
1012 
1013    Logically Collective
1014 
1015    Input Parameter:
1016 .  A - the matrix
1017 
1018    Output Parameter:
1019 .  B - the same matrix on rank zero, not set on other ranks
1020 
1021    Level: intermediate
1022 
1023    Notes:
1024      This routine has the advantage on systems with multiple ranks per node since only one copy of the matrix
1025      is stored on the first node, instead of the number of ranks copies. This can allow partitioning much larger
1026      paralllel graph sequentially.
1027 
1028 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues(), MatMPIAdjToSeq()
1029 @*/
1030 PetscErrorCode  MatMPIAdjToSeqRankZero(Mat A,Mat *B)
1031 {
1032   PetscFunctionBegin;
1033   PetscUseMethod(A,"MatMPIAdjToSeqRankZero_C",(Mat,Mat*),(A,B));
1034   PetscFunctionReturn(0);
1035 }
1036 
1037 /*@C
1038    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
1039 
1040    Logically Collective
1041 
1042    Input Parameters:
1043 +  A - the matrix
1044 .  i - the indices into j for the start of each row
1045 .  j - the column indices for each row (sorted for each row).
1046        The indices in i and j start with zero (NOT with one).
1047 -  values - [optional] edge weights
1048 
1049    Level: intermediate
1050 
1051 .seealso: `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MATMPIADJ`, `MatCreateMPIAdj()`
1052 @*/
1053 PetscErrorCode  MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
1054 {
1055   PetscFunctionBegin;
1056   PetscTryMethod(B,"MatMPIAdjSetPreallocation_C",(Mat,PetscInt*,PetscInt*,PetscInt*),(B,i,j,values));
1057   PetscFunctionReturn(0);
1058 }
1059 
1060 /*@C
1061    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
1062    The matrix does not have numerical values associated with it, but is
1063    intended for ordering (to reduce bandwidth etc) and partitioning.
1064 
1065    Collective
1066 
1067    Input Parameters:
1068 +  comm - MPI communicator
1069 .  m - number of local rows
1070 .  N - number of global columns
1071 .  i - the indices into j for the start of each row
1072 .  j - the column indices for each row (sorted for each row).
1073        The indices in i and j start with zero (NOT with one).
1074 -  values -[optional] edge weights
1075 
1076    Output Parameter:
1077 .  A - the matrix
1078 
1079    Level: intermediate
1080 
1081    Notes:
1082    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
1083    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
1084    call from Fortran you need not create the arrays with PetscMalloc().
1085 
1086    You should not include the matrix diagonals.
1087 
1088    If you already have a matrix, you can create its adjacency matrix by a call
1089    to MatConvert(), specifying a type of MATMPIADJ.
1090 
1091    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
1092 
1093 .seealso: `MatCreate()`, `MatConvert()`, `MatGetOrdering()`, `MATMPIADJ`, `MatMPIAdjSetPreallocation()`
1094 @*/
1095 PetscErrorCode  MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
1096 {
1097   PetscFunctionBegin;
1098   PetscCall(MatCreate(comm,A));
1099   PetscCall(MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N));
1100   PetscCall(MatSetType(*A,MATMPIADJ));
1101   PetscCall(MatMPIAdjSetPreallocation(*A,i,j,values));
1102   PetscFunctionReturn(0);
1103 }
1104