xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision daa037dfd3c3bec8dc8659548d2b20b07c1dc6de)
1 
2 /*
3     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
4 */
5 #include <../src/mat/impls/adj/mpi/mpiadj.h>    /*I "petscmat.h" I*/
6 #include <petscsf.h>
7 
8 /*
9  * The interface should be easy to use for both MatCreateSubMatrix (parallel sub-matrix) and MatCreateSubMatrices (sequential sub-matrices)
10  * */
11 static PetscErrorCode MatCreateSubMatrix_MPIAdj_data(Mat adj,IS irows, IS icols, PetscInt **sadj_xadj,PetscInt **sadj_adjncy,PetscInt **sadj_values)
12 {
13   PetscInt           nlrows_is,icols_n,i,j,nroots,nleaves,rlocalindex,*ncols_send,*ncols_recv;
14   PetscInt           nlrows_mat,*adjncy_recv,Ncols_recv,Ncols_send,*xadj_recv,*values_recv;
15   PetscInt          *ncols_recv_offsets,loc,rnclos,*sadjncy,*sxadj,*svalues;
16   const PetscInt    *irows_indices,*icols_indices,*xadj, *adjncy;
17   PetscMPIInt        owner;
18   Mat_MPIAdj        *a = (Mat_MPIAdj*)adj->data;
19   PetscLayout        rmap;
20   MPI_Comm           comm;
21   PetscSF            sf;
22   PetscSFNode       *iremote;
23   PetscBool          done;
24 
25   PetscFunctionBegin;
26   PetscCall(PetscObjectGetComm((PetscObject)adj,&comm));
27   PetscCall(MatGetLayouts(adj,&rmap,NULL));
28   PetscCall(ISGetLocalSize(irows,&nlrows_is));
29   PetscCall(ISGetIndices(irows,&irows_indices));
30   PetscCall(PetscMalloc1(nlrows_is,&iremote));
31   /* construct sf graph*/
32   nleaves = nlrows_is;
33   for (i=0; i<nlrows_is; i++) {
34     owner = -1;
35     rlocalindex = -1;
36     PetscCall(PetscLayoutFindOwnerIndex(rmap,irows_indices[i],&owner,&rlocalindex));
37     iremote[i].rank  = owner;
38     iremote[i].index = rlocalindex;
39   }
40   PetscCall(MatGetRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done));
41   PetscCall(PetscCalloc4(nlrows_mat,&ncols_send,nlrows_is,&xadj_recv,nlrows_is+1,&ncols_recv_offsets,nlrows_is,&ncols_recv));
42   nroots = nlrows_mat;
43   for (i=0; i<nlrows_mat; i++) {
44     ncols_send[i] = xadj[i+1]-xadj[i];
45   }
46   PetscCall(PetscSFCreate(comm,&sf));
47   PetscCall(PetscSFSetGraph(sf,nroots,nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER));
48   PetscCall(PetscSFSetType(sf,PETSCSFBASIC));
49   PetscCall(PetscSFSetFromOptions(sf));
50   PetscCall(PetscSFBcastBegin(sf,MPIU_INT,ncols_send,ncols_recv,MPI_REPLACE));
51   PetscCall(PetscSFBcastEnd(sf,MPIU_INT,ncols_send,ncols_recv,MPI_REPLACE));
52   PetscCall(PetscSFBcastBegin(sf,MPIU_INT,xadj,xadj_recv,MPI_REPLACE));
53   PetscCall(PetscSFBcastEnd(sf,MPIU_INT,xadj,xadj_recv,MPI_REPLACE));
54   PetscCall(PetscSFDestroy(&sf));
55   Ncols_recv =0;
56   for (i=0; i<nlrows_is; i++) {
57     Ncols_recv             += ncols_recv[i];
58     ncols_recv_offsets[i+1] = ncols_recv[i]+ncols_recv_offsets[i];
59   }
60   Ncols_send = 0;
61   for (i=0; i<nlrows_mat; i++) {
62     Ncols_send += ncols_send[i];
63   }
64   PetscCall(PetscCalloc1(Ncols_recv,&iremote));
65   PetscCall(PetscCalloc1(Ncols_recv,&adjncy_recv));
66   nleaves = Ncols_recv;
67   Ncols_recv = 0;
68   for (i=0; i<nlrows_is; i++) {
69     PetscCall(PetscLayoutFindOwner(rmap,irows_indices[i],&owner));
70     for (j=0; j<ncols_recv[i]; j++) {
71       iremote[Ncols_recv].rank    = owner;
72       iremote[Ncols_recv++].index = xadj_recv[i]+j;
73     }
74   }
75   PetscCall(ISRestoreIndices(irows,&irows_indices));
76   /*if we need to deal with edge weights ???*/
77   if (a->useedgeweights) PetscCall(PetscCalloc1(Ncols_recv,&values_recv));
78   nroots = Ncols_send;
79   PetscCall(PetscSFCreate(comm,&sf));
80   PetscCall(PetscSFSetGraph(sf,nroots,nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER));
81   PetscCall(PetscSFSetType(sf,PETSCSFBASIC));
82   PetscCall(PetscSFSetFromOptions(sf));
83   PetscCall(PetscSFBcastBegin(sf,MPIU_INT,adjncy,adjncy_recv,MPI_REPLACE));
84   PetscCall(PetscSFBcastEnd(sf,MPIU_INT,adjncy,adjncy_recv,MPI_REPLACE));
85   if (a->useedgeweights) {
86     PetscCall(PetscSFBcastBegin(sf,MPIU_INT,a->values,values_recv,MPI_REPLACE));
87     PetscCall(PetscSFBcastEnd(sf,MPIU_INT,a->values,values_recv,MPI_REPLACE));
88   }
89   PetscCall(PetscSFDestroy(&sf));
90   PetscCall(MatRestoreRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done));
91   PetscCall(ISGetLocalSize(icols,&icols_n));
92   PetscCall(ISGetIndices(icols,&icols_indices));
93   rnclos = 0;
94   for (i=0; i<nlrows_is; i++) {
95     for (j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++) {
96       PetscCall(PetscFindInt(adjncy_recv[j], icols_n, icols_indices, &loc));
97       if (loc<0) {
98         adjncy_recv[j] = -1;
99         if (a->useedgeweights) values_recv[j] = -1;
100         ncols_recv[i]--;
101       } else {
102         rnclos++;
103       }
104     }
105   }
106   PetscCall(ISRestoreIndices(icols,&icols_indices));
107   PetscCall(PetscCalloc1(rnclos,&sadjncy));
108   if (a->useedgeweights) PetscCall(PetscCalloc1(rnclos,&svalues));
109   PetscCall(PetscCalloc1(nlrows_is+1,&sxadj));
110   rnclos = 0;
111   for (i=0; i<nlrows_is; i++) {
112     for (j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++) {
113       if (adjncy_recv[j]<0) continue;
114       sadjncy[rnclos] = adjncy_recv[j];
115       if (a->useedgeweights) svalues[rnclos] = values_recv[j];
116       rnclos++;
117     }
118   }
119   for (i=0; i<nlrows_is; i++) {
120     sxadj[i+1] = sxadj[i]+ncols_recv[i];
121   }
122   if (sadj_xadj)  { *sadj_xadj = sxadj;} else    PetscCall(PetscFree(sxadj));
123   if (sadj_adjncy) { *sadj_adjncy = sadjncy;} else PetscCall(PetscFree(sadjncy));
124   if (sadj_values) {
125     if (a->useedgeweights) *sadj_values = svalues; else *sadj_values=NULL;
126   } else {
127     if (a->useedgeweights) PetscCall(PetscFree(svalues));
128   }
129   PetscCall(PetscFree4(ncols_send,xadj_recv,ncols_recv_offsets,ncols_recv));
130   PetscCall(PetscFree(adjncy_recv));
131   if (a->useedgeweights) PetscCall(PetscFree(values_recv));
132   PetscFunctionReturn(0);
133 }
134 
135 static PetscErrorCode MatCreateSubMatrices_MPIAdj_Private(Mat mat,PetscInt n,const IS irow[],const IS icol[],PetscBool subcomm,MatReuse scall,Mat *submat[])
136 {
137   PetscInt           i,irow_n,icol_n,*sxadj,*sadjncy,*svalues;
138   PetscInt          *indices,nindx,j,k,loc;
139   PetscMPIInt        issame;
140   const PetscInt    *irow_indices,*icol_indices;
141   MPI_Comm           scomm_row,scomm_col,scomm_mat;
142 
143   PetscFunctionBegin;
144   nindx = 0;
145   /*
146    * Estimate a maximum number for allocating memory
147    */
148   for (i=0; i<n; i++) {
149     PetscCall(ISGetLocalSize(irow[i],&irow_n));
150     PetscCall(ISGetLocalSize(icol[i],&icol_n));
151     nindx = nindx>(irow_n+icol_n)? nindx:(irow_n+icol_n);
152   }
153   PetscCall(PetscMalloc1(nindx,&indices));
154   /* construct a submat */
155   for (i=0; i<n; i++) {
156     if (subcomm) {
157       PetscCall(PetscObjectGetComm((PetscObject)irow[i],&scomm_row));
158       PetscCall(PetscObjectGetComm((PetscObject)icol[i],&scomm_col));
159       PetscCallMPI(MPI_Comm_compare(scomm_row,scomm_col,&issame));
160       PetscCheckFalse(issame != MPI_IDENT,PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"row index set must have the same comm as the col index set");
161       PetscCallMPI(MPI_Comm_compare(scomm_row,PETSC_COMM_SELF,&issame));
162       PetscCheckFalse(issame == MPI_IDENT,PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP," can not use PETSC_COMM_SELF as comm when extracting a parallel submatrix");
163     } else {
164       scomm_row = PETSC_COMM_SELF;
165     }
166     /*get sub-matrix data*/
167     sxadj=NULL; sadjncy=NULL; svalues=NULL;
168     PetscCall(MatCreateSubMatrix_MPIAdj_data(mat,irow[i],icol[i],&sxadj,&sadjncy,&svalues));
169     PetscCall(ISGetLocalSize(irow[i],&irow_n));
170     PetscCall(ISGetLocalSize(icol[i],&icol_n));
171     PetscCall(ISGetIndices(irow[i],&irow_indices));
172     PetscCall(PetscArraycpy(indices,irow_indices,irow_n));
173     PetscCall(ISRestoreIndices(irow[i],&irow_indices));
174     PetscCall(ISGetIndices(icol[i],&icol_indices));
175     PetscCall(PetscArraycpy(indices+irow_n,icol_indices,icol_n));
176     PetscCall(ISRestoreIndices(icol[i],&icol_indices));
177     nindx = irow_n+icol_n;
178     PetscCall(PetscSortRemoveDupsInt(&nindx,indices));
179     /* renumber columns */
180     for (j=0; j<irow_n; j++) {
181       for (k=sxadj[j]; k<sxadj[j+1]; k++) {
182         PetscCall(PetscFindInt(sadjncy[k],nindx,indices,&loc));
183         PetscCheckFalse(loc<0,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"can not find col %" PetscInt_FMT,sadjncy[k]);
184         sadjncy[k] = loc;
185       }
186     }
187     if (scall==MAT_INITIAL_MATRIX) {
188       PetscCall(MatCreateMPIAdj(scomm_row,irow_n,icol_n,sxadj,sadjncy,svalues,submat[i]));
189     } else {
190        Mat                sadj = *(submat[i]);
191        Mat_MPIAdj         *sa  = (Mat_MPIAdj*)((sadj)->data);
192        PetscCall(PetscObjectGetComm((PetscObject)sadj,&scomm_mat));
193        PetscCallMPI(MPI_Comm_compare(scomm_row,scomm_mat,&issame));
194        PetscCheckFalse(issame != MPI_IDENT,PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"submatrix  must have the same comm as the col index set");
195        PetscCall(PetscArraycpy(sa->i,sxadj,irow_n+1));
196        PetscCall(PetscArraycpy(sa->j,sadjncy,sxadj[irow_n]));
197        if (svalues) PetscCall(PetscArraycpy(sa->values,svalues,sxadj[irow_n]));
198        PetscCall(PetscFree(sxadj));
199        PetscCall(PetscFree(sadjncy));
200        if (svalues) PetscCall(PetscFree(svalues));
201     }
202   }
203   PetscCall(PetscFree(indices));
204   PetscFunctionReturn(0);
205 }
206 
207 static PetscErrorCode MatCreateSubMatricesMPI_MPIAdj(Mat mat,PetscInt n, const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
208 {
209   /*get sub-matrices across a sub communicator */
210   PetscFunctionBegin;
211   PetscCall(MatCreateSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_TRUE,scall,submat));
212   PetscFunctionReturn(0);
213 }
214 
215 static PetscErrorCode MatCreateSubMatrices_MPIAdj(Mat mat,PetscInt n,const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
216 {
217   PetscFunctionBegin;
218   /*get sub-matrices based on PETSC_COMM_SELF */
219   PetscCall(MatCreateSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_FALSE,scall,submat));
220   PetscFunctionReturn(0);
221 }
222 
223 static PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
224 {
225   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
226   PetscInt          i,j,m = A->rmap->n;
227   const char        *name;
228   PetscViewerFormat format;
229 
230   PetscFunctionBegin;
231   PetscCall(PetscObjectGetName((PetscObject)A,&name));
232   PetscCall(PetscViewerGetFormat(viewer,&format));
233   if (format == PETSC_VIEWER_ASCII_INFO) {
234     PetscFunctionReturn(0);
235   } else PetscCheckFalse(format == PETSC_VIEWER_ASCII_MATLAB,PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MATLAB format not supported");
236   else {
237     PetscCall(PetscViewerASCIIUseTabs(viewer,PETSC_FALSE));
238     PetscCall(PetscViewerASCIIPushSynchronized(viewer));
239     for (i=0; i<m; i++) {
240       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer,"row %" PetscInt_FMT ":",i+A->rmap->rstart));
241       for (j=a->i[i]; j<a->i[i+1]; j++) {
242         if (a->values) {
243           PetscCall(PetscViewerASCIISynchronizedPrintf(viewer," (%" PetscInt_FMT ", %" PetscInt_FMT ") ",a->j[j], a->values[j]));
244         } else {
245           PetscCall(PetscViewerASCIISynchronizedPrintf(viewer," %" PetscInt_FMT " ",a->j[j]));
246         }
247       }
248       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer,"\n"));
249     }
250     PetscCall(PetscViewerASCIIUseTabs(viewer,PETSC_TRUE));
251     PetscCall(PetscViewerFlush(viewer));
252     PetscCall(PetscViewerASCIIPopSynchronized(viewer));
253   }
254   PetscFunctionReturn(0);
255 }
256 
257 static PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
258 {
259   PetscBool      iascii;
260 
261   PetscFunctionBegin;
262   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii));
263   if (iascii) {
264     PetscCall(MatView_MPIAdj_ASCII(A,viewer));
265   }
266   PetscFunctionReturn(0);
267 }
268 
269 static PetscErrorCode MatDestroy_MPIAdj(Mat mat)
270 {
271   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;
272 
273   PetscFunctionBegin;
274 #if defined(PETSC_USE_LOG)
275   PetscLogObjectState((PetscObject)mat,"Rows=%" PetscInt_FMT ", Cols=%" PetscInt_FMT ", NZ=%" PetscInt_FMT,mat->rmap->n,mat->cmap->n,a->nz);
276 #endif
277   PetscCall(PetscFree(a->diag));
278   if (a->freeaij) {
279     if (a->freeaijwithfree) {
280       if (a->i) free(a->i);
281       if (a->j) free(a->j);
282     } else {
283       PetscCall(PetscFree(a->i));
284       PetscCall(PetscFree(a->j));
285       PetscCall(PetscFree(a->values));
286     }
287   }
288   PetscCall(PetscFree(a->rowvalues));
289   PetscCall(PetscFree(mat->data));
290   PetscCall(PetscObjectChangeTypeName((PetscObject)mat,NULL));
291   PetscCall(PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C",NULL));
292   PetscCall(PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjCreateNonemptySubcommMat_C",NULL));
293   PetscFunctionReturn(0);
294 }
295 
296 static PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscBool flg)
297 {
298   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
299 
300   PetscFunctionBegin;
301   switch (op) {
302   case MAT_SYMMETRIC:
303   case MAT_STRUCTURALLY_SYMMETRIC:
304   case MAT_HERMITIAN:
305     a->symmetric = flg;
306     break;
307   case MAT_SYMMETRY_ETERNAL:
308     break;
309   default:
310     PetscCall(PetscInfo(A,"Option %s ignored\n",MatOptions[op]));
311     break;
312   }
313   PetscFunctionReturn(0);
314 }
315 
316 static PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
317 {
318   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
319 
320   PetscFunctionBegin;
321   row -= A->rmap->rstart;
322   PetscCheckFalse(row < 0 || row >= A->rmap->n,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
323   *nz = a->i[row+1] - a->i[row];
324   if (v) {
325     PetscInt j;
326     if (a->rowvalues_alloc < *nz) {
327       PetscCall(PetscFree(a->rowvalues));
328       a->rowvalues_alloc = PetscMax(a->rowvalues_alloc*2, *nz);
329       PetscCall(PetscMalloc1(a->rowvalues_alloc,&a->rowvalues));
330     }
331     for (j=0; j<*nz; j++) {
332       a->rowvalues[j] = a->values ? a->values[a->i[row]+j]:1.0;
333     }
334     *v = (*nz) ? a->rowvalues : NULL;
335   }
336   if (idx) *idx = (*nz) ? a->j + a->i[row] : NULL;
337   PetscFunctionReturn(0);
338 }
339 
340 static PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
341 {
342   PetscFunctionBegin;
343   PetscFunctionReturn(0);
344 }
345 
346 static PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscBool * flg)
347 {
348   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data,*b = (Mat_MPIAdj*)B->data;
349   PetscBool      flag;
350 
351   PetscFunctionBegin;
352   /* If the  matrix dimensions are not equal,or no of nonzeros */
353   if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
354     flag = PETSC_FALSE;
355   }
356 
357   /* if the a->i are the same */
358   PetscCall(PetscArraycmp(a->i,b->i,A->rmap->n+1,&flag));
359 
360   /* if a->j are the same */
361   PetscCall(PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag));
362 
363   PetscCall(MPIU_Allreduce(&flag,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A)));
364   PetscFunctionReturn(0);
365 }
366 
367 static PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
368 {
369   PetscInt   i;
370   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
371   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
372 
373   PetscFunctionBegin;
374   *m    = A->rmap->n;
375   *ia   = a->i;
376   *ja   = a->j;
377   *done = PETSC_TRUE;
378   if (oshift) {
379     for (i=0; i<(*ia)[*m]; i++) {
380       (*ja)[i]++;
381     }
382     for (i=0; i<=(*m); i++) (*ia)[i]++;
383   }
384   PetscFunctionReturn(0);
385 }
386 
387 static PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
388 {
389   PetscInt   i;
390   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
391   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
392 
393   PetscFunctionBegin;
394   PetscCheckFalse(ia && a->i != *ia,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
395   PetscCheckFalse(ja && a->j != *ja,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
396   if (oshift) {
397     PetscCheck(ia,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"If oshift then you must passed in inia[] argument");
398     PetscCheck(ja,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"If oshift then you must passed in inja[] argument");
399     for (i=0; i<=(*m); i++) (*ia)[i]--;
400     for (i=0; i<(*ia)[*m]; i++) {
401       (*ja)[i]--;
402     }
403   }
404   PetscFunctionReturn(0);
405 }
406 
407 PetscErrorCode  MatConvertFrom_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
408 {
409   Mat               B;
410   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
411   const PetscInt    *rj;
412   const PetscScalar *ra;
413   MPI_Comm          comm;
414 
415   PetscFunctionBegin;
416   PetscCall(MatGetSize(A,NULL,&N));
417   PetscCall(MatGetLocalSize(A,&m,NULL));
418   PetscCall(MatGetOwnershipRange(A,&rstart,NULL));
419 
420   /* count the number of nonzeros per row */
421   for (i=0; i<m; i++) {
422     PetscCall(MatGetRow(A,i+rstart,&len,&rj,NULL));
423     for (j=0; j<len; j++) {
424       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
425     }
426     nzeros += len;
427     PetscCall(MatRestoreRow(A,i+rstart,&len,&rj,NULL));
428   }
429 
430   /* malloc space for nonzeros */
431   PetscCall(PetscMalloc1(nzeros+1,&a));
432   PetscCall(PetscMalloc1(N+1,&ia));
433   PetscCall(PetscMalloc1(nzeros+1,&ja));
434 
435   nzeros = 0;
436   ia[0]  = 0;
437   for (i=0; i<m; i++) {
438     PetscCall(MatGetRow(A,i+rstart,&len,&rj,&ra));
439     cnt  = 0;
440     for (j=0; j<len; j++) {
441       if (rj[j] != i+rstart) { /* if not diagonal */
442         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
443         ja[nzeros+cnt++] = rj[j];
444       }
445     }
446     PetscCall(MatRestoreRow(A,i+rstart,&len,&rj,&ra));
447     nzeros += cnt;
448     ia[i+1] = nzeros;
449   }
450 
451   PetscCall(PetscObjectGetComm((PetscObject)A,&comm));
452   PetscCall(MatCreate(comm,&B));
453   PetscCall(MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N));
454   PetscCall(MatSetType(B,type));
455   PetscCall(MatMPIAdjSetPreallocation(B,ia,ja,a));
456 
457   if (reuse == MAT_INPLACE_MATRIX) {
458     PetscCall(MatHeaderReplace(A,&B));
459   } else {
460     *newmat = B;
461   }
462   PetscFunctionReturn(0);
463 }
464 
465 /* -------------------------------------------------------------------*/
466 static struct _MatOps MatOps_Values = {NULL,
467                                        MatGetRow_MPIAdj,
468                                        MatRestoreRow_MPIAdj,
469                                        NULL,
470                                 /* 4*/ NULL,
471                                        NULL,
472                                        NULL,
473                                        NULL,
474                                        NULL,
475                                        NULL,
476                                 /*10*/ NULL,
477                                        NULL,
478                                        NULL,
479                                        NULL,
480                                        NULL,
481                                 /*15*/ NULL,
482                                        MatEqual_MPIAdj,
483                                        NULL,
484                                        NULL,
485                                        NULL,
486                                 /*20*/ NULL,
487                                        NULL,
488                                        MatSetOption_MPIAdj,
489                                        NULL,
490                                 /*24*/ NULL,
491                                        NULL,
492                                        NULL,
493                                        NULL,
494                                        NULL,
495                                 /*29*/ NULL,
496                                        NULL,
497                                        NULL,
498                                        NULL,
499                                        NULL,
500                                 /*34*/ NULL,
501                                        NULL,
502                                        NULL,
503                                        NULL,
504                                        NULL,
505                                 /*39*/ NULL,
506                                        MatCreateSubMatrices_MPIAdj,
507                                        NULL,
508                                        NULL,
509                                        NULL,
510                                 /*44*/ NULL,
511                                        NULL,
512                                        MatShift_Basic,
513                                        NULL,
514                                        NULL,
515                                 /*49*/ NULL,
516                                        MatGetRowIJ_MPIAdj,
517                                        MatRestoreRowIJ_MPIAdj,
518                                        NULL,
519                                        NULL,
520                                 /*54*/ NULL,
521                                        NULL,
522                                        NULL,
523                                        NULL,
524                                        NULL,
525                                 /*59*/ NULL,
526                                        MatDestroy_MPIAdj,
527                                        MatView_MPIAdj,
528                                        MatConvertFrom_MPIAdj,
529                                        NULL,
530                                 /*64*/ NULL,
531                                        NULL,
532                                        NULL,
533                                        NULL,
534                                        NULL,
535                                 /*69*/ NULL,
536                                        NULL,
537                                        NULL,
538                                        NULL,
539                                        NULL,
540                                 /*74*/ NULL,
541                                        NULL,
542                                        NULL,
543                                        NULL,
544                                        NULL,
545                                 /*79*/ NULL,
546                                        NULL,
547                                        NULL,
548                                        NULL,
549                                        NULL,
550                                 /*84*/ NULL,
551                                        NULL,
552                                        NULL,
553                                        NULL,
554                                        NULL,
555                                 /*89*/ NULL,
556                                        NULL,
557                                        NULL,
558                                        NULL,
559                                        NULL,
560                                 /*94*/ NULL,
561                                        NULL,
562                                        NULL,
563                                        NULL,
564                                        NULL,
565                                 /*99*/ NULL,
566                                        NULL,
567                                        NULL,
568                                        NULL,
569                                        NULL,
570                                /*104*/ NULL,
571                                        NULL,
572                                        NULL,
573                                        NULL,
574                                        NULL,
575                                /*109*/ NULL,
576                                        NULL,
577                                        NULL,
578                                        NULL,
579                                        NULL,
580                                /*114*/ NULL,
581                                        NULL,
582                                        NULL,
583                                        NULL,
584                                        NULL,
585                                /*119*/ NULL,
586                                        NULL,
587                                        NULL,
588                                        NULL,
589                                        NULL,
590                                /*124*/ NULL,
591                                        NULL,
592                                        NULL,
593                                        NULL,
594                                        MatCreateSubMatricesMPI_MPIAdj,
595                                /*129*/ NULL,
596                                        NULL,
597                                        NULL,
598                                        NULL,
599                                        NULL,
600                                /*134*/ NULL,
601                                        NULL,
602                                        NULL,
603                                        NULL,
604                                        NULL,
605                                /*139*/ NULL,
606                                        NULL,
607                                        NULL,
608                                        NULL,
609                                        NULL,
610                                 /*144*/NULL,
611                                        NULL,
612                                        NULL,
613                                        NULL
614 };
615 
616 static PetscErrorCode  MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
617 {
618   Mat_MPIAdj     *b = (Mat_MPIAdj*)B->data;
619   PetscBool       useedgeweights;
620 
621   PetscFunctionBegin;
622   PetscCall(PetscLayoutSetUp(B->rmap));
623   PetscCall(PetscLayoutSetUp(B->cmap));
624   if (values) useedgeweights = PETSC_TRUE; else useedgeweights = PETSC_FALSE;
625   /* Make everybody knows if they are using edge weights or not */
626   PetscCall(MPIU_Allreduce((int*)&useedgeweights,(int*)&b->useedgeweights,1,MPI_INT,MPI_MAX,PetscObjectComm((PetscObject)B)));
627 
628   if (PetscDefined(USE_DEBUG)) {
629     PetscInt ii;
630 
631     PetscCheckFalse(i[0] != 0,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %" PetscInt_FMT,i[0]);
632     for (ii=1; ii<B->rmap->n; ii++) {
633       PetscCheckFalse(i[ii] < 0 || i[ii] < i[ii-1],PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i[%" PetscInt_FMT "]=%" PetscInt_FMT " index is out of range: i[%" PetscInt_FMT "]=%" PetscInt_FMT,ii,i[ii],ii-1,i[ii-1]);
634     }
635     for (ii=0; ii<i[B->rmap->n]; ii++) {
636       PetscCheckFalse(j[ii] < 0 || j[ii] >= B->cmap->N,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column index %" PetscInt_FMT " out of range %" PetscInt_FMT,ii,j[ii]);
637     }
638   }
639   B->preallocated = PETSC_TRUE;
640 
641   b->j      = j;
642   b->i      = i;
643   b->values = values;
644 
645   b->nz        = i[B->rmap->n];
646   b->diag      = NULL;
647   b->symmetric = PETSC_FALSE;
648   b->freeaij   = PETSC_TRUE;
649 
650   PetscCall(MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY));
651   PetscCall(MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY));
652   PetscFunctionReturn(0);
653 }
654 
655 static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A,Mat *B)
656 {
657   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
658   const PetscInt *ranges;
659   MPI_Comm       acomm,bcomm;
660   MPI_Group      agroup,bgroup;
661   PetscMPIInt    i,rank,size,nranks,*ranks;
662 
663   PetscFunctionBegin;
664   *B    = NULL;
665   PetscCall(PetscObjectGetComm((PetscObject)A,&acomm));
666   PetscCallMPI(MPI_Comm_size(acomm,&size));
667   PetscCallMPI(MPI_Comm_size(acomm,&rank));
668   PetscCall(MatGetOwnershipRanges(A,&ranges));
669   for (i=0,nranks=0; i<size; i++) {
670     if (ranges[i+1] - ranges[i] > 0) nranks++;
671   }
672   if (nranks == size) {         /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
673     PetscCall(PetscObjectReference((PetscObject)A));
674     *B   = A;
675     PetscFunctionReturn(0);
676   }
677 
678   PetscCall(PetscMalloc1(nranks,&ranks));
679   for (i=0,nranks=0; i<size; i++) {
680     if (ranges[i+1] - ranges[i] > 0) ranks[nranks++] = i;
681   }
682   PetscCallMPI(MPI_Comm_group(acomm,&agroup));
683   PetscCallMPI(MPI_Group_incl(agroup,nranks,ranks,&bgroup));
684   PetscCall(PetscFree(ranks));
685   PetscCallMPI(MPI_Comm_create(acomm,bgroup,&bcomm));
686   PetscCallMPI(MPI_Group_free(&agroup));
687   PetscCallMPI(MPI_Group_free(&bgroup));
688   if (bcomm != MPI_COMM_NULL) {
689     PetscInt   m,N;
690     Mat_MPIAdj *b;
691     PetscCall(MatGetLocalSize(A,&m,NULL));
692     PetscCall(MatGetSize(A,NULL,&N));
693     PetscCall(MatCreateMPIAdj(bcomm,m,N,a->i,a->j,a->values,B));
694     b          = (Mat_MPIAdj*)(*B)->data;
695     b->freeaij = PETSC_FALSE;
696     PetscCallMPI(MPI_Comm_free(&bcomm));
697   }
698   PetscFunctionReturn(0);
699 }
700 
701 PetscErrorCode  MatMPIAdjToSeq_MPIAdj(Mat A,Mat *B)
702 {
703   PetscInt       M,N,*II,*J,NZ,nz,m,nzstart,i;
704   PetscInt       *Values = NULL;
705   Mat_MPIAdj     *adj = (Mat_MPIAdj*)A->data;
706   PetscMPIInt    mnz,mm,*allnz,*allm,size,*dispnz,*dispm;
707 
708   PetscFunctionBegin;
709   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A),&size));
710   PetscCall(MatGetSize(A,&M,&N));
711   PetscCall(MatGetLocalSize(A,&m,NULL));
712   nz   = adj->nz;
713   PetscCheckFalse(adj->i[m] != nz,PETSC_COMM_SELF,PETSC_ERR_PLIB,"nz %" PetscInt_FMT " not correct i[m] %" PetscInt_FMT,nz,adj->i[m]);
714   PetscCallMPI(MPI_Allreduce(&nz,&NZ,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)A)));
715 
716   PetscCall(PetscMPIIntCast(nz,&mnz));
717   PetscCall(PetscMalloc2(size,&allnz,size,&dispnz));
718   PetscCallMPI(MPI_Allgather(&mnz,1,MPI_INT,allnz,1,MPI_INT,PetscObjectComm((PetscObject)A)));
719   dispnz[0] = 0; for (i=1; i<size; i++) dispnz[i] = dispnz[i-1]+ allnz[i-1];
720   if (adj->values) {
721     PetscCall(PetscMalloc1(NZ,&Values));
722     PetscCallMPI(MPI_Allgatherv(adj->values,mnz,MPIU_INT,Values,allnz,dispnz,MPIU_INT,PetscObjectComm((PetscObject)A)));
723   }
724   PetscCall(PetscMalloc1(NZ,&J));
725   PetscCallMPI(MPI_Allgatherv(adj->j,mnz,MPIU_INT,J,allnz,dispnz,MPIU_INT,PetscObjectComm((PetscObject)A)));
726   PetscCall(PetscFree2(allnz,dispnz));
727   PetscCallMPI(MPI_Scan(&nz,&nzstart,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)A)));
728   nzstart -= nz;
729   /* shift the i[] values so they will be correct after being received */
730   for (i=0; i<m; i++) adj->i[i] += nzstart;
731   PetscCall(PetscMalloc1(M+1,&II));
732   PetscCall(PetscMPIIntCast(m,&mm));
733   PetscCall(PetscMalloc2(size,&allm,size,&dispm));
734   PetscCallMPI(MPI_Allgather(&mm,1,MPI_INT,allm,1,MPI_INT,PetscObjectComm((PetscObject)A)));
735   dispm[0] = 0; for (i=1; i<size; i++) dispm[i] = dispm[i-1]+ allm[i-1];
736   PetscCallMPI(MPI_Allgatherv(adj->i,mm,MPIU_INT,II,allm,dispm,MPIU_INT,PetscObjectComm((PetscObject)A)));
737   PetscCall(PetscFree2(allm,dispm));
738   II[M] = NZ;
739   /* shift the i[] values back */
740   for (i=0; i<m; i++) adj->i[i] -= nzstart;
741   PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF,M,N,II,J,Values,B));
742   PetscFunctionReturn(0);
743 }
744 
745 /*@
746    MatMPIAdjCreateNonemptySubcommMat - create the same MPIAdj matrix on a subcommunicator containing only processes owning a positive number of rows
747 
748    Collective
749 
750    Input Parameter:
751 .  A - original MPIAdj matrix
752 
753    Output Parameter:
754 .  B - matrix on subcommunicator, NULL on ranks that owned zero rows of A
755 
756    Level: developer
757 
758    Note:
759    This function is mostly useful for internal use by mesh partitioning packages that require that every process owns at least one row.
760 
761    The matrix B should be destroyed with MatDestroy(). The arrays are not copied, so B should be destroyed before A is destroyed.
762 
763 .seealso: MatCreateMPIAdj()
764 @*/
765 PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A,Mat *B)
766 {
767   PetscFunctionBegin;
768   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
769   PetscUseMethod(A,"MatMPIAdjCreateNonemptySubcommMat_C",(Mat,Mat*),(A,B));
770   PetscFunctionReturn(0);
771 }
772 
773 /*MC
774    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
775    intended for use constructing orderings and partitionings.
776 
777   Level: beginner
778 
779 .seealso: MatCreateMPIAdj
780 M*/
781 
782 PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B)
783 {
784   Mat_MPIAdj     *b;
785 
786   PetscFunctionBegin;
787   PetscCall(PetscNewLog(B,&b));
788   B->data      = (void*)b;
789   PetscCall(PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps)));
790   B->assembled = PETSC_FALSE;
791 
792   PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",MatMPIAdjSetPreallocation_MPIAdj));
793   PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjCreateNonemptySubcommMat_C",MatMPIAdjCreateNonemptySubcommMat_MPIAdj));
794   PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjToSeq_C",MatMPIAdjToSeq_MPIAdj));
795   PetscCall(PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ));
796   PetscFunctionReturn(0);
797 }
798 
799 /*@C
800    MatMPIAdjToSeq - Converts an parallel MPIAdj matrix to complete MPIAdj on each process (needed by sequential preconditioners)
801 
802    Logically Collective
803 
804    Input Parameter:
805 .  A - the matrix
806 
807    Output Parameter:
808 .  B - the same matrix on all processes
809 
810    Level: intermediate
811 
812 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
813 @*/
814 PetscErrorCode  MatMPIAdjToSeq(Mat A,Mat *B)
815 {
816   PetscFunctionBegin;
817   PetscUseMethod(A,"MatMPIAdjToSeq_C",(Mat,Mat*),(A,B));
818   PetscFunctionReturn(0);
819 }
820 
821 /*@C
822    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
823 
824    Logically Collective
825 
826    Input Parameters:
827 +  A - the matrix
828 .  i - the indices into j for the start of each row
829 .  j - the column indices for each row (sorted for each row).
830        The indices in i and j start with zero (NOT with one).
831 -  values - [optional] edge weights
832 
833    Level: intermediate
834 
835 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
836 @*/
837 PetscErrorCode  MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
838 {
839   PetscFunctionBegin;
840   PetscTryMethod(B,"MatMPIAdjSetPreallocation_C",(Mat,PetscInt*,PetscInt*,PetscInt*),(B,i,j,values));
841   PetscFunctionReturn(0);
842 }
843 
844 /*@C
845    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
846    The matrix does not have numerical values associated with it, but is
847    intended for ordering (to reduce bandwidth etc) and partitioning.
848 
849    Collective
850 
851    Input Parameters:
852 +  comm - MPI communicator
853 .  m - number of local rows
854 .  N - number of global columns
855 .  i - the indices into j for the start of each row
856 .  j - the column indices for each row (sorted for each row).
857        The indices in i and j start with zero (NOT with one).
858 -  values -[optional] edge weights
859 
860    Output Parameter:
861 .  A - the matrix
862 
863    Level: intermediate
864 
865    Notes:
866     This matrix object does not support most matrix operations, include
867    MatSetValues().
868    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
869    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
870     call from Fortran you need not create the arrays with PetscMalloc().
871    Should not include the matrix diagonals.
872 
873    If you already have a matrix, you can create its adjacency matrix by a call
874    to MatConvert, specifying a type of MATMPIADJ.
875 
876    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
877 
878 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
879 @*/
880 PetscErrorCode  MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
881 {
882   PetscFunctionBegin;
883   PetscCall(MatCreate(comm,A));
884   PetscCall(MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N));
885   PetscCall(MatSetType(*A,MATMPIADJ));
886   PetscCall(MatMPIAdjSetPreallocation(*A,i,j,values));
887   PetscFunctionReturn(0);
888 }
889