xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 4fb89dddf56594b92bdd2ca7e24874fafe134f45)
1 
2 /*
3     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
4 */
5 #include <../src/mat/impls/adj/mpi/mpiadj.h>    /*I "petscmat.h" I*/
6 #include <petscsf.h>
7 
8 /*
9  * The interface should be easy to use for both MatCreateSubMatrix (parallel sub-matrix) and MatCreateSubMatrices (sequential sub-matrices)
10  * */
11 static PetscErrorCode MatCreateSubMatrix_MPIAdj_data(Mat adj,IS irows, IS icols, PetscInt **sadj_xadj,PetscInt **sadj_adjncy,PetscInt **sadj_values)
12 {
13   PetscInt           nlrows_is,icols_n,i,j,nroots,nleaves,rlocalindex,*ncols_send,*ncols_recv;
14   PetscInt           nlrows_mat,*adjncy_recv,Ncols_recv,Ncols_send,*xadj_recv,*values_recv;
15   PetscInt          *ncols_recv_offsets,loc,rnclos,*sadjncy,*sxadj,*svalues;
16   const PetscInt    *irows_indices,*icols_indices,*xadj, *adjncy;
17   PetscMPIInt        owner;
18   Mat_MPIAdj        *a = (Mat_MPIAdj*)adj->data;
19   PetscLayout        rmap;
20   MPI_Comm           comm;
21   PetscSF            sf;
22   PetscSFNode       *iremote;
23   PetscBool          done;
24 
25   PetscFunctionBegin;
26   PetscCall(PetscObjectGetComm((PetscObject)adj,&comm));
27   PetscCall(MatGetLayouts(adj,&rmap,NULL));
28   PetscCall(ISGetLocalSize(irows,&nlrows_is));
29   PetscCall(ISGetIndices(irows,&irows_indices));
30   PetscCall(PetscMalloc1(nlrows_is,&iremote));
31   /* construct sf graph*/
32   nleaves = nlrows_is;
33   for (i=0; i<nlrows_is; i++) {
34     owner = -1;
35     rlocalindex = -1;
36     PetscCall(PetscLayoutFindOwnerIndex(rmap,irows_indices[i],&owner,&rlocalindex));
37     iremote[i].rank  = owner;
38     iremote[i].index = rlocalindex;
39   }
40   PetscCall(MatGetRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done));
41   PetscCall(PetscCalloc4(nlrows_mat,&ncols_send,nlrows_is,&xadj_recv,nlrows_is+1,&ncols_recv_offsets,nlrows_is,&ncols_recv));
42   nroots = nlrows_mat;
43   for (i=0; i<nlrows_mat; i++) {
44     ncols_send[i] = xadj[i+1]-xadj[i];
45   }
46   PetscCall(PetscSFCreate(comm,&sf));
47   PetscCall(PetscSFSetGraph(sf,nroots,nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER));
48   PetscCall(PetscSFSetType(sf,PETSCSFBASIC));
49   PetscCall(PetscSFSetFromOptions(sf));
50   PetscCall(PetscSFBcastBegin(sf,MPIU_INT,ncols_send,ncols_recv,MPI_REPLACE));
51   PetscCall(PetscSFBcastEnd(sf,MPIU_INT,ncols_send,ncols_recv,MPI_REPLACE));
52   PetscCall(PetscSFBcastBegin(sf,MPIU_INT,xadj,xadj_recv,MPI_REPLACE));
53   PetscCall(PetscSFBcastEnd(sf,MPIU_INT,xadj,xadj_recv,MPI_REPLACE));
54   PetscCall(PetscSFDestroy(&sf));
55   Ncols_recv =0;
56   for (i=0; i<nlrows_is; i++) {
57     Ncols_recv             += ncols_recv[i];
58     ncols_recv_offsets[i+1] = ncols_recv[i]+ncols_recv_offsets[i];
59   }
60   Ncols_send = 0;
61   for (i=0; i<nlrows_mat; i++) {
62     Ncols_send += ncols_send[i];
63   }
64   PetscCall(PetscCalloc1(Ncols_recv,&iremote));
65   PetscCall(PetscCalloc1(Ncols_recv,&adjncy_recv));
66   nleaves = Ncols_recv;
67   Ncols_recv = 0;
68   for (i=0; i<nlrows_is; i++) {
69     PetscCall(PetscLayoutFindOwner(rmap,irows_indices[i],&owner));
70     for (j=0; j<ncols_recv[i]; j++) {
71       iremote[Ncols_recv].rank    = owner;
72       iremote[Ncols_recv++].index = xadj_recv[i]+j;
73     }
74   }
75   PetscCall(ISRestoreIndices(irows,&irows_indices));
76   /*if we need to deal with edge weights ???*/
77   if (a->useedgeweights) PetscCall(PetscCalloc1(Ncols_recv,&values_recv));
78   nroots = Ncols_send;
79   PetscCall(PetscSFCreate(comm,&sf));
80   PetscCall(PetscSFSetGraph(sf,nroots,nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER));
81   PetscCall(PetscSFSetType(sf,PETSCSFBASIC));
82   PetscCall(PetscSFSetFromOptions(sf));
83   PetscCall(PetscSFBcastBegin(sf,MPIU_INT,adjncy,adjncy_recv,MPI_REPLACE));
84   PetscCall(PetscSFBcastEnd(sf,MPIU_INT,adjncy,adjncy_recv,MPI_REPLACE));
85   if (a->useedgeweights) {
86     PetscCall(PetscSFBcastBegin(sf,MPIU_INT,a->values,values_recv,MPI_REPLACE));
87     PetscCall(PetscSFBcastEnd(sf,MPIU_INT,a->values,values_recv,MPI_REPLACE));
88   }
89   PetscCall(PetscSFDestroy(&sf));
90   PetscCall(MatRestoreRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done));
91   PetscCall(ISGetLocalSize(icols,&icols_n));
92   PetscCall(ISGetIndices(icols,&icols_indices));
93   rnclos = 0;
94   for (i=0; i<nlrows_is; i++) {
95     for (j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++) {
96       PetscCall(PetscFindInt(adjncy_recv[j], icols_n, icols_indices, &loc));
97       if (loc<0) {
98         adjncy_recv[j] = -1;
99         if (a->useedgeweights) values_recv[j] = -1;
100         ncols_recv[i]--;
101       } else {
102         rnclos++;
103       }
104     }
105   }
106   PetscCall(ISRestoreIndices(icols,&icols_indices));
107   PetscCall(PetscCalloc1(rnclos,&sadjncy));
108   if (a->useedgeweights) PetscCall(PetscCalloc1(rnclos,&svalues));
109   PetscCall(PetscCalloc1(nlrows_is+1,&sxadj));
110   rnclos = 0;
111   for (i=0; i<nlrows_is; i++) {
112     for (j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++) {
113       if (adjncy_recv[j]<0) continue;
114       sadjncy[rnclos] = adjncy_recv[j];
115       if (a->useedgeweights) svalues[rnclos] = values_recv[j];
116       rnclos++;
117     }
118   }
119   for (i=0; i<nlrows_is; i++) {
120     sxadj[i+1] = sxadj[i]+ncols_recv[i];
121   }
122   if (sadj_xadj)  { *sadj_xadj = sxadj;} else    PetscCall(PetscFree(sxadj));
123   if (sadj_adjncy) { *sadj_adjncy = sadjncy;} else PetscCall(PetscFree(sadjncy));
124   if (sadj_values) {
125     if (a->useedgeweights) *sadj_values = svalues; else *sadj_values=NULL;
126   } else {
127     if (a->useedgeweights) PetscCall(PetscFree(svalues));
128   }
129   PetscCall(PetscFree4(ncols_send,xadj_recv,ncols_recv_offsets,ncols_recv));
130   PetscCall(PetscFree(adjncy_recv));
131   if (a->useedgeweights) PetscCall(PetscFree(values_recv));
132   PetscFunctionReturn(0);
133 }
134 
135 static PetscErrorCode MatCreateSubMatrices_MPIAdj_Private(Mat mat,PetscInt n,const IS irow[],const IS icol[],PetscBool subcomm,MatReuse scall,Mat *submat[])
136 {
137   PetscInt           i,irow_n,icol_n,*sxadj,*sadjncy,*svalues;
138   PetscInt          *indices,nindx,j,k,loc;
139   PetscMPIInt        issame;
140   const PetscInt    *irow_indices,*icol_indices;
141   MPI_Comm           scomm_row,scomm_col,scomm_mat;
142 
143   PetscFunctionBegin;
144   nindx = 0;
145   /*
146    * Estimate a maximum number for allocating memory
147    */
148   for (i=0; i<n; i++) {
149     PetscCall(ISGetLocalSize(irow[i],&irow_n));
150     PetscCall(ISGetLocalSize(icol[i],&icol_n));
151     nindx = nindx>(irow_n+icol_n)? nindx:(irow_n+icol_n);
152   }
153   PetscCall(PetscMalloc1(nindx,&indices));
154   /* construct a submat */
155   for (i=0; i<n; i++) {
156     if (subcomm) {
157       PetscCall(PetscObjectGetComm((PetscObject)irow[i],&scomm_row));
158       PetscCall(PetscObjectGetComm((PetscObject)icol[i],&scomm_col));
159       PetscCallMPI(MPI_Comm_compare(scomm_row,scomm_col,&issame));
160       PetscCheck(issame == MPI_IDENT,PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"row index set must have the same comm as the col index set");
161       PetscCallMPI(MPI_Comm_compare(scomm_row,PETSC_COMM_SELF,&issame));
162       PetscCheck(issame != MPI_IDENT,PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP," can not use PETSC_COMM_SELF as comm when extracting a parallel submatrix");
163     } else {
164       scomm_row = PETSC_COMM_SELF;
165     }
166     /*get sub-matrix data*/
167     sxadj=NULL; sadjncy=NULL; svalues=NULL;
168     PetscCall(MatCreateSubMatrix_MPIAdj_data(mat,irow[i],icol[i],&sxadj,&sadjncy,&svalues));
169     PetscCall(ISGetLocalSize(irow[i],&irow_n));
170     PetscCall(ISGetLocalSize(icol[i],&icol_n));
171     PetscCall(ISGetIndices(irow[i],&irow_indices));
172     PetscCall(PetscArraycpy(indices,irow_indices,irow_n));
173     PetscCall(ISRestoreIndices(irow[i],&irow_indices));
174     PetscCall(ISGetIndices(icol[i],&icol_indices));
175     PetscCall(PetscArraycpy(indices+irow_n,icol_indices,icol_n));
176     PetscCall(ISRestoreIndices(icol[i],&icol_indices));
177     nindx = irow_n+icol_n;
178     PetscCall(PetscSortRemoveDupsInt(&nindx,indices));
179     /* renumber columns */
180     for (j=0; j<irow_n; j++) {
181       for (k=sxadj[j]; k<sxadj[j+1]; k++) {
182         PetscCall(PetscFindInt(sadjncy[k],nindx,indices,&loc));
183         PetscCheck(loc>=0,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"can not find col %" PetscInt_FMT,sadjncy[k]);
184         sadjncy[k] = loc;
185       }
186     }
187     if (scall==MAT_INITIAL_MATRIX) {
188       PetscCall(MatCreateMPIAdj(scomm_row,irow_n,icol_n,sxadj,sadjncy,svalues,submat[i]));
189     } else {
190        Mat                sadj = *(submat[i]);
191        Mat_MPIAdj         *sa  = (Mat_MPIAdj*)((sadj)->data);
192        PetscCall(PetscObjectGetComm((PetscObject)sadj,&scomm_mat));
193        PetscCallMPI(MPI_Comm_compare(scomm_row,scomm_mat,&issame));
194        PetscCheck(issame == MPI_IDENT,PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"submatrix  must have the same comm as the col index set");
195        PetscCall(PetscArraycpy(sa->i,sxadj,irow_n+1));
196        PetscCall(PetscArraycpy(sa->j,sadjncy,sxadj[irow_n]));
197        if (svalues) PetscCall(PetscArraycpy(sa->values,svalues,sxadj[irow_n]));
198        PetscCall(PetscFree(sxadj));
199        PetscCall(PetscFree(sadjncy));
200        if (svalues) PetscCall(PetscFree(svalues));
201     }
202   }
203   PetscCall(PetscFree(indices));
204   PetscFunctionReturn(0);
205 }
206 
207 static PetscErrorCode MatCreateSubMatricesMPI_MPIAdj(Mat mat,PetscInt n, const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
208 {
209   /*get sub-matrices across a sub communicator */
210   PetscFunctionBegin;
211   PetscCall(MatCreateSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_TRUE,scall,submat));
212   PetscFunctionReturn(0);
213 }
214 
215 static PetscErrorCode MatCreateSubMatrices_MPIAdj(Mat mat,PetscInt n,const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
216 {
217   PetscFunctionBegin;
218   /*get sub-matrices based on PETSC_COMM_SELF */
219   PetscCall(MatCreateSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_FALSE,scall,submat));
220   PetscFunctionReturn(0);
221 }
222 
223 static PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
224 {
225   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
226   PetscInt          i,j,m = A->rmap->n;
227   const char        *name;
228   PetscViewerFormat format;
229 
230   PetscFunctionBegin;
231   PetscCall(PetscObjectGetName((PetscObject)A,&name));
232   PetscCall(PetscViewerGetFormat(viewer,&format));
233   if (format == PETSC_VIEWER_ASCII_INFO) {
234     PetscFunctionReturn(0);
235   } else {
236     PetscCheck(format != PETSC_VIEWER_ASCII_MATLAB,PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MATLAB format not supported");
237     PetscCall(PetscViewerASCIIUseTabs(viewer,PETSC_FALSE));
238     PetscCall(PetscViewerASCIIPushSynchronized(viewer));
239     for (i=0; i<m; i++) {
240       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer,"row %" PetscInt_FMT ":",i+A->rmap->rstart));
241       for (j=a->i[i]; j<a->i[i+1]; j++) {
242         if (a->values) {
243           PetscCall(PetscViewerASCIISynchronizedPrintf(viewer," (%" PetscInt_FMT ", %" PetscInt_FMT ") ",a->j[j], a->values[j]));
244         } else {
245           PetscCall(PetscViewerASCIISynchronizedPrintf(viewer," %" PetscInt_FMT " ",a->j[j]));
246         }
247       }
248       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer,"\n"));
249     }
250     PetscCall(PetscViewerASCIIUseTabs(viewer,PETSC_TRUE));
251     PetscCall(PetscViewerFlush(viewer));
252     PetscCall(PetscViewerASCIIPopSynchronized(viewer));
253   }
254   PetscFunctionReturn(0);
255 }
256 
257 static PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
258 {
259   PetscBool      iascii;
260 
261   PetscFunctionBegin;
262   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii));
263   if (iascii) PetscCall(MatView_MPIAdj_ASCII(A,viewer));
264   PetscFunctionReturn(0);
265 }
266 
267 static PetscErrorCode MatDestroy_MPIAdj(Mat mat)
268 {
269   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;
270 
271   PetscFunctionBegin;
272 #if defined(PETSC_USE_LOG)
273   PetscLogObjectState((PetscObject)mat,"Rows=%" PetscInt_FMT ", Cols=%" PetscInt_FMT ", NZ=%" PetscInt_FMT,mat->rmap->n,mat->cmap->n,a->nz);
274 #endif
275   PetscCall(PetscFree(a->diag));
276   if (a->freeaij) {
277     if (a->freeaijwithfree) {
278       if (a->i) free(a->i);
279       if (a->j) free(a->j);
280     } else {
281       PetscCall(PetscFree(a->i));
282       PetscCall(PetscFree(a->j));
283       PetscCall(PetscFree(a->values));
284     }
285   }
286   PetscCall(PetscFree(a->rowvalues));
287   PetscCall(PetscFree(mat->data));
288   PetscCall(PetscObjectChangeTypeName((PetscObject)mat,NULL));
289   PetscCall(PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C",NULL));
290   PetscCall(PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjCreateNonemptySubcommMat_C",NULL));
291   PetscCall(PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjToSeq_C",NULL));
292   PetscFunctionReturn(0);
293 }
294 
295 static PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscBool flg)
296 {
297   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
298 
299   PetscFunctionBegin;
300   switch (op) {
301   case MAT_SYMMETRIC:
302   case MAT_STRUCTURALLY_SYMMETRIC:
303   case MAT_HERMITIAN:
304     a->symmetric = flg;
305     break;
306   case MAT_SYMMETRY_ETERNAL:
307     break;
308   default:
309     PetscCall(PetscInfo(A,"Option %s ignored\n",MatOptions[op]));
310     break;
311   }
312   PetscFunctionReturn(0);
313 }
314 
315 static PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
316 {
317   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
318 
319   PetscFunctionBegin;
320   row -= A->rmap->rstart;
321   PetscCheck(row >= 0 && row < A->rmap->n,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
322   *nz = a->i[row+1] - a->i[row];
323   if (v) {
324     PetscInt j;
325     if (a->rowvalues_alloc < *nz) {
326       PetscCall(PetscFree(a->rowvalues));
327       a->rowvalues_alloc = PetscMax(a->rowvalues_alloc*2, *nz);
328       PetscCall(PetscMalloc1(a->rowvalues_alloc,&a->rowvalues));
329     }
330     for (j=0; j<*nz; j++) {
331       a->rowvalues[j] = a->values ? a->values[a->i[row]+j]:1.0;
332     }
333     *v = (*nz) ? a->rowvalues : NULL;
334   }
335   if (idx) *idx = (*nz) ? a->j + a->i[row] : NULL;
336   PetscFunctionReturn(0);
337 }
338 
339 static PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
340 {
341   PetscFunctionBegin;
342   PetscFunctionReturn(0);
343 }
344 
345 static PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscBool * flg)
346 {
347   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data,*b = (Mat_MPIAdj*)B->data;
348   PetscBool      flag;
349 
350   PetscFunctionBegin;
351   /* If the  matrix dimensions are not equal,or no of nonzeros */
352   if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
353     flag = PETSC_FALSE;
354   }
355 
356   /* if the a->i are the same */
357   PetscCall(PetscArraycmp(a->i,b->i,A->rmap->n+1,&flag));
358 
359   /* if a->j are the same */
360   PetscCall(PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag));
361 
362   PetscCall(MPIU_Allreduce(&flag,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A)));
363   PetscFunctionReturn(0);
364 }
365 
366 static PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
367 {
368   PetscInt   i;
369   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
370   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
371 
372   PetscFunctionBegin;
373   *m    = A->rmap->n;
374   *ia   = a->i;
375   *ja   = a->j;
376   *done = PETSC_TRUE;
377   if (oshift) {
378     for (i=0; i<(*ia)[*m]; i++) {
379       (*ja)[i]++;
380     }
381     for (i=0; i<=(*m); i++) (*ia)[i]++;
382   }
383   PetscFunctionReturn(0);
384 }
385 
386 static PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
387 {
388   PetscInt   i;
389   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
390   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
391 
392   PetscFunctionBegin;
393   PetscCheck(!ia || a->i == *ia,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
394   PetscCheck(!ja || a->j == *ja,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
395   if (oshift) {
396     PetscCheck(ia,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"If oshift then you must passed in inia[] argument");
397     PetscCheck(ja,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"If oshift then you must passed in inja[] argument");
398     for (i=0; i<=(*m); i++) (*ia)[i]--;
399     for (i=0; i<(*ia)[*m]; i++) {
400       (*ja)[i]--;
401     }
402   }
403   PetscFunctionReturn(0);
404 }
405 
406 PetscErrorCode  MatConvertFrom_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
407 {
408   Mat               B;
409   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
410   const PetscInt    *rj;
411   const PetscScalar *ra;
412   MPI_Comm          comm;
413 
414   PetscFunctionBegin;
415   PetscCall(MatGetSize(A,NULL,&N));
416   PetscCall(MatGetLocalSize(A,&m,NULL));
417   PetscCall(MatGetOwnershipRange(A,&rstart,NULL));
418 
419   /* count the number of nonzeros per row */
420   for (i=0; i<m; i++) {
421     PetscCall(MatGetRow(A,i+rstart,&len,&rj,NULL));
422     for (j=0; j<len; j++) {
423       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
424     }
425     nzeros += len;
426     PetscCall(MatRestoreRow(A,i+rstart,&len,&rj,NULL));
427   }
428 
429   /* malloc space for nonzeros */
430   PetscCall(PetscMalloc1(nzeros+1,&a));
431   PetscCall(PetscMalloc1(N+1,&ia));
432   PetscCall(PetscMalloc1(nzeros+1,&ja));
433 
434   nzeros = 0;
435   ia[0]  = 0;
436   for (i=0; i<m; i++) {
437     PetscCall(MatGetRow(A,i+rstart,&len,&rj,&ra));
438     cnt  = 0;
439     for (j=0; j<len; j++) {
440       if (rj[j] != i+rstart) { /* if not diagonal */
441         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
442         ja[nzeros+cnt++] = rj[j];
443       }
444     }
445     PetscCall(MatRestoreRow(A,i+rstart,&len,&rj,&ra));
446     nzeros += cnt;
447     ia[i+1] = nzeros;
448   }
449 
450   PetscCall(PetscObjectGetComm((PetscObject)A,&comm));
451   PetscCall(MatCreate(comm,&B));
452   PetscCall(MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N));
453   PetscCall(MatSetType(B,type));
454   PetscCall(MatMPIAdjSetPreallocation(B,ia,ja,a));
455 
456   if (reuse == MAT_INPLACE_MATRIX) {
457     PetscCall(MatHeaderReplace(A,&B));
458   } else {
459     *newmat = B;
460   }
461   PetscFunctionReturn(0);
462 }
463 
464 /* -------------------------------------------------------------------*/
465 static struct _MatOps MatOps_Values = {NULL,
466                                        MatGetRow_MPIAdj,
467                                        MatRestoreRow_MPIAdj,
468                                        NULL,
469                                 /* 4*/ NULL,
470                                        NULL,
471                                        NULL,
472                                        NULL,
473                                        NULL,
474                                        NULL,
475                                 /*10*/ NULL,
476                                        NULL,
477                                        NULL,
478                                        NULL,
479                                        NULL,
480                                 /*15*/ NULL,
481                                        MatEqual_MPIAdj,
482                                        NULL,
483                                        NULL,
484                                        NULL,
485                                 /*20*/ NULL,
486                                        NULL,
487                                        MatSetOption_MPIAdj,
488                                        NULL,
489                                 /*24*/ NULL,
490                                        NULL,
491                                        NULL,
492                                        NULL,
493                                        NULL,
494                                 /*29*/ NULL,
495                                        NULL,
496                                        NULL,
497                                        NULL,
498                                        NULL,
499                                 /*34*/ NULL,
500                                        NULL,
501                                        NULL,
502                                        NULL,
503                                        NULL,
504                                 /*39*/ NULL,
505                                        MatCreateSubMatrices_MPIAdj,
506                                        NULL,
507                                        NULL,
508                                        NULL,
509                                 /*44*/ NULL,
510                                        NULL,
511                                        MatShift_Basic,
512                                        NULL,
513                                        NULL,
514                                 /*49*/ NULL,
515                                        MatGetRowIJ_MPIAdj,
516                                        MatRestoreRowIJ_MPIAdj,
517                                        NULL,
518                                        NULL,
519                                 /*54*/ NULL,
520                                        NULL,
521                                        NULL,
522                                        NULL,
523                                        NULL,
524                                 /*59*/ NULL,
525                                        MatDestroy_MPIAdj,
526                                        MatView_MPIAdj,
527                                        MatConvertFrom_MPIAdj,
528                                        NULL,
529                                 /*64*/ NULL,
530                                        NULL,
531                                        NULL,
532                                        NULL,
533                                        NULL,
534                                 /*69*/ NULL,
535                                        NULL,
536                                        NULL,
537                                        NULL,
538                                        NULL,
539                                 /*74*/ NULL,
540                                        NULL,
541                                        NULL,
542                                        NULL,
543                                        NULL,
544                                 /*79*/ NULL,
545                                        NULL,
546                                        NULL,
547                                        NULL,
548                                        NULL,
549                                 /*84*/ NULL,
550                                        NULL,
551                                        NULL,
552                                        NULL,
553                                        NULL,
554                                 /*89*/ NULL,
555                                        NULL,
556                                        NULL,
557                                        NULL,
558                                        NULL,
559                                 /*94*/ NULL,
560                                        NULL,
561                                        NULL,
562                                        NULL,
563                                        NULL,
564                                 /*99*/ NULL,
565                                        NULL,
566                                        NULL,
567                                        NULL,
568                                        NULL,
569                                /*104*/ NULL,
570                                        NULL,
571                                        NULL,
572                                        NULL,
573                                        NULL,
574                                /*109*/ NULL,
575                                        NULL,
576                                        NULL,
577                                        NULL,
578                                        NULL,
579                                /*114*/ NULL,
580                                        NULL,
581                                        NULL,
582                                        NULL,
583                                        NULL,
584                                /*119*/ NULL,
585                                        NULL,
586                                        NULL,
587                                        NULL,
588                                        NULL,
589                                /*124*/ NULL,
590                                        NULL,
591                                        NULL,
592                                        NULL,
593                                        MatCreateSubMatricesMPI_MPIAdj,
594                                /*129*/ NULL,
595                                        NULL,
596                                        NULL,
597                                        NULL,
598                                        NULL,
599                                /*134*/ NULL,
600                                        NULL,
601                                        NULL,
602                                        NULL,
603                                        NULL,
604                                /*139*/ NULL,
605                                        NULL,
606                                        NULL,
607                                        NULL,
608                                        NULL,
609                                 /*144*/NULL,
610                                        NULL,
611                                        NULL,
612                                        NULL,
613                                        NULL,
614                                        NULL
615 };
616 
617 static PetscErrorCode  MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
618 {
619   Mat_MPIAdj     *b = (Mat_MPIAdj*)B->data;
620   PetscBool       useedgeweights;
621 
622   PetscFunctionBegin;
623   PetscCall(PetscLayoutSetUp(B->rmap));
624   PetscCall(PetscLayoutSetUp(B->cmap));
625   if (values) useedgeweights = PETSC_TRUE; else useedgeweights = PETSC_FALSE;
626   /* Make everybody knows if they are using edge weights or not */
627   PetscCall(MPIU_Allreduce((int*)&useedgeweights,(int*)&b->useedgeweights,1,MPI_INT,MPI_MAX,PetscObjectComm((PetscObject)B)));
628 
629   if (PetscDefined(USE_DEBUG)) {
630     PetscInt ii;
631 
632     PetscCheck(i[0] == 0,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %" PetscInt_FMT,i[0]);
633     for (ii=1; ii<B->rmap->n; ii++) {
634       PetscCheck(i[ii] >= 0 && i[ii] >= i[ii-1],PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i[%" PetscInt_FMT "]=%" PetscInt_FMT " index is out of range: i[%" PetscInt_FMT "]=%" PetscInt_FMT,ii,i[ii],ii-1,i[ii-1]);
635     }
636     for (ii=0; ii<i[B->rmap->n]; ii++) {
637       PetscCheck(j[ii] >= 0 && j[ii] < B->cmap->N,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column index %" PetscInt_FMT " out of range %" PetscInt_FMT,ii,j[ii]);
638     }
639   }
640   B->preallocated = PETSC_TRUE;
641 
642   b->j      = j;
643   b->i      = i;
644   b->values = values;
645 
646   b->nz        = i[B->rmap->n];
647   b->diag      = NULL;
648   b->symmetric = PETSC_FALSE;
649   b->freeaij   = PETSC_TRUE;
650 
651   PetscCall(MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY));
652   PetscCall(MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY));
653   PetscFunctionReturn(0);
654 }
655 
656 static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A,Mat *B)
657 {
658   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
659   const PetscInt *ranges;
660   MPI_Comm       acomm,bcomm;
661   MPI_Group      agroup,bgroup;
662   PetscMPIInt    i,rank,size,nranks,*ranks;
663 
664   PetscFunctionBegin;
665   *B    = NULL;
666   PetscCall(PetscObjectGetComm((PetscObject)A,&acomm));
667   PetscCallMPI(MPI_Comm_size(acomm,&size));
668   PetscCallMPI(MPI_Comm_size(acomm,&rank));
669   PetscCall(MatGetOwnershipRanges(A,&ranges));
670   for (i=0,nranks=0; i<size; i++) {
671     if (ranges[i+1] - ranges[i] > 0) nranks++;
672   }
673   if (nranks == size) {         /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
674     PetscCall(PetscObjectReference((PetscObject)A));
675     *B   = A;
676     PetscFunctionReturn(0);
677   }
678 
679   PetscCall(PetscMalloc1(nranks,&ranks));
680   for (i=0,nranks=0; i<size; i++) {
681     if (ranges[i+1] - ranges[i] > 0) ranks[nranks++] = i;
682   }
683   PetscCallMPI(MPI_Comm_group(acomm,&agroup));
684   PetscCallMPI(MPI_Group_incl(agroup,nranks,ranks,&bgroup));
685   PetscCall(PetscFree(ranks));
686   PetscCallMPI(MPI_Comm_create(acomm,bgroup,&bcomm));
687   PetscCallMPI(MPI_Group_free(&agroup));
688   PetscCallMPI(MPI_Group_free(&bgroup));
689   if (bcomm != MPI_COMM_NULL) {
690     PetscInt   m,N;
691     Mat_MPIAdj *b;
692     PetscCall(MatGetLocalSize(A,&m,NULL));
693     PetscCall(MatGetSize(A,NULL,&N));
694     PetscCall(MatCreateMPIAdj(bcomm,m,N,a->i,a->j,a->values,B));
695     b          = (Mat_MPIAdj*)(*B)->data;
696     b->freeaij = PETSC_FALSE;
697     PetscCallMPI(MPI_Comm_free(&bcomm));
698   }
699   PetscFunctionReturn(0);
700 }
701 
702 PetscErrorCode  MatMPIAdjToSeq_MPIAdj(Mat A,Mat *B)
703 {
704   PetscInt       M,N,*II,*J,NZ,nz,m,nzstart,i;
705   PetscInt       *Values = NULL;
706   Mat_MPIAdj     *adj = (Mat_MPIAdj*)A->data;
707   PetscMPIInt    mnz,mm,*allnz,*allm,size,*dispnz,*dispm;
708 
709   PetscFunctionBegin;
710   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A),&size));
711   PetscCall(MatGetSize(A,&M,&N));
712   PetscCall(MatGetLocalSize(A,&m,NULL));
713   nz   = adj->nz;
714   PetscCheck(adj->i[m] == nz,PETSC_COMM_SELF,PETSC_ERR_PLIB,"nz %" PetscInt_FMT " not correct i[m] %" PetscInt_FMT,nz,adj->i[m]);
715   PetscCallMPI(MPI_Allreduce(&nz,&NZ,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)A)));
716 
717   PetscCall(PetscMPIIntCast(nz,&mnz));
718   PetscCall(PetscMalloc2(size,&allnz,size,&dispnz));
719   PetscCallMPI(MPI_Allgather(&mnz,1,MPI_INT,allnz,1,MPI_INT,PetscObjectComm((PetscObject)A)));
720   dispnz[0] = 0; for (i=1; i<size; i++) dispnz[i] = dispnz[i-1]+ allnz[i-1];
721   if (adj->values) {
722     PetscCall(PetscMalloc1(NZ,&Values));
723     PetscCallMPI(MPI_Allgatherv(adj->values,mnz,MPIU_INT,Values,allnz,dispnz,MPIU_INT,PetscObjectComm((PetscObject)A)));
724   }
725   PetscCall(PetscMalloc1(NZ,&J));
726   PetscCallMPI(MPI_Allgatherv(adj->j,mnz,MPIU_INT,J,allnz,dispnz,MPIU_INT,PetscObjectComm((PetscObject)A)));
727   PetscCall(PetscFree2(allnz,dispnz));
728   PetscCallMPI(MPI_Scan(&nz,&nzstart,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)A)));
729   nzstart -= nz;
730   /* shift the i[] values so they will be correct after being received */
731   for (i=0; i<m; i++) adj->i[i] += nzstart;
732   PetscCall(PetscMalloc1(M+1,&II));
733   PetscCall(PetscMPIIntCast(m,&mm));
734   PetscCall(PetscMalloc2(size,&allm,size,&dispm));
735   PetscCallMPI(MPI_Allgather(&mm,1,MPI_INT,allm,1,MPI_INT,PetscObjectComm((PetscObject)A)));
736   dispm[0] = 0; for (i=1; i<size; i++) dispm[i] = dispm[i-1]+ allm[i-1];
737   PetscCallMPI(MPI_Allgatherv(adj->i,mm,MPIU_INT,II,allm,dispm,MPIU_INT,PetscObjectComm((PetscObject)A)));
738   PetscCall(PetscFree2(allm,dispm));
739   II[M] = NZ;
740   /* shift the i[] values back */
741   for (i=0; i<m; i++) adj->i[i] -= nzstart;
742   PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF,M,N,II,J,Values,B));
743   PetscFunctionReturn(0);
744 }
745 
746 /*@
747    MatMPIAdjCreateNonemptySubcommMat - create the same MPIAdj matrix on a subcommunicator containing only processes owning a positive number of rows
748 
749    Collective
750 
751    Input Parameter:
752 .  A - original MPIAdj matrix
753 
754    Output Parameter:
755 .  B - matrix on subcommunicator, NULL on ranks that owned zero rows of A
756 
757    Level: developer
758 
759    Note:
760    This function is mostly useful for internal use by mesh partitioning packages that require that every process owns at least one row.
761 
762    The matrix B should be destroyed with MatDestroy(). The arrays are not copied, so B should be destroyed before A is destroyed.
763 
764 .seealso: `MatCreateMPIAdj()`
765 @*/
766 PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A,Mat *B)
767 {
768   PetscFunctionBegin;
769   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
770   PetscUseMethod(A,"MatMPIAdjCreateNonemptySubcommMat_C",(Mat,Mat*),(A,B));
771   PetscFunctionReturn(0);
772 }
773 
774 /*MC
775    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
776    intended for use constructing orderings and partitionings.
777 
778   Level: beginner
779 
780 .seealso: `MatCreateMPIAdj`
781 M*/
782 
783 PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B)
784 {
785   Mat_MPIAdj     *b;
786 
787   PetscFunctionBegin;
788   PetscCall(PetscNewLog(B,&b));
789   B->data      = (void*)b;
790   PetscCall(PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps)));
791   B->assembled = PETSC_FALSE;
792 
793   PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",MatMPIAdjSetPreallocation_MPIAdj));
794   PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjCreateNonemptySubcommMat_C",MatMPIAdjCreateNonemptySubcommMat_MPIAdj));
795   PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjToSeq_C",MatMPIAdjToSeq_MPIAdj));
796   PetscCall(PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ));
797   PetscFunctionReturn(0);
798 }
799 
800 /*@C
801    MatMPIAdjToSeq - Converts an parallel MPIAdj matrix to complete MPIAdj on each process (needed by sequential preconditioners)
802 
803    Logically Collective
804 
805    Input Parameter:
806 .  A - the matrix
807 
808    Output Parameter:
809 .  B - the same matrix on all processes
810 
811    Level: intermediate
812 
813 .seealso: `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`
814 @*/
815 PetscErrorCode  MatMPIAdjToSeq(Mat A,Mat *B)
816 {
817   PetscFunctionBegin;
818   PetscUseMethod(A,"MatMPIAdjToSeq_C",(Mat,Mat*),(A,B));
819   PetscFunctionReturn(0);
820 }
821 
822 /*@C
823    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
824 
825    Logically Collective
826 
827    Input Parameters:
828 +  A - the matrix
829 .  i - the indices into j for the start of each row
830 .  j - the column indices for each row (sorted for each row).
831        The indices in i and j start with zero (NOT with one).
832 -  values - [optional] edge weights
833 
834    Level: intermediate
835 
836 .seealso: `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`
837 @*/
838 PetscErrorCode  MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
839 {
840   PetscFunctionBegin;
841   PetscTryMethod(B,"MatMPIAdjSetPreallocation_C",(Mat,PetscInt*,PetscInt*,PetscInt*),(B,i,j,values));
842   PetscFunctionReturn(0);
843 }
844 
845 /*@C
846    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
847    The matrix does not have numerical values associated with it, but is
848    intended for ordering (to reduce bandwidth etc) and partitioning.
849 
850    Collective
851 
852    Input Parameters:
853 +  comm - MPI communicator
854 .  m - number of local rows
855 .  N - number of global columns
856 .  i - the indices into j for the start of each row
857 .  j - the column indices for each row (sorted for each row).
858        The indices in i and j start with zero (NOT with one).
859 -  values -[optional] edge weights
860 
861    Output Parameter:
862 .  A - the matrix
863 
864    Level: intermediate
865 
866    Notes:
867     This matrix object does not support most matrix operations, include
868    MatSetValues().
869    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
870    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
871     call from Fortran you need not create the arrays with PetscMalloc().
872    Should not include the matrix diagonals.
873 
874    If you already have a matrix, you can create its adjacency matrix by a call
875    to MatConvert, specifying a type of MATMPIADJ.
876 
877    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
878 
879 .seealso: `MatCreate()`, `MatConvert()`, `MatGetOrdering()`
880 @*/
881 PetscErrorCode  MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
882 {
883   PetscFunctionBegin;
884   PetscCall(MatCreate(comm,A));
885   PetscCall(MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N));
886   PetscCall(MatSetType(*A,MATMPIADJ));
887   PetscCall(MatMPIAdjSetPreallocation(*A,i,j,values));
888   PetscFunctionReturn(0);
889 }
890