xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 4ffacfe27a72f4cdf51b68a3bbb6aed96040fb2f)
1 
2 /*
3     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
4 */
5 #include <../src/mat/impls/adj/mpi/mpiadj.h>    /*I "petscmat.h" I*/
6 #include <petscsf.h>
7 
8 /*
9  * The interface should be easy to use for both MatCreateSubMatrix (parallel sub-matrix) and MatCreateSubMatrices (sequential sub-matrices)
10  * */
11 static PetscErrorCode MatCreateSubMatrix_MPIAdj_data(Mat adj,IS irows, IS icols, PetscInt **sadj_xadj,PetscInt **sadj_adjncy,PetscInt **sadj_values)
12 {
13   PetscInt           nlrows_is,icols_n,i,j,nroots,nleaves,rlocalindex,*ncols_send,*ncols_recv;
14   PetscInt           nlrows_mat,*adjncy_recv,Ncols_recv,Ncols_send,*xadj_recv,*values_recv;
15   PetscInt          *ncols_recv_offsets,loc,rnclos,*sadjncy,*sxadj,*svalues;
16   const PetscInt    *irows_indices,*icols_indices,*xadj, *adjncy;
17   PetscMPIInt        owner;
18   Mat_MPIAdj        *a = (Mat_MPIAdj*)adj->data;
19   PetscLayout        rmap;
20   MPI_Comm           comm;
21   PetscSF            sf;
22   PetscSFNode       *iremote;
23   PetscBool          done;
24 
25   PetscFunctionBegin;
26   PetscCall(PetscObjectGetComm((PetscObject)adj,&comm));
27   PetscCall(MatGetLayouts(adj,&rmap,NULL));
28   PetscCall(ISGetLocalSize(irows,&nlrows_is));
29   PetscCall(ISGetIndices(irows,&irows_indices));
30   PetscCall(PetscMalloc1(nlrows_is,&iremote));
31   /* construct sf graph*/
32   nleaves = nlrows_is;
33   for (i=0; i<nlrows_is; i++) {
34     owner = -1;
35     rlocalindex = -1;
36     PetscCall(PetscLayoutFindOwnerIndex(rmap,irows_indices[i],&owner,&rlocalindex));
37     iremote[i].rank  = owner;
38     iremote[i].index = rlocalindex;
39   }
40   PetscCall(MatGetRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done));
41   PetscCall(PetscCalloc4(nlrows_mat,&ncols_send,nlrows_is,&xadj_recv,nlrows_is+1,&ncols_recv_offsets,nlrows_is,&ncols_recv));
42   nroots = nlrows_mat;
43   for (i=0; i<nlrows_mat; i++) {
44     ncols_send[i] = xadj[i+1]-xadj[i];
45   }
46   PetscCall(PetscSFCreate(comm,&sf));
47   PetscCall(PetscSFSetGraph(sf,nroots,nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER));
48   PetscCall(PetscSFSetType(sf,PETSCSFBASIC));
49   PetscCall(PetscSFSetFromOptions(sf));
50   PetscCall(PetscSFBcastBegin(sf,MPIU_INT,ncols_send,ncols_recv,MPI_REPLACE));
51   PetscCall(PetscSFBcastEnd(sf,MPIU_INT,ncols_send,ncols_recv,MPI_REPLACE));
52   PetscCall(PetscSFBcastBegin(sf,MPIU_INT,xadj,xadj_recv,MPI_REPLACE));
53   PetscCall(PetscSFBcastEnd(sf,MPIU_INT,xadj,xadj_recv,MPI_REPLACE));
54   PetscCall(PetscSFDestroy(&sf));
55   Ncols_recv =0;
56   for (i=0; i<nlrows_is; i++) {
57     Ncols_recv             += ncols_recv[i];
58     ncols_recv_offsets[i+1] = ncols_recv[i]+ncols_recv_offsets[i];
59   }
60   Ncols_send = 0;
61   for (i=0; i<nlrows_mat; i++) {
62     Ncols_send += ncols_send[i];
63   }
64   PetscCall(PetscCalloc1(Ncols_recv,&iremote));
65   PetscCall(PetscCalloc1(Ncols_recv,&adjncy_recv));
66   nleaves = Ncols_recv;
67   Ncols_recv = 0;
68   for (i=0; i<nlrows_is; i++) {
69     PetscCall(PetscLayoutFindOwner(rmap,irows_indices[i],&owner));
70     for (j=0; j<ncols_recv[i]; j++) {
71       iremote[Ncols_recv].rank    = owner;
72       iremote[Ncols_recv++].index = xadj_recv[i]+j;
73     }
74   }
75   PetscCall(ISRestoreIndices(irows,&irows_indices));
76   /*if we need to deal with edge weights ???*/
77   if (a->useedgeweights) PetscCall(PetscCalloc1(Ncols_recv,&values_recv));
78   nroots = Ncols_send;
79   PetscCall(PetscSFCreate(comm,&sf));
80   PetscCall(PetscSFSetGraph(sf,nroots,nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER));
81   PetscCall(PetscSFSetType(sf,PETSCSFBASIC));
82   PetscCall(PetscSFSetFromOptions(sf));
83   PetscCall(PetscSFBcastBegin(sf,MPIU_INT,adjncy,adjncy_recv,MPI_REPLACE));
84   PetscCall(PetscSFBcastEnd(sf,MPIU_INT,adjncy,adjncy_recv,MPI_REPLACE));
85   if (a->useedgeweights) {
86     PetscCall(PetscSFBcastBegin(sf,MPIU_INT,a->values,values_recv,MPI_REPLACE));
87     PetscCall(PetscSFBcastEnd(sf,MPIU_INT,a->values,values_recv,MPI_REPLACE));
88   }
89   PetscCall(PetscSFDestroy(&sf));
90   PetscCall(MatRestoreRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done));
91   PetscCall(ISGetLocalSize(icols,&icols_n));
92   PetscCall(ISGetIndices(icols,&icols_indices));
93   rnclos = 0;
94   for (i=0; i<nlrows_is; i++) {
95     for (j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++) {
96       PetscCall(PetscFindInt(adjncy_recv[j], icols_n, icols_indices, &loc));
97       if (loc<0) {
98         adjncy_recv[j] = -1;
99         if (a->useedgeweights) values_recv[j] = -1;
100         ncols_recv[i]--;
101       } else {
102         rnclos++;
103       }
104     }
105   }
106   PetscCall(ISRestoreIndices(icols,&icols_indices));
107   PetscCall(PetscCalloc1(rnclos,&sadjncy));
108   if (a->useedgeweights) PetscCall(PetscCalloc1(rnclos,&svalues));
109   PetscCall(PetscCalloc1(nlrows_is+1,&sxadj));
110   rnclos = 0;
111   for (i=0; i<nlrows_is; i++) {
112     for (j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++) {
113       if (adjncy_recv[j]<0) continue;
114       sadjncy[rnclos] = adjncy_recv[j];
115       if (a->useedgeweights) svalues[rnclos] = values_recv[j];
116       rnclos++;
117     }
118   }
119   for (i=0; i<nlrows_is; i++) {
120     sxadj[i+1] = sxadj[i]+ncols_recv[i];
121   }
122   if (sadj_xadj)  { *sadj_xadj = sxadj;} else    PetscCall(PetscFree(sxadj));
123   if (sadj_adjncy) { *sadj_adjncy = sadjncy;} else PetscCall(PetscFree(sadjncy));
124   if (sadj_values) {
125     if (a->useedgeweights) *sadj_values = svalues; else *sadj_values=NULL;
126   } else {
127     if (a->useedgeweights) PetscCall(PetscFree(svalues));
128   }
129   PetscCall(PetscFree4(ncols_send,xadj_recv,ncols_recv_offsets,ncols_recv));
130   PetscCall(PetscFree(adjncy_recv));
131   if (a->useedgeweights) PetscCall(PetscFree(values_recv));
132   PetscFunctionReturn(0);
133 }
134 
135 static PetscErrorCode MatCreateSubMatrices_MPIAdj_Private(Mat mat,PetscInt n,const IS irow[],const IS icol[],PetscBool subcomm,MatReuse scall,Mat *submat[])
136 {
137   PetscInt           i,irow_n,icol_n,*sxadj,*sadjncy,*svalues;
138   PetscInt          *indices,nindx,j,k,loc;
139   PetscMPIInt        issame;
140   const PetscInt    *irow_indices,*icol_indices;
141   MPI_Comm           scomm_row,scomm_col,scomm_mat;
142 
143   PetscFunctionBegin;
144   nindx = 0;
145   /*
146    * Estimate a maximum number for allocating memory
147    */
148   for (i=0; i<n; i++) {
149     PetscCall(ISGetLocalSize(irow[i],&irow_n));
150     PetscCall(ISGetLocalSize(icol[i],&icol_n));
151     nindx = nindx>(irow_n+icol_n)? nindx:(irow_n+icol_n);
152   }
153   PetscCall(PetscMalloc1(nindx,&indices));
154   /* construct a submat */
155   for (i=0; i<n; i++) {
156     if (subcomm) {
157       PetscCall(PetscObjectGetComm((PetscObject)irow[i],&scomm_row));
158       PetscCall(PetscObjectGetComm((PetscObject)icol[i],&scomm_col));
159       PetscCallMPI(MPI_Comm_compare(scomm_row,scomm_col,&issame));
160       PetscCheck(issame == MPI_IDENT,PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"row index set must have the same comm as the col index set");
161       PetscCallMPI(MPI_Comm_compare(scomm_row,PETSC_COMM_SELF,&issame));
162       PetscCheck(issame != MPI_IDENT,PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP," can not use PETSC_COMM_SELF as comm when extracting a parallel submatrix");
163     } else {
164       scomm_row = PETSC_COMM_SELF;
165     }
166     /*get sub-matrix data*/
167     sxadj=NULL; sadjncy=NULL; svalues=NULL;
168     PetscCall(MatCreateSubMatrix_MPIAdj_data(mat,irow[i],icol[i],&sxadj,&sadjncy,&svalues));
169     PetscCall(ISGetLocalSize(irow[i],&irow_n));
170     PetscCall(ISGetLocalSize(icol[i],&icol_n));
171     PetscCall(ISGetIndices(irow[i],&irow_indices));
172     PetscCall(PetscArraycpy(indices,irow_indices,irow_n));
173     PetscCall(ISRestoreIndices(irow[i],&irow_indices));
174     PetscCall(ISGetIndices(icol[i],&icol_indices));
175     PetscCall(PetscArraycpy(indices+irow_n,icol_indices,icol_n));
176     PetscCall(ISRestoreIndices(icol[i],&icol_indices));
177     nindx = irow_n+icol_n;
178     PetscCall(PetscSortRemoveDupsInt(&nindx,indices));
179     /* renumber columns */
180     for (j=0; j<irow_n; j++) {
181       for (k=sxadj[j]; k<sxadj[j+1]; k++) {
182         PetscCall(PetscFindInt(sadjncy[k],nindx,indices,&loc));
183         PetscCheck(loc>=0,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"can not find col %" PetscInt_FMT,sadjncy[k]);
184         sadjncy[k] = loc;
185       }
186     }
187     if (scall==MAT_INITIAL_MATRIX) {
188       PetscCall(MatCreateMPIAdj(scomm_row,irow_n,icol_n,sxadj,sadjncy,svalues,submat[i]));
189     } else {
190        Mat                sadj = *(submat[i]);
191        Mat_MPIAdj         *sa  = (Mat_MPIAdj*)((sadj)->data);
192        PetscCall(PetscObjectGetComm((PetscObject)sadj,&scomm_mat));
193        PetscCallMPI(MPI_Comm_compare(scomm_row,scomm_mat,&issame));
194        PetscCheck(issame == MPI_IDENT,PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"submatrix  must have the same comm as the col index set");
195        PetscCall(PetscArraycpy(sa->i,sxadj,irow_n+1));
196        PetscCall(PetscArraycpy(sa->j,sadjncy,sxadj[irow_n]));
197        if (svalues) PetscCall(PetscArraycpy(sa->values,svalues,sxadj[irow_n]));
198        PetscCall(PetscFree(sxadj));
199        PetscCall(PetscFree(sadjncy));
200        if (svalues) PetscCall(PetscFree(svalues));
201     }
202   }
203   PetscCall(PetscFree(indices));
204   PetscFunctionReturn(0);
205 }
206 
207 static PetscErrorCode MatCreateSubMatricesMPI_MPIAdj(Mat mat,PetscInt n, const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
208 {
209   /*get sub-matrices across a sub communicator */
210   PetscFunctionBegin;
211   PetscCall(MatCreateSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_TRUE,scall,submat));
212   PetscFunctionReturn(0);
213 }
214 
215 static PetscErrorCode MatCreateSubMatrices_MPIAdj(Mat mat,PetscInt n,const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
216 {
217   PetscFunctionBegin;
218   /*get sub-matrices based on PETSC_COMM_SELF */
219   PetscCall(MatCreateSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_FALSE,scall,submat));
220   PetscFunctionReturn(0);
221 }
222 
223 static PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
224 {
225   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
226   PetscInt          i,j,m = A->rmap->n;
227   const char        *name;
228   PetscViewerFormat format;
229 
230   PetscFunctionBegin;
231   PetscCall(PetscObjectGetName((PetscObject)A,&name));
232   PetscCall(PetscViewerGetFormat(viewer,&format));
233   if (format == PETSC_VIEWER_ASCII_INFO) {
234     PetscFunctionReturn(0);
235   } else {
236     PetscCheck(format != PETSC_VIEWER_ASCII_MATLAB,PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MATLAB format not supported");
237     PetscCall(PetscViewerASCIIUseTabs(viewer,PETSC_FALSE));
238     PetscCall(PetscViewerASCIIPushSynchronized(viewer));
239     for (i=0; i<m; i++) {
240       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer,"row %" PetscInt_FMT ":",i+A->rmap->rstart));
241       for (j=a->i[i]; j<a->i[i+1]; j++) {
242         if (a->values) {
243           PetscCall(PetscViewerASCIISynchronizedPrintf(viewer," (%" PetscInt_FMT ", %" PetscInt_FMT ") ",a->j[j], a->values[j]));
244         } else {
245           PetscCall(PetscViewerASCIISynchronizedPrintf(viewer," %" PetscInt_FMT " ",a->j[j]));
246         }
247       }
248       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer,"\n"));
249     }
250     PetscCall(PetscViewerASCIIUseTabs(viewer,PETSC_TRUE));
251     PetscCall(PetscViewerFlush(viewer));
252     PetscCall(PetscViewerASCIIPopSynchronized(viewer));
253   }
254   PetscFunctionReturn(0);
255 }
256 
257 static PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
258 {
259   PetscBool      iascii;
260 
261   PetscFunctionBegin;
262   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii));
263   if (iascii) PetscCall(MatView_MPIAdj_ASCII(A,viewer));
264   PetscFunctionReturn(0);
265 }
266 
267 static PetscErrorCode MatDestroy_MPIAdj(Mat mat)
268 {
269   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;
270 
271   PetscFunctionBegin;
272 #if defined(PETSC_USE_LOG)
273   PetscLogObjectState((PetscObject)mat,"Rows=%" PetscInt_FMT ", Cols=%" PetscInt_FMT ", NZ=%" PetscInt_FMT,mat->rmap->n,mat->cmap->n,a->nz);
274 #endif
275   PetscCall(PetscFree(a->diag));
276   if (a->freeaij) {
277     if (a->freeaijwithfree) {
278       if (a->i) free(a->i);
279       if (a->j) free(a->j);
280     } else {
281       PetscCall(PetscFree(a->i));
282       PetscCall(PetscFree(a->j));
283       PetscCall(PetscFree(a->values));
284     }
285   }
286   PetscCall(PetscFree(a->rowvalues));
287   PetscCall(PetscFree(mat->data));
288   PetscCall(PetscObjectChangeTypeName((PetscObject)mat,NULL));
289   PetscCall(PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C",NULL));
290   PetscCall(PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjCreateNonemptySubcommMat_C",NULL));
291   PetscCall(PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjToSeq_C",NULL));
292   PetscFunctionReturn(0);
293 }
294 
295 static PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscBool flg)
296 {
297   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
298 
299   PetscFunctionBegin;
300   switch (op) {
301   case MAT_SYMMETRIC:
302   case MAT_STRUCTURALLY_SYMMETRIC:
303   case MAT_HERMITIAN:
304     a->symmetric = flg;
305     break;
306   case MAT_SYMMETRY_ETERNAL:
307     break;
308   default:
309     PetscCall(PetscInfo(A,"Option %s ignored\n",MatOptions[op]));
310     break;
311   }
312   PetscFunctionReturn(0);
313 }
314 
315 static PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
316 {
317   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
318 
319   PetscFunctionBegin;
320   row -= A->rmap->rstart;
321   PetscCheck(row >= 0 && row < A->rmap->n,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
322   *nz = a->i[row+1] - a->i[row];
323   if (v) {
324     PetscInt j;
325     if (a->rowvalues_alloc < *nz) {
326       PetscCall(PetscFree(a->rowvalues));
327       a->rowvalues_alloc = PetscMax(a->rowvalues_alloc*2, *nz);
328       PetscCall(PetscMalloc1(a->rowvalues_alloc,&a->rowvalues));
329     }
330     for (j=0; j<*nz; j++) {
331       a->rowvalues[j] = a->values ? a->values[a->i[row]+j]:1.0;
332     }
333     *v = (*nz) ? a->rowvalues : NULL;
334   }
335   if (idx) *idx = (*nz) ? a->j + a->i[row] : NULL;
336   PetscFunctionReturn(0);
337 }
338 
339 static PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
340 {
341   PetscFunctionBegin;
342   PetscFunctionReturn(0);
343 }
344 
345 static PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscBool * flg)
346 {
347   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data,*b = (Mat_MPIAdj*)B->data;
348   PetscBool      flag;
349 
350   PetscFunctionBegin;
351   /* If the  matrix dimensions are not equal,or no of nonzeros */
352   if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
353     flag = PETSC_FALSE;
354   }
355 
356   /* if the a->i are the same */
357   PetscCall(PetscArraycmp(a->i,b->i,A->rmap->n+1,&flag));
358 
359   /* if a->j are the same */
360   PetscCall(PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag));
361 
362   PetscCall(MPIU_Allreduce(&flag,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A)));
363   PetscFunctionReturn(0);
364 }
365 
366 static PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
367 {
368   PetscInt   i;
369   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
370   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
371 
372   PetscFunctionBegin;
373   *m    = A->rmap->n;
374   *ia   = a->i;
375   *ja   = a->j;
376   *done = PETSC_TRUE;
377   if (oshift) {
378     for (i=0; i<(*ia)[*m]; i++) {
379       (*ja)[i]++;
380     }
381     for (i=0; i<=(*m); i++) (*ia)[i]++;
382   }
383   PetscFunctionReturn(0);
384 }
385 
386 static PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
387 {
388   PetscInt   i;
389   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
390   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
391 
392   PetscFunctionBegin;
393   PetscCheck(!ia || a->i == *ia,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
394   PetscCheck(!ja || a->j == *ja,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
395   if (oshift) {
396     PetscCheck(ia,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"If oshift then you must passed in inia[] argument");
397     PetscCheck(ja,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"If oshift then you must passed in inja[] argument");
398     for (i=0; i<=(*m); i++) (*ia)[i]--;
399     for (i=0; i<(*ia)[*m]; i++) {
400       (*ja)[i]--;
401     }
402   }
403   PetscFunctionReturn(0);
404 }
405 
406 PetscErrorCode  MatConvertFrom_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
407 {
408   Mat               B;
409   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
410   const PetscInt    *rj;
411   const PetscScalar *ra;
412   MPI_Comm          comm;
413 
414   PetscFunctionBegin;
415   PetscCall(MatGetSize(A,NULL,&N));
416   PetscCall(MatGetLocalSize(A,&m,NULL));
417   PetscCall(MatGetOwnershipRange(A,&rstart,NULL));
418 
419   /* count the number of nonzeros per row */
420   for (i=0; i<m; i++) {
421     PetscCall(MatGetRow(A,i+rstart,&len,&rj,NULL));
422     for (j=0; j<len; j++) {
423       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
424     }
425     nzeros += len;
426     PetscCall(MatRestoreRow(A,i+rstart,&len,&rj,NULL));
427   }
428 
429   /* malloc space for nonzeros */
430   PetscCall(PetscMalloc1(nzeros+1,&a));
431   PetscCall(PetscMalloc1(N+1,&ia));
432   PetscCall(PetscMalloc1(nzeros+1,&ja));
433 
434   nzeros = 0;
435   ia[0]  = 0;
436   for (i=0; i<m; i++) {
437     PetscCall(MatGetRow(A,i+rstart,&len,&rj,&ra));
438     cnt  = 0;
439     for (j=0; j<len; j++) {
440       if (rj[j] != i+rstart) { /* if not diagonal */
441         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
442         ja[nzeros+cnt++] = rj[j];
443       }
444     }
445     PetscCall(MatRestoreRow(A,i+rstart,&len,&rj,&ra));
446     nzeros += cnt;
447     ia[i+1] = nzeros;
448   }
449 
450   PetscCall(PetscObjectGetComm((PetscObject)A,&comm));
451   PetscCall(MatCreate(comm,&B));
452   PetscCall(MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N));
453   PetscCall(MatSetType(B,type));
454   PetscCall(MatMPIAdjSetPreallocation(B,ia,ja,a));
455 
456   if (reuse == MAT_INPLACE_MATRIX) {
457     PetscCall(MatHeaderReplace(A,&B));
458   } else {
459     *newmat = B;
460   }
461   PetscFunctionReturn(0);
462 }
463 
464 /* -------------------------------------------------------------------*/
465 static struct _MatOps MatOps_Values = {NULL,
466                                        MatGetRow_MPIAdj,
467                                        MatRestoreRow_MPIAdj,
468                                        NULL,
469                                 /* 4*/ NULL,
470                                        NULL,
471                                        NULL,
472                                        NULL,
473                                        NULL,
474                                        NULL,
475                                 /*10*/ NULL,
476                                        NULL,
477                                        NULL,
478                                        NULL,
479                                        NULL,
480                                 /*15*/ NULL,
481                                        MatEqual_MPIAdj,
482                                        NULL,
483                                        NULL,
484                                        NULL,
485                                 /*20*/ NULL,
486                                        NULL,
487                                        MatSetOption_MPIAdj,
488                                        NULL,
489                                 /*24*/ NULL,
490                                        NULL,
491                                        NULL,
492                                        NULL,
493                                        NULL,
494                                 /*29*/ NULL,
495                                        NULL,
496                                        NULL,
497                                        NULL,
498                                        NULL,
499                                 /*34*/ NULL,
500                                        NULL,
501                                        NULL,
502                                        NULL,
503                                        NULL,
504                                 /*39*/ NULL,
505                                        MatCreateSubMatrices_MPIAdj,
506                                        NULL,
507                                        NULL,
508                                        NULL,
509                                 /*44*/ NULL,
510                                        NULL,
511                                        MatShift_Basic,
512                                        NULL,
513                                        NULL,
514                                 /*49*/ NULL,
515                                        MatGetRowIJ_MPIAdj,
516                                        MatRestoreRowIJ_MPIAdj,
517                                        NULL,
518                                        NULL,
519                                 /*54*/ NULL,
520                                        NULL,
521                                        NULL,
522                                        NULL,
523                                        NULL,
524                                 /*59*/ NULL,
525                                        MatDestroy_MPIAdj,
526                                        MatView_MPIAdj,
527                                        MatConvertFrom_MPIAdj,
528                                        NULL,
529                                 /*64*/ NULL,
530                                        NULL,
531                                        NULL,
532                                        NULL,
533                                        NULL,
534                                 /*69*/ NULL,
535                                        NULL,
536                                        NULL,
537                                        NULL,
538                                        NULL,
539                                 /*74*/ NULL,
540                                        NULL,
541                                        NULL,
542                                        NULL,
543                                        NULL,
544                                 /*79*/ NULL,
545                                        NULL,
546                                        NULL,
547                                        NULL,
548                                        NULL,
549                                 /*84*/ NULL,
550                                        NULL,
551                                        NULL,
552                                        NULL,
553                                        NULL,
554                                 /*89*/ NULL,
555                                        NULL,
556                                        NULL,
557                                        NULL,
558                                        NULL,
559                                 /*94*/ NULL,
560                                        NULL,
561                                        NULL,
562                                        NULL,
563                                        NULL,
564                                 /*99*/ NULL,
565                                        NULL,
566                                        NULL,
567                                        NULL,
568                                        NULL,
569                                /*104*/ NULL,
570                                        NULL,
571                                        NULL,
572                                        NULL,
573                                        NULL,
574                                /*109*/ NULL,
575                                        NULL,
576                                        NULL,
577                                        NULL,
578                                        NULL,
579                                /*114*/ NULL,
580                                        NULL,
581                                        NULL,
582                                        NULL,
583                                        NULL,
584                                /*119*/ NULL,
585                                        NULL,
586                                        NULL,
587                                        NULL,
588                                        NULL,
589                                /*124*/ NULL,
590                                        NULL,
591                                        NULL,
592                                        NULL,
593                                        MatCreateSubMatricesMPI_MPIAdj,
594                                /*129*/ NULL,
595                                        NULL,
596                                        NULL,
597                                        NULL,
598                                        NULL,
599                                /*134*/ NULL,
600                                        NULL,
601                                        NULL,
602                                        NULL,
603                                        NULL,
604                                /*139*/ NULL,
605                                        NULL,
606                                        NULL,
607                                        NULL,
608                                        NULL,
609                                 /*144*/NULL,
610                                        NULL,
611                                        NULL,
612                                        NULL
613 };
614 
615 static PetscErrorCode  MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
616 {
617   Mat_MPIAdj     *b = (Mat_MPIAdj*)B->data;
618   PetscBool       useedgeweights;
619 
620   PetscFunctionBegin;
621   PetscCall(PetscLayoutSetUp(B->rmap));
622   PetscCall(PetscLayoutSetUp(B->cmap));
623   if (values) useedgeweights = PETSC_TRUE; else useedgeweights = PETSC_FALSE;
624   /* Make everybody knows if they are using edge weights or not */
625   PetscCall(MPIU_Allreduce((int*)&useedgeweights,(int*)&b->useedgeweights,1,MPI_INT,MPI_MAX,PetscObjectComm((PetscObject)B)));
626 
627   if (PetscDefined(USE_DEBUG)) {
628     PetscInt ii;
629 
630     PetscCheck(i[0] == 0,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %" PetscInt_FMT,i[0]);
631     for (ii=1; ii<B->rmap->n; ii++) {
632       PetscCheck(i[ii] >= 0 && i[ii] >= i[ii-1],PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i[%" PetscInt_FMT "]=%" PetscInt_FMT " index is out of range: i[%" PetscInt_FMT "]=%" PetscInt_FMT,ii,i[ii],ii-1,i[ii-1]);
633     }
634     for (ii=0; ii<i[B->rmap->n]; ii++) {
635       PetscCheck(j[ii] >= 0 && j[ii] < B->cmap->N,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column index %" PetscInt_FMT " out of range %" PetscInt_FMT,ii,j[ii]);
636     }
637   }
638   B->preallocated = PETSC_TRUE;
639 
640   b->j      = j;
641   b->i      = i;
642   b->values = values;
643 
644   b->nz        = i[B->rmap->n];
645   b->diag      = NULL;
646   b->symmetric = PETSC_FALSE;
647   b->freeaij   = PETSC_TRUE;
648 
649   PetscCall(MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY));
650   PetscCall(MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY));
651   PetscFunctionReturn(0);
652 }
653 
654 static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A,Mat *B)
655 {
656   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
657   const PetscInt *ranges;
658   MPI_Comm       acomm,bcomm;
659   MPI_Group      agroup,bgroup;
660   PetscMPIInt    i,rank,size,nranks,*ranks;
661 
662   PetscFunctionBegin;
663   *B    = NULL;
664   PetscCall(PetscObjectGetComm((PetscObject)A,&acomm));
665   PetscCallMPI(MPI_Comm_size(acomm,&size));
666   PetscCallMPI(MPI_Comm_size(acomm,&rank));
667   PetscCall(MatGetOwnershipRanges(A,&ranges));
668   for (i=0,nranks=0; i<size; i++) {
669     if (ranges[i+1] - ranges[i] > 0) nranks++;
670   }
671   if (nranks == size) {         /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
672     PetscCall(PetscObjectReference((PetscObject)A));
673     *B   = A;
674     PetscFunctionReturn(0);
675   }
676 
677   PetscCall(PetscMalloc1(nranks,&ranks));
678   for (i=0,nranks=0; i<size; i++) {
679     if (ranges[i+1] - ranges[i] > 0) ranks[nranks++] = i;
680   }
681   PetscCallMPI(MPI_Comm_group(acomm,&agroup));
682   PetscCallMPI(MPI_Group_incl(agroup,nranks,ranks,&bgroup));
683   PetscCall(PetscFree(ranks));
684   PetscCallMPI(MPI_Comm_create(acomm,bgroup,&bcomm));
685   PetscCallMPI(MPI_Group_free(&agroup));
686   PetscCallMPI(MPI_Group_free(&bgroup));
687   if (bcomm != MPI_COMM_NULL) {
688     PetscInt   m,N;
689     Mat_MPIAdj *b;
690     PetscCall(MatGetLocalSize(A,&m,NULL));
691     PetscCall(MatGetSize(A,NULL,&N));
692     PetscCall(MatCreateMPIAdj(bcomm,m,N,a->i,a->j,a->values,B));
693     b          = (Mat_MPIAdj*)(*B)->data;
694     b->freeaij = PETSC_FALSE;
695     PetscCallMPI(MPI_Comm_free(&bcomm));
696   }
697   PetscFunctionReturn(0);
698 }
699 
700 PetscErrorCode  MatMPIAdjToSeq_MPIAdj(Mat A,Mat *B)
701 {
702   PetscInt       M,N,*II,*J,NZ,nz,m,nzstart,i;
703   PetscInt       *Values = NULL;
704   Mat_MPIAdj     *adj = (Mat_MPIAdj*)A->data;
705   PetscMPIInt    mnz,mm,*allnz,*allm,size,*dispnz,*dispm;
706 
707   PetscFunctionBegin;
708   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A),&size));
709   PetscCall(MatGetSize(A,&M,&N));
710   PetscCall(MatGetLocalSize(A,&m,NULL));
711   nz   = adj->nz;
712   PetscCheck(adj->i[m] == nz,PETSC_COMM_SELF,PETSC_ERR_PLIB,"nz %" PetscInt_FMT " not correct i[m] %" PetscInt_FMT,nz,adj->i[m]);
713   PetscCallMPI(MPI_Allreduce(&nz,&NZ,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)A)));
714 
715   PetscCall(PetscMPIIntCast(nz,&mnz));
716   PetscCall(PetscMalloc2(size,&allnz,size,&dispnz));
717   PetscCallMPI(MPI_Allgather(&mnz,1,MPI_INT,allnz,1,MPI_INT,PetscObjectComm((PetscObject)A)));
718   dispnz[0] = 0; for (i=1; i<size; i++) dispnz[i] = dispnz[i-1]+ allnz[i-1];
719   if (adj->values) {
720     PetscCall(PetscMalloc1(NZ,&Values));
721     PetscCallMPI(MPI_Allgatherv(adj->values,mnz,MPIU_INT,Values,allnz,dispnz,MPIU_INT,PetscObjectComm((PetscObject)A)));
722   }
723   PetscCall(PetscMalloc1(NZ,&J));
724   PetscCallMPI(MPI_Allgatherv(adj->j,mnz,MPIU_INT,J,allnz,dispnz,MPIU_INT,PetscObjectComm((PetscObject)A)));
725   PetscCall(PetscFree2(allnz,dispnz));
726   PetscCallMPI(MPI_Scan(&nz,&nzstart,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)A)));
727   nzstart -= nz;
728   /* shift the i[] values so they will be correct after being received */
729   for (i=0; i<m; i++) adj->i[i] += nzstart;
730   PetscCall(PetscMalloc1(M+1,&II));
731   PetscCall(PetscMPIIntCast(m,&mm));
732   PetscCall(PetscMalloc2(size,&allm,size,&dispm));
733   PetscCallMPI(MPI_Allgather(&mm,1,MPI_INT,allm,1,MPI_INT,PetscObjectComm((PetscObject)A)));
734   dispm[0] = 0; for (i=1; i<size; i++) dispm[i] = dispm[i-1]+ allm[i-1];
735   PetscCallMPI(MPI_Allgatherv(adj->i,mm,MPIU_INT,II,allm,dispm,MPIU_INT,PetscObjectComm((PetscObject)A)));
736   PetscCall(PetscFree2(allm,dispm));
737   II[M] = NZ;
738   /* shift the i[] values back */
739   for (i=0; i<m; i++) adj->i[i] -= nzstart;
740   PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF,M,N,II,J,Values,B));
741   PetscFunctionReturn(0);
742 }
743 
744 /*@
745    MatMPIAdjCreateNonemptySubcommMat - create the same MPIAdj matrix on a subcommunicator containing only processes owning a positive number of rows
746 
747    Collective
748 
749    Input Parameter:
750 .  A - original MPIAdj matrix
751 
752    Output Parameter:
753 .  B - matrix on subcommunicator, NULL on ranks that owned zero rows of A
754 
755    Level: developer
756 
757    Note:
758    This function is mostly useful for internal use by mesh partitioning packages that require that every process owns at least one row.
759 
760    The matrix B should be destroyed with MatDestroy(). The arrays are not copied, so B should be destroyed before A is destroyed.
761 
762 .seealso: `MatCreateMPIAdj()`
763 @*/
764 PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A,Mat *B)
765 {
766   PetscFunctionBegin;
767   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
768   PetscUseMethod(A,"MatMPIAdjCreateNonemptySubcommMat_C",(Mat,Mat*),(A,B));
769   PetscFunctionReturn(0);
770 }
771 
772 /*MC
773    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
774    intended for use constructing orderings and partitionings.
775 
776   Level: beginner
777 
778 .seealso: `MatCreateMPIAdj`
779 M*/
780 
781 PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B)
782 {
783   Mat_MPIAdj     *b;
784 
785   PetscFunctionBegin;
786   PetscCall(PetscNewLog(B,&b));
787   B->data      = (void*)b;
788   PetscCall(PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps)));
789   B->assembled = PETSC_FALSE;
790 
791   PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",MatMPIAdjSetPreallocation_MPIAdj));
792   PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjCreateNonemptySubcommMat_C",MatMPIAdjCreateNonemptySubcommMat_MPIAdj));
793   PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjToSeq_C",MatMPIAdjToSeq_MPIAdj));
794   PetscCall(PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ));
795   PetscFunctionReturn(0);
796 }
797 
798 /*@C
799    MatMPIAdjToSeq - Converts an parallel MPIAdj matrix to complete MPIAdj on each process (needed by sequential preconditioners)
800 
801    Logically Collective
802 
803    Input Parameter:
804 .  A - the matrix
805 
806    Output Parameter:
807 .  B - the same matrix on all processes
808 
809    Level: intermediate
810 
811 .seealso: `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`
812 @*/
813 PetscErrorCode  MatMPIAdjToSeq(Mat A,Mat *B)
814 {
815   PetscFunctionBegin;
816   PetscUseMethod(A,"MatMPIAdjToSeq_C",(Mat,Mat*),(A,B));
817   PetscFunctionReturn(0);
818 }
819 
820 /*@C
821    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
822 
823    Logically Collective
824 
825    Input Parameters:
826 +  A - the matrix
827 .  i - the indices into j for the start of each row
828 .  j - the column indices for each row (sorted for each row).
829        The indices in i and j start with zero (NOT with one).
830 -  values - [optional] edge weights
831 
832    Level: intermediate
833 
834 .seealso: `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`
835 @*/
836 PetscErrorCode  MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
837 {
838   PetscFunctionBegin;
839   PetscTryMethod(B,"MatMPIAdjSetPreallocation_C",(Mat,PetscInt*,PetscInt*,PetscInt*),(B,i,j,values));
840   PetscFunctionReturn(0);
841 }
842 
843 /*@C
844    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
845    The matrix does not have numerical values associated with it, but is
846    intended for ordering (to reduce bandwidth etc) and partitioning.
847 
848    Collective
849 
850    Input Parameters:
851 +  comm - MPI communicator
852 .  m - number of local rows
853 .  N - number of global columns
854 .  i - the indices into j for the start of each row
855 .  j - the column indices for each row (sorted for each row).
856        The indices in i and j start with zero (NOT with one).
857 -  values -[optional] edge weights
858 
859    Output Parameter:
860 .  A - the matrix
861 
862    Level: intermediate
863 
864    Notes:
865     This matrix object does not support most matrix operations, include
866    MatSetValues().
867    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
868    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
869     call from Fortran you need not create the arrays with PetscMalloc().
870    Should not include the matrix diagonals.
871 
872    If you already have a matrix, you can create its adjacency matrix by a call
873    to MatConvert, specifying a type of MATMPIADJ.
874 
875    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
876 
877 .seealso: `MatCreate()`, `MatConvert()`, `MatGetOrdering()`
878 @*/
879 PetscErrorCode  MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
880 {
881   PetscFunctionBegin;
882   PetscCall(MatCreate(comm,A));
883   PetscCall(MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N));
884   PetscCall(MatSetType(*A,MATMPIADJ));
885   PetscCall(MatMPIAdjSetPreallocation(*A,i,j,values));
886   PetscFunctionReturn(0);
887 }
888