xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 030f984af8d8bb4c203755d35bded3c05b3d83ce)
1 
2 /*
3     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
4 */
5 #include <../src/mat/impls/adj/mpi/mpiadj.h>    /*I "petscmat.h" I*/
6 #include <petscsf.h>
7 
8 /*
9  * The interface should be easy to use for both MatCreateSubMatrix (parallel sub-matrix) and MatCreateSubMatrices (sequential sub-matrices)
10  * */
11 static PetscErrorCode MatCreateSubMatrix_MPIAdj_data(Mat adj,IS irows, IS icols, PetscInt **sadj_xadj,PetscInt **sadj_adjncy,PetscInt **sadj_values)
12 {
13   PetscInt           nlrows_is,icols_n,i,j,nroots,nleaves,rlocalindex,*ncols_send,*ncols_recv;
14   PetscInt           nlrows_mat,*adjncy_recv,Ncols_recv,Ncols_send,*xadj_recv,*values_recv;
15   PetscInt          *ncols_recv_offsets,loc,rnclos,*sadjncy,*sxadj,*svalues;
16   const PetscInt    *irows_indices,*icols_indices,*xadj, *adjncy;
17   PetscMPIInt        owner;
18   Mat_MPIAdj        *a = (Mat_MPIAdj*)adj->data;
19   PetscLayout        rmap;
20   MPI_Comm           comm;
21   PetscSF            sf;
22   PetscSFNode       *iremote;
23   PetscBool          done;
24   PetscErrorCode     ierr;
25 
26   PetscFunctionBegin;
27   ierr = PetscObjectGetComm((PetscObject)adj,&comm);CHKERRQ(ierr);
28   ierr = MatGetLayouts(adj,&rmap,NULL);CHKERRQ(ierr);
29   ierr = ISGetLocalSize(irows,&nlrows_is);CHKERRQ(ierr);
30   ierr = ISGetIndices(irows,&irows_indices);CHKERRQ(ierr);
31   ierr = PetscMalloc1(nlrows_is,&iremote);CHKERRQ(ierr);
32   /* construct sf graph*/
33   nleaves = nlrows_is;
34   for (i=0; i<nlrows_is; i++) {
35     owner = -1;
36     rlocalindex = -1;
37     ierr = PetscLayoutFindOwnerIndex(rmap,irows_indices[i],&owner,&rlocalindex);CHKERRQ(ierr);
38     iremote[i].rank  = owner;
39     iremote[i].index = rlocalindex;
40   }
41   ierr = MatGetRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done);CHKERRQ(ierr);
42   ierr = PetscCalloc4(nlrows_mat,&ncols_send,nlrows_is,&xadj_recv,nlrows_is+1,&ncols_recv_offsets,nlrows_is,&ncols_recv);CHKERRQ(ierr);
43   nroots = nlrows_mat;
44   for (i=0; i<nlrows_mat; i++) {
45     ncols_send[i] = xadj[i+1]-xadj[i];
46   }
47   ierr = PetscSFCreate(comm,&sf);CHKERRQ(ierr);
48   ierr = PetscSFSetGraph(sf,nroots,nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
49   ierr = PetscSFSetType(sf,PETSCSFBASIC);CHKERRQ(ierr);
50   ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
51   ierr = PetscSFBcastBegin(sf,MPIU_INT,ncols_send,ncols_recv,MPI_REPLACE);CHKERRQ(ierr);
52   ierr = PetscSFBcastEnd(sf,MPIU_INT,ncols_send,ncols_recv,MPI_REPLACE);CHKERRQ(ierr);
53   ierr = PetscSFBcastBegin(sf,MPIU_INT,xadj,xadj_recv,MPI_REPLACE);CHKERRQ(ierr);
54   ierr = PetscSFBcastEnd(sf,MPIU_INT,xadj,xadj_recv,MPI_REPLACE);CHKERRQ(ierr);
55   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
56   Ncols_recv =0;
57   for (i=0; i<nlrows_is; i++) {
58     Ncols_recv             += ncols_recv[i];
59     ncols_recv_offsets[i+1] = ncols_recv[i]+ncols_recv_offsets[i];
60   }
61   Ncols_send = 0;
62   for (i=0; i<nlrows_mat; i++) {
63     Ncols_send += ncols_send[i];
64   }
65   ierr = PetscCalloc1(Ncols_recv,&iremote);CHKERRQ(ierr);
66   ierr = PetscCalloc1(Ncols_recv,&adjncy_recv);CHKERRQ(ierr);
67   nleaves = Ncols_recv;
68   Ncols_recv = 0;
69   for (i=0; i<nlrows_is; i++) {
70     ierr = PetscLayoutFindOwner(rmap,irows_indices[i],&owner);CHKERRQ(ierr);
71     for (j=0; j<ncols_recv[i]; j++) {
72       iremote[Ncols_recv].rank    = owner;
73       iremote[Ncols_recv++].index = xadj_recv[i]+j;
74     }
75   }
76   ierr = ISRestoreIndices(irows,&irows_indices);CHKERRQ(ierr);
77   /*if we need to deal with edge weights ???*/
78   if (a->useedgeweights) {ierr = PetscCalloc1(Ncols_recv,&values_recv);CHKERRQ(ierr);}
79   nroots = Ncols_send;
80   ierr = PetscSFCreate(comm,&sf);CHKERRQ(ierr);
81   ierr = PetscSFSetGraph(sf,nroots,nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
82   ierr = PetscSFSetType(sf,PETSCSFBASIC);CHKERRQ(ierr);
83   ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
84   ierr = PetscSFBcastBegin(sf,MPIU_INT,adjncy,adjncy_recv,MPI_REPLACE);CHKERRQ(ierr);
85   ierr = PetscSFBcastEnd(sf,MPIU_INT,adjncy,adjncy_recv,MPI_REPLACE);CHKERRQ(ierr);
86   if (a->useedgeweights) {
87     ierr = PetscSFBcastBegin(sf,MPIU_INT,a->values,values_recv,MPI_REPLACE);CHKERRQ(ierr);
88     ierr = PetscSFBcastEnd(sf,MPIU_INT,a->values,values_recv,MPI_REPLACE);CHKERRQ(ierr);
89   }
90   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
91   ierr = MatRestoreRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done);CHKERRQ(ierr);
92   ierr = ISGetLocalSize(icols,&icols_n);CHKERRQ(ierr);
93   ierr = ISGetIndices(icols,&icols_indices);CHKERRQ(ierr);
94   rnclos = 0;
95   for (i=0; i<nlrows_is; i++) {
96     for (j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++) {
97       ierr = PetscFindInt(adjncy_recv[j], icols_n, icols_indices, &loc);CHKERRQ(ierr);
98       if (loc<0) {
99         adjncy_recv[j] = -1;
100         if (a->useedgeweights) values_recv[j] = -1;
101         ncols_recv[i]--;
102       } else {
103         rnclos++;
104       }
105     }
106   }
107   ierr = ISRestoreIndices(icols,&icols_indices);CHKERRQ(ierr);
108   ierr = PetscCalloc1(rnclos,&sadjncy);CHKERRQ(ierr);
109   if (a->useedgeweights) {ierr = PetscCalloc1(rnclos,&svalues);CHKERRQ(ierr);}
110   ierr = PetscCalloc1(nlrows_is+1,&sxadj);CHKERRQ(ierr);
111   rnclos = 0;
112   for (i=0; i<nlrows_is; i++) {
113     for (j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++) {
114       if (adjncy_recv[j]<0) continue;
115       sadjncy[rnclos] = adjncy_recv[j];
116       if (a->useedgeweights) svalues[rnclos] = values_recv[j];
117       rnclos++;
118     }
119   }
120   for (i=0; i<nlrows_is; i++) {
121     sxadj[i+1] = sxadj[i]+ncols_recv[i];
122   }
123   if (sadj_xadj)  { *sadj_xadj = sxadj;} else    { ierr = PetscFree(sxadj);CHKERRQ(ierr);}
124   if (sadj_adjncy) { *sadj_adjncy = sadjncy;} else { ierr = PetscFree(sadjncy);CHKERRQ(ierr);}
125   if (sadj_values) {
126     if (a->useedgeweights) *sadj_values = svalues; else *sadj_values=NULL;
127   } else {
128     if (a->useedgeweights) {ierr = PetscFree(svalues);CHKERRQ(ierr);}
129   }
130   ierr = PetscFree4(ncols_send,xadj_recv,ncols_recv_offsets,ncols_recv);CHKERRQ(ierr);
131   ierr = PetscFree(adjncy_recv);CHKERRQ(ierr);
132   if (a->useedgeweights) {ierr = PetscFree(values_recv);CHKERRQ(ierr);}
133   PetscFunctionReturn(0);
134 }
135 
136 static PetscErrorCode MatCreateSubMatrices_MPIAdj_Private(Mat mat,PetscInt n,const IS irow[],const IS icol[],PetscBool subcomm,MatReuse scall,Mat *submat[])
137 {
138   PetscInt           i,irow_n,icol_n,*sxadj,*sadjncy,*svalues;
139   PetscInt          *indices,nindx,j,k,loc;
140   PetscMPIInt        issame;
141   const PetscInt    *irow_indices,*icol_indices;
142   MPI_Comm           scomm_row,scomm_col,scomm_mat;
143   PetscErrorCode     ierr;
144 
145   PetscFunctionBegin;
146   nindx = 0;
147   /*
148    * Estimate a maximum number for allocating memory
149    */
150   for (i=0; i<n; i++) {
151     ierr = ISGetLocalSize(irow[i],&irow_n);CHKERRQ(ierr);
152     ierr = ISGetLocalSize(icol[i],&icol_n);CHKERRQ(ierr);
153     nindx = nindx>(irow_n+icol_n)? nindx:(irow_n+icol_n);
154   }
155   ierr = PetscMalloc1(nindx,&indices);CHKERRQ(ierr);
156   /* construct a submat */
157   for (i=0; i<n; i++) {
158     if (subcomm) {
159       ierr = PetscObjectGetComm((PetscObject)irow[i],&scomm_row);CHKERRQ(ierr);
160       ierr = PetscObjectGetComm((PetscObject)icol[i],&scomm_col);CHKERRQ(ierr);
161       ierr = MPI_Comm_compare(scomm_row,scomm_col,&issame);CHKERRMPI(ierr);
162       if (issame != MPI_IDENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"row index set must have the same comm as the col index set\n");
163       ierr = MPI_Comm_compare(scomm_row,PETSC_COMM_SELF,&issame);CHKERRMPI(ierr);
164       if (issame == MPI_IDENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP," can not use PETSC_COMM_SELF as comm when extracting a parallel submatrix\n");
165     } else {
166       scomm_row = PETSC_COMM_SELF;
167     }
168     /*get sub-matrix data*/
169     sxadj=NULL; sadjncy=NULL; svalues=NULL;
170     ierr = MatCreateSubMatrix_MPIAdj_data(mat,irow[i],icol[i],&sxadj,&sadjncy,&svalues);CHKERRQ(ierr);
171     ierr = ISGetLocalSize(irow[i],&irow_n);CHKERRQ(ierr);
172     ierr = ISGetLocalSize(icol[i],&icol_n);CHKERRQ(ierr);
173     ierr = ISGetIndices(irow[i],&irow_indices);CHKERRQ(ierr);
174     ierr = PetscArraycpy(indices,irow_indices,irow_n);CHKERRQ(ierr);
175     ierr = ISRestoreIndices(irow[i],&irow_indices);CHKERRQ(ierr);
176     ierr = ISGetIndices(icol[i],&icol_indices);CHKERRQ(ierr);
177     ierr = PetscArraycpy(indices+irow_n,icol_indices,icol_n);CHKERRQ(ierr);
178     ierr = ISRestoreIndices(icol[i],&icol_indices);CHKERRQ(ierr);
179     nindx = irow_n+icol_n;
180     ierr = PetscSortRemoveDupsInt(&nindx,indices);CHKERRQ(ierr);
181     /* renumber columns */
182     for (j=0; j<irow_n; j++) {
183       for (k=sxadj[j]; k<sxadj[j+1]; k++) {
184         ierr = PetscFindInt(sadjncy[k],nindx,indices,&loc);CHKERRQ(ierr);
185         if (loc<0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"can not find col %D",sadjncy[k]);
186         sadjncy[k] = loc;
187       }
188     }
189     if (scall==MAT_INITIAL_MATRIX) {
190       ierr = MatCreateMPIAdj(scomm_row,irow_n,icol_n,sxadj,sadjncy,svalues,submat[i]);CHKERRQ(ierr);
191     } else {
192        Mat                sadj = *(submat[i]);
193        Mat_MPIAdj         *sa  = (Mat_MPIAdj*)((sadj)->data);
194        ierr = PetscObjectGetComm((PetscObject)sadj,&scomm_mat);CHKERRQ(ierr);
195        ierr = MPI_Comm_compare(scomm_row,scomm_mat,&issame);CHKERRMPI(ierr);
196        if (issame != MPI_IDENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"submatrix  must have the same comm as the col index set\n");
197        ierr = PetscArraycpy(sa->i,sxadj,irow_n+1);CHKERRQ(ierr);
198        ierr = PetscArraycpy(sa->j,sadjncy,sxadj[irow_n]);CHKERRQ(ierr);
199        if (svalues) {ierr = PetscArraycpy(sa->values,svalues,sxadj[irow_n]);CHKERRQ(ierr);}
200        ierr = PetscFree(sxadj);CHKERRQ(ierr);
201        ierr = PetscFree(sadjncy);CHKERRQ(ierr);
202        if (svalues) {ierr = PetscFree(svalues);CHKERRQ(ierr);}
203     }
204   }
205   ierr = PetscFree(indices);CHKERRQ(ierr);
206   PetscFunctionReturn(0);
207 }
208 
209 static PetscErrorCode MatCreateSubMatricesMPI_MPIAdj(Mat mat,PetscInt n, const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
210 {
211   PetscErrorCode     ierr;
212   /*get sub-matrices across a sub communicator */
213   PetscFunctionBegin;
214   ierr = MatCreateSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_TRUE,scall,submat);CHKERRQ(ierr);
215   PetscFunctionReturn(0);
216 }
217 
218 static PetscErrorCode MatCreateSubMatrices_MPIAdj(Mat mat,PetscInt n,const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
219 {
220   PetscErrorCode     ierr;
221 
222   PetscFunctionBegin;
223   /*get sub-matrices based on PETSC_COMM_SELF */
224   ierr = MatCreateSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_FALSE,scall,submat);CHKERRQ(ierr);
225   PetscFunctionReturn(0);
226 }
227 
228 static PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
229 {
230   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
231   PetscErrorCode    ierr;
232   PetscInt          i,j,m = A->rmap->n;
233   const char        *name;
234   PetscViewerFormat format;
235 
236   PetscFunctionBegin;
237   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
238   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
239   if (format == PETSC_VIEWER_ASCII_INFO) {
240     PetscFunctionReturn(0);
241   } else if (format == PETSC_VIEWER_ASCII_MATLAB) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MATLAB format not supported");
242   else {
243     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);CHKERRQ(ierr);
244     ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
245     for (i=0; i<m; i++) {
246       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap->rstart);CHKERRQ(ierr);
247       for (j=a->i[i]; j<a->i[i+1]; j++) {
248         if (a->values) {
249           ierr = PetscViewerASCIISynchronizedPrintf(viewer," (%D, %D) ",a->j[j], a->values[j]);CHKERRQ(ierr);
250         } else {
251           ierr = PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);CHKERRQ(ierr);
252         }
253       }
254       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
255     }
256     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);CHKERRQ(ierr);
257     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
258     ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
259   }
260   PetscFunctionReturn(0);
261 }
262 
263 static PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
264 {
265   PetscErrorCode ierr;
266   PetscBool      iascii;
267 
268   PetscFunctionBegin;
269   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
270   if (iascii) {
271     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
272   }
273   PetscFunctionReturn(0);
274 }
275 
276 static PetscErrorCode MatDestroy_MPIAdj(Mat mat)
277 {
278   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;
279   PetscErrorCode ierr;
280 
281   PetscFunctionBegin;
282 #if defined(PETSC_USE_LOG)
283   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap->n,mat->cmap->n,a->nz);
284 #endif
285   ierr = PetscFree(a->diag);CHKERRQ(ierr);
286   if (a->freeaij) {
287     if (a->freeaijwithfree) {
288       if (a->i) free(a->i);
289       if (a->j) free(a->j);
290     } else {
291       ierr = PetscFree(a->i);CHKERRQ(ierr);
292       ierr = PetscFree(a->j);CHKERRQ(ierr);
293       ierr = PetscFree(a->values);CHKERRQ(ierr);
294     }
295   }
296   ierr = PetscFree(a->rowvalues);CHKERRQ(ierr);
297   ierr = PetscFree(mat->data);CHKERRQ(ierr);
298   ierr = PetscObjectChangeTypeName((PetscObject)mat,NULL);CHKERRQ(ierr);
299   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C",NULL);CHKERRQ(ierr);
300   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjCreateNonemptySubcommMat_C",NULL);CHKERRQ(ierr);
301   PetscFunctionReturn(0);
302 }
303 
304 static PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscBool flg)
305 {
306   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
307   PetscErrorCode ierr;
308 
309   PetscFunctionBegin;
310   switch (op) {
311   case MAT_SYMMETRIC:
312   case MAT_STRUCTURALLY_SYMMETRIC:
313   case MAT_HERMITIAN:
314     a->symmetric = flg;
315     break;
316   case MAT_SYMMETRY_ETERNAL:
317     break;
318   default:
319     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
320     break;
321   }
322   PetscFunctionReturn(0);
323 }
324 
325 static PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
326 {
327   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
328   PetscErrorCode ierr;
329 
330   PetscFunctionBegin;
331   row -= A->rmap->rstart;
332   if (row < 0 || row >= A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
333   *nz = a->i[row+1] - a->i[row];
334   if (v) {
335     PetscInt j;
336     if (a->rowvalues_alloc < *nz) {
337       ierr = PetscFree(a->rowvalues);CHKERRQ(ierr);
338       a->rowvalues_alloc = PetscMax(a->rowvalues_alloc*2, *nz);
339       ierr = PetscMalloc1(a->rowvalues_alloc,&a->rowvalues);CHKERRQ(ierr);
340     }
341     for (j=0; j<*nz; j++) {
342       a->rowvalues[j] = a->values ? a->values[a->i[row]+j]:1.0;
343     }
344     *v = (*nz) ? a->rowvalues : NULL;
345   }
346   if (idx) *idx = (*nz) ? a->j + a->i[row] : NULL;
347   PetscFunctionReturn(0);
348 }
349 
350 static PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
351 {
352   PetscFunctionBegin;
353   PetscFunctionReturn(0);
354 }
355 
356 static PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscBool * flg)
357 {
358   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data,*b = (Mat_MPIAdj*)B->data;
359   PetscErrorCode ierr;
360   PetscBool      flag;
361 
362   PetscFunctionBegin;
363   /* If the  matrix dimensions are not equal,or no of nonzeros */
364   if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
365     flag = PETSC_FALSE;
366   }
367 
368   /* if the a->i are the same */
369   ierr = PetscArraycmp(a->i,b->i,A->rmap->n+1,&flag);CHKERRQ(ierr);
370 
371   /* if a->j are the same */
372   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
373 
374   ierr = MPIU_Allreduce(&flag,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRMPI(ierr);
375   PetscFunctionReturn(0);
376 }
377 
378 static PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
379 {
380   PetscInt   i;
381   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
382   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
383 
384   PetscFunctionBegin;
385   *m    = A->rmap->n;
386   *ia   = a->i;
387   *ja   = a->j;
388   *done = PETSC_TRUE;
389   if (oshift) {
390     for (i=0; i<(*ia)[*m]; i++) {
391       (*ja)[i]++;
392     }
393     for (i=0; i<=(*m); i++) (*ia)[i]++;
394   }
395   PetscFunctionReturn(0);
396 }
397 
398 static PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
399 {
400   PetscInt   i;
401   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
402   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
403 
404   PetscFunctionBegin;
405   if (ia && a->i != *ia) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
406   if (ja && a->j != *ja) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
407   if (oshift) {
408     if (!ia) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"If oshift then you must passed in inia[] argument");
409     if (!ja) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"If oshift then you must passed in inja[] argument");
410     for (i=0; i<=(*m); i++) (*ia)[i]--;
411     for (i=0; i<(*ia)[*m]; i++) {
412       (*ja)[i]--;
413     }
414   }
415   PetscFunctionReturn(0);
416 }
417 
418 PetscErrorCode  MatConvertFrom_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
419 {
420   Mat               B;
421   PetscErrorCode    ierr;
422   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
423   const PetscInt    *rj;
424   const PetscScalar *ra;
425   MPI_Comm          comm;
426 
427   PetscFunctionBegin;
428   ierr = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
429   ierr = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
430   ierr = MatGetOwnershipRange(A,&rstart,NULL);CHKERRQ(ierr);
431 
432   /* count the number of nonzeros per row */
433   for (i=0; i<m; i++) {
434     ierr = MatGetRow(A,i+rstart,&len,&rj,NULL);CHKERRQ(ierr);
435     for (j=0; j<len; j++) {
436       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
437     }
438     nzeros += len;
439     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,NULL);CHKERRQ(ierr);
440   }
441 
442   /* malloc space for nonzeros */
443   ierr = PetscMalloc1(nzeros+1,&a);CHKERRQ(ierr);
444   ierr = PetscMalloc1(N+1,&ia);CHKERRQ(ierr);
445   ierr = PetscMalloc1(nzeros+1,&ja);CHKERRQ(ierr);
446 
447   nzeros = 0;
448   ia[0]  = 0;
449   for (i=0; i<m; i++) {
450     ierr = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
451     cnt  = 0;
452     for (j=0; j<len; j++) {
453       if (rj[j] != i+rstart) { /* if not diagonal */
454         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
455         ja[nzeros+cnt++] = rj[j];
456       }
457     }
458     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
459     nzeros += cnt;
460     ia[i+1] = nzeros;
461   }
462 
463   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
464   ierr = MatCreate(comm,&B);CHKERRQ(ierr);
465   ierr = MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
466   ierr = MatSetType(B,type);CHKERRQ(ierr);
467   ierr = MatMPIAdjSetPreallocation(B,ia,ja,a);CHKERRQ(ierr);
468 
469   if (reuse == MAT_INPLACE_MATRIX) {
470     ierr = MatHeaderReplace(A,&B);CHKERRQ(ierr);
471   } else {
472     *newmat = B;
473   }
474   PetscFunctionReturn(0);
475 }
476 
477 /* -------------------------------------------------------------------*/
478 static struct _MatOps MatOps_Values = {NULL,
479                                        MatGetRow_MPIAdj,
480                                        MatRestoreRow_MPIAdj,
481                                        NULL,
482                                 /* 4*/ NULL,
483                                        NULL,
484                                        NULL,
485                                        NULL,
486                                        NULL,
487                                        NULL,
488                                 /*10*/ NULL,
489                                        NULL,
490                                        NULL,
491                                        NULL,
492                                        NULL,
493                                 /*15*/ NULL,
494                                        MatEqual_MPIAdj,
495                                        NULL,
496                                        NULL,
497                                        NULL,
498                                 /*20*/ NULL,
499                                        NULL,
500                                        MatSetOption_MPIAdj,
501                                        NULL,
502                                 /*24*/ NULL,
503                                        NULL,
504                                        NULL,
505                                        NULL,
506                                        NULL,
507                                 /*29*/ NULL,
508                                        NULL,
509                                        NULL,
510                                        NULL,
511                                        NULL,
512                                 /*34*/ NULL,
513                                        NULL,
514                                        NULL,
515                                        NULL,
516                                        NULL,
517                                 /*39*/ NULL,
518                                        MatCreateSubMatrices_MPIAdj,
519                                        NULL,
520                                        NULL,
521                                        NULL,
522                                 /*44*/ NULL,
523                                        NULL,
524                                        MatShift_Basic,
525                                        NULL,
526                                        NULL,
527                                 /*49*/ NULL,
528                                        MatGetRowIJ_MPIAdj,
529                                        MatRestoreRowIJ_MPIAdj,
530                                        NULL,
531                                        NULL,
532                                 /*54*/ NULL,
533                                        NULL,
534                                        NULL,
535                                        NULL,
536                                        NULL,
537                                 /*59*/ NULL,
538                                        MatDestroy_MPIAdj,
539                                        MatView_MPIAdj,
540                                        MatConvertFrom_MPIAdj,
541                                        NULL,
542                                 /*64*/ NULL,
543                                        NULL,
544                                        NULL,
545                                        NULL,
546                                        NULL,
547                                 /*69*/ NULL,
548                                        NULL,
549                                        NULL,
550                                        NULL,
551                                        NULL,
552                                 /*74*/ NULL,
553                                        NULL,
554                                        NULL,
555                                        NULL,
556                                        NULL,
557                                 /*79*/ NULL,
558                                        NULL,
559                                        NULL,
560                                        NULL,
561                                        NULL,
562                                 /*84*/ NULL,
563                                        NULL,
564                                        NULL,
565                                        NULL,
566                                        NULL,
567                                 /*89*/ NULL,
568                                        NULL,
569                                        NULL,
570                                        NULL,
571                                        NULL,
572                                 /*94*/ NULL,
573                                        NULL,
574                                        NULL,
575                                        NULL,
576                                        NULL,
577                                 /*99*/ NULL,
578                                        NULL,
579                                        NULL,
580                                        NULL,
581                                        NULL,
582                                /*104*/ NULL,
583                                        NULL,
584                                        NULL,
585                                        NULL,
586                                        NULL,
587                                /*109*/ NULL,
588                                        NULL,
589                                        NULL,
590                                        NULL,
591                                        NULL,
592                                /*114*/ NULL,
593                                        NULL,
594                                        NULL,
595                                        NULL,
596                                        NULL,
597                                /*119*/ NULL,
598                                        NULL,
599                                        NULL,
600                                        NULL,
601                                        NULL,
602                                /*124*/ NULL,
603                                        NULL,
604                                        NULL,
605                                        NULL,
606                                        MatCreateSubMatricesMPI_MPIAdj,
607                                /*129*/ NULL,
608                                        NULL,
609                                        NULL,
610                                        NULL,
611                                        NULL,
612                                /*134*/ NULL,
613                                        NULL,
614                                        NULL,
615                                        NULL,
616                                        NULL,
617                                /*139*/ NULL,
618                                        NULL,
619                                        NULL
620 };
621 
622 static PetscErrorCode  MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
623 {
624   Mat_MPIAdj     *b = (Mat_MPIAdj*)B->data;
625   PetscBool       useedgeweights;
626   PetscErrorCode ierr;
627 
628   PetscFunctionBegin;
629   ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
630   ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
631   if (values) useedgeweights = PETSC_TRUE; else useedgeweights = PETSC_FALSE;
632   /* Make everybody knows if they are using edge weights or not */
633   ierr = MPIU_Allreduce((int*)&useedgeweights,(int*)&b->useedgeweights,1,MPI_INT,MPI_MAX,PetscObjectComm((PetscObject)B));CHKERRMPI(ierr);
634 
635   if (PetscDefined(USE_DEBUG)) {
636     PetscInt ii;
637 
638     if (i[0] != 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
639     for (ii=1; ii<B->rmap->n; ii++) {
640       if (i[ii] < 0 || i[ii] < i[ii-1]) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
641     }
642     for (ii=0; ii<i[B->rmap->n]; ii++) {
643       if (j[ii] < 0 || j[ii] >= B->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
644     }
645   }
646   B->preallocated = PETSC_TRUE;
647 
648   b->j      = j;
649   b->i      = i;
650   b->values = values;
651 
652   b->nz        = i[B->rmap->n];
653   b->diag      = NULL;
654   b->symmetric = PETSC_FALSE;
655   b->freeaij   = PETSC_TRUE;
656 
657   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
658   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
659   PetscFunctionReturn(0);
660 }
661 
662 static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A,Mat *B)
663 {
664   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
665   PetscErrorCode ierr;
666   const PetscInt *ranges;
667   MPI_Comm       acomm,bcomm;
668   MPI_Group      agroup,bgroup;
669   PetscMPIInt    i,rank,size,nranks,*ranks;
670 
671   PetscFunctionBegin;
672   *B    = NULL;
673   ierr  = PetscObjectGetComm((PetscObject)A,&acomm);CHKERRQ(ierr);
674   ierr  = MPI_Comm_size(acomm,&size);CHKERRMPI(ierr);
675   ierr  = MPI_Comm_size(acomm,&rank);CHKERRMPI(ierr);
676   ierr  = MatGetOwnershipRanges(A,&ranges);CHKERRQ(ierr);
677   for (i=0,nranks=0; i<size; i++) {
678     if (ranges[i+1] - ranges[i] > 0) nranks++;
679   }
680   if (nranks == size) {         /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
681     ierr = PetscObjectReference((PetscObject)A);CHKERRQ(ierr);
682     *B   = A;
683     PetscFunctionReturn(0);
684   }
685 
686   ierr = PetscMalloc1(nranks,&ranks);CHKERRQ(ierr);
687   for (i=0,nranks=0; i<size; i++) {
688     if (ranges[i+1] - ranges[i] > 0) ranks[nranks++] = i;
689   }
690   ierr = MPI_Comm_group(acomm,&agroup);CHKERRMPI(ierr);
691   ierr = MPI_Group_incl(agroup,nranks,ranks,&bgroup);CHKERRMPI(ierr);
692   ierr = PetscFree(ranks);CHKERRQ(ierr);
693   ierr = MPI_Comm_create(acomm,bgroup,&bcomm);CHKERRMPI(ierr);
694   ierr = MPI_Group_free(&agroup);CHKERRMPI(ierr);
695   ierr = MPI_Group_free(&bgroup);CHKERRMPI(ierr);
696   if (bcomm != MPI_COMM_NULL) {
697     PetscInt   m,N;
698     Mat_MPIAdj *b;
699     ierr       = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
700     ierr       = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
701     ierr       = MatCreateMPIAdj(bcomm,m,N,a->i,a->j,a->values,B);CHKERRQ(ierr);
702     b          = (Mat_MPIAdj*)(*B)->data;
703     b->freeaij = PETSC_FALSE;
704     ierr       = MPI_Comm_free(&bcomm);CHKERRMPI(ierr);
705   }
706   PetscFunctionReturn(0);
707 }
708 
709 PetscErrorCode  MatMPIAdjToSeq_MPIAdj(Mat A,Mat *B)
710 {
711   PetscErrorCode ierr;
712   PetscInt       M,N,*II,*J,NZ,nz,m,nzstart,i;
713   PetscInt       *Values = NULL;
714   Mat_MPIAdj     *adj = (Mat_MPIAdj*)A->data;
715   PetscMPIInt    mnz,mm,*allnz,*allm,size,*dispnz,*dispm;
716 
717   PetscFunctionBegin;
718   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRMPI(ierr);
719   ierr = MatGetSize(A,&M,&N);CHKERRQ(ierr);
720   ierr = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
721   nz   = adj->nz;
722   if (adj->i[m] != nz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"nz %D not correct i[m] %d",nz,adj->i[m]);
723   ierr = MPI_Allreduce(&nz,&NZ,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)A));CHKERRMPI(ierr);
724 
725   ierr = PetscMPIIntCast(nz,&mnz);CHKERRQ(ierr);
726   ierr = PetscMalloc2(size,&allnz,size,&dispnz);CHKERRQ(ierr);
727   ierr = MPI_Allgather(&mnz,1,MPI_INT,allnz,1,MPI_INT,PetscObjectComm((PetscObject)A));CHKERRMPI(ierr);
728   dispnz[0] = 0; for (i=1; i<size; i++) dispnz[i] = dispnz[i-1]+ allnz[i-1];
729   if (adj->values) {
730     ierr = PetscMalloc1(NZ,&Values);CHKERRQ(ierr);
731     ierr = MPI_Allgatherv(adj->values,mnz,MPIU_INT,Values,allnz,dispnz,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRMPI(ierr);
732   }
733   ierr = PetscMalloc1(NZ,&J);CHKERRQ(ierr);
734   ierr = MPI_Allgatherv(adj->j,mnz,MPIU_INT,J,allnz,dispnz,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRMPI(ierr);
735   ierr = PetscFree2(allnz,dispnz);CHKERRQ(ierr);
736   ierr = MPI_Scan(&nz,&nzstart,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)A));CHKERRMPI(ierr);
737   nzstart -= nz;
738   /* shift the i[] values so they will be correct after being received */
739   for (i=0; i<m; i++) adj->i[i] += nzstart;
740   ierr = PetscMalloc1(M+1,&II);CHKERRQ(ierr);
741   ierr = PetscMPIIntCast(m,&mm);CHKERRQ(ierr);
742   ierr = PetscMalloc2(size,&allm,size,&dispm);CHKERRQ(ierr);
743   ierr = MPI_Allgather(&mm,1,MPI_INT,allm,1,MPI_INT,PetscObjectComm((PetscObject)A));CHKERRMPI(ierr);
744   dispm[0] = 0; for (i=1; i<size; i++) dispm[i] = dispm[i-1]+ allm[i-1];
745   ierr = MPI_Allgatherv(adj->i,mm,MPIU_INT,II,allm,dispm,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRMPI(ierr);
746   ierr = PetscFree2(allm,dispm);CHKERRQ(ierr);
747   II[M] = NZ;
748   /* shift the i[] values back */
749   for (i=0; i<m; i++) adj->i[i] -= nzstart;
750   ierr = MatCreateMPIAdj(PETSC_COMM_SELF,M,N,II,J,Values,B);CHKERRQ(ierr);
751   PetscFunctionReturn(0);
752 }
753 
754 /*@
755    MatMPIAdjCreateNonemptySubcommMat - create the same MPIAdj matrix on a subcommunicator containing only processes owning a positive number of rows
756 
757    Collective
758 
759    Input Arguments:
760 .  A - original MPIAdj matrix
761 
762    Output Arguments:
763 .  B - matrix on subcommunicator, NULL on ranks that owned zero rows of A
764 
765    Level: developer
766 
767    Note:
768    This function is mostly useful for internal use by mesh partitioning packages that require that every process owns at least one row.
769 
770    The matrix B should be destroyed with MatDestroy(). The arrays are not copied, so B should be destroyed before A is destroyed.
771 
772 .seealso: MatCreateMPIAdj()
773 @*/
774 PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A,Mat *B)
775 {
776   PetscErrorCode ierr;
777 
778   PetscFunctionBegin;
779   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
780   ierr = PetscUseMethod(A,"MatMPIAdjCreateNonemptySubcommMat_C",(Mat,Mat*),(A,B));CHKERRQ(ierr);
781   PetscFunctionReturn(0);
782 }
783 
784 /*MC
785    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
786    intended for use constructing orderings and partitionings.
787 
788   Level: beginner
789 
790 .seealso: MatCreateMPIAdj
791 M*/
792 
793 PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B)
794 {
795   Mat_MPIAdj     *b;
796   PetscErrorCode ierr;
797 
798   PetscFunctionBegin;
799   ierr         = PetscNewLog(B,&b);CHKERRQ(ierr);
800   B->data      = (void*)b;
801   ierr         = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
802   B->assembled = PETSC_FALSE;
803 
804   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
805   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjCreateNonemptySubcommMat_C",MatMPIAdjCreateNonemptySubcommMat_MPIAdj);CHKERRQ(ierr);
806   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjToSeq_C",MatMPIAdjToSeq_MPIAdj);CHKERRQ(ierr);
807   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ);CHKERRQ(ierr);
808   PetscFunctionReturn(0);
809 }
810 
811 /*@C
812    MatMPIAdjToSeq - Converts an parallel MPIAdj matrix to complete MPIAdj on each process (needed by sequential preconditioners)
813 
814    Logically Collective
815 
816    Input Parameter:
817 .  A - the matrix
818 
819    Output Parameter:
820 .  B - the same matrix on all processes
821 
822    Level: intermediate
823 
824 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
825 @*/
826 PetscErrorCode  MatMPIAdjToSeq(Mat A,Mat *B)
827 {
828   PetscErrorCode ierr;
829 
830   PetscFunctionBegin;
831   ierr = PetscUseMethod(A,"MatMPIAdjToSeq_C",(Mat,Mat*),(A,B));CHKERRQ(ierr);
832   PetscFunctionReturn(0);
833 }
834 
835 /*@C
836    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
837 
838    Logically Collective
839 
840    Input Parameters:
841 +  A - the matrix
842 .  i - the indices into j for the start of each row
843 .  j - the column indices for each row (sorted for each row).
844        The indices in i and j start with zero (NOT with one).
845 -  values - [optional] edge weights
846 
847    Level: intermediate
848 
849 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
850 @*/
851 PetscErrorCode  MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
852 {
853   PetscErrorCode ierr;
854 
855   PetscFunctionBegin;
856   ierr = PetscTryMethod(B,"MatMPIAdjSetPreallocation_C",(Mat,PetscInt*,PetscInt*,PetscInt*),(B,i,j,values));CHKERRQ(ierr);
857   PetscFunctionReturn(0);
858 }
859 
860 /*@C
861    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
862    The matrix does not have numerical values associated with it, but is
863    intended for ordering (to reduce bandwidth etc) and partitioning.
864 
865    Collective
866 
867    Input Parameters:
868 +  comm - MPI communicator
869 .  m - number of local rows
870 .  N - number of global columns
871 .  i - the indices into j for the start of each row
872 .  j - the column indices for each row (sorted for each row).
873        The indices in i and j start with zero (NOT with one).
874 -  values -[optional] edge weights
875 
876    Output Parameter:
877 .  A - the matrix
878 
879    Level: intermediate
880 
881    Notes:
882     This matrix object does not support most matrix operations, include
883    MatSetValues().
884    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
885    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
886     call from Fortran you need not create the arrays with PetscMalloc().
887    Should not include the matrix diagonals.
888 
889    If you already have a matrix, you can create its adjacency matrix by a call
890    to MatConvert, specifying a type of MATMPIADJ.
891 
892    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
893 
894 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
895 @*/
896 PetscErrorCode  MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
897 {
898   PetscErrorCode ierr;
899 
900   PetscFunctionBegin;
901   ierr = MatCreate(comm,A);CHKERRQ(ierr);
902   ierr = MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
903   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
904   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
905   PetscFunctionReturn(0);
906 }
907