xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision f0fc11cebb1bb284829732915f9e84cabc170c2f)
1 
2 /*
3     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
4 */
5 #include <../src/mat/impls/adj/mpi/mpiadj.h>    /*I "petscmat.h" I*/
6 #include <petscsf.h>
7 
8 /*
9  * The interface should be easy to use for both MatCreateSubMatrix (parallel sub-matrix) and MatCreateSubMatrices (sequential sub-matrices)
10  * */
11 static PetscErrorCode MatCreateSubMatrix_MPIAdj_data(Mat adj,IS irows, IS icols, PetscInt **sadj_xadj,PetscInt **sadj_adjncy,PetscInt **sadj_values)
12 {
13   PetscInt           nlrows_is,icols_n,i,j,nroots,nleaves,rlocalindex,*ncols_send,*ncols_recv;
14   PetscInt           nlrows_mat,*adjncy_recv,Ncols_recv,Ncols_send,*xadj_recv,*values_recv;
15   PetscInt          *ncols_recv_offsets,loc,rnclos,*sadjncy,*sxadj,*svalues;
16   const PetscInt    *irows_indices,*icols_indices,*xadj, *adjncy;
17   PetscMPIInt        owner;
18   Mat_MPIAdj        *a = (Mat_MPIAdj*)adj->data;
19   PetscLayout        rmap;
20   MPI_Comm           comm;
21   PetscSF            sf;
22   PetscSFNode       *iremote;
23   PetscBool          done;
24   PetscErrorCode     ierr;
25 
26   PetscFunctionBegin;
27   ierr = PetscObjectGetComm((PetscObject)adj,&comm);CHKERRQ(ierr);
28   ierr = MatGetLayouts(adj,&rmap,NULL);CHKERRQ(ierr);
29   ierr = ISGetLocalSize(irows,&nlrows_is);CHKERRQ(ierr);
30   ierr = ISGetIndices(irows,&irows_indices);CHKERRQ(ierr);
31   ierr = PetscMalloc1(nlrows_is,&iremote);CHKERRQ(ierr);
32   /* construct sf graph*/
33   nleaves = nlrows_is;
34   for (i=0; i<nlrows_is; i++){
35     owner = -1;
36     rlocalindex = -1;
37     ierr = PetscLayoutFindOwnerIndex(rmap,irows_indices[i],&owner,&rlocalindex);CHKERRQ(ierr);
38     iremote[i].rank  = owner;
39     iremote[i].index = rlocalindex;
40   }
41   ierr = MatGetRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done);CHKERRQ(ierr);
42   ierr = PetscCalloc4(nlrows_mat,&ncols_send,nlrows_is,&xadj_recv,nlrows_is+1,&ncols_recv_offsets,nlrows_is,&ncols_recv);CHKERRQ(ierr);
43   nroots = nlrows_mat;
44   for (i=0; i<nlrows_mat; i++){
45     ncols_send[i] = xadj[i+1]-xadj[i];
46   }
47   ierr = PetscSFCreate(comm,&sf);CHKERRQ(ierr);
48   ierr = PetscSFSetGraph(sf,nroots,nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
49   ierr = PetscSFSetType(sf,PETSCSFBASIC);CHKERRQ(ierr);
50   ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
51   ierr = PetscSFBcastBegin(sf,MPIU_INT,ncols_send,ncols_recv);CHKERRQ(ierr);
52   ierr = PetscSFBcastEnd(sf,MPIU_INT,ncols_send,ncols_recv);CHKERRQ(ierr);
53   ierr = PetscSFBcastBegin(sf,MPIU_INT,xadj,xadj_recv);CHKERRQ(ierr);
54   ierr = PetscSFBcastEnd(sf,MPIU_INT,xadj,xadj_recv);CHKERRQ(ierr);
55   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
56   Ncols_recv =0;
57   for (i=0; i<nlrows_is; i++){
58     Ncols_recv             += ncols_recv[i];
59     ncols_recv_offsets[i+1] = ncols_recv[i]+ncols_recv_offsets[i];
60   }
61   Ncols_send = 0;
62   for(i=0; i<nlrows_mat; i++){
63     Ncols_send += ncols_send[i];
64   }
65   ierr = PetscCalloc1(Ncols_recv,&iremote);CHKERRQ(ierr);
66   ierr = PetscCalloc1(Ncols_recv,&adjncy_recv);CHKERRQ(ierr);
67   nleaves = Ncols_recv;
68   Ncols_recv = 0;
69   for (i=0; i<nlrows_is; i++){
70     ierr = PetscLayoutFindOwner(rmap,irows_indices[i],&owner);CHKERRQ(ierr);
71     for (j=0; j<ncols_recv[i]; j++){
72       iremote[Ncols_recv].rank    = owner;
73       iremote[Ncols_recv++].index = xadj_recv[i]+j;
74     }
75   }
76   ierr = ISRestoreIndices(irows,&irows_indices);CHKERRQ(ierr);
77   /*if we need to deal with edge weights ???*/
78   if (a->useedgeweights){ierr = PetscCalloc1(Ncols_recv,&values_recv);CHKERRQ(ierr);}
79   nroots = Ncols_send;
80   ierr = PetscSFCreate(comm,&sf);CHKERRQ(ierr);
81   ierr = PetscSFSetGraph(sf,nroots,nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
82   ierr = PetscSFSetType(sf,PETSCSFBASIC);CHKERRQ(ierr);
83   ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
84   ierr = PetscSFBcastBegin(sf,MPIU_INT,adjncy,adjncy_recv);CHKERRQ(ierr);
85   ierr = PetscSFBcastEnd(sf,MPIU_INT,adjncy,adjncy_recv);CHKERRQ(ierr);
86   if (a->useedgeweights){
87     ierr = PetscSFBcastBegin(sf,MPIU_INT,a->values,values_recv);CHKERRQ(ierr);
88     ierr = PetscSFBcastEnd(sf,MPIU_INT,a->values,values_recv);CHKERRQ(ierr);
89   }
90   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
91   ierr = MatRestoreRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done);CHKERRQ(ierr);
92   ierr = ISGetLocalSize(icols,&icols_n);CHKERRQ(ierr);
93   ierr = ISGetIndices(icols,&icols_indices);CHKERRQ(ierr);
94   rnclos = 0;
95   for (i=0; i<nlrows_is; i++){
96     for (j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++){
97       ierr = PetscFindInt(adjncy_recv[j], icols_n, icols_indices, &loc);CHKERRQ(ierr);
98       if (loc<0){
99         adjncy_recv[j] = -1;
100         if (a->useedgeweights) values_recv[j] = -1;
101         ncols_recv[i]--;
102       } else {
103         rnclos++;
104       }
105     }
106   }
107   ierr = ISRestoreIndices(icols,&icols_indices);CHKERRQ(ierr);
108   ierr = PetscCalloc1(rnclos,&sadjncy);CHKERRQ(ierr);
109   if (a->useedgeweights) {ierr = PetscCalloc1(rnclos,&svalues);CHKERRQ(ierr);}
110   ierr = PetscCalloc1(nlrows_is+1,&sxadj);CHKERRQ(ierr);
111   rnclos = 0;
112   for(i=0; i<nlrows_is; i++){
113     for(j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++){
114       if (adjncy_recv[j]<0) continue;
115       sadjncy[rnclos] = adjncy_recv[j];
116       if (a->useedgeweights) svalues[rnclos] = values_recv[j];
117       rnclos++;
118     }
119   }
120   for(i=0; i<nlrows_is; i++){
121     sxadj[i+1] = sxadj[i]+ncols_recv[i];
122   }
123   if (sadj_xadj)  { *sadj_xadj = sxadj;} else    { ierr = PetscFree(sxadj);CHKERRQ(ierr);}
124   if (sadj_adjncy){ *sadj_adjncy = sadjncy;} else { ierr = PetscFree(sadjncy);CHKERRQ(ierr);}
125   if (sadj_values){
126     if (a->useedgeweights) *sadj_values = svalues; else *sadj_values=0;
127   } else {
128     if (a->useedgeweights) {ierr = PetscFree(svalues);CHKERRQ(ierr);}
129   }
130   ierr = PetscFree4(ncols_send,xadj_recv,ncols_recv_offsets,ncols_recv);CHKERRQ(ierr);
131   ierr = PetscFree(adjncy_recv);CHKERRQ(ierr);
132   if (a->useedgeweights) {ierr = PetscFree(values_recv);CHKERRQ(ierr);}
133   PetscFunctionReturn(0);
134 }
135 
136 static PetscErrorCode MatCreateSubMatrices_MPIAdj_Private(Mat mat,PetscInt n,const IS irow[],const IS icol[],PetscBool subcomm,MatReuse scall,Mat *submat[])
137 {
138   PetscInt           i,irow_n,icol_n,*sxadj,*sadjncy,*svalues;
139   PetscInt          *indices,nindx,j,k,loc;
140   PetscMPIInt        issame;
141   const PetscInt    *irow_indices,*icol_indices;
142   MPI_Comm           scomm_row,scomm_col,scomm_mat;
143   PetscErrorCode     ierr;
144 
145   PetscFunctionBegin;
146   nindx = 0;
147   /*
148    * Estimate a maximum number for allocating memory
149    */
150   for(i=0; i<n; i++){
151     ierr = ISGetLocalSize(irow[i],&irow_n);CHKERRQ(ierr);
152     ierr = ISGetLocalSize(icol[i],&icol_n);CHKERRQ(ierr);
153     nindx = nindx>(irow_n+icol_n)? nindx:(irow_n+icol_n);
154   }
155   ierr = PetscMalloc1(nindx,&indices);CHKERRQ(ierr);
156   /* construct a submat */
157   for (i=0; i<n; i++){
158     if (subcomm){
159       ierr = PetscObjectGetComm((PetscObject)irow[i],&scomm_row);CHKERRQ(ierr);
160       ierr = PetscObjectGetComm((PetscObject)icol[i],&scomm_col);CHKERRQ(ierr);
161       ierr = MPI_Comm_compare(scomm_row,scomm_col,&issame);CHKERRQ(ierr);
162       if (issame != MPI_IDENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"row index set must have the same comm as the col index set\n");
163       ierr = MPI_Comm_compare(scomm_row,PETSC_COMM_SELF,&issame);CHKERRQ(ierr);
164       if (issame == MPI_IDENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP," can not use PETSC_COMM_SELF as comm when extracting a parallel submatrix\n");
165     } else {
166       scomm_row = PETSC_COMM_SELF;
167     }
168     /*get sub-matrix data*/
169     sxadj=0; sadjncy=0; svalues=0;
170     ierr = MatCreateSubMatrix_MPIAdj_data(mat,irow[i],icol[i],&sxadj,&sadjncy,&svalues);CHKERRQ(ierr);
171     ierr = ISGetLocalSize(irow[i],&irow_n);CHKERRQ(ierr);
172     ierr = ISGetLocalSize(icol[i],&icol_n);CHKERRQ(ierr);
173     ierr = ISGetIndices(irow[i],&irow_indices);CHKERRQ(ierr);
174     ierr = PetscArraycpy(indices,irow_indices,irow_n);CHKERRQ(ierr);
175     ierr = ISRestoreIndices(irow[i],&irow_indices);CHKERRQ(ierr);
176     ierr = ISGetIndices(icol[i],&icol_indices);CHKERRQ(ierr);
177     ierr = PetscArraycpy(indices+irow_n,icol_indices,icol_n);CHKERRQ(ierr);
178     ierr = ISRestoreIndices(icol[i],&icol_indices);CHKERRQ(ierr);
179     nindx = irow_n+icol_n;
180     ierr = PetscSortRemoveDupsInt(&nindx,indices);CHKERRQ(ierr);
181     /* renumber columns */
182     for (j=0; j<irow_n; j++){
183       for (k=sxadj[j]; k<sxadj[j+1]; k++){
184         ierr = PetscFindInt(sadjncy[k],nindx,indices,&loc);CHKERRQ(ierr);
185 #if defined(PETSC_USE_DEBUG)
186         if (loc<0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"can not find col %D",sadjncy[k]);
187 #endif
188         sadjncy[k] = loc;
189       }
190     }
191     if (scall==MAT_INITIAL_MATRIX){
192       ierr = MatCreateMPIAdj(scomm_row,irow_n,icol_n,sxadj,sadjncy,svalues,submat[i]);CHKERRQ(ierr);
193     } else {
194        Mat                sadj = *(submat[i]);
195        Mat_MPIAdj         *sa  = (Mat_MPIAdj*)((sadj)->data);
196        ierr = PetscObjectGetComm((PetscObject)sadj,&scomm_mat);CHKERRQ(ierr);
197        ierr = MPI_Comm_compare(scomm_row,scomm_mat,&issame);CHKERRQ(ierr);
198        if (issame != MPI_IDENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"submatrix  must have the same comm as the col index set\n");
199        ierr = PetscArraycpy(sa->i,sxadj,irow_n+1);CHKERRQ(ierr);
200        ierr = PetscArraycpy(sa->j,sadjncy,sxadj[irow_n]);CHKERRQ(ierr);
201        if (svalues){ierr = PetscArraycpy(sa->values,svalues,sxadj[irow_n]);CHKERRQ(ierr);}
202        ierr = PetscFree(sxadj);CHKERRQ(ierr);
203        ierr = PetscFree(sadjncy);CHKERRQ(ierr);
204        if (svalues) {ierr = PetscFree(svalues);CHKERRQ(ierr);}
205     }
206   }
207   ierr = PetscFree(indices);CHKERRQ(ierr);
208   PetscFunctionReturn(0);
209 }
210 
211 static PetscErrorCode MatCreateSubMatricesMPI_MPIAdj(Mat mat,PetscInt n, const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
212 {
213   PetscErrorCode     ierr;
214   /*get sub-matrices across a sub communicator */
215   PetscFunctionBegin;
216   ierr = MatCreateSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_TRUE,scall,submat);CHKERRQ(ierr);
217   PetscFunctionReturn(0);
218 }
219 
220 static PetscErrorCode MatCreateSubMatrices_MPIAdj(Mat mat,PetscInt n,const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
221 {
222   PetscErrorCode     ierr;
223 
224   PetscFunctionBegin;
225   /*get sub-matrices based on PETSC_COMM_SELF */
226   ierr = MatCreateSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_FALSE,scall,submat);CHKERRQ(ierr);
227   PetscFunctionReturn(0);
228 }
229 
230 static PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
231 {
232   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
233   PetscErrorCode    ierr;
234   PetscInt          i,j,m = A->rmap->n;
235   const char        *name;
236   PetscViewerFormat format;
237 
238   PetscFunctionBegin;
239   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
240   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
241   if (format == PETSC_VIEWER_ASCII_INFO) {
242     PetscFunctionReturn(0);
243   } else if (format == PETSC_VIEWER_ASCII_MATLAB) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MATLAB format not supported");
244   else {
245     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);CHKERRQ(ierr);
246     ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
247     for (i=0; i<m; i++) {
248       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap->rstart);CHKERRQ(ierr);
249       for (j=a->i[i]; j<a->i[i+1]; j++) {
250         if (a->values) {
251           ierr = PetscViewerASCIISynchronizedPrintf(viewer," (%D, %D) ",a->j[j], a->values[j]);CHKERRQ(ierr);
252         } else {
253           ierr = PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);CHKERRQ(ierr);
254         }
255       }
256       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
257     }
258     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);CHKERRQ(ierr);
259     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
260     ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
261   }
262   PetscFunctionReturn(0);
263 }
264 
265 static PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
266 {
267   PetscErrorCode ierr;
268   PetscBool      iascii;
269 
270   PetscFunctionBegin;
271   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
272   if (iascii) {
273     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
274   }
275   PetscFunctionReturn(0);
276 }
277 
278 static PetscErrorCode MatDestroy_MPIAdj(Mat mat)
279 {
280   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;
281   PetscErrorCode ierr;
282 
283   PetscFunctionBegin;
284 #if defined(PETSC_USE_LOG)
285   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap->n,mat->cmap->n,a->nz);
286 #endif
287   ierr = PetscFree(a->diag);CHKERRQ(ierr);
288   if (a->freeaij) {
289     if (a->freeaijwithfree) {
290       if (a->i) free(a->i);
291       if (a->j) free(a->j);
292     } else {
293       ierr = PetscFree(a->i);CHKERRQ(ierr);
294       ierr = PetscFree(a->j);CHKERRQ(ierr);
295       ierr = PetscFree(a->values);CHKERRQ(ierr);
296     }
297   }
298   ierr = PetscFree(a->rowvalues);CHKERRQ(ierr);
299   ierr = PetscFree(mat->data);CHKERRQ(ierr);
300   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
301   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C",NULL);CHKERRQ(ierr);
302   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjCreateNonemptySubcommMat_C",NULL);CHKERRQ(ierr);
303   PetscFunctionReturn(0);
304 }
305 
306 static PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscBool flg)
307 {
308   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
309   PetscErrorCode ierr;
310 
311   PetscFunctionBegin;
312   switch (op) {
313   case MAT_SYMMETRIC:
314   case MAT_STRUCTURALLY_SYMMETRIC:
315   case MAT_HERMITIAN:
316     a->symmetric = flg;
317     break;
318   case MAT_SYMMETRY_ETERNAL:
319     break;
320   default:
321     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
322     break;
323   }
324   PetscFunctionReturn(0);
325 }
326 
327 static PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
328 {
329   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
330   PetscErrorCode ierr;
331 
332   PetscFunctionBegin;
333   row -= A->rmap->rstart;
334   if (row < 0 || row >= A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
335   *nz = a->i[row+1] - a->i[row];
336   if (v) {
337     PetscInt j;
338     if (a->rowvalues_alloc < *nz) {
339       ierr = PetscFree(a->rowvalues);CHKERRQ(ierr);
340       a->rowvalues_alloc = PetscMax(a->rowvalues_alloc*2, *nz);
341       ierr = PetscMalloc1(a->rowvalues_alloc,&a->rowvalues);CHKERRQ(ierr);
342     }
343     for (j=0; j<*nz; j++){
344       a->rowvalues[j] = a->values ? a->values[a->i[row]+j]:1.0;
345     }
346     *v = (*nz) ? a->rowvalues : NULL;
347   }
348   if (idx) *idx = (*nz) ? a->j + a->i[row] : NULL;
349   PetscFunctionReturn(0);
350 }
351 
352 static PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
353 {
354   PetscFunctionBegin;
355   PetscFunctionReturn(0);
356 }
357 
358 static PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscBool * flg)
359 {
360   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data,*b = (Mat_MPIAdj*)B->data;
361   PetscErrorCode ierr;
362   PetscBool      flag;
363 
364   PetscFunctionBegin;
365   /* If the  matrix dimensions are not equal,or no of nonzeros */
366   if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
367     flag = PETSC_FALSE;
368   }
369 
370   /* if the a->i are the same */
371   ierr = PetscArraycmp(a->i,b->i,A->rmap->n+1,&flag);CHKERRQ(ierr);
372 
373   /* if a->j are the same */
374   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
375 
376   ierr = MPIU_Allreduce(&flag,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
377   PetscFunctionReturn(0);
378 }
379 
380 static PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
381 {
382   PetscInt   i;
383   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
384   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
385 
386   PetscFunctionBegin;
387   *m    = A->rmap->n;
388   *ia   = a->i;
389   *ja   = a->j;
390   *done = PETSC_TRUE;
391   if (oshift) {
392     for (i=0; i<(*ia)[*m]; i++) {
393       (*ja)[i]++;
394     }
395     for (i=0; i<=(*m); i++) (*ia)[i]++;
396   }
397   PetscFunctionReturn(0);
398 }
399 
400 static PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
401 {
402   PetscInt   i;
403   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
404   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
405 
406   PetscFunctionBegin;
407   if (ia && a->i != *ia) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
408   if (ja && a->j != *ja) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
409   if (oshift) {
410     if (!ia) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"If oshift then you must passed in inia[] argument");
411     if (!ja) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"If oshift then you must passed in inja[] argument");
412     for (i=0; i<=(*m); i++) (*ia)[i]--;
413     for (i=0; i<(*ia)[*m]; i++) {
414       (*ja)[i]--;
415     }
416   }
417   PetscFunctionReturn(0);
418 }
419 
420 PetscErrorCode  MatConvertFrom_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
421 {
422   Mat               B;
423   PetscErrorCode    ierr;
424   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
425   const PetscInt    *rj;
426   const PetscScalar *ra;
427   MPI_Comm          comm;
428 
429   PetscFunctionBegin;
430   ierr = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
431   ierr = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
432   ierr = MatGetOwnershipRange(A,&rstart,NULL);CHKERRQ(ierr);
433 
434   /* count the number of nonzeros per row */
435   for (i=0; i<m; i++) {
436     ierr = MatGetRow(A,i+rstart,&len,&rj,NULL);CHKERRQ(ierr);
437     for (j=0; j<len; j++) {
438       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
439     }
440     nzeros += len;
441     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,NULL);CHKERRQ(ierr);
442   }
443 
444   /* malloc space for nonzeros */
445   ierr = PetscMalloc1(nzeros+1,&a);CHKERRQ(ierr);
446   ierr = PetscMalloc1(N+1,&ia);CHKERRQ(ierr);
447   ierr = PetscMalloc1(nzeros+1,&ja);CHKERRQ(ierr);
448 
449   nzeros = 0;
450   ia[0]  = 0;
451   for (i=0; i<m; i++) {
452     ierr = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
453     cnt  = 0;
454     for (j=0; j<len; j++) {
455       if (rj[j] != i+rstart) { /* if not diagonal */
456         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
457         ja[nzeros+cnt++] = rj[j];
458       }
459     }
460     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
461     nzeros += cnt;
462     ia[i+1] = nzeros;
463   }
464 
465   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
466   ierr = MatCreate(comm,&B);CHKERRQ(ierr);
467   ierr = MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
468   ierr = MatSetType(B,type);CHKERRQ(ierr);
469   ierr = MatMPIAdjSetPreallocation(B,ia,ja,a);CHKERRQ(ierr);
470 
471   if (reuse == MAT_INPLACE_MATRIX) {
472     ierr = MatHeaderReplace(A,&B);CHKERRQ(ierr);
473   } else {
474     *newmat = B;
475   }
476   PetscFunctionReturn(0);
477 }
478 
479 /* -------------------------------------------------------------------*/
480 static struct _MatOps MatOps_Values = {0,
481                                        MatGetRow_MPIAdj,
482                                        MatRestoreRow_MPIAdj,
483                                        0,
484                                 /* 4*/ 0,
485                                        0,
486                                        0,
487                                        0,
488                                        0,
489                                        0,
490                                 /*10*/ 0,
491                                        0,
492                                        0,
493                                        0,
494                                        0,
495                                 /*15*/ 0,
496                                        MatEqual_MPIAdj,
497                                        0,
498                                        0,
499                                        0,
500                                 /*20*/ 0,
501                                        0,
502                                        MatSetOption_MPIAdj,
503                                        0,
504                                 /*24*/ 0,
505                                        0,
506                                        0,
507                                        0,
508                                        0,
509                                 /*29*/ 0,
510                                        0,
511                                        0,
512                                        0,
513                                        0,
514                                 /*34*/ 0,
515                                        0,
516                                        0,
517                                        0,
518                                        0,
519                                 /*39*/ 0,
520                                        MatCreateSubMatrices_MPIAdj,
521                                        0,
522                                        0,
523                                        0,
524                                 /*44*/ 0,
525                                        0,
526                                        MatShift_Basic,
527                                        0,
528                                        0,
529                                 /*49*/ 0,
530                                        MatGetRowIJ_MPIAdj,
531                                        MatRestoreRowIJ_MPIAdj,
532                                        0,
533                                        0,
534                                 /*54*/ 0,
535                                        0,
536                                        0,
537                                        0,
538                                        0,
539                                 /*59*/ 0,
540                                        MatDestroy_MPIAdj,
541                                        MatView_MPIAdj,
542                                        MatConvertFrom_MPIAdj,
543                                        0,
544                                 /*64*/ 0,
545                                        0,
546                                        0,
547                                        0,
548                                        0,
549                                 /*69*/ 0,
550                                        0,
551                                        0,
552                                        0,
553                                        0,
554                                 /*74*/ 0,
555                                        0,
556                                        0,
557                                        0,
558                                        0,
559                                 /*79*/ 0,
560                                        0,
561                                        0,
562                                        0,
563                                        0,
564                                 /*84*/ 0,
565                                        0,
566                                        0,
567                                        0,
568                                        0,
569                                 /*89*/ 0,
570                                        0,
571                                        0,
572                                        0,
573                                        0,
574                                 /*94*/ 0,
575                                        0,
576                                        0,
577                                        0,
578                                        0,
579                                 /*99*/ 0,
580                                        0,
581                                        0,
582                                        0,
583                                        0,
584                                /*104*/ 0,
585                                        0,
586                                        0,
587                                        0,
588                                        0,
589                                /*109*/ 0,
590                                        0,
591                                        0,
592                                        0,
593                                        0,
594                                /*114*/ 0,
595                                        0,
596                                        0,
597                                        0,
598                                        0,
599                                /*119*/ 0,
600                                        0,
601                                        0,
602                                        0,
603                                        0,
604                                /*124*/ 0,
605                                        0,
606                                        0,
607                                        0,
608                                        MatCreateSubMatricesMPI_MPIAdj,
609                                /*129*/ 0,
610                                        0,
611                                        0,
612                                        0,
613                                        0,
614                                /*134*/ 0,
615                                        0,
616                                        0,
617                                        0,
618                                        0,
619                                /*139*/ 0,
620                                        0,
621                                        0
622 };
623 
624 static PetscErrorCode  MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
625 {
626   Mat_MPIAdj     *b = (Mat_MPIAdj*)B->data;
627   PetscBool       useedgeweights;
628   PetscErrorCode ierr;
629 #if defined(PETSC_USE_DEBUG)
630   PetscInt ii;
631 #endif
632 
633   PetscFunctionBegin;
634   ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
635   ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
636   if (values) useedgeweights = PETSC_TRUE; else useedgeweights = PETSC_FALSE;
637   /* Make everybody knows if they are using edge weights or not */
638   ierr = MPIU_Allreduce((int*)&useedgeweights,(int*)&b->useedgeweights,1,MPI_INT,MPI_MAX,PetscObjectComm((PetscObject)B));CHKERRQ(ierr);
639 
640 #if defined(PETSC_USE_DEBUG)
641   if (i[0] != 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
642   for (ii=1; ii<B->rmap->n; ii++) {
643     if (i[ii] < 0 || i[ii] < i[ii-1]) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
644   }
645   for (ii=0; ii<i[B->rmap->n]; ii++) {
646     if (j[ii] < 0 || j[ii] >= B->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
647   }
648 #endif
649   B->preallocated = PETSC_TRUE;
650 
651   b->j      = j;
652   b->i      = i;
653   b->values = values;
654 
655   b->nz        = i[B->rmap->n];
656   b->diag      = 0;
657   b->symmetric = PETSC_FALSE;
658   b->freeaij   = PETSC_TRUE;
659 
660   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
661   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
662   PetscFunctionReturn(0);
663 }
664 
665 static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A,Mat *B)
666 {
667   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
668   PetscErrorCode ierr;
669   const PetscInt *ranges;
670   MPI_Comm       acomm,bcomm;
671   MPI_Group      agroup,bgroup;
672   PetscMPIInt    i,rank,size,nranks,*ranks;
673 
674   PetscFunctionBegin;
675   *B    = NULL;
676   ierr  = PetscObjectGetComm((PetscObject)A,&acomm);CHKERRQ(ierr);
677   ierr  = MPI_Comm_size(acomm,&size);CHKERRQ(ierr);
678   ierr  = MPI_Comm_size(acomm,&rank);CHKERRQ(ierr);
679   ierr  = MatGetOwnershipRanges(A,&ranges);CHKERRQ(ierr);
680   for (i=0,nranks=0; i<size; i++) {
681     if (ranges[i+1] - ranges[i] > 0) nranks++;
682   }
683   if (nranks == size) {         /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
684     ierr = PetscObjectReference((PetscObject)A);CHKERRQ(ierr);
685     *B   = A;
686     PetscFunctionReturn(0);
687   }
688 
689   ierr = PetscMalloc1(nranks,&ranks);CHKERRQ(ierr);
690   for (i=0,nranks=0; i<size; i++) {
691     if (ranges[i+1] - ranges[i] > 0) ranks[nranks++] = i;
692   }
693   ierr = MPI_Comm_group(acomm,&agroup);CHKERRQ(ierr);
694   ierr = MPI_Group_incl(agroup,nranks,ranks,&bgroup);CHKERRQ(ierr);
695   ierr = PetscFree(ranks);CHKERRQ(ierr);
696   ierr = MPI_Comm_create(acomm,bgroup,&bcomm);CHKERRQ(ierr);
697   ierr = MPI_Group_free(&agroup);CHKERRQ(ierr);
698   ierr = MPI_Group_free(&bgroup);CHKERRQ(ierr);
699   if (bcomm != MPI_COMM_NULL) {
700     PetscInt   m,N;
701     Mat_MPIAdj *b;
702     ierr       = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
703     ierr       = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
704     ierr       = MatCreateMPIAdj(bcomm,m,N,a->i,a->j,a->values,B);CHKERRQ(ierr);
705     b          = (Mat_MPIAdj*)(*B)->data;
706     b->freeaij = PETSC_FALSE;
707     ierr       = MPI_Comm_free(&bcomm);CHKERRQ(ierr);
708   }
709   PetscFunctionReturn(0);
710 }
711 
712 PetscErrorCode  MatMPIAdjToSeq_MPIAdj(Mat A,Mat *B)
713 {
714   PetscErrorCode ierr;
715   PetscInt       M,N,*II,*J,NZ,nz,m,nzstart,i;
716   PetscInt       *Values = NULL;
717   Mat_MPIAdj     *adj = (Mat_MPIAdj*)A->data;
718   PetscMPIInt    mnz,mm,*allnz,*allm,size,*dispnz,*dispm;
719 
720   PetscFunctionBegin;
721   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
722   ierr = MatGetSize(A,&M,&N);CHKERRQ(ierr);
723   ierr = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
724   nz   = adj->nz;
725   if (adj->i[m] != nz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"nz %D not correct i[m] %d",nz,adj->i[m]);
726   ierr = MPI_Allreduce(&nz,&NZ,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
727 
728   ierr = PetscMPIIntCast(nz,&mnz);CHKERRQ(ierr);
729   ierr = PetscMalloc2(size,&allnz,size,&dispnz);CHKERRQ(ierr);
730   ierr = MPI_Allgather(&mnz,1,MPI_INT,allnz,1,MPI_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
731   dispnz[0] = 0; for (i=1; i<size; i++) dispnz[i] = dispnz[i-1]+ allnz[i-1];
732   if (adj->values) {
733     ierr = PetscMalloc1(NZ,&Values);CHKERRQ(ierr);
734     ierr = MPI_Allgatherv(adj->values,mnz,MPIU_INT,Values,allnz,dispnz,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
735   }
736   ierr = PetscMalloc1(NZ,&J);CHKERRQ(ierr);
737   ierr = MPI_Allgatherv(adj->j,mnz,MPIU_INT,J,allnz,dispnz,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
738   ierr = PetscFree2(allnz,dispnz);CHKERRQ(ierr);
739   ierr = MPI_Scan(&nz,&nzstart,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
740   nzstart -= nz;
741   /* shift the i[] values so they will be correct after being received */
742   for (i=0; i<m; i++) adj->i[i] += nzstart;
743   ierr = PetscMalloc1(M+1,&II);CHKERRQ(ierr);
744   ierr = PetscMPIIntCast(m,&mm);CHKERRQ(ierr);
745   ierr = PetscMalloc2(size,&allm,size,&dispm);CHKERRQ(ierr);
746   ierr = MPI_Allgather(&mm,1,MPI_INT,allm,1,MPI_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
747   dispm[0] = 0; for (i=1; i<size; i++) dispm[i] = dispm[i-1]+ allm[i-1];
748   ierr = MPI_Allgatherv(adj->i,mm,MPIU_INT,II,allm,dispm,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
749   ierr = PetscFree2(allm,dispm);CHKERRQ(ierr);
750   II[M] = NZ;
751   /* shift the i[] values back */
752   for (i=0; i<m; i++) adj->i[i] -= nzstart;
753   ierr = MatCreateMPIAdj(PETSC_COMM_SELF,M,N,II,J,Values,B);CHKERRQ(ierr);
754   PetscFunctionReturn(0);
755 }
756 
757 /*@
758    MatMPIAdjCreateNonemptySubcommMat - create the same MPIAdj matrix on a subcommunicator containing only processes owning a positive number of rows
759 
760    Collective
761 
762    Input Arguments:
763 .  A - original MPIAdj matrix
764 
765    Output Arguments:
766 .  B - matrix on subcommunicator, NULL on ranks that owned zero rows of A
767 
768    Level: developer
769 
770    Note:
771    This function is mostly useful for internal use by mesh partitioning packages that require that every process owns at least one row.
772 
773    The matrix B should be destroyed with MatDestroy(). The arrays are not copied, so B should be destroyed before A is destroyed.
774 
775 .seealso: MatCreateMPIAdj()
776 @*/
777 PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A,Mat *B)
778 {
779   PetscErrorCode ierr;
780 
781   PetscFunctionBegin;
782   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
783   ierr = PetscUseMethod(A,"MatMPIAdjCreateNonemptySubcommMat_C",(Mat,Mat*),(A,B));CHKERRQ(ierr);
784   PetscFunctionReturn(0);
785 }
786 
787 /*MC
788    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
789    intended for use constructing orderings and partitionings.
790 
791   Level: beginner
792 
793 .seealso: MatCreateMPIAdj
794 M*/
795 
796 PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B)
797 {
798   Mat_MPIAdj     *b;
799   PetscErrorCode ierr;
800 
801   PetscFunctionBegin;
802   ierr         = PetscNewLog(B,&b);CHKERRQ(ierr);
803   B->data      = (void*)b;
804   ierr         = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
805   B->assembled = PETSC_FALSE;
806 
807   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
808   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjCreateNonemptySubcommMat_C",MatMPIAdjCreateNonemptySubcommMat_MPIAdj);CHKERRQ(ierr);
809   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjToSeq_C",MatMPIAdjToSeq_MPIAdj);CHKERRQ(ierr);
810   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ);CHKERRQ(ierr);
811   PetscFunctionReturn(0);
812 }
813 
814 /*@C
815    MatMPIAdjToSeq - Converts an parallel MPIAdj matrix to complete MPIAdj on each process (needed by sequential preconditioners)
816 
817    Logically Collective
818 
819    Input Parameter:
820 .  A - the matrix
821 
822    Output Parameter:
823 .  B - the same matrix on all processes
824 
825    Level: intermediate
826 
827 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
828 @*/
829 PetscErrorCode  MatMPIAdjToSeq(Mat A,Mat *B)
830 {
831   PetscErrorCode ierr;
832 
833   PetscFunctionBegin;
834   ierr = PetscUseMethod(A,"MatMPIAdjToSeq_C",(Mat,Mat*),(A,B));CHKERRQ(ierr);
835   PetscFunctionReturn(0);
836 }
837 
838 /*@C
839    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
840 
841    Logically Collective
842 
843    Input Parameters:
844 +  A - the matrix
845 .  i - the indices into j for the start of each row
846 .  j - the column indices for each row (sorted for each row).
847        The indices in i and j start with zero (NOT with one).
848 -  values - [optional] edge weights
849 
850    Level: intermediate
851 
852 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
853 @*/
854 PetscErrorCode  MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
855 {
856   PetscErrorCode ierr;
857 
858   PetscFunctionBegin;
859   ierr = PetscTryMethod(B,"MatMPIAdjSetPreallocation_C",(Mat,PetscInt*,PetscInt*,PetscInt*),(B,i,j,values));CHKERRQ(ierr);
860   PetscFunctionReturn(0);
861 }
862 
863 /*@C
864    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
865    The matrix does not have numerical values associated with it, but is
866    intended for ordering (to reduce bandwidth etc) and partitioning.
867 
868    Collective
869 
870    Input Parameters:
871 +  comm - MPI communicator
872 .  m - number of local rows
873 .  N - number of global columns
874 .  i - the indices into j for the start of each row
875 .  j - the column indices for each row (sorted for each row).
876        The indices in i and j start with zero (NOT with one).
877 -  values -[optional] edge weights
878 
879    Output Parameter:
880 .  A - the matrix
881 
882    Level: intermediate
883 
884    Notes:
885     This matrix object does not support most matrix operations, include
886    MatSetValues().
887    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
888    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
889     call from Fortran you need not create the arrays with PetscMalloc().
890    Should not include the matrix diagonals.
891 
892    If you already have a matrix, you can create its adjacency matrix by a call
893    to MatConvert, specifying a type of MATMPIADJ.
894 
895    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
896 
897 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
898 @*/
899 PetscErrorCode  MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
900 {
901   PetscErrorCode ierr;
902 
903   PetscFunctionBegin;
904   ierr = MatCreate(comm,A);CHKERRQ(ierr);
905   ierr = MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
906   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
907   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
908   PetscFunctionReturn(0);
909 }
910