xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision dec1416f15364d8a66cef6f4b2a5a2aba5192d13)
1 
2 /*
3     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
4 */
5 #include <../src/mat/impls/adj/mpi/mpiadj.h>    /*I "petscmat.h" I*/
6 #include <petscsf.h>
7 
8 /*
9  * The interface should be easy to use for both MatCreateSubMatrix (parallel sub-matrix) and MatCreateSubMatrices (sequential sub-matrices)
10  * */
11 static PetscErrorCode MatCreateSubMatrix_MPIAdj_data(Mat adj,IS irows, IS icols, PetscInt **sadj_xadj,PetscInt **sadj_adjncy,PetscInt **sadj_values)
12 {
13   PetscInt           nlrows_is,icols_n,i,j,nroots,nleaves,owner,rlocalindex,*ncols_send,*ncols_recv;
14   PetscInt           nlrows_mat,*adjncy_recv,Ncols_recv,Ncols_send,*xadj_recv,*values_recv;
15   PetscInt          *ncols_recv_offsets,loc,rnclos,*sadjncy,*sxadj,*svalues;
16   const PetscInt    *irows_indices,*icols_indices,*xadj, *adjncy;
17   Mat_MPIAdj        *a = (Mat_MPIAdj*)adj->data;
18   PetscLayout        rmap;
19   MPI_Comm           comm;
20   PetscSF            sf;
21   PetscSFNode       *iremote;
22   PetscBool          done;
23   PetscErrorCode     ierr;
24 
25   PetscFunctionBegin;
26   ierr = PetscObjectGetComm((PetscObject)adj,&comm);CHKERRQ(ierr);
27   ierr = MatGetLayouts(adj,&rmap,NULL);CHKERRQ(ierr);
28   ierr = ISGetLocalSize(irows,&nlrows_is);CHKERRQ(ierr);
29   ierr = ISGetIndices(irows,&irows_indices);CHKERRQ(ierr);
30   ierr = PetscMalloc1(nlrows_is,&iremote);CHKERRQ(ierr);
31   /* construct sf graph*/
32   nleaves = nlrows_is;
33   for (i=0; i<nlrows_is; i++){
34     owner = -1;
35     rlocalindex = -1;
36     ierr = PetscLayoutFindOwnerIndex(rmap,irows_indices[i],&owner,&rlocalindex);CHKERRQ(ierr);
37     iremote[i].rank  = owner;
38     iremote[i].index = rlocalindex;
39   }
40   ierr = MatGetRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done);CHKERRQ(ierr);
41   ierr = PetscCalloc4(nlrows_mat,&ncols_send,nlrows_is,&xadj_recv,nlrows_is+1,&ncols_recv_offsets,nlrows_is,&ncols_recv);CHKERRQ(ierr);
42   nroots = nlrows_mat;
43   for (i=0; i<nlrows_mat; i++){
44     ncols_send[i] = xadj[i+1]-xadj[i];
45   }
46   ierr = PetscSFCreate(comm,&sf);CHKERRQ(ierr);
47   ierr = PetscSFSetGraph(sf,nroots,nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
48   ierr = PetscSFSetType(sf,PETSCSFBASIC);CHKERRQ(ierr);
49   ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
50   ierr = PetscSFBcastBegin(sf,MPIU_INT,ncols_send,ncols_recv);CHKERRQ(ierr);
51   ierr = PetscSFBcastEnd(sf,MPIU_INT,ncols_send,ncols_recv);CHKERRQ(ierr);
52   ierr = PetscSFBcastBegin(sf,MPIU_INT,xadj,xadj_recv);CHKERRQ(ierr);
53   ierr = PetscSFBcastEnd(sf,MPIU_INT,xadj,xadj_recv);CHKERRQ(ierr);
54   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
55   Ncols_recv =0;
56   for (i=0; i<nlrows_is; i++){
57     Ncols_recv             += ncols_recv[i];
58     ncols_recv_offsets[i+1] = ncols_recv[i]+ncols_recv_offsets[i];
59   }
60   Ncols_send = 0;
61   for(i=0; i<nlrows_mat; i++){
62     Ncols_send += ncols_send[i];
63   }
64   ierr = PetscCalloc1(Ncols_recv,&iremote);CHKERRQ(ierr);
65   ierr = PetscCalloc1(Ncols_recv,&adjncy_recv);CHKERRQ(ierr);
66   nleaves = Ncols_recv;
67   Ncols_recv = 0;
68   for (i=0; i<nlrows_is; i++){
69     ierr = PetscLayoutFindOwner(rmap,irows_indices[i],&owner);CHKERRQ(ierr);
70     for (j=0; j<ncols_recv[i]; j++){
71       iremote[Ncols_recv].rank    = owner;
72       iremote[Ncols_recv++].index = xadj_recv[i]+j;
73     }
74   }
75   ierr = ISRestoreIndices(irows,&irows_indices);CHKERRQ(ierr);
76   /*if we need to deal with edge weights ???*/
77   if (a->useedgeweights){ierr = PetscCalloc1(Ncols_recv,&values_recv);CHKERRQ(ierr);}
78   nroots = Ncols_send;
79   ierr = PetscSFCreate(comm,&sf);CHKERRQ(ierr);
80   ierr = PetscSFSetGraph(sf,nroots,nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
81   ierr = PetscSFSetType(sf,PETSCSFBASIC);CHKERRQ(ierr);
82   ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
83   ierr = PetscSFBcastBegin(sf,MPIU_INT,adjncy,adjncy_recv);CHKERRQ(ierr);
84   ierr = PetscSFBcastEnd(sf,MPIU_INT,adjncy,adjncy_recv);CHKERRQ(ierr);
85   if (a->useedgeweights){
86     ierr = PetscSFBcastBegin(sf,MPIU_INT,a->values,values_recv);CHKERRQ(ierr);
87     ierr = PetscSFBcastEnd(sf,MPIU_INT,a->values,values_recv);CHKERRQ(ierr);
88   }
89   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
90   ierr = MatRestoreRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done);CHKERRQ(ierr);
91   ierr = ISGetLocalSize(icols,&icols_n);CHKERRQ(ierr);
92   ierr = ISGetIndices(icols,&icols_indices);CHKERRQ(ierr);
93   rnclos = 0;
94   for (i=0; i<nlrows_is; i++){
95     for (j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++){
96       ierr = PetscFindInt(adjncy_recv[j], icols_n, icols_indices, &loc);CHKERRQ(ierr);
97       if (loc<0){
98         adjncy_recv[j] = -1;
99         if (a->useedgeweights) values_recv[j] = -1;
100         ncols_recv[i]--;
101       } else {
102         rnclos++;
103       }
104     }
105   }
106   ierr = ISRestoreIndices(icols,&icols_indices);CHKERRQ(ierr);
107   ierr = PetscCalloc1(rnclos,&sadjncy);CHKERRQ(ierr);
108   if (a->useedgeweights) {ierr = PetscCalloc1(rnclos,&svalues);CHKERRQ(ierr);}
109   ierr = PetscCalloc1(nlrows_is+1,&sxadj);CHKERRQ(ierr);
110   rnclos = 0;
111   for(i=0; i<nlrows_is; i++){
112     for(j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++){
113       if (adjncy_recv[j]<0) continue;
114       sadjncy[rnclos] = adjncy_recv[j];
115       if (a->useedgeweights) svalues[rnclos] = values_recv[j];
116       rnclos++;
117     }
118   }
119   for(i=0; i<nlrows_is; i++){
120     sxadj[i+1] = sxadj[i]+ncols_recv[i];
121   }
122   if (sadj_xadj)  { *sadj_xadj = sxadj;} else    { ierr = PetscFree(sxadj);CHKERRQ(ierr);}
123   if (sadj_adjncy){ *sadj_adjncy = sadjncy;} else { ierr = PetscFree(sadjncy);CHKERRQ(ierr);}
124   if (sadj_values){
125     if (a->useedgeweights) *sadj_values = svalues; else *sadj_values=0;
126   } else {
127     if (a->useedgeweights) {ierr = PetscFree(svalues);CHKERRQ(ierr);}
128   }
129   ierr = PetscFree4(ncols_send,xadj_recv,ncols_recv_offsets,ncols_recv);CHKERRQ(ierr);
130   ierr = PetscFree(adjncy_recv);CHKERRQ(ierr);
131   if (a->useedgeweights) {ierr = PetscFree(values_recv);CHKERRQ(ierr);}
132   PetscFunctionReturn(0);
133 }
134 
135 static PetscErrorCode MatCreateSubMatrices_MPIAdj_Private(Mat mat,PetscInt n,const IS irow[],const IS icol[],PetscBool subcomm,MatReuse scall,Mat *submat[])
136 {
137   PetscInt           i,irow_n,icol_n,*sxadj,*sadjncy,*svalues;
138   PetscInt          *indices,nindx,j,k,loc;
139   PetscMPIInt        issame;
140   const PetscInt    *irow_indices,*icol_indices;
141   MPI_Comm           scomm_row,scomm_col,scomm_mat;
142   PetscErrorCode     ierr;
143 
144   PetscFunctionBegin;
145   nindx = 0;
146   /*
147    * Estimate a maximum number for allocating memory
148    */
149   for(i=0; i<n; i++){
150     ierr = ISGetLocalSize(irow[i],&irow_n);CHKERRQ(ierr);
151     ierr = ISGetLocalSize(icol[i],&icol_n);CHKERRQ(ierr);
152     nindx = nindx>(irow_n+icol_n)? nindx:(irow_n+icol_n);
153   }
154   ierr = PetscMalloc1(nindx,&indices);CHKERRQ(ierr);
155   /* construct a submat */
156   for (i=0; i<n; i++){
157     if (subcomm){
158       ierr = PetscObjectGetComm((PetscObject)irow[i],&scomm_row);CHKERRQ(ierr);
159       ierr = PetscObjectGetComm((PetscObject)icol[i],&scomm_col);CHKERRQ(ierr);
160       ierr = MPI_Comm_compare(scomm_row,scomm_col,&issame);CHKERRQ(ierr);
161       if (issame != MPI_IDENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"row index set must have the same comm as the col index set\n");
162       ierr = MPI_Comm_compare(scomm_row,PETSC_COMM_SELF,&issame);CHKERRQ(ierr);
163       if (issame == MPI_IDENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP," can not use PETSC_COMM_SELF as comm when extracting a parallel submatrix\n");
164     } else {
165       scomm_row = PETSC_COMM_SELF;
166     }
167     /*get sub-matrix data*/
168     sxadj=0; sadjncy=0; svalues=0;
169     ierr = MatCreateSubMatrix_MPIAdj_data(mat,irow[i],icol[i],&sxadj,&sadjncy,&svalues);CHKERRQ(ierr);
170     ierr = ISGetLocalSize(irow[i],&irow_n);CHKERRQ(ierr);
171     ierr = ISGetLocalSize(icol[i],&icol_n);CHKERRQ(ierr);
172     ierr = ISGetIndices(irow[i],&irow_indices);CHKERRQ(ierr);
173     ierr = PetscArraycpy(indices,irow_indices,irow_n);CHKERRQ(ierr);
174     ierr = ISRestoreIndices(irow[i],&irow_indices);CHKERRQ(ierr);
175     ierr = ISGetIndices(icol[i],&icol_indices);CHKERRQ(ierr);
176     ierr = PetscArraycpy(indices+irow_n,icol_indices,icol_n);CHKERRQ(ierr);
177     ierr = ISRestoreIndices(icol[i],&icol_indices);CHKERRQ(ierr);
178     nindx = irow_n+icol_n;
179     ierr = PetscSortRemoveDupsInt(&nindx,indices);CHKERRQ(ierr);
180     /* renumber columns */
181     for (j=0; j<irow_n; j++){
182       for (k=sxadj[j]; k<sxadj[j+1]; k++){
183         ierr = PetscFindInt(sadjncy[k],nindx,indices,&loc);CHKERRQ(ierr);
184 #if defined(PETSC_USE_DEBUG)
185         if (loc<0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"can not find col %D",sadjncy[k]);
186 #endif
187         sadjncy[k] = loc;
188       }
189     }
190     if (scall==MAT_INITIAL_MATRIX){
191       ierr = MatCreateMPIAdj(scomm_row,irow_n,icol_n,sxadj,sadjncy,svalues,submat[i]);CHKERRQ(ierr);
192     } else {
193        Mat                sadj = *(submat[i]);
194        Mat_MPIAdj         *sa  = (Mat_MPIAdj*)((sadj)->data);
195        ierr = PetscObjectGetComm((PetscObject)sadj,&scomm_mat);CHKERRQ(ierr);
196        ierr = MPI_Comm_compare(scomm_row,scomm_mat,&issame);CHKERRQ(ierr);
197        if (issame != MPI_IDENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"submatrix  must have the same comm as the col index set\n");
198        ierr = PetscArraycpy(sa->i,sxadj,irow_n+1);CHKERRQ(ierr);
199        ierr = PetscArraycpy(sa->j,sadjncy,sxadj[irow_n]);CHKERRQ(ierr);
200        if (svalues){ierr = PetscArraycpy(sa->values,svalues,sxadj[irow_n]);CHKERRQ(ierr);}
201        ierr = PetscFree(sxadj);CHKERRQ(ierr);
202        ierr = PetscFree(sadjncy);CHKERRQ(ierr);
203        if (svalues) {ierr = PetscFree(svalues);CHKERRQ(ierr);}
204     }
205   }
206   ierr = PetscFree(indices);CHKERRQ(ierr);
207   PetscFunctionReturn(0);
208 }
209 
210 static PetscErrorCode MatCreateSubMatricesMPI_MPIAdj(Mat mat,PetscInt n, const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
211 {
212   PetscErrorCode     ierr;
213   /*get sub-matrices across a sub communicator */
214   PetscFunctionBegin;
215   ierr = MatCreateSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_TRUE,scall,submat);CHKERRQ(ierr);
216   PetscFunctionReturn(0);
217 }
218 
219 static PetscErrorCode MatCreateSubMatrices_MPIAdj(Mat mat,PetscInt n,const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
220 {
221   PetscErrorCode     ierr;
222 
223   PetscFunctionBegin;
224   /*get sub-matrices based on PETSC_COMM_SELF */
225   ierr = MatCreateSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_FALSE,scall,submat);CHKERRQ(ierr);
226   PetscFunctionReturn(0);
227 }
228 
229 static PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
230 {
231   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
232   PetscErrorCode    ierr;
233   PetscInt          i,j,m = A->rmap->n;
234   const char        *name;
235   PetscViewerFormat format;
236 
237   PetscFunctionBegin;
238   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
239   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
240   if (format == PETSC_VIEWER_ASCII_INFO) {
241     PetscFunctionReturn(0);
242   } else if (format == PETSC_VIEWER_ASCII_MATLAB) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MATLAB format not supported");
243   else {
244     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);CHKERRQ(ierr);
245     ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
246     for (i=0; i<m; i++) {
247       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap->rstart);CHKERRQ(ierr);
248       for (j=a->i[i]; j<a->i[i+1]; j++) {
249         if (a->values) {
250           ierr = PetscViewerASCIISynchronizedPrintf(viewer," (%D, %D) ",a->j[j], a->values[j]);CHKERRQ(ierr);
251         } else {
252           ierr = PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);CHKERRQ(ierr);
253         }
254       }
255       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
256     }
257     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);CHKERRQ(ierr);
258     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
259     ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
260   }
261   PetscFunctionReturn(0);
262 }
263 
264 static PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
265 {
266   PetscErrorCode ierr;
267   PetscBool      iascii;
268 
269   PetscFunctionBegin;
270   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
271   if (iascii) {
272     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
273   }
274   PetscFunctionReturn(0);
275 }
276 
277 static PetscErrorCode MatDestroy_MPIAdj(Mat mat)
278 {
279   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;
280   PetscErrorCode ierr;
281 
282   PetscFunctionBegin;
283 #if defined(PETSC_USE_LOG)
284   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap->n,mat->cmap->n,a->nz);
285 #endif
286   ierr = PetscFree(a->diag);CHKERRQ(ierr);
287   if (a->freeaij) {
288     if (a->freeaijwithfree) {
289       if (a->i) free(a->i);
290       if (a->j) free(a->j);
291     } else {
292       ierr = PetscFree(a->i);CHKERRQ(ierr);
293       ierr = PetscFree(a->j);CHKERRQ(ierr);
294       ierr = PetscFree(a->values);CHKERRQ(ierr);
295     }
296   }
297   ierr = PetscFree(a->rowvalues);CHKERRQ(ierr);
298   ierr = PetscFree(mat->data);CHKERRQ(ierr);
299   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
300   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C",NULL);CHKERRQ(ierr);
301   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjCreateNonemptySubcommMat_C",NULL);CHKERRQ(ierr);
302   PetscFunctionReturn(0);
303 }
304 
305 static PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscBool flg)
306 {
307   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
308   PetscErrorCode ierr;
309 
310   PetscFunctionBegin;
311   switch (op) {
312   case MAT_SYMMETRIC:
313   case MAT_STRUCTURALLY_SYMMETRIC:
314   case MAT_HERMITIAN:
315     a->symmetric = flg;
316     break;
317   case MAT_SYMMETRY_ETERNAL:
318     break;
319   default:
320     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
321     break;
322   }
323   PetscFunctionReturn(0);
324 }
325 
326 static PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
327 {
328   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
329   PetscErrorCode ierr;
330 
331   PetscFunctionBegin;
332   row -= A->rmap->rstart;
333   if (row < 0 || row >= A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
334   *nz = a->i[row+1] - a->i[row];
335   if (v) {
336     PetscInt j;
337     if (a->rowvalues_alloc < *nz) {
338       ierr = PetscFree(a->rowvalues);CHKERRQ(ierr);
339       a->rowvalues_alloc = PetscMax(a->rowvalues_alloc*2, *nz);
340       ierr = PetscMalloc1(a->rowvalues_alloc,&a->rowvalues);CHKERRQ(ierr);
341     }
342     for (j=0; j<*nz; j++){
343       a->rowvalues[j] = a->values ? a->values[a->i[row]+j]:1.0;
344     }
345     *v = (*nz) ? a->rowvalues : NULL;
346   }
347   if (idx) *idx = (*nz) ? a->j + a->i[row] : NULL;
348   PetscFunctionReturn(0);
349 }
350 
351 static PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
352 {
353   PetscFunctionBegin;
354   PetscFunctionReturn(0);
355 }
356 
357 static PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscBool * flg)
358 {
359   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data,*b = (Mat_MPIAdj*)B->data;
360   PetscErrorCode ierr;
361   PetscBool      flag;
362 
363   PetscFunctionBegin;
364   /* If the  matrix dimensions are not equal,or no of nonzeros */
365   if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
366     flag = PETSC_FALSE;
367   }
368 
369   /* if the a->i are the same */
370   ierr = PetscArraycmp(a->i,b->i,A->rmap->n+1,&flag);CHKERRQ(ierr);
371 
372   /* if a->j are the same */
373   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
374 
375   ierr = MPIU_Allreduce(&flag,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
376   PetscFunctionReturn(0);
377 }
378 
379 static PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
380 {
381   PetscInt   i;
382   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
383   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
384 
385   PetscFunctionBegin;
386   *m    = A->rmap->n;
387   *ia   = a->i;
388   *ja   = a->j;
389   *done = PETSC_TRUE;
390   if (oshift) {
391     for (i=0; i<(*ia)[*m]; i++) {
392       (*ja)[i]++;
393     }
394     for (i=0; i<=(*m); i++) (*ia)[i]++;
395   }
396   PetscFunctionReturn(0);
397 }
398 
399 static PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
400 {
401   PetscInt   i;
402   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
403   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
404 
405   PetscFunctionBegin;
406   if (ia && a->i != *ia) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
407   if (ja && a->j != *ja) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
408   if (oshift) {
409     if (!ia) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"If oshift then you must passed in inia[] argument");
410     if (!ja) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"If oshift then you must passed in inja[] argument");
411     for (i=0; i<=(*m); i++) (*ia)[i]--;
412     for (i=0; i<(*ia)[*m]; i++) {
413       (*ja)[i]--;
414     }
415   }
416   PetscFunctionReturn(0);
417 }
418 
419 PetscErrorCode  MatConvertFrom_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
420 {
421   Mat               B;
422   PetscErrorCode    ierr;
423   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
424   const PetscInt    *rj;
425   const PetscScalar *ra;
426   MPI_Comm          comm;
427 
428   PetscFunctionBegin;
429   ierr = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
430   ierr = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
431   ierr = MatGetOwnershipRange(A,&rstart,NULL);CHKERRQ(ierr);
432 
433   /* count the number of nonzeros per row */
434   for (i=0; i<m; i++) {
435     ierr = MatGetRow(A,i+rstart,&len,&rj,NULL);CHKERRQ(ierr);
436     for (j=0; j<len; j++) {
437       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
438     }
439     nzeros += len;
440     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,NULL);CHKERRQ(ierr);
441   }
442 
443   /* malloc space for nonzeros */
444   ierr = PetscMalloc1(nzeros+1,&a);CHKERRQ(ierr);
445   ierr = PetscMalloc1(N+1,&ia);CHKERRQ(ierr);
446   ierr = PetscMalloc1(nzeros+1,&ja);CHKERRQ(ierr);
447 
448   nzeros = 0;
449   ia[0]  = 0;
450   for (i=0; i<m; i++) {
451     ierr = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
452     cnt  = 0;
453     for (j=0; j<len; j++) {
454       if (rj[j] != i+rstart) { /* if not diagonal */
455         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
456         ja[nzeros+cnt++] = rj[j];
457       }
458     }
459     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
460     nzeros += cnt;
461     ia[i+1] = nzeros;
462   }
463 
464   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
465   ierr = MatCreate(comm,&B);CHKERRQ(ierr);
466   ierr = MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
467   ierr = MatSetType(B,type);CHKERRQ(ierr);
468   ierr = MatMPIAdjSetPreallocation(B,ia,ja,a);CHKERRQ(ierr);
469 
470   if (reuse == MAT_INPLACE_MATRIX) {
471     ierr = MatHeaderReplace(A,&B);CHKERRQ(ierr);
472   } else {
473     *newmat = B;
474   }
475   PetscFunctionReturn(0);
476 }
477 
478 /* -------------------------------------------------------------------*/
479 static struct _MatOps MatOps_Values = {0,
480                                        MatGetRow_MPIAdj,
481                                        MatRestoreRow_MPIAdj,
482                                        0,
483                                 /* 4*/ 0,
484                                        0,
485                                        0,
486                                        0,
487                                        0,
488                                        0,
489                                 /*10*/ 0,
490                                        0,
491                                        0,
492                                        0,
493                                        0,
494                                 /*15*/ 0,
495                                        MatEqual_MPIAdj,
496                                        0,
497                                        0,
498                                        0,
499                                 /*20*/ 0,
500                                        0,
501                                        MatSetOption_MPIAdj,
502                                        0,
503                                 /*24*/ 0,
504                                        0,
505                                        0,
506                                        0,
507                                        0,
508                                 /*29*/ 0,
509                                        0,
510                                        0,
511                                        0,
512                                        0,
513                                 /*34*/ 0,
514                                        0,
515                                        0,
516                                        0,
517                                        0,
518                                 /*39*/ 0,
519                                        MatCreateSubMatrices_MPIAdj,
520                                        0,
521                                        0,
522                                        0,
523                                 /*44*/ 0,
524                                        0,
525                                        MatShift_Basic,
526                                        0,
527                                        0,
528                                 /*49*/ 0,
529                                        MatGetRowIJ_MPIAdj,
530                                        MatRestoreRowIJ_MPIAdj,
531                                        0,
532                                        0,
533                                 /*54*/ 0,
534                                        0,
535                                        0,
536                                        0,
537                                        0,
538                                 /*59*/ 0,
539                                        MatDestroy_MPIAdj,
540                                        MatView_MPIAdj,
541                                        MatConvertFrom_MPIAdj,
542                                        0,
543                                 /*64*/ 0,
544                                        0,
545                                        0,
546                                        0,
547                                        0,
548                                 /*69*/ 0,
549                                        0,
550                                        0,
551                                        0,
552                                        0,
553                                 /*74*/ 0,
554                                        0,
555                                        0,
556                                        0,
557                                        0,
558                                 /*79*/ 0,
559                                        0,
560                                        0,
561                                        0,
562                                        0,
563                                 /*84*/ 0,
564                                        0,
565                                        0,
566                                        0,
567                                        0,
568                                 /*89*/ 0,
569                                        0,
570                                        0,
571                                        0,
572                                        0,
573                                 /*94*/ 0,
574                                        0,
575                                        0,
576                                        0,
577                                        0,
578                                 /*99*/ 0,
579                                        0,
580                                        0,
581                                        0,
582                                        0,
583                                /*104*/ 0,
584                                        0,
585                                        0,
586                                        0,
587                                        0,
588                                /*109*/ 0,
589                                        0,
590                                        0,
591                                        0,
592                                        0,
593                                /*114*/ 0,
594                                        0,
595                                        0,
596                                        0,
597                                        0,
598                                /*119*/ 0,
599                                        0,
600                                        0,
601                                        0,
602                                        0,
603                                /*124*/ 0,
604                                        0,
605                                        0,
606                                        0,
607                                        MatCreateSubMatricesMPI_MPIAdj,
608                                /*129*/ 0,
609                                        0,
610                                        0,
611                                        0,
612                                        0,
613                                /*134*/ 0,
614                                        0,
615                                        0,
616                                        0,
617                                        0,
618                                /*139*/ 0,
619                                        0,
620                                        0
621 };
622 
623 static PetscErrorCode  MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
624 {
625   Mat_MPIAdj     *b = (Mat_MPIAdj*)B->data;
626   PetscBool       useedgeweights;
627   PetscErrorCode ierr;
628 #if defined(PETSC_USE_DEBUG)
629   PetscInt ii;
630 #endif
631 
632   PetscFunctionBegin;
633   ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
634   ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
635   if (values) useedgeweights = PETSC_TRUE; else useedgeweights = PETSC_FALSE;
636   /* Make everybody knows if they are using edge weights or not */
637   ierr = MPIU_Allreduce((int*)&useedgeweights,(int*)&b->useedgeweights,1,MPI_INT,MPI_MAX,PetscObjectComm((PetscObject)B));CHKERRQ(ierr);
638 
639 #if defined(PETSC_USE_DEBUG)
640   if (i[0] != 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
641   for (ii=1; ii<B->rmap->n; ii++) {
642     if (i[ii] < 0 || i[ii] < i[ii-1]) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
643   }
644   for (ii=0; ii<i[B->rmap->n]; ii++) {
645     if (j[ii] < 0 || j[ii] >= B->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
646   }
647 #endif
648   B->preallocated = PETSC_TRUE;
649 
650   b->j      = j;
651   b->i      = i;
652   b->values = values;
653 
654   b->nz        = i[B->rmap->n];
655   b->diag      = 0;
656   b->symmetric = PETSC_FALSE;
657   b->freeaij   = PETSC_TRUE;
658 
659   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
660   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
661   PetscFunctionReturn(0);
662 }
663 
664 static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A,Mat *B)
665 {
666   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
667   PetscErrorCode ierr;
668   const PetscInt *ranges;
669   MPI_Comm       acomm,bcomm;
670   MPI_Group      agroup,bgroup;
671   PetscMPIInt    i,rank,size,nranks,*ranks;
672 
673   PetscFunctionBegin;
674   *B    = NULL;
675   ierr  = PetscObjectGetComm((PetscObject)A,&acomm);CHKERRQ(ierr);
676   ierr  = MPI_Comm_size(acomm,&size);CHKERRQ(ierr);
677   ierr  = MPI_Comm_size(acomm,&rank);CHKERRQ(ierr);
678   ierr  = MatGetOwnershipRanges(A,&ranges);CHKERRQ(ierr);
679   for (i=0,nranks=0; i<size; i++) {
680     if (ranges[i+1] - ranges[i] > 0) nranks++;
681   }
682   if (nranks == size) {         /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
683     ierr = PetscObjectReference((PetscObject)A);CHKERRQ(ierr);
684     *B   = A;
685     PetscFunctionReturn(0);
686   }
687 
688   ierr = PetscMalloc1(nranks,&ranks);CHKERRQ(ierr);
689   for (i=0,nranks=0; i<size; i++) {
690     if (ranges[i+1] - ranges[i] > 0) ranks[nranks++] = i;
691   }
692   ierr = MPI_Comm_group(acomm,&agroup);CHKERRQ(ierr);
693   ierr = MPI_Group_incl(agroup,nranks,ranks,&bgroup);CHKERRQ(ierr);
694   ierr = PetscFree(ranks);CHKERRQ(ierr);
695   ierr = MPI_Comm_create(acomm,bgroup,&bcomm);CHKERRQ(ierr);
696   ierr = MPI_Group_free(&agroup);CHKERRQ(ierr);
697   ierr = MPI_Group_free(&bgroup);CHKERRQ(ierr);
698   if (bcomm != MPI_COMM_NULL) {
699     PetscInt   m,N;
700     Mat_MPIAdj *b;
701     ierr       = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
702     ierr       = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
703     ierr       = MatCreateMPIAdj(bcomm,m,N,a->i,a->j,a->values,B);CHKERRQ(ierr);
704     b          = (Mat_MPIAdj*)(*B)->data;
705     b->freeaij = PETSC_FALSE;
706     ierr       = MPI_Comm_free(&bcomm);CHKERRQ(ierr);
707   }
708   PetscFunctionReturn(0);
709 }
710 
711 PetscErrorCode  MatMPIAdjToSeq_MPIAdj(Mat A,Mat *B)
712 {
713   PetscErrorCode ierr;
714   PetscInt       M,N,*II,*J,NZ,nz,m,nzstart,i;
715   PetscInt       *Values = NULL;
716   Mat_MPIAdj     *adj = (Mat_MPIAdj*)A->data;
717   PetscMPIInt    mnz,mm,*allnz,*allm,size,*dispnz,*dispm;
718 
719   PetscFunctionBegin;
720   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
721   ierr = MatGetSize(A,&M,&N);CHKERRQ(ierr);
722   ierr = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
723   nz   = adj->nz;
724   if (adj->i[m] != nz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"nz %D not correct i[m] %d",nz,adj->i[m]);
725   ierr = MPI_Allreduce(&nz,&NZ,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
726 
727   ierr = PetscMPIIntCast(nz,&mnz);CHKERRQ(ierr);
728   ierr = PetscMalloc2(size,&allnz,size,&dispnz);CHKERRQ(ierr);
729   ierr = MPI_Allgather(&mnz,1,MPI_INT,allnz,1,MPI_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
730   dispnz[0] = 0; for (i=1; i<size; i++) dispnz[i] = dispnz[i-1]+ allnz[i-1];
731   if (adj->values) {
732     ierr = PetscMalloc1(NZ,&Values);CHKERRQ(ierr);
733     ierr = MPI_Allgatherv(adj->values,mnz,MPIU_INT,Values,allnz,dispnz,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
734   }
735   ierr = PetscMalloc1(NZ,&J);CHKERRQ(ierr);
736   ierr = MPI_Allgatherv(adj->j,mnz,MPIU_INT,J,allnz,dispnz,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
737   ierr = PetscFree2(allnz,dispnz);CHKERRQ(ierr);
738   ierr = MPI_Scan(&nz,&nzstart,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
739   nzstart -= nz;
740   /* shift the i[] values so they will be correct after being received */
741   for (i=0; i<m; i++) adj->i[i] += nzstart;
742   ierr = PetscMalloc1(M+1,&II);CHKERRQ(ierr);
743   ierr = PetscMPIIntCast(m,&mm);CHKERRQ(ierr);
744   ierr = PetscMalloc2(size,&allm,size,&dispm);CHKERRQ(ierr);
745   ierr = MPI_Allgather(&mm,1,MPI_INT,allm,1,MPI_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
746   dispm[0] = 0; for (i=1; i<size; i++) dispm[i] = dispm[i-1]+ allm[i-1];
747   ierr = MPI_Allgatherv(adj->i,mm,MPIU_INT,II,allm,dispm,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
748   ierr = PetscFree2(allm,dispm);CHKERRQ(ierr);
749   II[M] = NZ;
750   /* shift the i[] values back */
751   for (i=0; i<m; i++) adj->i[i] -= nzstart;
752   ierr = MatCreateMPIAdj(PETSC_COMM_SELF,M,N,II,J,Values,B);CHKERRQ(ierr);
753   PetscFunctionReturn(0);
754 }
755 
756 /*@
757    MatMPIAdjCreateNonemptySubcommMat - create the same MPIAdj matrix on a subcommunicator containing only processes owning a positive number of rows
758 
759    Collective
760 
761    Input Arguments:
762 .  A - original MPIAdj matrix
763 
764    Output Arguments:
765 .  B - matrix on subcommunicator, NULL on ranks that owned zero rows of A
766 
767    Level: developer
768 
769    Note:
770    This function is mostly useful for internal use by mesh partitioning packages that require that every process owns at least one row.
771 
772    The matrix B should be destroyed with MatDestroy(). The arrays are not copied, so B should be destroyed before A is destroyed.
773 
774 .seealso: MatCreateMPIAdj()
775 @*/
776 PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A,Mat *B)
777 {
778   PetscErrorCode ierr;
779 
780   PetscFunctionBegin;
781   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
782   ierr = PetscUseMethod(A,"MatMPIAdjCreateNonemptySubcommMat_C",(Mat,Mat*),(A,B));CHKERRQ(ierr);
783   PetscFunctionReturn(0);
784 }
785 
786 /*MC
787    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
788    intended for use constructing orderings and partitionings.
789 
790   Level: beginner
791 
792 .seealso: MatCreateMPIAdj
793 M*/
794 
795 PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B)
796 {
797   Mat_MPIAdj     *b;
798   PetscErrorCode ierr;
799 
800   PetscFunctionBegin;
801   ierr         = PetscNewLog(B,&b);CHKERRQ(ierr);
802   B->data      = (void*)b;
803   ierr         = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
804   B->assembled = PETSC_FALSE;
805 
806   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
807   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjCreateNonemptySubcommMat_C",MatMPIAdjCreateNonemptySubcommMat_MPIAdj);CHKERRQ(ierr);
808   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjToSeq_C",MatMPIAdjToSeq_MPIAdj);CHKERRQ(ierr);
809   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ);CHKERRQ(ierr);
810   PetscFunctionReturn(0);
811 }
812 
813 /*@C
814    MatMPIAdjToSeq - Converts an parallel MPIAdj matrix to complete MPIAdj on each process (needed by sequential preconditioners)
815 
816    Logically Collective
817 
818    Input Parameter:
819 .  A - the matrix
820 
821    Output Parameter:
822 .  B - the same matrix on all processes
823 
824    Level: intermediate
825 
826 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
827 @*/
828 PetscErrorCode  MatMPIAdjToSeq(Mat A,Mat *B)
829 {
830   PetscErrorCode ierr;
831 
832   PetscFunctionBegin;
833   ierr = PetscUseMethod(A,"MatMPIAdjToSeq_C",(Mat,Mat*),(A,B));CHKERRQ(ierr);
834   PetscFunctionReturn(0);
835 }
836 
837 /*@C
838    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
839 
840    Logically Collective
841 
842    Input Parameters:
843 +  A - the matrix
844 .  i - the indices into j for the start of each row
845 .  j - the column indices for each row (sorted for each row).
846        The indices in i and j start with zero (NOT with one).
847 -  values - [optional] edge weights
848 
849    Level: intermediate
850 
851 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
852 @*/
853 PetscErrorCode  MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
854 {
855   PetscErrorCode ierr;
856 
857   PetscFunctionBegin;
858   ierr = PetscTryMethod(B,"MatMPIAdjSetPreallocation_C",(Mat,PetscInt*,PetscInt*,PetscInt*),(B,i,j,values));CHKERRQ(ierr);
859   PetscFunctionReturn(0);
860 }
861 
862 /*@C
863    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
864    The matrix does not have numerical values associated with it, but is
865    intended for ordering (to reduce bandwidth etc) and partitioning.
866 
867    Collective
868 
869    Input Parameters:
870 +  comm - MPI communicator
871 .  m - number of local rows
872 .  N - number of global columns
873 .  i - the indices into j for the start of each row
874 .  j - the column indices for each row (sorted for each row).
875        The indices in i and j start with zero (NOT with one).
876 -  values -[optional] edge weights
877 
878    Output Parameter:
879 .  A - the matrix
880 
881    Level: intermediate
882 
883    Notes:
884     This matrix object does not support most matrix operations, include
885    MatSetValues().
886    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
887    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
888     call from Fortran you need not create the arrays with PetscMalloc().
889    Should not include the matrix diagonals.
890 
891    If you already have a matrix, you can create its adjacency matrix by a call
892    to MatConvert, specifying a type of MATMPIADJ.
893 
894    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
895 
896 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
897 @*/
898 PetscErrorCode  MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
899 {
900   PetscErrorCode ierr;
901 
902   PetscFunctionBegin;
903   ierr = MatCreate(comm,A);CHKERRQ(ierr);
904   ierr = MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
905   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
906   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
907   PetscFunctionReturn(0);
908 }
909 
910 
911 
912 
913 
914 
915 
916