xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 91e63d38360eb9bc922f79d792328cc4769c01ac)
1 
2 /*
3     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
4 */
5 #include <../src/mat/impls/adj/mpi/mpiadj.h>    /*I "petscmat.h" I*/
6 #include <petscsf.h>
7 
8 /*
9  * The interface should be easy to use for both MatCreateSubMatrix (parallel sub-matrix) and MatCreateSubMatrices (sequential sub-matrices)
10  * */
11 static PetscErrorCode MatCreateSubMatrix_MPIAdj_data(Mat adj,IS irows, IS icols, PetscInt **sadj_xadj,PetscInt **sadj_adjncy,PetscInt **sadj_values)
12 {
13   PetscInt           nlrows_is,icols_n,i,j,nroots,nleaves,rlocalindex,*ncols_send,*ncols_recv;
14   PetscInt           nlrows_mat,*adjncy_recv,Ncols_recv,Ncols_send,*xadj_recv,*values_recv;
15   PetscInt          *ncols_recv_offsets,loc,rnclos,*sadjncy,*sxadj,*svalues;
16   const PetscInt    *irows_indices,*icols_indices,*xadj, *adjncy;
17   PetscMPIInt        owner;
18   Mat_MPIAdj        *a = (Mat_MPIAdj*)adj->data;
19   PetscLayout        rmap;
20   MPI_Comm           comm;
21   PetscSF            sf;
22   PetscSFNode       *iremote;
23   PetscBool          done;
24   PetscErrorCode     ierr;
25 
26   PetscFunctionBegin;
27   ierr = PetscObjectGetComm((PetscObject)adj,&comm);CHKERRQ(ierr);
28   ierr = MatGetLayouts(adj,&rmap,NULL);CHKERRQ(ierr);
29   ierr = ISGetLocalSize(irows,&nlrows_is);CHKERRQ(ierr);
30   ierr = ISGetIndices(irows,&irows_indices);CHKERRQ(ierr);
31   ierr = PetscMalloc1(nlrows_is,&iremote);CHKERRQ(ierr);
32   /* construct sf graph*/
33   nleaves = nlrows_is;
34   for (i=0; i<nlrows_is; i++) {
35     owner = -1;
36     rlocalindex = -1;
37     ierr = PetscLayoutFindOwnerIndex(rmap,irows_indices[i],&owner,&rlocalindex);CHKERRQ(ierr);
38     iremote[i].rank  = owner;
39     iremote[i].index = rlocalindex;
40   }
41   ierr = MatGetRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done);CHKERRQ(ierr);
42   ierr = PetscCalloc4(nlrows_mat,&ncols_send,nlrows_is,&xadj_recv,nlrows_is+1,&ncols_recv_offsets,nlrows_is,&ncols_recv);CHKERRQ(ierr);
43   nroots = nlrows_mat;
44   for (i=0; i<nlrows_mat; i++) {
45     ncols_send[i] = xadj[i+1]-xadj[i];
46   }
47   ierr = PetscSFCreate(comm,&sf);CHKERRQ(ierr);
48   ierr = PetscSFSetGraph(sf,nroots,nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
49   ierr = PetscSFSetType(sf,PETSCSFBASIC);CHKERRQ(ierr);
50   ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
51   ierr = PetscSFBcastBegin(sf,MPIU_INT,ncols_send,ncols_recv,MPI_REPLACE);CHKERRQ(ierr);
52   ierr = PetscSFBcastEnd(sf,MPIU_INT,ncols_send,ncols_recv,MPI_REPLACE);CHKERRQ(ierr);
53   ierr = PetscSFBcastBegin(sf,MPIU_INT,xadj,xadj_recv,MPI_REPLACE);CHKERRQ(ierr);
54   ierr = PetscSFBcastEnd(sf,MPIU_INT,xadj,xadj_recv,MPI_REPLACE);CHKERRQ(ierr);
55   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
56   Ncols_recv =0;
57   for (i=0; i<nlrows_is; i++) {
58     Ncols_recv             += ncols_recv[i];
59     ncols_recv_offsets[i+1] = ncols_recv[i]+ncols_recv_offsets[i];
60   }
61   Ncols_send = 0;
62   for (i=0; i<nlrows_mat; i++) {
63     Ncols_send += ncols_send[i];
64   }
65   ierr = PetscCalloc1(Ncols_recv,&iremote);CHKERRQ(ierr);
66   ierr = PetscCalloc1(Ncols_recv,&adjncy_recv);CHKERRQ(ierr);
67   nleaves = Ncols_recv;
68   Ncols_recv = 0;
69   for (i=0; i<nlrows_is; i++) {
70     ierr = PetscLayoutFindOwner(rmap,irows_indices[i],&owner);CHKERRQ(ierr);
71     for (j=0; j<ncols_recv[i]; j++) {
72       iremote[Ncols_recv].rank    = owner;
73       iremote[Ncols_recv++].index = xadj_recv[i]+j;
74     }
75   }
76   ierr = ISRestoreIndices(irows,&irows_indices);CHKERRQ(ierr);
77   /*if we need to deal with edge weights ???*/
78   if (a->useedgeweights) {ierr = PetscCalloc1(Ncols_recv,&values_recv);CHKERRQ(ierr);}
79   nroots = Ncols_send;
80   ierr = PetscSFCreate(comm,&sf);CHKERRQ(ierr);
81   ierr = PetscSFSetGraph(sf,nroots,nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
82   ierr = PetscSFSetType(sf,PETSCSFBASIC);CHKERRQ(ierr);
83   ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
84   ierr = PetscSFBcastBegin(sf,MPIU_INT,adjncy,adjncy_recv,MPI_REPLACE);CHKERRQ(ierr);
85   ierr = PetscSFBcastEnd(sf,MPIU_INT,adjncy,adjncy_recv,MPI_REPLACE);CHKERRQ(ierr);
86   if (a->useedgeweights) {
87     ierr = PetscSFBcastBegin(sf,MPIU_INT,a->values,values_recv,MPI_REPLACE);CHKERRQ(ierr);
88     ierr = PetscSFBcastEnd(sf,MPIU_INT,a->values,values_recv,MPI_REPLACE);CHKERRQ(ierr);
89   }
90   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
91   ierr = MatRestoreRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done);CHKERRQ(ierr);
92   ierr = ISGetLocalSize(icols,&icols_n);CHKERRQ(ierr);
93   ierr = ISGetIndices(icols,&icols_indices);CHKERRQ(ierr);
94   rnclos = 0;
95   for (i=0; i<nlrows_is; i++) {
96     for (j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++) {
97       ierr = PetscFindInt(adjncy_recv[j], icols_n, icols_indices, &loc);CHKERRQ(ierr);
98       if (loc<0) {
99         adjncy_recv[j] = -1;
100         if (a->useedgeweights) values_recv[j] = -1;
101         ncols_recv[i]--;
102       } else {
103         rnclos++;
104       }
105     }
106   }
107   ierr = ISRestoreIndices(icols,&icols_indices);CHKERRQ(ierr);
108   ierr = PetscCalloc1(rnclos,&sadjncy);CHKERRQ(ierr);
109   if (a->useedgeweights) {ierr = PetscCalloc1(rnclos,&svalues);CHKERRQ(ierr);}
110   ierr = PetscCalloc1(nlrows_is+1,&sxadj);CHKERRQ(ierr);
111   rnclos = 0;
112   for (i=0; i<nlrows_is; i++) {
113     for (j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++) {
114       if (adjncy_recv[j]<0) continue;
115       sadjncy[rnclos] = adjncy_recv[j];
116       if (a->useedgeweights) svalues[rnclos] = values_recv[j];
117       rnclos++;
118     }
119   }
120   for (i=0; i<nlrows_is; i++) {
121     sxadj[i+1] = sxadj[i]+ncols_recv[i];
122   }
123   if (sadj_xadj)  { *sadj_xadj = sxadj;} else    { ierr = PetscFree(sxadj);CHKERRQ(ierr);}
124   if (sadj_adjncy) { *sadj_adjncy = sadjncy;} else { ierr = PetscFree(sadjncy);CHKERRQ(ierr);}
125   if (sadj_values) {
126     if (a->useedgeweights) *sadj_values = svalues; else *sadj_values=NULL;
127   } else {
128     if (a->useedgeweights) {ierr = PetscFree(svalues);CHKERRQ(ierr);}
129   }
130   ierr = PetscFree4(ncols_send,xadj_recv,ncols_recv_offsets,ncols_recv);CHKERRQ(ierr);
131   ierr = PetscFree(adjncy_recv);CHKERRQ(ierr);
132   if (a->useedgeweights) {ierr = PetscFree(values_recv);CHKERRQ(ierr);}
133   PetscFunctionReturn(0);
134 }
135 
136 static PetscErrorCode MatCreateSubMatrices_MPIAdj_Private(Mat mat,PetscInt n,const IS irow[],const IS icol[],PetscBool subcomm,MatReuse scall,Mat *submat[])
137 {
138   PetscInt           i,irow_n,icol_n,*sxadj,*sadjncy,*svalues;
139   PetscInt          *indices,nindx,j,k,loc;
140   PetscMPIInt        issame;
141   const PetscInt    *irow_indices,*icol_indices;
142   MPI_Comm           scomm_row,scomm_col,scomm_mat;
143   PetscErrorCode     ierr;
144 
145   PetscFunctionBegin;
146   nindx = 0;
147   /*
148    * Estimate a maximum number for allocating memory
149    */
150   for (i=0; i<n; i++) {
151     ierr = ISGetLocalSize(irow[i],&irow_n);CHKERRQ(ierr);
152     ierr = ISGetLocalSize(icol[i],&icol_n);CHKERRQ(ierr);
153     nindx = nindx>(irow_n+icol_n)? nindx:(irow_n+icol_n);
154   }
155   ierr = PetscMalloc1(nindx,&indices);CHKERRQ(ierr);
156   /* construct a submat */
157   for (i=0; i<n; i++) {
158     if (subcomm) {
159       ierr = PetscObjectGetComm((PetscObject)irow[i],&scomm_row);CHKERRQ(ierr);
160       ierr = PetscObjectGetComm((PetscObject)icol[i],&scomm_col);CHKERRQ(ierr);
161       ierr = MPI_Comm_compare(scomm_row,scomm_col,&issame);CHKERRMPI(ierr);
162       PetscCheckFalse(issame != MPI_IDENT,PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"row index set must have the same comm as the col index set");
163       ierr = MPI_Comm_compare(scomm_row,PETSC_COMM_SELF,&issame);CHKERRMPI(ierr);
164       PetscCheckFalse(issame == MPI_IDENT,PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP," can not use PETSC_COMM_SELF as comm when extracting a parallel submatrix");
165     } else {
166       scomm_row = PETSC_COMM_SELF;
167     }
168     /*get sub-matrix data*/
169     sxadj=NULL; sadjncy=NULL; svalues=NULL;
170     ierr = MatCreateSubMatrix_MPIAdj_data(mat,irow[i],icol[i],&sxadj,&sadjncy,&svalues);CHKERRQ(ierr);
171     ierr = ISGetLocalSize(irow[i],&irow_n);CHKERRQ(ierr);
172     ierr = ISGetLocalSize(icol[i],&icol_n);CHKERRQ(ierr);
173     ierr = ISGetIndices(irow[i],&irow_indices);CHKERRQ(ierr);
174     ierr = PetscArraycpy(indices,irow_indices,irow_n);CHKERRQ(ierr);
175     ierr = ISRestoreIndices(irow[i],&irow_indices);CHKERRQ(ierr);
176     ierr = ISGetIndices(icol[i],&icol_indices);CHKERRQ(ierr);
177     ierr = PetscArraycpy(indices+irow_n,icol_indices,icol_n);CHKERRQ(ierr);
178     ierr = ISRestoreIndices(icol[i],&icol_indices);CHKERRQ(ierr);
179     nindx = irow_n+icol_n;
180     ierr = PetscSortRemoveDupsInt(&nindx,indices);CHKERRQ(ierr);
181     /* renumber columns */
182     for (j=0; j<irow_n; j++) {
183       for (k=sxadj[j]; k<sxadj[j+1]; k++) {
184         ierr = PetscFindInt(sadjncy[k],nindx,indices,&loc);CHKERRQ(ierr);
185         PetscCheckFalse(loc<0,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"can not find col %" PetscInt_FMT,sadjncy[k]);
186         sadjncy[k] = loc;
187       }
188     }
189     if (scall==MAT_INITIAL_MATRIX) {
190       ierr = MatCreateMPIAdj(scomm_row,irow_n,icol_n,sxadj,sadjncy,svalues,submat[i]);CHKERRQ(ierr);
191     } else {
192        Mat                sadj = *(submat[i]);
193        Mat_MPIAdj         *sa  = (Mat_MPIAdj*)((sadj)->data);
194        ierr = PetscObjectGetComm((PetscObject)sadj,&scomm_mat);CHKERRQ(ierr);
195        ierr = MPI_Comm_compare(scomm_row,scomm_mat,&issame);CHKERRMPI(ierr);
196        PetscCheckFalse(issame != MPI_IDENT,PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"submatrix  must have the same comm as the col index set");
197        ierr = PetscArraycpy(sa->i,sxadj,irow_n+1);CHKERRQ(ierr);
198        ierr = PetscArraycpy(sa->j,sadjncy,sxadj[irow_n]);CHKERRQ(ierr);
199        if (svalues) {ierr = PetscArraycpy(sa->values,svalues,sxadj[irow_n]);CHKERRQ(ierr);}
200        ierr = PetscFree(sxadj);CHKERRQ(ierr);
201        ierr = PetscFree(sadjncy);CHKERRQ(ierr);
202        if (svalues) {ierr = PetscFree(svalues);CHKERRQ(ierr);}
203     }
204   }
205   ierr = PetscFree(indices);CHKERRQ(ierr);
206   PetscFunctionReturn(0);
207 }
208 
209 static PetscErrorCode MatCreateSubMatricesMPI_MPIAdj(Mat mat,PetscInt n, const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
210 {
211   PetscErrorCode     ierr;
212   /*get sub-matrices across a sub communicator */
213   PetscFunctionBegin;
214   ierr = MatCreateSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_TRUE,scall,submat);CHKERRQ(ierr);
215   PetscFunctionReturn(0);
216 }
217 
218 static PetscErrorCode MatCreateSubMatrices_MPIAdj(Mat mat,PetscInt n,const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
219 {
220   PetscErrorCode     ierr;
221 
222   PetscFunctionBegin;
223   /*get sub-matrices based on PETSC_COMM_SELF */
224   ierr = MatCreateSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_FALSE,scall,submat);CHKERRQ(ierr);
225   PetscFunctionReturn(0);
226 }
227 
228 static PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
229 {
230   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
231   PetscErrorCode    ierr;
232   PetscInt          i,j,m = A->rmap->n;
233   const char        *name;
234   PetscViewerFormat format;
235 
236   PetscFunctionBegin;
237   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
238   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
239   if (format == PETSC_VIEWER_ASCII_INFO) {
240     PetscFunctionReturn(0);
241   } else PetscCheckFalse(format == PETSC_VIEWER_ASCII_MATLAB,PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MATLAB format not supported");
242   else {
243     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);CHKERRQ(ierr);
244     ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
245     for (i=0; i<m; i++) {
246       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %" PetscInt_FMT ":",i+A->rmap->rstart);CHKERRQ(ierr);
247       for (j=a->i[i]; j<a->i[i+1]; j++) {
248         if (a->values) {
249           ierr = PetscViewerASCIISynchronizedPrintf(viewer," (%" PetscInt_FMT ", %" PetscInt_FMT ") ",a->j[j], a->values[j]);CHKERRQ(ierr);
250         } else {
251           ierr = PetscViewerASCIISynchronizedPrintf(viewer," %" PetscInt_FMT " ",a->j[j]);CHKERRQ(ierr);
252         }
253       }
254       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
255     }
256     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);CHKERRQ(ierr);
257     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
258     ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
259   }
260   PetscFunctionReturn(0);
261 }
262 
263 static PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
264 {
265   PetscErrorCode ierr;
266   PetscBool      iascii;
267 
268   PetscFunctionBegin;
269   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
270   if (iascii) {
271     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
272   }
273   PetscFunctionReturn(0);
274 }
275 
276 static PetscErrorCode MatDestroy_MPIAdj(Mat mat)
277 {
278   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;
279   PetscErrorCode ierr;
280 
281   PetscFunctionBegin;
282 #if defined(PETSC_USE_LOG)
283   PetscLogObjectState((PetscObject)mat,"Rows=%" PetscInt_FMT ", Cols=%" PetscInt_FMT ", NZ=%" PetscInt_FMT,mat->rmap->n,mat->cmap->n,a->nz);
284 #endif
285   ierr = PetscFree(a->diag);CHKERRQ(ierr);
286   if (a->freeaij) {
287     if (a->freeaijwithfree) {
288       if (a->i) free(a->i);
289       if (a->j) free(a->j);
290     } else {
291       ierr = PetscFree(a->i);CHKERRQ(ierr);
292       ierr = PetscFree(a->j);CHKERRQ(ierr);
293       ierr = PetscFree(a->values);CHKERRQ(ierr);
294     }
295   }
296   ierr = PetscFree(a->rowvalues);CHKERRQ(ierr);
297   ierr = PetscFree(mat->data);CHKERRQ(ierr);
298   ierr = PetscObjectChangeTypeName((PetscObject)mat,NULL);CHKERRQ(ierr);
299   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C",NULL);CHKERRQ(ierr);
300   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjCreateNonemptySubcommMat_C",NULL);CHKERRQ(ierr);
301   PetscFunctionReturn(0);
302 }
303 
304 static PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscBool flg)
305 {
306   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
307   PetscErrorCode ierr;
308 
309   PetscFunctionBegin;
310   switch (op) {
311   case MAT_SYMMETRIC:
312   case MAT_STRUCTURALLY_SYMMETRIC:
313   case MAT_HERMITIAN:
314     a->symmetric = flg;
315     break;
316   case MAT_SYMMETRY_ETERNAL:
317     break;
318   default:
319     ierr = PetscInfo(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
320     break;
321   }
322   PetscFunctionReturn(0);
323 }
324 
325 static PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
326 {
327   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
328   PetscErrorCode ierr;
329 
330   PetscFunctionBegin;
331   row -= A->rmap->rstart;
332   PetscCheckFalse(row < 0 || row >= A->rmap->n,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
333   *nz = a->i[row+1] - a->i[row];
334   if (v) {
335     PetscInt j;
336     if (a->rowvalues_alloc < *nz) {
337       ierr = PetscFree(a->rowvalues);CHKERRQ(ierr);
338       a->rowvalues_alloc = PetscMax(a->rowvalues_alloc*2, *nz);
339       ierr = PetscMalloc1(a->rowvalues_alloc,&a->rowvalues);CHKERRQ(ierr);
340     }
341     for (j=0; j<*nz; j++) {
342       a->rowvalues[j] = a->values ? a->values[a->i[row]+j]:1.0;
343     }
344     *v = (*nz) ? a->rowvalues : NULL;
345   }
346   if (idx) *idx = (*nz) ? a->j + a->i[row] : NULL;
347   PetscFunctionReturn(0);
348 }
349 
350 static PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
351 {
352   PetscFunctionBegin;
353   PetscFunctionReturn(0);
354 }
355 
356 static PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscBool * flg)
357 {
358   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data,*b = (Mat_MPIAdj*)B->data;
359   PetscErrorCode ierr;
360   PetscBool      flag;
361 
362   PetscFunctionBegin;
363   /* If the  matrix dimensions are not equal,or no of nonzeros */
364   if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
365     flag = PETSC_FALSE;
366   }
367 
368   /* if the a->i are the same */
369   ierr = PetscArraycmp(a->i,b->i,A->rmap->n+1,&flag);CHKERRQ(ierr);
370 
371   /* if a->j are the same */
372   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
373 
374   ierr = MPIU_Allreduce(&flag,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRMPI(ierr);
375   PetscFunctionReturn(0);
376 }
377 
378 static PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
379 {
380   PetscInt   i;
381   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
382   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
383 
384   PetscFunctionBegin;
385   *m    = A->rmap->n;
386   *ia   = a->i;
387   *ja   = a->j;
388   *done = PETSC_TRUE;
389   if (oshift) {
390     for (i=0; i<(*ia)[*m]; i++) {
391       (*ja)[i]++;
392     }
393     for (i=0; i<=(*m); i++) (*ia)[i]++;
394   }
395   PetscFunctionReturn(0);
396 }
397 
398 static PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
399 {
400   PetscInt   i;
401   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
402   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
403 
404   PetscFunctionBegin;
405   PetscCheckFalse(ia && a->i != *ia,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
406   PetscCheckFalse(ja && a->j != *ja,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
407   if (oshift) {
408     PetscCheckFalse(!ia,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"If oshift then you must passed in inia[] argument");
409     PetscCheckFalse(!ja,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"If oshift then you must passed in inja[] argument");
410     for (i=0; i<=(*m); i++) (*ia)[i]--;
411     for (i=0; i<(*ia)[*m]; i++) {
412       (*ja)[i]--;
413     }
414   }
415   PetscFunctionReturn(0);
416 }
417 
418 PetscErrorCode  MatConvertFrom_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
419 {
420   Mat               B;
421   PetscErrorCode    ierr;
422   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
423   const PetscInt    *rj;
424   const PetscScalar *ra;
425   MPI_Comm          comm;
426 
427   PetscFunctionBegin;
428   ierr = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
429   ierr = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
430   ierr = MatGetOwnershipRange(A,&rstart,NULL);CHKERRQ(ierr);
431 
432   /* count the number of nonzeros per row */
433   for (i=0; i<m; i++) {
434     ierr = MatGetRow(A,i+rstart,&len,&rj,NULL);CHKERRQ(ierr);
435     for (j=0; j<len; j++) {
436       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
437     }
438     nzeros += len;
439     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,NULL);CHKERRQ(ierr);
440   }
441 
442   /* malloc space for nonzeros */
443   ierr = PetscMalloc1(nzeros+1,&a);CHKERRQ(ierr);
444   ierr = PetscMalloc1(N+1,&ia);CHKERRQ(ierr);
445   ierr = PetscMalloc1(nzeros+1,&ja);CHKERRQ(ierr);
446 
447   nzeros = 0;
448   ia[0]  = 0;
449   for (i=0; i<m; i++) {
450     ierr = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
451     cnt  = 0;
452     for (j=0; j<len; j++) {
453       if (rj[j] != i+rstart) { /* if not diagonal */
454         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
455         ja[nzeros+cnt++] = rj[j];
456       }
457     }
458     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
459     nzeros += cnt;
460     ia[i+1] = nzeros;
461   }
462 
463   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
464   ierr = MatCreate(comm,&B);CHKERRQ(ierr);
465   ierr = MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
466   ierr = MatSetType(B,type);CHKERRQ(ierr);
467   ierr = MatMPIAdjSetPreallocation(B,ia,ja,a);CHKERRQ(ierr);
468 
469   if (reuse == MAT_INPLACE_MATRIX) {
470     ierr = MatHeaderReplace(A,&B);CHKERRQ(ierr);
471   } else {
472     *newmat = B;
473   }
474   PetscFunctionReturn(0);
475 }
476 
477 /* -------------------------------------------------------------------*/
478 static struct _MatOps MatOps_Values = {NULL,
479                                        MatGetRow_MPIAdj,
480                                        MatRestoreRow_MPIAdj,
481                                        NULL,
482                                 /* 4*/ NULL,
483                                        NULL,
484                                        NULL,
485                                        NULL,
486                                        NULL,
487                                        NULL,
488                                 /*10*/ NULL,
489                                        NULL,
490                                        NULL,
491                                        NULL,
492                                        NULL,
493                                 /*15*/ NULL,
494                                        MatEqual_MPIAdj,
495                                        NULL,
496                                        NULL,
497                                        NULL,
498                                 /*20*/ NULL,
499                                        NULL,
500                                        MatSetOption_MPIAdj,
501                                        NULL,
502                                 /*24*/ NULL,
503                                        NULL,
504                                        NULL,
505                                        NULL,
506                                        NULL,
507                                 /*29*/ NULL,
508                                        NULL,
509                                        NULL,
510                                        NULL,
511                                        NULL,
512                                 /*34*/ NULL,
513                                        NULL,
514                                        NULL,
515                                        NULL,
516                                        NULL,
517                                 /*39*/ NULL,
518                                        MatCreateSubMatrices_MPIAdj,
519                                        NULL,
520                                        NULL,
521                                        NULL,
522                                 /*44*/ NULL,
523                                        NULL,
524                                        MatShift_Basic,
525                                        NULL,
526                                        NULL,
527                                 /*49*/ NULL,
528                                        MatGetRowIJ_MPIAdj,
529                                        MatRestoreRowIJ_MPIAdj,
530                                        NULL,
531                                        NULL,
532                                 /*54*/ NULL,
533                                        NULL,
534                                        NULL,
535                                        NULL,
536                                        NULL,
537                                 /*59*/ NULL,
538                                        MatDestroy_MPIAdj,
539                                        MatView_MPIAdj,
540                                        MatConvertFrom_MPIAdj,
541                                        NULL,
542                                 /*64*/ NULL,
543                                        NULL,
544                                        NULL,
545                                        NULL,
546                                        NULL,
547                                 /*69*/ NULL,
548                                        NULL,
549                                        NULL,
550                                        NULL,
551                                        NULL,
552                                 /*74*/ NULL,
553                                        NULL,
554                                        NULL,
555                                        NULL,
556                                        NULL,
557                                 /*79*/ NULL,
558                                        NULL,
559                                        NULL,
560                                        NULL,
561                                        NULL,
562                                 /*84*/ NULL,
563                                        NULL,
564                                        NULL,
565                                        NULL,
566                                        NULL,
567                                 /*89*/ NULL,
568                                        NULL,
569                                        NULL,
570                                        NULL,
571                                        NULL,
572                                 /*94*/ NULL,
573                                        NULL,
574                                        NULL,
575                                        NULL,
576                                        NULL,
577                                 /*99*/ NULL,
578                                        NULL,
579                                        NULL,
580                                        NULL,
581                                        NULL,
582                                /*104*/ NULL,
583                                        NULL,
584                                        NULL,
585                                        NULL,
586                                        NULL,
587                                /*109*/ NULL,
588                                        NULL,
589                                        NULL,
590                                        NULL,
591                                        NULL,
592                                /*114*/ NULL,
593                                        NULL,
594                                        NULL,
595                                        NULL,
596                                        NULL,
597                                /*119*/ NULL,
598                                        NULL,
599                                        NULL,
600                                        NULL,
601                                        NULL,
602                                /*124*/ NULL,
603                                        NULL,
604                                        NULL,
605                                        NULL,
606                                        MatCreateSubMatricesMPI_MPIAdj,
607                                /*129*/ NULL,
608                                        NULL,
609                                        NULL,
610                                        NULL,
611                                        NULL,
612                                /*134*/ NULL,
613                                        NULL,
614                                        NULL,
615                                        NULL,
616                                        NULL,
617                                /*139*/ NULL,
618                                        NULL,
619                                        NULL,
620                                        NULL,
621                                        NULL,
622                                 /*144*/NULL,
623                                        NULL,
624                                        NULL,
625                                        NULL
626 };
627 
628 static PetscErrorCode  MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
629 {
630   Mat_MPIAdj     *b = (Mat_MPIAdj*)B->data;
631   PetscBool       useedgeweights;
632   PetscErrorCode ierr;
633 
634   PetscFunctionBegin;
635   ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
636   ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
637   if (values) useedgeweights = PETSC_TRUE; else useedgeweights = PETSC_FALSE;
638   /* Make everybody knows if they are using edge weights or not */
639   ierr = MPIU_Allreduce((int*)&useedgeweights,(int*)&b->useedgeweights,1,MPI_INT,MPI_MAX,PetscObjectComm((PetscObject)B));CHKERRMPI(ierr);
640 
641   if (PetscDefined(USE_DEBUG)) {
642     PetscInt ii;
643 
644     PetscCheckFalse(i[0] != 0,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %" PetscInt_FMT,i[0]);
645     for (ii=1; ii<B->rmap->n; ii++) {
646       PetscCheckFalse(i[ii] < 0 || i[ii] < i[ii-1],PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i[%" PetscInt_FMT "]=%" PetscInt_FMT " index is out of range: i[%" PetscInt_FMT "]=%" PetscInt_FMT,ii,i[ii],ii-1,i[ii-1]);
647     }
648     for (ii=0; ii<i[B->rmap->n]; ii++) {
649       PetscCheckFalse(j[ii] < 0 || j[ii] >= B->cmap->N,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column index %" PetscInt_FMT " out of range %" PetscInt_FMT,ii,j[ii]);
650     }
651   }
652   B->preallocated = PETSC_TRUE;
653 
654   b->j      = j;
655   b->i      = i;
656   b->values = values;
657 
658   b->nz        = i[B->rmap->n];
659   b->diag      = NULL;
660   b->symmetric = PETSC_FALSE;
661   b->freeaij   = PETSC_TRUE;
662 
663   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
664   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
665   PetscFunctionReturn(0);
666 }
667 
668 static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A,Mat *B)
669 {
670   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
671   PetscErrorCode ierr;
672   const PetscInt *ranges;
673   MPI_Comm       acomm,bcomm;
674   MPI_Group      agroup,bgroup;
675   PetscMPIInt    i,rank,size,nranks,*ranks;
676 
677   PetscFunctionBegin;
678   *B    = NULL;
679   ierr  = PetscObjectGetComm((PetscObject)A,&acomm);CHKERRQ(ierr);
680   ierr  = MPI_Comm_size(acomm,&size);CHKERRMPI(ierr);
681   ierr  = MPI_Comm_size(acomm,&rank);CHKERRMPI(ierr);
682   ierr  = MatGetOwnershipRanges(A,&ranges);CHKERRQ(ierr);
683   for (i=0,nranks=0; i<size; i++) {
684     if (ranges[i+1] - ranges[i] > 0) nranks++;
685   }
686   if (nranks == size) {         /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
687     ierr = PetscObjectReference((PetscObject)A);CHKERRQ(ierr);
688     *B   = A;
689     PetscFunctionReturn(0);
690   }
691 
692   ierr = PetscMalloc1(nranks,&ranks);CHKERRQ(ierr);
693   for (i=0,nranks=0; i<size; i++) {
694     if (ranges[i+1] - ranges[i] > 0) ranks[nranks++] = i;
695   }
696   ierr = MPI_Comm_group(acomm,&agroup);CHKERRMPI(ierr);
697   ierr = MPI_Group_incl(agroup,nranks,ranks,&bgroup);CHKERRMPI(ierr);
698   ierr = PetscFree(ranks);CHKERRQ(ierr);
699   ierr = MPI_Comm_create(acomm,bgroup,&bcomm);CHKERRMPI(ierr);
700   ierr = MPI_Group_free(&agroup);CHKERRMPI(ierr);
701   ierr = MPI_Group_free(&bgroup);CHKERRMPI(ierr);
702   if (bcomm != MPI_COMM_NULL) {
703     PetscInt   m,N;
704     Mat_MPIAdj *b;
705     ierr       = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
706     ierr       = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
707     ierr       = MatCreateMPIAdj(bcomm,m,N,a->i,a->j,a->values,B);CHKERRQ(ierr);
708     b          = (Mat_MPIAdj*)(*B)->data;
709     b->freeaij = PETSC_FALSE;
710     ierr       = MPI_Comm_free(&bcomm);CHKERRMPI(ierr);
711   }
712   PetscFunctionReturn(0);
713 }
714 
715 PetscErrorCode  MatMPIAdjToSeq_MPIAdj(Mat A,Mat *B)
716 {
717   PetscErrorCode ierr;
718   PetscInt       M,N,*II,*J,NZ,nz,m,nzstart,i;
719   PetscInt       *Values = NULL;
720   Mat_MPIAdj     *adj = (Mat_MPIAdj*)A->data;
721   PetscMPIInt    mnz,mm,*allnz,*allm,size,*dispnz,*dispm;
722 
723   PetscFunctionBegin;
724   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRMPI(ierr);
725   ierr = MatGetSize(A,&M,&N);CHKERRQ(ierr);
726   ierr = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
727   nz   = adj->nz;
728   PetscCheckFalse(adj->i[m] != nz,PETSC_COMM_SELF,PETSC_ERR_PLIB,"nz %" PetscInt_FMT " not correct i[m] %" PetscInt_FMT,nz,adj->i[m]);
729   ierr = MPI_Allreduce(&nz,&NZ,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)A));CHKERRMPI(ierr);
730 
731   ierr = PetscMPIIntCast(nz,&mnz);CHKERRQ(ierr);
732   ierr = PetscMalloc2(size,&allnz,size,&dispnz);CHKERRQ(ierr);
733   ierr = MPI_Allgather(&mnz,1,MPI_INT,allnz,1,MPI_INT,PetscObjectComm((PetscObject)A));CHKERRMPI(ierr);
734   dispnz[0] = 0; for (i=1; i<size; i++) dispnz[i] = dispnz[i-1]+ allnz[i-1];
735   if (adj->values) {
736     ierr = PetscMalloc1(NZ,&Values);CHKERRQ(ierr);
737     ierr = MPI_Allgatherv(adj->values,mnz,MPIU_INT,Values,allnz,dispnz,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRMPI(ierr);
738   }
739   ierr = PetscMalloc1(NZ,&J);CHKERRQ(ierr);
740   ierr = MPI_Allgatherv(adj->j,mnz,MPIU_INT,J,allnz,dispnz,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRMPI(ierr);
741   ierr = PetscFree2(allnz,dispnz);CHKERRQ(ierr);
742   ierr = MPI_Scan(&nz,&nzstart,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)A));CHKERRMPI(ierr);
743   nzstart -= nz;
744   /* shift the i[] values so they will be correct after being received */
745   for (i=0; i<m; i++) adj->i[i] += nzstart;
746   ierr = PetscMalloc1(M+1,&II);CHKERRQ(ierr);
747   ierr = PetscMPIIntCast(m,&mm);CHKERRQ(ierr);
748   ierr = PetscMalloc2(size,&allm,size,&dispm);CHKERRQ(ierr);
749   ierr = MPI_Allgather(&mm,1,MPI_INT,allm,1,MPI_INT,PetscObjectComm((PetscObject)A));CHKERRMPI(ierr);
750   dispm[0] = 0; for (i=1; i<size; i++) dispm[i] = dispm[i-1]+ allm[i-1];
751   ierr = MPI_Allgatherv(adj->i,mm,MPIU_INT,II,allm,dispm,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRMPI(ierr);
752   ierr = PetscFree2(allm,dispm);CHKERRQ(ierr);
753   II[M] = NZ;
754   /* shift the i[] values back */
755   for (i=0; i<m; i++) adj->i[i] -= nzstart;
756   ierr = MatCreateMPIAdj(PETSC_COMM_SELF,M,N,II,J,Values,B);CHKERRQ(ierr);
757   PetscFunctionReturn(0);
758 }
759 
760 /*@
761    MatMPIAdjCreateNonemptySubcommMat - create the same MPIAdj matrix on a subcommunicator containing only processes owning a positive number of rows
762 
763    Collective
764 
765    Input Parameter:
766 .  A - original MPIAdj matrix
767 
768    Output Parameter:
769 .  B - matrix on subcommunicator, NULL on ranks that owned zero rows of A
770 
771    Level: developer
772 
773    Note:
774    This function is mostly useful for internal use by mesh partitioning packages that require that every process owns at least one row.
775 
776    The matrix B should be destroyed with MatDestroy(). The arrays are not copied, so B should be destroyed before A is destroyed.
777 
778 .seealso: MatCreateMPIAdj()
779 @*/
780 PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A,Mat *B)
781 {
782   PetscErrorCode ierr;
783 
784   PetscFunctionBegin;
785   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
786   ierr = PetscUseMethod(A,"MatMPIAdjCreateNonemptySubcommMat_C",(Mat,Mat*),(A,B));CHKERRQ(ierr);
787   PetscFunctionReturn(0);
788 }
789 
790 /*MC
791    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
792    intended for use constructing orderings and partitionings.
793 
794   Level: beginner
795 
796 .seealso: MatCreateMPIAdj
797 M*/
798 
799 PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B)
800 {
801   Mat_MPIAdj     *b;
802   PetscErrorCode ierr;
803 
804   PetscFunctionBegin;
805   ierr         = PetscNewLog(B,&b);CHKERRQ(ierr);
806   B->data      = (void*)b;
807   ierr         = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
808   B->assembled = PETSC_FALSE;
809 
810   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
811   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjCreateNonemptySubcommMat_C",MatMPIAdjCreateNonemptySubcommMat_MPIAdj);CHKERRQ(ierr);
812   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjToSeq_C",MatMPIAdjToSeq_MPIAdj);CHKERRQ(ierr);
813   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ);CHKERRQ(ierr);
814   PetscFunctionReturn(0);
815 }
816 
817 /*@C
818    MatMPIAdjToSeq - Converts an parallel MPIAdj matrix to complete MPIAdj on each process (needed by sequential preconditioners)
819 
820    Logically Collective
821 
822    Input Parameter:
823 .  A - the matrix
824 
825    Output Parameter:
826 .  B - the same matrix on all processes
827 
828    Level: intermediate
829 
830 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
831 @*/
832 PetscErrorCode  MatMPIAdjToSeq(Mat A,Mat *B)
833 {
834   PetscErrorCode ierr;
835 
836   PetscFunctionBegin;
837   ierr = PetscUseMethod(A,"MatMPIAdjToSeq_C",(Mat,Mat*),(A,B));CHKERRQ(ierr);
838   PetscFunctionReturn(0);
839 }
840 
841 /*@C
842    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
843 
844    Logically Collective
845 
846    Input Parameters:
847 +  A - the matrix
848 .  i - the indices into j for the start of each row
849 .  j - the column indices for each row (sorted for each row).
850        The indices in i and j start with zero (NOT with one).
851 -  values - [optional] edge weights
852 
853    Level: intermediate
854 
855 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
856 @*/
857 PetscErrorCode  MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
858 {
859   PetscErrorCode ierr;
860 
861   PetscFunctionBegin;
862   ierr = PetscTryMethod(B,"MatMPIAdjSetPreallocation_C",(Mat,PetscInt*,PetscInt*,PetscInt*),(B,i,j,values));CHKERRQ(ierr);
863   PetscFunctionReturn(0);
864 }
865 
866 /*@C
867    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
868    The matrix does not have numerical values associated with it, but is
869    intended for ordering (to reduce bandwidth etc) and partitioning.
870 
871    Collective
872 
873    Input Parameters:
874 +  comm - MPI communicator
875 .  m - number of local rows
876 .  N - number of global columns
877 .  i - the indices into j for the start of each row
878 .  j - the column indices for each row (sorted for each row).
879        The indices in i and j start with zero (NOT with one).
880 -  values -[optional] edge weights
881 
882    Output Parameter:
883 .  A - the matrix
884 
885    Level: intermediate
886 
887    Notes:
888     This matrix object does not support most matrix operations, include
889    MatSetValues().
890    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
891    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
892     call from Fortran you need not create the arrays with PetscMalloc().
893    Should not include the matrix diagonals.
894 
895    If you already have a matrix, you can create its adjacency matrix by a call
896    to MatConvert, specifying a type of MATMPIADJ.
897 
898    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
899 
900 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
901 @*/
902 PetscErrorCode  MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
903 {
904   PetscErrorCode ierr;
905 
906   PetscFunctionBegin;
907   ierr = MatCreate(comm,A);CHKERRQ(ierr);
908   ierr = MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
909   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
910   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
911   PetscFunctionReturn(0);
912 }
913