xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 95cbbfd359aa2cc7e96afc77b64e4c0540dc7b23)
1 
2 /*
3     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
4 */
5 #include <../src/mat/impls/adj/mpi/mpiadj.h>    /*I "petscmat.h" I*/
6 #include <petscsf.h>
7 
8 /*
9  * The interface should be easy to use for both MatCreateSubMatrix (parallel sub-matrix) and MatCreateSubMatrices (sequential sub-matrices)
10  * */
11 static PetscErrorCode MatCreateSubMatrix_MPIAdj_data(Mat adj,IS irows, IS icols, PetscInt **sadj_xadj,PetscInt **sadj_adjncy,PetscInt **sadj_values)
12 {
13   PetscInt           nlrows_is,icols_n,i,j,nroots,nleaves,owner,rlocalindex,*ncols_send,*ncols_recv;
14   PetscInt           nlrows_mat,*adjncy_recv,Ncols_recv,Ncols_send,*xadj_recv,*values_recv;
15   PetscInt          *ncols_recv_offsets,loc,rnclos,*sadjncy,*sxadj,*svalues;
16   const PetscInt    *irows_indices,*icols_indices,*xadj, *adjncy;
17   Mat_MPIAdj        *a = (Mat_MPIAdj*)adj->data;
18   PetscLayout        rmap;
19   MPI_Comm           comm;
20   PetscSF            sf;
21   PetscSFNode       *iremote;
22   PetscBool          done;
23   PetscErrorCode     ierr;
24 
25   PetscFunctionBegin;
26   ierr = PetscObjectGetComm((PetscObject)adj,&comm);CHKERRQ(ierr);
27   ierr = MatGetLayouts(adj,&rmap,NULL);CHKERRQ(ierr);
28   ierr = ISGetLocalSize(irows,&nlrows_is);CHKERRQ(ierr);
29   ierr = ISGetIndices(irows,&irows_indices);CHKERRQ(ierr);
30   ierr = PetscCalloc1(nlrows_is,&iremote);CHKERRQ(ierr);
31   /* construct sf graph*/
32   nleaves = nlrows_is;
33   for (i=0; i<nlrows_is; i++){
34     owner = -1;
35     rlocalindex = -1;
36     ierr = PetscLayoutFindOwnerIndex(rmap,irows_indices[i],&owner,&rlocalindex);CHKERRQ(ierr);
37     iremote[i].rank  = owner;
38     iremote[i].index = rlocalindex;
39   }
40   ierr = MatGetRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done);CHKERRQ(ierr);
41   ierr = PetscCalloc4(nlrows_mat,&ncols_send,nlrows_is,&xadj_recv,nlrows_is+1,&ncols_recv_offsets,nlrows_is,&ncols_recv);CHKERRQ(ierr);
42   nroots = nlrows_mat;
43   for (i=0; i<nlrows_mat; i++){
44     ncols_send[i] = xadj[i+1]-xadj[i];
45   }
46   ierr = PetscSFCreate(comm,&sf);CHKERRQ(ierr);
47   ierr = PetscSFSetGraph(sf,nroots,nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
48   ierr = PetscSFSetType(sf,PETSCSFBASIC);CHKERRQ(ierr);
49   ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
50   ierr = PetscSFBcastBegin(sf,MPIU_INT,ncols_send,ncols_recv);CHKERRQ(ierr);
51   ierr = PetscSFBcastEnd(sf,MPIU_INT,ncols_send,ncols_recv);CHKERRQ(ierr);
52   ierr = PetscSFBcastBegin(sf,MPIU_INT,xadj,xadj_recv);CHKERRQ(ierr);
53   ierr = PetscSFBcastEnd(sf,MPIU_INT,xadj,xadj_recv);CHKERRQ(ierr);
54   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
55   Ncols_recv =0;
56   for (i=0; i<nlrows_is; i++){
57     Ncols_recv             += ncols_recv[i];
58     ncols_recv_offsets[i+1] = ncols_recv[i]+ncols_recv_offsets[i];
59   }
60   Ncols_send = 0;
61   for(i=0; i<nlrows_mat; i++){
62     Ncols_send += ncols_send[i];
63   }
64   ierr = PetscCalloc1(Ncols_recv,&iremote);CHKERRQ(ierr);
65   ierr = PetscCalloc1(Ncols_recv,&adjncy_recv);CHKERRQ(ierr);
66   nleaves = Ncols_recv;
67   Ncols_recv = 0;
68   for (i=0; i<nlrows_is; i++){
69     ierr = PetscLayoutFindOwner(rmap,irows_indices[i],&owner);CHKERRQ(ierr);
70     for (j=0; j<ncols_recv[i]; j++){
71       iremote[Ncols_recv].rank    = owner;
72       iremote[Ncols_recv++].index = xadj_recv[i]+j;
73     }
74   }
75   ierr = ISRestoreIndices(irows,&irows_indices);CHKERRQ(ierr);
76   /*if we need to deal with edge weights ???*/
77   if (a->useedgeweights){ierr = PetscCalloc1(Ncols_recv,&values_recv);CHKERRQ(ierr);}
78   nroots = Ncols_send;
79   ierr = PetscSFCreate(comm,&sf);CHKERRQ(ierr);
80   ierr = PetscSFSetGraph(sf,nroots,nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
81   ierr = PetscSFSetType(sf,PETSCSFBASIC);CHKERRQ(ierr);
82   ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
83   ierr = PetscSFBcastBegin(sf,MPIU_INT,adjncy,adjncy_recv);CHKERRQ(ierr);
84   ierr = PetscSFBcastEnd(sf,MPIU_INT,adjncy,adjncy_recv);CHKERRQ(ierr);
85   if (a->useedgeweights){
86     ierr = PetscSFBcastBegin(sf,MPIU_INT,a->values,values_recv);CHKERRQ(ierr);
87     ierr = PetscSFBcastEnd(sf,MPIU_INT,a->values,values_recv);CHKERRQ(ierr);
88   }
89   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
90   ierr = MatRestoreRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done);CHKERRQ(ierr);
91   ierr = ISGetLocalSize(icols,&icols_n);CHKERRQ(ierr);
92   ierr = ISGetIndices(icols,&icols_indices);CHKERRQ(ierr);
93   rnclos = 0;
94   for (i=0; i<nlrows_is; i++){
95     for (j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++){
96       ierr = PetscFindInt(adjncy_recv[j], icols_n, icols_indices, &loc);CHKERRQ(ierr);
97       if (loc<0){
98         adjncy_recv[j] = -1;
99         if (a->useedgeweights) values_recv[j] = -1;
100         ncols_recv[i]--;
101       } else {
102         rnclos++;
103       }
104     }
105   }
106   ierr = ISRestoreIndices(icols,&icols_indices);CHKERRQ(ierr);
107   ierr = PetscCalloc1(rnclos,&sadjncy);CHKERRQ(ierr);
108   if (a->useedgeweights) {ierr = PetscCalloc1(rnclos,&svalues);CHKERRQ(ierr);}
109   ierr = PetscCalloc1(nlrows_is+1,&sxadj);CHKERRQ(ierr);
110   rnclos = 0;
111   for(i=0; i<nlrows_is; i++){
112     for(j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++){
113       if (adjncy_recv[j]<0) continue;
114       sadjncy[rnclos] = adjncy_recv[j];
115       if (a->useedgeweights) svalues[rnclos] = values_recv[j];
116       rnclos++;
117     }
118   }
119   for(i=0; i<nlrows_is; i++){
120     sxadj[i+1] = sxadj[i]+ncols_recv[i];
121   }
122   if (sadj_xadj)  { *sadj_xadj = sxadj;} else    { ierr = PetscFree(sxadj);CHKERRQ(ierr);}
123   if (sadj_adjncy){ *sadj_adjncy = sadjncy;} else { ierr = PetscFree(sadjncy);CHKERRQ(ierr);}
124   if (sadj_values){
125     if (a->useedgeweights) *sadj_values = svalues; else *sadj_values=0;
126   } else {
127     if (a->useedgeweights) {ierr = PetscFree(svalues);CHKERRQ(ierr);}
128   }
129   ierr = PetscFree4(ncols_send,xadj_recv,ncols_recv_offsets,ncols_recv);CHKERRQ(ierr);
130   ierr = PetscFree(adjncy_recv);CHKERRQ(ierr);
131   if (a->useedgeweights) {ierr = PetscFree(values_recv);CHKERRQ(ierr);}
132   PetscFunctionReturn(0);
133 }
134 
135 static PetscErrorCode MatCreateSubMatrices_MPIAdj_Private(Mat mat,PetscInt n,const IS irow[],const IS icol[],PetscBool subcomm,MatReuse scall,Mat *submat[])
136 {
137   PetscInt           i,irow_n,icol_n,*sxadj,*sadjncy,*svalues;
138   PetscInt          *indices,nindx,j,k,loc;
139   PetscMPIInt        issame;
140   const PetscInt    *irow_indices,*icol_indices;
141   MPI_Comm           scomm_row,scomm_col,scomm_mat;
142   PetscErrorCode     ierr;
143 
144   PetscFunctionBegin;
145   nindx = 0;
146   /*
147    * Estimate a maximum number for allocating memory
148    */
149   for(i=0; i<n; i++){
150     ierr = ISGetLocalSize(irow[i],&irow_n);CHKERRQ(ierr);
151     ierr = ISGetLocalSize(icol[i],&icol_n);CHKERRQ(ierr);
152     nindx = nindx>(irow_n+icol_n)? nindx:(irow_n+icol_n);
153   }
154   ierr = PetscCalloc1(nindx,&indices);CHKERRQ(ierr);
155   /* construct a submat */
156   for(i=0; i<n; i++){
157     if (subcomm){
158       ierr = PetscObjectGetComm((PetscObject)irow[i],&scomm_row);CHKERRQ(ierr);
159       ierr = PetscObjectGetComm((PetscObject)icol[i],&scomm_col);CHKERRQ(ierr);
160       ierr = MPI_Comm_compare(scomm_row,scomm_col,&issame);CHKERRQ(ierr);
161       if (issame != MPI_IDENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"row index set must have the same comm as the col index set\n");
162       ierr = MPI_Comm_compare(scomm_row,PETSC_COMM_SELF,&issame);CHKERRQ(ierr);
163       if (issame == MPI_IDENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP," can not use PETSC_COMM_SELF as comm when extracting a parallel submatrix\n");
164     } else {
165       scomm_row = PETSC_COMM_SELF;
166     }
167     /*get sub-matrix data*/
168     sxadj=0; sadjncy=0; svalues=0;
169     ierr = MatCreateSubMatrix_MPIAdj_data(mat,irow[i],icol[i],&sxadj,&sadjncy,&svalues);CHKERRQ(ierr);
170     ierr = ISGetLocalSize(irow[i],&irow_n);CHKERRQ(ierr);
171     ierr = ISGetLocalSize(icol[i],&icol_n);CHKERRQ(ierr);
172     ierr = ISGetIndices(irow[i],&irow_indices);CHKERRQ(ierr);
173     ierr = PetscMemcpy(indices,irow_indices,sizeof(PetscInt)*irow_n);CHKERRQ(ierr);
174     ierr = ISRestoreIndices(irow[i],&irow_indices);CHKERRQ(ierr);
175     ierr = ISGetIndices(icol[i],&icol_indices);CHKERRQ(ierr);
176     ierr = PetscMemcpy(indices+irow_n,icol_indices,sizeof(PetscInt)*icol_n);CHKERRQ(ierr);
177     ierr = ISRestoreIndices(icol[i],&icol_indices);CHKERRQ(ierr);
178     nindx = irow_n+icol_n;
179     ierr = PetscSortRemoveDupsInt(&nindx,indices);CHKERRQ(ierr);
180     /* renumber columns */
181     for(j=0; j<irow_n; j++){
182       for(k=sxadj[j]; k<sxadj[j+1]; k++){
183         ierr = PetscFindInt(sadjncy[k],nindx,indices,&loc);CHKERRQ(ierr);
184 #if defined(PETSC_USE_DEBUG)
185         if (loc<0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"can not find col %D",sadjncy[k]);
186 #endif
187         sadjncy[k] = loc;
188       }
189     }
190     if (scall==MAT_INITIAL_MATRIX){
191       ierr = MatCreateMPIAdj(scomm_row,irow_n,icol_n,sxadj,sadjncy,svalues,submat[i]);CHKERRQ(ierr);
192     } else {
193        Mat                sadj = *(submat[i]);
194        Mat_MPIAdj         *sa  = (Mat_MPIAdj*)((sadj)->data);
195        ierr = PetscObjectGetComm((PetscObject)sadj,&scomm_mat);CHKERRQ(ierr);
196        ierr = MPI_Comm_compare(scomm_row,scomm_mat,&issame);CHKERRQ(ierr);
197        if (issame != MPI_IDENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"submatrix  must have the same comm as the col index set\n");
198        ierr = PetscMemcpy(sa->i,sxadj,sizeof(PetscInt)*(irow_n+1));CHKERRQ(ierr);
199        ierr = PetscMemcpy(sa->j,sadjncy,sizeof(PetscInt)*sxadj[irow_n]);CHKERRQ(ierr);
200        if (svalues){ierr = PetscMemcpy(sa->values,svalues,sizeof(PetscInt)*sxadj[irow_n]);CHKERRQ(ierr);}
201        ierr = PetscFree(sxadj);CHKERRQ(ierr);
202        ierr = PetscFree(sadjncy);CHKERRQ(ierr);
203        if (svalues) {ierr = PetscFree(svalues);CHKERRQ(ierr);}
204     }
205   }
206   ierr = PetscFree(indices);CHKERRQ(ierr);
207   PetscFunctionReturn(0);
208 }
209 
210 static PetscErrorCode MatCreateSubMatricesMPI_MPIAdj(Mat mat,PetscInt n, const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
211 {
212   PetscErrorCode     ierr;
213   /*get sub-matrices across a sub communicator */
214   PetscFunctionBegin;
215   ierr = MatCreateSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_TRUE,scall,submat);CHKERRQ(ierr);
216   PetscFunctionReturn(0);
217 }
218 
219 static PetscErrorCode MatCreateSubMatrices_MPIAdj(Mat mat,PetscInt n,const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
220 {
221   PetscErrorCode     ierr;
222 
223   PetscFunctionBegin;
224   /*get sub-matrices based on PETSC_COMM_SELF */
225   ierr = MatCreateSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_FALSE,scall,submat);CHKERRQ(ierr);
226   PetscFunctionReturn(0);
227 }
228 
229 static PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
230 {
231   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
232   PetscErrorCode    ierr;
233   PetscInt          i,j,m = A->rmap->n;
234   const char        *name;
235   PetscViewerFormat format;
236 
237   PetscFunctionBegin;
238   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
239   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
240   if (format == PETSC_VIEWER_ASCII_INFO) {
241     PetscFunctionReturn(0);
242   } else if (format == PETSC_VIEWER_ASCII_MATLAB) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MATLAB format not supported");
243   else {
244     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);CHKERRQ(ierr);
245     ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
246     for (i=0; i<m; i++) {
247       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap->rstart);CHKERRQ(ierr);
248       for (j=a->i[i]; j<a->i[i+1]; j++) {
249         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);CHKERRQ(ierr);
250       }
251       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
252     }
253     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);CHKERRQ(ierr);
254     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
255     ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
256   }
257   PetscFunctionReturn(0);
258 }
259 
260 static PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
261 {
262   PetscErrorCode ierr;
263   PetscBool      iascii;
264 
265   PetscFunctionBegin;
266   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
267   if (iascii) {
268     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
269   }
270   PetscFunctionReturn(0);
271 }
272 
273 static PetscErrorCode MatDestroy_MPIAdj(Mat mat)
274 {
275   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;
276   PetscErrorCode ierr;
277 
278   PetscFunctionBegin;
279 #if defined(PETSC_USE_LOG)
280   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap->n,mat->cmap->n,a->nz);
281 #endif
282   ierr = PetscFree(a->diag);CHKERRQ(ierr);
283   if (a->freeaij) {
284     if (a->freeaijwithfree) {
285       if (a->i) free(a->i);
286       if (a->j) free(a->j);
287     } else {
288       ierr = PetscFree(a->i);CHKERRQ(ierr);
289       ierr = PetscFree(a->j);CHKERRQ(ierr);
290       ierr = PetscFree(a->values);CHKERRQ(ierr);
291     }
292   }
293   ierr = PetscFree(a->rowvalues);CHKERRQ(ierr);
294   ierr = PetscFree(mat->data);CHKERRQ(ierr);
295   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
296   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C",NULL);CHKERRQ(ierr);
297   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjCreateNonemptySubcommMat_C",NULL);CHKERRQ(ierr);
298   PetscFunctionReturn(0);
299 }
300 
301 static PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscBool flg)
302 {
303   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
304   PetscErrorCode ierr;
305 
306   PetscFunctionBegin;
307   switch (op) {
308   case MAT_SYMMETRIC:
309   case MAT_STRUCTURALLY_SYMMETRIC:
310   case MAT_HERMITIAN:
311     a->symmetric = flg;
312     break;
313   case MAT_SYMMETRY_ETERNAL:
314     break;
315   default:
316     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
317     break;
318   }
319   PetscFunctionReturn(0);
320 }
321 
322 static PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
323 {
324   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
325   PetscErrorCode ierr;
326 
327   PetscFunctionBegin;
328   row -= A->rmap->rstart;
329   if (row < 0 || row >= A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
330   *nz = a->i[row+1] - a->i[row];
331   if (v) {
332     PetscInt j;
333     if (a->rowvalues_alloc < *nz) {
334       ierr = PetscFree(a->rowvalues);CHKERRQ(ierr);
335       a->rowvalues_alloc = PetscMax(a->rowvalues_alloc*2, *nz);
336       ierr = PetscMalloc1(a->rowvalues_alloc,&a->rowvalues);CHKERRQ(ierr);
337     }
338     for (j=0; j<*nz; j++){
339       a->rowvalues[j] = a->values ? a->values[a->i[row]+j]:1.0;
340     }
341     *v = (*nz) ? a->rowvalues : NULL;
342   }
343   if (idx) *idx = (*nz) ? a->j + a->i[row] : NULL;
344   PetscFunctionReturn(0);
345 }
346 
347 static PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
348 {
349   PetscFunctionBegin;
350   PetscFunctionReturn(0);
351 }
352 
353 static PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscBool * flg)
354 {
355   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data,*b = (Mat_MPIAdj*)B->data;
356   PetscErrorCode ierr;
357   PetscBool      flag;
358 
359   PetscFunctionBegin;
360   /* If the  matrix dimensions are not equal,or no of nonzeros */
361   if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
362     flag = PETSC_FALSE;
363   }
364 
365   /* if the a->i are the same */
366   ierr = PetscMemcmp(a->i,b->i,(A->rmap->n+1)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
367 
368   /* if a->j are the same */
369   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
370 
371   ierr = MPIU_Allreduce(&flag,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
372   PetscFunctionReturn(0);
373 }
374 
375 static PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
376 {
377   PetscInt   i;
378   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
379   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
380 
381   PetscFunctionBegin;
382   *m    = A->rmap->n;
383   *ia   = a->i;
384   *ja   = a->j;
385   *done = PETSC_TRUE;
386   if (oshift) {
387     for (i=0; i<(*ia)[*m]; i++) {
388       (*ja)[i]++;
389     }
390     for (i=0; i<=(*m); i++) (*ia)[i]++;
391   }
392   PetscFunctionReturn(0);
393 }
394 
395 static PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
396 {
397   PetscInt   i;
398   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
399   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
400 
401   PetscFunctionBegin;
402   if (ia && a->i != *ia) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
403   if (ja && a->j != *ja) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
404   if (oshift) {
405     if (!ia) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"If oshift then you must passed in inia[] argument");
406     if (!ja) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"If oshift then you must passed in inja[] argument");
407     for (i=0; i<=(*m); i++) (*ia)[i]--;
408     for (i=0; i<(*ia)[*m]; i++) {
409       (*ja)[i]--;
410     }
411   }
412   PetscFunctionReturn(0);
413 }
414 
415 PetscErrorCode  MatConvertFrom_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
416 {
417   Mat               B;
418   PetscErrorCode    ierr;
419   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
420   const PetscInt    *rj;
421   const PetscScalar *ra;
422   MPI_Comm          comm;
423 
424   PetscFunctionBegin;
425   ierr = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
426   ierr = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
427   ierr = MatGetOwnershipRange(A,&rstart,NULL);CHKERRQ(ierr);
428 
429   /* count the number of nonzeros per row */
430   for (i=0; i<m; i++) {
431     ierr = MatGetRow(A,i+rstart,&len,&rj,NULL);CHKERRQ(ierr);
432     for (j=0; j<len; j++) {
433       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
434     }
435     nzeros += len;
436     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,NULL);CHKERRQ(ierr);
437   }
438 
439   /* malloc space for nonzeros */
440   ierr = PetscMalloc1(nzeros+1,&a);CHKERRQ(ierr);
441   ierr = PetscMalloc1(N+1,&ia);CHKERRQ(ierr);
442   ierr = PetscMalloc1(nzeros+1,&ja);CHKERRQ(ierr);
443 
444   nzeros = 0;
445   ia[0]  = 0;
446   for (i=0; i<m; i++) {
447     ierr = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
448     cnt  = 0;
449     for (j=0; j<len; j++) {
450       if (rj[j] != i+rstart) { /* if not diagonal */
451         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
452         ja[nzeros+cnt++] = rj[j];
453       }
454     }
455     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
456     nzeros += cnt;
457     ia[i+1] = nzeros;
458   }
459 
460   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
461   ierr = MatCreate(comm,&B);CHKERRQ(ierr);
462   ierr = MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
463   ierr = MatSetType(B,type);CHKERRQ(ierr);
464   ierr = MatMPIAdjSetPreallocation(B,ia,ja,a);CHKERRQ(ierr);
465 
466   if (reuse == MAT_INPLACE_MATRIX) {
467     ierr = MatHeaderReplace(A,&B);CHKERRQ(ierr);
468   } else {
469     *newmat = B;
470   }
471   PetscFunctionReturn(0);
472 }
473 
474 /* -------------------------------------------------------------------*/
475 static struct _MatOps MatOps_Values = {0,
476                                        MatGetRow_MPIAdj,
477                                        MatRestoreRow_MPIAdj,
478                                        0,
479                                 /* 4*/ 0,
480                                        0,
481                                        0,
482                                        0,
483                                        0,
484                                        0,
485                                 /*10*/ 0,
486                                        0,
487                                        0,
488                                        0,
489                                        0,
490                                 /*15*/ 0,
491                                        MatEqual_MPIAdj,
492                                        0,
493                                        0,
494                                        0,
495                                 /*20*/ 0,
496                                        0,
497                                        MatSetOption_MPIAdj,
498                                        0,
499                                 /*24*/ 0,
500                                        0,
501                                        0,
502                                        0,
503                                        0,
504                                 /*29*/ 0,
505                                        0,
506                                        0,
507                                        0,
508                                        0,
509                                 /*34*/ 0,
510                                        0,
511                                        0,
512                                        0,
513                                        0,
514                                 /*39*/ 0,
515                                        MatCreateSubMatrices_MPIAdj,
516                                        0,
517                                        0,
518                                        0,
519                                 /*44*/ 0,
520                                        0,
521                                        MatShift_Basic,
522                                        0,
523                                        0,
524                                 /*49*/ 0,
525                                        MatGetRowIJ_MPIAdj,
526                                        MatRestoreRowIJ_MPIAdj,
527                                        0,
528                                        0,
529                                 /*54*/ 0,
530                                        0,
531                                        0,
532                                        0,
533                                        0,
534                                 /*59*/ 0,
535                                        MatDestroy_MPIAdj,
536                                        MatView_MPIAdj,
537                                        MatConvertFrom_MPIAdj,
538                                        0,
539                                 /*64*/ 0,
540                                        0,
541                                        0,
542                                        0,
543                                        0,
544                                 /*69*/ 0,
545                                        0,
546                                        0,
547                                        0,
548                                        0,
549                                 /*74*/ 0,
550                                        0,
551                                        0,
552                                        0,
553                                        0,
554                                 /*79*/ 0,
555                                        0,
556                                        0,
557                                        0,
558                                        0,
559                                 /*84*/ 0,
560                                        0,
561                                        0,
562                                        0,
563                                        0,
564                                 /*89*/ 0,
565                                        0,
566                                        0,
567                                        0,
568                                        0,
569                                 /*94*/ 0,
570                                        0,
571                                        0,
572                                        0,
573                                        0,
574                                 /*99*/ 0,
575                                        0,
576                                        0,
577                                        0,
578                                        0,
579                                /*104*/ 0,
580                                        0,
581                                        0,
582                                        0,
583                                        0,
584                                /*109*/ 0,
585                                        0,
586                                        0,
587                                        0,
588                                        0,
589                                /*114*/ 0,
590                                        0,
591                                        0,
592                                        0,
593                                        0,
594                                /*119*/ 0,
595                                        0,
596                                        0,
597                                        0,
598                                        0,
599                                /*124*/ 0,
600                                        0,
601                                        0,
602                                        0,
603                                        MatCreateSubMatricesMPI_MPIAdj,
604                                /*129*/ 0,
605                                        0,
606                                        0,
607                                        0,
608                                        0,
609                                /*134*/ 0,
610                                        0,
611                                        0,
612                                        0,
613                                        0,
614                                /*139*/ 0,
615                                        0,
616                                        0
617 };
618 
619 static PetscErrorCode  MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
620 {
621   Mat_MPIAdj     *b = (Mat_MPIAdj*)B->data;
622   PetscBool       useedgeweights;
623   PetscErrorCode ierr;
624 #if defined(PETSC_USE_DEBUG)
625   PetscInt ii;
626 #endif
627 
628   PetscFunctionBegin;
629   ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
630   ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
631   if (values) useedgeweights = PETSC_TRUE; else useedgeweights = PETSC_FALSE;
632   /* Make everybody knows if they are using edge weights or not */
633   ierr = MPIU_Allreduce((int*)&useedgeweights,(int*)&b->useedgeweights,1,MPI_INT,MPI_MAX,PetscObjectComm((PetscObject)B));CHKERRQ(ierr);
634 
635 #if defined(PETSC_USE_DEBUG)
636   if (i[0] != 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
637   for (ii=1; ii<B->rmap->n; ii++) {
638     if (i[ii] < 0 || i[ii] < i[ii-1]) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
639   }
640   for (ii=0; ii<i[B->rmap->n]; ii++) {
641     if (j[ii] < 0 || j[ii] >= B->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
642   }
643 #endif
644   B->preallocated = PETSC_TRUE;
645 
646   b->j      = j;
647   b->i      = i;
648   b->values = values;
649 
650   b->nz        = i[B->rmap->n];
651   b->diag      = 0;
652   b->symmetric = PETSC_FALSE;
653   b->freeaij   = PETSC_TRUE;
654 
655   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
656   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
657   PetscFunctionReturn(0);
658 }
659 
660 static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A,Mat *B)
661 {
662   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
663   PetscErrorCode ierr;
664   const PetscInt *ranges;
665   MPI_Comm       acomm,bcomm;
666   MPI_Group      agroup,bgroup;
667   PetscMPIInt    i,rank,size,nranks,*ranks;
668 
669   PetscFunctionBegin;
670   *B    = NULL;
671   ierr  = PetscObjectGetComm((PetscObject)A,&acomm);CHKERRQ(ierr);
672   ierr  = MPI_Comm_size(acomm,&size);CHKERRQ(ierr);
673   ierr  = MPI_Comm_size(acomm,&rank);CHKERRQ(ierr);
674   ierr  = MatGetOwnershipRanges(A,&ranges);CHKERRQ(ierr);
675   for (i=0,nranks=0; i<size; i++) {
676     if (ranges[i+1] - ranges[i] > 0) nranks++;
677   }
678   if (nranks == size) {         /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
679     ierr = PetscObjectReference((PetscObject)A);CHKERRQ(ierr);
680     *B   = A;
681     PetscFunctionReturn(0);
682   }
683 
684   ierr = PetscMalloc1(nranks,&ranks);CHKERRQ(ierr);
685   for (i=0,nranks=0; i<size; i++) {
686     if (ranges[i+1] - ranges[i] > 0) ranks[nranks++] = i;
687   }
688   ierr = MPI_Comm_group(acomm,&agroup);CHKERRQ(ierr);
689   ierr = MPI_Group_incl(agroup,nranks,ranks,&bgroup);CHKERRQ(ierr);
690   ierr = PetscFree(ranks);CHKERRQ(ierr);
691   ierr = MPI_Comm_create(acomm,bgroup,&bcomm);CHKERRQ(ierr);
692   ierr = MPI_Group_free(&agroup);CHKERRQ(ierr);
693   ierr = MPI_Group_free(&bgroup);CHKERRQ(ierr);
694   if (bcomm != MPI_COMM_NULL) {
695     PetscInt   m,N;
696     Mat_MPIAdj *b;
697     ierr       = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
698     ierr       = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
699     ierr       = MatCreateMPIAdj(bcomm,m,N,a->i,a->j,a->values,B);CHKERRQ(ierr);
700     b          = (Mat_MPIAdj*)(*B)->data;
701     b->freeaij = PETSC_FALSE;
702     ierr       = MPI_Comm_free(&bcomm);CHKERRQ(ierr);
703   }
704   PetscFunctionReturn(0);
705 }
706 
707 PetscErrorCode  MatMPIAdjToSeq_MPIAdj(Mat A,Mat *B)
708 {
709   PetscErrorCode ierr;
710   PetscInt       M,N,*II,*J,NZ,nz,m,nzstart,i;
711   PetscInt       *Values = NULL;
712   Mat_MPIAdj     *adj = (Mat_MPIAdj*)A->data;
713   PetscMPIInt    mnz,mm,*allnz,*allm,size,*dispnz,*dispm;
714 
715   PetscFunctionBegin;
716   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
717   ierr = MatGetSize(A,&M,&N);CHKERRQ(ierr);
718   ierr = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
719   nz   = adj->nz;
720   if (adj->i[m] != nz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"nz %D not correct i[m] %d",nz,adj->i[m]);
721   ierr = MPI_Allreduce(&nz,&NZ,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
722 
723   ierr = PetscMPIIntCast(nz,&mnz);CHKERRQ(ierr);
724   ierr = PetscCalloc2(size,&allnz,size,&dispnz);CHKERRQ(ierr);
725   ierr = MPI_Allgather(&mnz,1,MPI_INT,allnz,1,MPI_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
726   dispnz[0] = 0; for (i=1; i<size; i++) dispnz[i] = dispnz[i-1]+ allnz[i-1];
727   if (adj->values) {
728     ierr = PetscMalloc1(NZ,&Values);CHKERRQ(ierr);
729     ierr = MPI_Allgatherv(adj->values,mnz,MPIU_INT,Values,allnz,dispnz,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
730   }
731   ierr = PetscMalloc1(NZ,&J);CHKERRQ(ierr);
732   ierr = MPI_Allgatherv(adj->j,mnz,MPIU_INT,J,allnz,dispnz,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
733   ierr = PetscFree2(allnz,dispnz);CHKERRQ(ierr);
734   ierr = MPI_Scan(&nz,&nzstart,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
735   nzstart -= nz;
736   /* shift the i[] values so they will be correct after being received */
737   for (i=0; i<m; i++) adj->i[i] += nzstart;
738   ierr = PetscMalloc1(M+1,&II);CHKERRQ(ierr);
739   ierr = PetscMPIIntCast(m,&mm);CHKERRQ(ierr);
740   ierr = PetscMalloc2(size,&allm,size,&dispm);CHKERRQ(ierr);
741   ierr = MPI_Allgather(&mm,1,MPI_INT,allm,1,MPI_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
742   dispm[0] = 0; for (i=1; i<size; i++) dispm[i] = dispm[i-1]+ allm[i-1];
743   ierr = MPI_Allgatherv(adj->i,mm,MPIU_INT,II,allm,dispm,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
744   ierr = PetscFree2(allm,dispm);CHKERRQ(ierr);
745   II[M] = NZ;
746   /* shift the i[] values back */
747   for (i=0; i<m; i++) adj->i[i] -= nzstart;
748   ierr = MatCreateMPIAdj(PETSC_COMM_SELF,M,N,II,J,Values,B);CHKERRQ(ierr);
749   PetscFunctionReturn(0);
750 }
751 
752 /*@
753    MatMPIAdjCreateNonemptySubcommMat - create the same MPIAdj matrix on a subcommunicator containing only processes owning a positive number of rows
754 
755    Collective
756 
757    Input Arguments:
758 .  A - original MPIAdj matrix
759 
760    Output Arguments:
761 .  B - matrix on subcommunicator, NULL on ranks that owned zero rows of A
762 
763    Level: developer
764 
765    Note:
766    This function is mostly useful for internal use by mesh partitioning packages that require that every process owns at least one row.
767 
768    The matrix B should be destroyed with MatDestroy(). The arrays are not copied, so B should be destroyed before A is destroyed.
769 
770 .seealso: MatCreateMPIAdj()
771 @*/
772 PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A,Mat *B)
773 {
774   PetscErrorCode ierr;
775 
776   PetscFunctionBegin;
777   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
778   ierr = PetscUseMethod(A,"MatMPIAdjCreateNonemptySubcommMat_C",(Mat,Mat*),(A,B));CHKERRQ(ierr);
779   PetscFunctionReturn(0);
780 }
781 
782 /*MC
783    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
784    intended for use constructing orderings and partitionings.
785 
786   Level: beginner
787 
788 .seealso: MatCreateMPIAdj
789 M*/
790 
791 PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B)
792 {
793   Mat_MPIAdj     *b;
794   PetscErrorCode ierr;
795 
796   PetscFunctionBegin;
797   ierr         = PetscNewLog(B,&b);CHKERRQ(ierr);
798   B->data      = (void*)b;
799   ierr         = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
800   B->assembled = PETSC_FALSE;
801 
802   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
803   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjCreateNonemptySubcommMat_C",MatMPIAdjCreateNonemptySubcommMat_MPIAdj);CHKERRQ(ierr);
804   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjToSeq_C",MatMPIAdjToSeq_MPIAdj);CHKERRQ(ierr);
805   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ);CHKERRQ(ierr);
806   PetscFunctionReturn(0);
807 }
808 
809 /*@C
810    MatMPIAdjToSeq - Converts an parallel MPIAdj matrix to complete MPIAdj on each process (needed by sequential preconditioners)
811 
812    Logically Collective on MPI_Comm
813 
814    Input Parameter:
815 .  A - the matrix
816 
817    Output Parameter:
818 .  B - the same matrix on all processes
819 
820    Level: intermediate
821 
822 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
823 @*/
824 PetscErrorCode  MatMPIAdjToSeq(Mat A,Mat *B)
825 {
826   PetscErrorCode ierr;
827 
828   PetscFunctionBegin;
829   ierr = PetscUseMethod(A,"MatMPIAdjToSeq_C",(Mat,Mat*),(A,B));CHKERRQ(ierr);
830   PetscFunctionReturn(0);
831 }
832 
833 /*@C
834    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
835 
836    Logically Collective on MPI_Comm
837 
838    Input Parameters:
839 +  A - the matrix
840 .  i - the indices into j for the start of each row
841 .  j - the column indices for each row (sorted for each row).
842        The indices in i and j start with zero (NOT with one).
843 -  values - [optional] edge weights
844 
845    Level: intermediate
846 
847 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
848 @*/
849 PetscErrorCode  MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
850 {
851   PetscErrorCode ierr;
852 
853   PetscFunctionBegin;
854   ierr = PetscTryMethod(B,"MatMPIAdjSetPreallocation_C",(Mat,PetscInt*,PetscInt*,PetscInt*),(B,i,j,values));CHKERRQ(ierr);
855   PetscFunctionReturn(0);
856 }
857 
858 /*@C
859    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
860    The matrix does not have numerical values associated with it, but is
861    intended for ordering (to reduce bandwidth etc) and partitioning.
862 
863    Collective on MPI_Comm
864 
865    Input Parameters:
866 +  comm - MPI communicator
867 .  m - number of local rows
868 .  N - number of global columns
869 .  i - the indices into j for the start of each row
870 .  j - the column indices for each row (sorted for each row).
871        The indices in i and j start with zero (NOT with one).
872 -  values -[optional] edge weights
873 
874    Output Parameter:
875 .  A - the matrix
876 
877    Level: intermediate
878 
879    Notes:
880     This matrix object does not support most matrix operations, include
881    MatSetValues().
882    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
883    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
884     call from Fortran you need not create the arrays with PetscMalloc().
885    Should not include the matrix diagonals.
886 
887    If you already have a matrix, you can create its adjacency matrix by a call
888    to MatConvert, specifying a type of MATMPIADJ.
889 
890    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
891 
892 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
893 @*/
894 PetscErrorCode  MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
895 {
896   PetscErrorCode ierr;
897 
898   PetscFunctionBegin;
899   ierr = MatCreate(comm,A);CHKERRQ(ierr);
900   ierr = MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
901   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
902   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
903   PetscFunctionReturn(0);
904 }
905 
906 
907 
908 
909 
910 
911 
912