xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 4fc747eaadbeca11629f314a99edccbc2ed7b3d3)
1 
2 /*
3     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
4 */
5 #include <../src/mat/impls/adj/mpi/mpiadj.h>    /*I "petscmat.h" I*/
6 #include <petscsf.h>
7 
8 #undef __FUNCT__
9 #define __FUNCT__ "MatGetSubMatrix_MPIAdj_data"
10 /*
11  * The interface should be easy to use for both MatGetSubMatrix (parallel sub-matrix) and MatGetSubMatrices (sequential sub-matrices)
12  * */
13 static PetscErrorCode MatGetSubMatrix_MPIAdj_data(Mat adj,IS irows, IS icols, PetscInt **sadj_xadj,PetscInt **sadj_adjncy,PetscInt **sadj_values)
14 {
15   PetscInt           nlrows_is,icols_n,i,j,nroots,nleaves,owner,rlocalindex,*ncols_send,*ncols_recv;
16   PetscInt           nlrows_mat,*adjncy_recv,Ncols_recv,Ncols_send,*xadj_recv,*values_recv;
17   PetscInt          *ncols_recv_offsets,loc,rnclos,*sadjncy,*sxadj,*svalues,isvalue;
18   const PetscInt    *irows_indices,*icols_indices,*xadj, *adjncy;
19   Mat_MPIAdj        *a = (Mat_MPIAdj*)adj->data;
20   PetscLayout        rmap;
21   MPI_Comm           comm;
22   PetscSF            sf;
23   PetscSFNode       *iremote;
24   PetscBool          done;
25   PetscErrorCode     ierr;
26 
27   PetscFunctionBegin;
28   ierr = PetscObjectGetComm((PetscObject)adj,&comm);CHKERRQ(ierr);
29   ierr = MatGetLayouts(adj,&rmap,PETSC_NULL);CHKERRQ(ierr);
30   ierr = ISGetLocalSize(irows,&nlrows_is);CHKERRQ(ierr);
31   ierr = ISGetIndices(irows,&irows_indices);CHKERRQ(ierr);
32   ierr = PetscCalloc1(nlrows_is,&iremote);CHKERRQ(ierr);
33   /* construct sf graph*/
34   nleaves = nlrows_is;
35   for (i=0; i<nlrows_is; i++){
36     owner = -1;
37     rlocalindex = -1;
38     ierr = PetscLayoutFindOwnerIndex(rmap,irows_indices[i],&owner,&rlocalindex);CHKERRQ(ierr);
39     iremote[i].rank  = owner;
40     iremote[i].index = rlocalindex;
41   }
42   ierr = MatGetRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done);CHKERRQ(ierr);
43   ierr = PetscCalloc4(nlrows_mat,&ncols_send,nlrows_is,&xadj_recv,nlrows_is+1,&ncols_recv_offsets,nlrows_is,&ncols_recv);CHKERRQ(ierr);
44   nroots = nlrows_mat;
45   for (i=0; i<nlrows_mat; i++){
46     ncols_send[i] = xadj[i+1]-xadj[i];
47   }
48   ierr = PetscSFCreate(comm,&sf);CHKERRQ(ierr);
49   ierr = PetscSFSetGraph(sf,nroots,nleaves,PETSC_NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
50   ierr = PetscSFSetType(sf,PETSCSFBASIC);CHKERRQ(ierr);
51   ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
52   ierr = PetscSFBcastBegin(sf,MPIU_INT,ncols_send,ncols_recv);CHKERRQ(ierr);
53   ierr = PetscSFBcastEnd(sf,MPIU_INT,ncols_send,ncols_recv);CHKERRQ(ierr);
54   ierr = PetscSFBcastBegin(sf,MPIU_INT,xadj,xadj_recv);CHKERRQ(ierr);
55   ierr = PetscSFBcastEnd(sf,MPIU_INT,xadj,xadj_recv);CHKERRQ(ierr);
56   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
57   Ncols_recv =0;
58   for (i=0; i<nlrows_is; i++){
59     Ncols_recv             += ncols_recv[i];
60     ncols_recv_offsets[i+1] = ncols_recv[i]+ncols_recv_offsets[i];
61   }
62   Ncols_send = 0;
63   for(i=0; i<nlrows_mat; i++){
64     Ncols_send += ncols_send[i];
65   }
66   ierr = PetscCalloc1(Ncols_recv,&iremote);CHKERRQ(ierr);
67   ierr = PetscCalloc1(Ncols_recv,&adjncy_recv);CHKERRQ(ierr);
68   nleaves = Ncols_recv;
69   Ncols_recv = 0;
70   for (i=0; i<nlrows_is; i++){
71     ierr = PetscLayoutFindOwner(rmap,irows_indices[i],&owner);CHKERRQ(ierr);
72     for (j=0; j<ncols_recv[i]; j++){
73       iremote[Ncols_recv].rank    = owner;
74       iremote[Ncols_recv++].index = xadj_recv[i]+j;
75     }
76   }
77   ierr = ISRestoreIndices(irows,&irows_indices);CHKERRQ(ierr);
78   /*if we need to deal with edge weights ???*/
79   if (a->values){isvalue=1;} else {isvalue=0;}
80   /*involve a global communication */
81   /*ierr = MPIU_Allreduce(&isvalue,&isvalue,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);*/
82   if (isvalue){ierr = PetscCalloc1(Ncols_recv,&values_recv);CHKERRQ(ierr);}
83   nroots = Ncols_send;
84   ierr = PetscSFCreate(comm,&sf);CHKERRQ(ierr);
85   ierr = PetscSFSetGraph(sf,nroots,nleaves,PETSC_NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
86   ierr = PetscSFSetType(sf,PETSCSFBASIC);CHKERRQ(ierr);
87   ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
88   ierr = PetscSFBcastBegin(sf,MPIU_INT,adjncy,adjncy_recv);CHKERRQ(ierr);
89   ierr = PetscSFBcastEnd(sf,MPIU_INT,adjncy,adjncy_recv);CHKERRQ(ierr);
90   if (isvalue){
91     ierr = PetscSFBcastBegin(sf,MPIU_INT,a->values,values_recv);CHKERRQ(ierr);
92     ierr = PetscSFBcastEnd(sf,MPIU_INT,a->values,values_recv);CHKERRQ(ierr);
93   }
94   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
95   ierr = MatRestoreRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done);CHKERRQ(ierr);
96   ierr = ISGetLocalSize(icols,&icols_n);CHKERRQ(ierr);
97   ierr = ISGetIndices(icols,&icols_indices);CHKERRQ(ierr);
98   rnclos = 0;
99   for (i=0; i<nlrows_is; i++){
100     for (j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++){
101       ierr = PetscFindInt(adjncy_recv[j], icols_n, icols_indices, &loc);CHKERRQ(ierr);
102       if (loc<0){
103         adjncy_recv[j] = -1;
104         if (isvalue) values_recv[j] = -1;
105         ncols_recv[i]--;
106       } else {
107         rnclos++;
108       }
109     }
110   }
111   ierr = ISRestoreIndices(icols,&icols_indices);CHKERRQ(ierr);
112   ierr = PetscCalloc1(rnclos,&sadjncy);CHKERRQ(ierr);
113   if (isvalue) {ierr = PetscCalloc1(rnclos,&svalues);CHKERRQ(ierr);}
114   ierr = PetscCalloc1(nlrows_is+1,&sxadj);CHKERRQ(ierr);
115   rnclos = 0;
116   for(i=0; i<nlrows_is; i++){
117     for(j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++){
118       if (adjncy_recv[j]<0) continue;
119       sadjncy[rnclos] = adjncy_recv[j];
120       if (isvalue) svalues[rnclos] = values_recv[j];
121       rnclos++;
122     }
123   }
124   for(i=0; i<nlrows_is; i++){
125     sxadj[i+1] = sxadj[i]+ncols_recv[i];
126   }
127   if (sadj_xadj)  { *sadj_xadj = sxadj;} else    { ierr = PetscFree(sxadj);CHKERRQ(ierr);}
128   if (sadj_adjncy){ *sadj_adjncy = sadjncy;} else { ierr = PetscFree(sadjncy);CHKERRQ(ierr);}
129   if (sadj_values){
130     if (isvalue) *sadj_values = svalues; else *sadj_values=0;
131   } else {
132     if (isvalue) {ierr = PetscFree(svalues);CHKERRQ(ierr);}
133   }
134   ierr = PetscFree4(ncols_send,xadj_recv,ncols_recv_offsets,ncols_recv);CHKERRQ(ierr);
135   ierr = PetscFree(adjncy_recv);CHKERRQ(ierr);
136   if (isvalue) {ierr = PetscFree(values_recv);CHKERRQ(ierr);}
137   PetscFunctionReturn(0);
138 }
139 
140 #undef __FUNCT__
141 #define __FUNCT__ "MatGetSubMatrices_MPIAdj_Private"
142 static PetscErrorCode MatGetSubMatrices_MPIAdj_Private(Mat mat,PetscInt n,const IS irow[],const IS icol[],PetscBool subcomm,MatReuse scall,Mat *submat[])
143 {
144   PetscInt           i,irow_n,icol_n,*sxadj,*sadjncy,*svalues;
145   PetscInt          *indices,nindx,j,k,loc;
146   PetscMPIInt        issame;
147   const PetscInt    *irow_indices,*icol_indices;
148   MPI_Comm           scomm_row,scomm_col,scomm_mat;
149   PetscErrorCode     ierr;
150 
151   PetscFunctionBegin;
152   nindx = 0;
153   /*
154    * Estimate a maximum number for allocating memory
155    */
156   for(i=0; i<n; i++){
157     ierr = ISGetLocalSize(irow[i],&irow_n);CHKERRQ(ierr);
158     ierr = ISGetLocalSize(icol[i],&icol_n);CHKERRQ(ierr);
159     nindx = nindx>(irow_n+icol_n)? nindx:(irow_n+icol_n);
160   }
161   ierr = PetscCalloc1(nindx,&indices);CHKERRQ(ierr);
162   /* construct a submat */
163   for(i=0; i<n; i++){
164     if (subcomm){
165       ierr = PetscObjectGetComm((PetscObject)irow[i],&scomm_row);CHKERRQ(ierr);
166       ierr = PetscObjectGetComm((PetscObject)icol[i],&scomm_col);CHKERRQ(ierr);
167       ierr = MPI_Comm_compare(scomm_row,scomm_col,&issame);CHKERRQ(ierr);
168       if (issame != MPI_IDENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"row index set must have the same comm as the col index set\n");
169       ierr = MPI_Comm_compare(scomm_row,PETSC_COMM_SELF,&issame);CHKERRQ(ierr);
170       if (issame == MPI_IDENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP," can not use PETSC_COMM_SELF as comm when extracting a parallel submatrix\n");
171     } else {
172       scomm_row = PETSC_COMM_SELF;
173     }
174     /*get sub-matrix data*/
175     sxadj=0; sadjncy=0; svalues=0;
176     ierr = MatGetSubMatrix_MPIAdj_data(mat,irow[i],icol[i],&sxadj,&sadjncy,&svalues);CHKERRQ(ierr);
177     ierr = ISGetLocalSize(irow[i],&irow_n);CHKERRQ(ierr);
178     ierr = ISGetLocalSize(icol[i],&icol_n);CHKERRQ(ierr);
179     ierr = ISGetIndices(irow[i],&irow_indices);CHKERRQ(ierr);
180     ierr = PetscMemcpy(indices,irow_indices,sizeof(PetscInt)*irow_n);CHKERRQ(ierr);
181     ierr = ISRestoreIndices(irow[i],&irow_indices);CHKERRQ(ierr);
182     ierr = ISGetIndices(icol[i],&icol_indices);CHKERRQ(ierr);
183     ierr = PetscMemcpy(indices+irow_n,icol_indices,sizeof(PetscInt)*icol_n);CHKERRQ(ierr);
184     ierr = ISRestoreIndices(icol[i],&icol_indices);CHKERRQ(ierr);
185     nindx = irow_n+icol_n;
186     ierr = PetscSortRemoveDupsInt(&nindx,indices);CHKERRQ(ierr);
187     /* renumber columns */
188     for(j=0; j<irow_n; j++){
189       for(k=sxadj[j]; k<sxadj[j+1]; k++){
190         ierr = PetscFindInt(sadjncy[k],nindx,indices,&loc);CHKERRQ(ierr);
191 #if PETSC_USE_DEBUG
192         if (loc<0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"can not find col %d \n",sadjncy[k]);
193 #endif
194         sadjncy[k] = loc;
195       }
196     }
197     if (scall==MAT_INITIAL_MATRIX){
198       ierr = MatCreateMPIAdj(scomm_row,irow_n,icol_n,sxadj,sadjncy,svalues,submat[i]);CHKERRQ(ierr);
199     } else {
200        Mat                sadj = *(submat[i]);
201        Mat_MPIAdj         *sa  = (Mat_MPIAdj*)((sadj)->data);
202        ierr = PetscObjectGetComm((PetscObject)sadj,&scomm_mat);CHKERRQ(ierr);
203        ierr = MPI_Comm_compare(scomm_row,scomm_mat,&issame);CHKERRQ(ierr);
204        if (issame != MPI_IDENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"submatrix  must have the same comm as the col index set\n");
205        ierr = PetscMemcpy(sa->i,sxadj,sizeof(PetscInt)*(irow_n+1));CHKERRQ(ierr);
206        ierr = PetscMemcpy(sa->j,sadjncy,sizeof(PetscInt)*sxadj[irow_n]);CHKERRQ(ierr);
207        if (svalues){ierr = PetscMemcpy(sa->values,svalues,sizeof(PetscInt)*sxadj[irow_n]);CHKERRQ(ierr);}
208        ierr = PetscFree(sxadj);CHKERRQ(ierr);
209        ierr = PetscFree(sadjncy);CHKERRQ(ierr);
210        if (svalues) {ierr = PetscFree(svalues);CHKERRQ(ierr);}
211     }
212   }
213   ierr = PetscFree(indices);CHKERRQ(ierr);
214   PetscFunctionReturn(0);
215 }
216 
217 #undef __FUNCT__
218 #define __FUNCT__ "MatGetSubMatricesMPI_MPIAdj"
219 static PetscErrorCode MatGetSubMatricesMPI_MPIAdj(Mat mat,PetscInt n, const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
220 {
221   PetscErrorCode     ierr;
222   /*get sub-matrices across a sub communicator */
223   PetscFunctionBegin;
224   ierr = MatGetSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_TRUE,scall,submat);CHKERRQ(ierr);
225   PetscFunctionReturn(0);
226 }
227 
228 #undef __FUNCT__
229 #define __FUNCT__ "MatGetSubMatrices_MPIAdj"
230 static PetscErrorCode MatGetSubMatrices_MPIAdj(Mat mat,PetscInt n,const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
231 {
232   PetscErrorCode     ierr;
233 
234   PetscFunctionBegin;
235   /*get sub-matrices based on PETSC_COMM_SELF */
236   ierr = MatGetSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_FALSE,scall,submat);CHKERRQ(ierr);
237   PetscFunctionReturn(0);
238 }
239 
240 #undef __FUNCT__
241 #define __FUNCT__ "MatView_MPIAdj_ASCII"
242 static PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
243 {
244   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
245   PetscErrorCode    ierr;
246   PetscInt          i,j,m = A->rmap->n;
247   const char        *name;
248   PetscViewerFormat format;
249 
250   PetscFunctionBegin;
251   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
252   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
253   if (format == PETSC_VIEWER_ASCII_INFO) {
254     PetscFunctionReturn(0);
255   } else if (format == PETSC_VIEWER_ASCII_MATLAB) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MATLAB format not supported");
256   else {
257     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);CHKERRQ(ierr);
258     ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
259     for (i=0; i<m; i++) {
260       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap->rstart);CHKERRQ(ierr);
261       for (j=a->i[i]; j<a->i[i+1]; j++) {
262         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);CHKERRQ(ierr);
263       }
264       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
265     }
266     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);CHKERRQ(ierr);
267     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
268     ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
269   }
270   PetscFunctionReturn(0);
271 }
272 
273 #undef __FUNCT__
274 #define __FUNCT__ "MatView_MPIAdj"
275 static PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
276 {
277   PetscErrorCode ierr;
278   PetscBool      iascii;
279 
280   PetscFunctionBegin;
281   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
282   if (iascii) {
283     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
284   }
285   PetscFunctionReturn(0);
286 }
287 
288 #undef __FUNCT__
289 #define __FUNCT__ "MatDestroy_MPIAdj"
290 static PetscErrorCode MatDestroy_MPIAdj(Mat mat)
291 {
292   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;
293   PetscErrorCode ierr;
294 
295   PetscFunctionBegin;
296 #if defined(PETSC_USE_LOG)
297   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap->n,mat->cmap->n,a->nz);
298 #endif
299   ierr = PetscFree(a->diag);CHKERRQ(ierr);
300   if (a->freeaij) {
301     if (a->freeaijwithfree) {
302       if (a->i) free(a->i);
303       if (a->j) free(a->j);
304     } else {
305       ierr = PetscFree(a->i);CHKERRQ(ierr);
306       ierr = PetscFree(a->j);CHKERRQ(ierr);
307       ierr = PetscFree(a->values);CHKERRQ(ierr);
308     }
309   }
310   ierr = PetscFree(a->rowvalues);CHKERRQ(ierr);
311   ierr = PetscFree(mat->data);CHKERRQ(ierr);
312   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
313   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C",NULL);CHKERRQ(ierr);
314   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjCreateNonemptySubcommMat_C",NULL);CHKERRQ(ierr);
315   PetscFunctionReturn(0);
316 }
317 
318 #undef __FUNCT__
319 #define __FUNCT__ "MatSetOption_MPIAdj"
320 static PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscBool flg)
321 {
322   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
323   PetscErrorCode ierr;
324 
325   PetscFunctionBegin;
326   switch (op) {
327   case MAT_SYMMETRIC:
328   case MAT_STRUCTURALLY_SYMMETRIC:
329   case MAT_HERMITIAN:
330     a->symmetric = flg;
331     break;
332   case MAT_SYMMETRY_ETERNAL:
333     break;
334   default:
335     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
336     break;
337   }
338   PetscFunctionReturn(0);
339 }
340 
341 #undef __FUNCT__
342 #define __FUNCT__ "MatGetRow_MPIAdj"
343 static PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
344 {
345   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
346   PetscErrorCode ierr;
347 
348   PetscFunctionBegin;
349   row -= A->rmap->rstart;
350   if (row < 0 || row >= A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
351   *nz = a->i[row+1] - a->i[row];
352   if (v) {
353     PetscInt j;
354     if (a->rowvalues_alloc < *nz) {
355       ierr = PetscFree(a->rowvalues);CHKERRQ(ierr);
356       a->rowvalues_alloc = PetscMax(a->rowvalues_alloc*2, *nz);
357       ierr = PetscMalloc1(a->rowvalues_alloc,&a->rowvalues);CHKERRQ(ierr);
358     }
359     for (j=0; j<*nz; j++){
360       a->rowvalues[j] = a->values ? a->values[a->i[row]+j]:1.0;
361     }
362     *v = (*nz) ? a->rowvalues : NULL;
363   }
364   if (idx) *idx = (*nz) ? a->j + a->i[row] : NULL;
365   PetscFunctionReturn(0);
366 }
367 
368 #undef __FUNCT__
369 #define __FUNCT__ "MatRestoreRow_MPIAdj"
370 static PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
371 {
372   PetscFunctionBegin;
373   PetscFunctionReturn(0);
374 }
375 
376 #undef __FUNCT__
377 #define __FUNCT__ "MatEqual_MPIAdj"
378 static PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscBool * flg)
379 {
380   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data,*b = (Mat_MPIAdj*)B->data;
381   PetscErrorCode ierr;
382   PetscBool      flag;
383 
384   PetscFunctionBegin;
385   /* If the  matrix dimensions are not equal,or no of nonzeros */
386   if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
387     flag = PETSC_FALSE;
388   }
389 
390   /* if the a->i are the same */
391   ierr = PetscMemcmp(a->i,b->i,(A->rmap->n+1)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
392 
393   /* if a->j are the same */
394   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
395 
396   ierr = MPIU_Allreduce(&flag,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
397   PetscFunctionReturn(0);
398 }
399 
400 #undef __FUNCT__
401 #define __FUNCT__ "MatGetRowIJ_MPIAdj"
402 static PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
403 {
404   PetscInt   i;
405   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
406   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
407 
408   PetscFunctionBegin;
409   *m    = A->rmap->n;
410   *ia   = a->i;
411   *ja   = a->j;
412   *done = PETSC_TRUE;
413   if (oshift) {
414     for (i=0; i<(*ia)[*m]; i++) {
415       (*ja)[i]++;
416     }
417     for (i=0; i<=(*m); i++) (*ia)[i]++;
418   }
419   PetscFunctionReturn(0);
420 }
421 
422 #undef __FUNCT__
423 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj"
424 static PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
425 {
426   PetscInt   i;
427   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
428   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
429 
430   PetscFunctionBegin;
431   if (ia && a->i != *ia) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
432   if (ja && a->j != *ja) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
433   if (oshift) {
434     if (!ia) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"If oshift then you must passed in inia[] argument");
435     if (!ja) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"If oshift then you must passed in inja[] argument");
436     for (i=0; i<=(*m); i++) (*ia)[i]--;
437     for (i=0; i<(*ia)[*m]; i++) {
438       (*ja)[i]--;
439     }
440   }
441   PetscFunctionReturn(0);
442 }
443 
444 #undef __FUNCT__
445 #define __FUNCT__ "MatConvertFrom_MPIAdj"
446 PetscErrorCode  MatConvertFrom_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
447 {
448   Mat               B;
449   PetscErrorCode    ierr;
450   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
451   const PetscInt    *rj;
452   const PetscScalar *ra;
453   MPI_Comm          comm;
454 
455   PetscFunctionBegin;
456   ierr = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
457   ierr = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
458   ierr = MatGetOwnershipRange(A,&rstart,NULL);CHKERRQ(ierr);
459 
460   /* count the number of nonzeros per row */
461   for (i=0; i<m; i++) {
462     ierr = MatGetRow(A,i+rstart,&len,&rj,NULL);CHKERRQ(ierr);
463     for (j=0; j<len; j++) {
464       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
465     }
466     nzeros += len;
467     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,NULL);CHKERRQ(ierr);
468   }
469 
470   /* malloc space for nonzeros */
471   ierr = PetscMalloc1(nzeros+1,&a);CHKERRQ(ierr);
472   ierr = PetscMalloc1(N+1,&ia);CHKERRQ(ierr);
473   ierr = PetscMalloc1(nzeros+1,&ja);CHKERRQ(ierr);
474 
475   nzeros = 0;
476   ia[0]  = 0;
477   for (i=0; i<m; i++) {
478     ierr = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
479     cnt  = 0;
480     for (j=0; j<len; j++) {
481       if (rj[j] != i+rstart) { /* if not diagonal */
482         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
483         ja[nzeros+cnt++] = rj[j];
484       }
485     }
486     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
487     nzeros += cnt;
488     ia[i+1] = nzeros;
489   }
490 
491   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
492   ierr = MatCreate(comm,&B);CHKERRQ(ierr);
493   ierr = MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
494   ierr = MatSetType(B,type);CHKERRQ(ierr);
495   ierr = MatMPIAdjSetPreallocation(B,ia,ja,a);CHKERRQ(ierr);
496 
497   if (reuse == MAT_INPLACE_MATRIX) {
498     ierr = MatHeaderReplace(A,&B);CHKERRQ(ierr);
499   } else {
500     *newmat = B;
501   }
502   PetscFunctionReturn(0);
503 }
504 
505 /* -------------------------------------------------------------------*/
506 static struct _MatOps MatOps_Values = {0,
507                                        MatGetRow_MPIAdj,
508                                        MatRestoreRow_MPIAdj,
509                                        0,
510                                 /* 4*/ 0,
511                                        0,
512                                        0,
513                                        0,
514                                        0,
515                                        0,
516                                 /*10*/ 0,
517                                        0,
518                                        0,
519                                        0,
520                                        0,
521                                 /*15*/ 0,
522                                        MatEqual_MPIAdj,
523                                        0,
524                                        0,
525                                        0,
526                                 /*20*/ 0,
527                                        0,
528                                        MatSetOption_MPIAdj,
529                                        0,
530                                 /*24*/ 0,
531                                        0,
532                                        0,
533                                        0,
534                                        0,
535                                 /*29*/ 0,
536                                        0,
537                                        0,
538                                        0,
539                                        0,
540                                 /*34*/ 0,
541                                        0,
542                                        0,
543                                        0,
544                                        0,
545                                 /*39*/ 0,
546                                        MatGetSubMatrices_MPIAdj,
547                                        0,
548                                        0,
549                                        0,
550                                 /*44*/ 0,
551                                        0,
552                                        MatShift_Basic,
553                                        0,
554                                        0,
555                                 /*49*/ 0,
556                                        MatGetRowIJ_MPIAdj,
557                                        MatRestoreRowIJ_MPIAdj,
558                                        0,
559                                        0,
560                                 /*54*/ 0,
561                                        0,
562                                        0,
563                                        0,
564                                        0,
565                                 /*59*/ 0,
566                                        MatDestroy_MPIAdj,
567                                        MatView_MPIAdj,
568                                        MatConvertFrom_MPIAdj,
569                                        0,
570                                 /*64*/ 0,
571                                        0,
572                                        0,
573                                        0,
574                                        0,
575                                 /*69*/ 0,
576                                        0,
577                                        0,
578                                        0,
579                                        0,
580                                 /*74*/ 0,
581                                        0,
582                                        0,
583                                        0,
584                                        0,
585                                 /*79*/ 0,
586                                        0,
587                                        0,
588                                        0,
589                                        0,
590                                 /*84*/ 0,
591                                        0,
592                                        0,
593                                        0,
594                                        0,
595                                 /*89*/ 0,
596                                        0,
597                                        0,
598                                        0,
599                                        0,
600                                 /*94*/ 0,
601                                        0,
602                                        0,
603                                        0,
604                                        0,
605                                 /*99*/ 0,
606                                        0,
607                                        0,
608                                        0,
609                                        0,
610                                /*104*/ 0,
611                                        0,
612                                        0,
613                                        0,
614                                        0,
615                                /*109*/ 0,
616                                        0,
617                                        0,
618                                        0,
619                                        0,
620                                /*114*/ 0,
621                                        0,
622                                        0,
623                                        0,
624                                        0,
625                                /*119*/ 0,
626                                        0,
627                                        0,
628                                        0,
629                                        0,
630                                /*124*/ 0,
631                                        0,
632                                        0,
633                                        0,
634                                        MatGetSubMatricesMPI_MPIAdj,
635                                /*129*/ 0,
636                                        0,
637                                        0,
638                                        0,
639                                        0,
640                                /*134*/ 0,
641                                        0,
642                                        0,
643                                        0,
644                                        0,
645                                /*139*/ 0,
646                                        0,
647                                        0
648 };
649 
650 #undef __FUNCT__
651 #define __FUNCT__ "MatMPIAdjSetPreallocation_MPIAdj"
652 static PetscErrorCode  MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
653 {
654   Mat_MPIAdj     *b = (Mat_MPIAdj*)B->data;
655   PetscErrorCode ierr;
656 #if defined(PETSC_USE_DEBUG)
657   PetscInt ii;
658 #endif
659 
660   PetscFunctionBegin;
661   ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
662   ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
663 
664 #if defined(PETSC_USE_DEBUG)
665   if (i[0] != 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
666   for (ii=1; ii<B->rmap->n; ii++) {
667     if (i[ii] < 0 || i[ii] < i[ii-1]) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
668   }
669   for (ii=0; ii<i[B->rmap->n]; ii++) {
670     if (j[ii] < 0 || j[ii] >= B->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
671   }
672 #endif
673   B->preallocated = PETSC_TRUE;
674 
675   b->j      = j;
676   b->i      = i;
677   b->values = values;
678 
679   b->nz        = i[B->rmap->n];
680   b->diag      = 0;
681   b->symmetric = PETSC_FALSE;
682   b->freeaij   = PETSC_TRUE;
683 
684   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
685   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
686   PetscFunctionReturn(0);
687 }
688 
689 #undef __FUNCT__
690 #define __FUNCT__ "MatMPIAdjCreateNonemptySubcommMat_MPIAdj"
691 static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A,Mat *B)
692 {
693   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
694   PetscErrorCode ierr;
695   const PetscInt *ranges;
696   MPI_Comm       acomm,bcomm;
697   MPI_Group      agroup,bgroup;
698   PetscMPIInt    i,rank,size,nranks,*ranks;
699 
700   PetscFunctionBegin;
701   *B    = NULL;
702   ierr  = PetscObjectGetComm((PetscObject)A,&acomm);CHKERRQ(ierr);
703   ierr  = MPI_Comm_size(acomm,&size);CHKERRQ(ierr);
704   ierr  = MPI_Comm_size(acomm,&rank);CHKERRQ(ierr);
705   ierr  = MatGetOwnershipRanges(A,&ranges);CHKERRQ(ierr);
706   for (i=0,nranks=0; i<size; i++) {
707     if (ranges[i+1] - ranges[i] > 0) nranks++;
708   }
709   if (nranks == size) {         /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
710     ierr = PetscObjectReference((PetscObject)A);CHKERRQ(ierr);
711     *B   = A;
712     PetscFunctionReturn(0);
713   }
714 
715   ierr = PetscMalloc1(nranks,&ranks);CHKERRQ(ierr);
716   for (i=0,nranks=0; i<size; i++) {
717     if (ranges[i+1] - ranges[i] > 0) ranks[nranks++] = i;
718   }
719   ierr = MPI_Comm_group(acomm,&agroup);CHKERRQ(ierr);
720   ierr = MPI_Group_incl(agroup,nranks,ranks,&bgroup);CHKERRQ(ierr);
721   ierr = PetscFree(ranks);CHKERRQ(ierr);
722   ierr = MPI_Comm_create(acomm,bgroup,&bcomm);CHKERRQ(ierr);
723   ierr = MPI_Group_free(&agroup);CHKERRQ(ierr);
724   ierr = MPI_Group_free(&bgroup);CHKERRQ(ierr);
725   if (bcomm != MPI_COMM_NULL) {
726     PetscInt   m,N;
727     Mat_MPIAdj *b;
728     ierr       = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
729     ierr       = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
730     ierr       = MatCreateMPIAdj(bcomm,m,N,a->i,a->j,a->values,B);CHKERRQ(ierr);
731     b          = (Mat_MPIAdj*)(*B)->data;
732     b->freeaij = PETSC_FALSE;
733     ierr       = MPI_Comm_free(&bcomm);CHKERRQ(ierr);
734   }
735   PetscFunctionReturn(0);
736 }
737 
738 #undef __FUNCT__
739 #define __FUNCT__ "MatMPIAdjCreateNonemptySubcommMat"
740 /*@
741    MatMPIAdjCreateNonemptySubcommMat - create the same MPIAdj matrix on a subcommunicator containing only processes owning a positive number of rows
742 
743    Collective
744 
745    Input Arguments:
746 .  A - original MPIAdj matrix
747 
748    Output Arguments:
749 .  B - matrix on subcommunicator, NULL on ranks that owned zero rows of A
750 
751    Level: developer
752 
753    Note:
754    This function is mostly useful for internal use by mesh partitioning packages that require that every process owns at least one row.
755 
756    The matrix B should be destroyed with MatDestroy(). The arrays are not copied, so B should be destroyed before A is destroyed.
757 
758 .seealso: MatCreateMPIAdj()
759 @*/
760 PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A,Mat *B)
761 {
762   PetscErrorCode ierr;
763 
764   PetscFunctionBegin;
765   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
766   ierr = PetscUseMethod(A,"MatMPIAdjCreateNonemptySubcommMat_C",(Mat,Mat*),(A,B));CHKERRQ(ierr);
767   PetscFunctionReturn(0);
768 }
769 
770 /*MC
771    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
772    intended for use constructing orderings and partitionings.
773 
774   Level: beginner
775 
776 .seealso: MatCreateMPIAdj
777 M*/
778 
779 #undef __FUNCT__
780 #define __FUNCT__ "MatCreate_MPIAdj"
781 PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B)
782 {
783   Mat_MPIAdj     *b;
784   PetscErrorCode ierr;
785 
786   PetscFunctionBegin;
787   ierr         = PetscNewLog(B,&b);CHKERRQ(ierr);
788   B->data      = (void*)b;
789   ierr         = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
790   B->assembled = PETSC_FALSE;
791 
792   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
793   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjCreateNonemptySubcommMat_C",MatMPIAdjCreateNonemptySubcommMat_MPIAdj);CHKERRQ(ierr);
794   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ);CHKERRQ(ierr);
795   PetscFunctionReturn(0);
796 }
797 
798 #undef __FUNCT__
799 #define __FUNCT__ "MatMPIAdjSetPreallocation"
800 /*@C
801    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
802 
803    Logically Collective on MPI_Comm
804 
805    Input Parameters:
806 +  A - the matrix
807 .  i - the indices into j for the start of each row
808 .  j - the column indices for each row (sorted for each row).
809        The indices in i and j start with zero (NOT with one).
810 -  values - [optional] edge weights
811 
812    Level: intermediate
813 
814 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
815 @*/
816 PetscErrorCode  MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
817 {
818   PetscErrorCode ierr;
819 
820   PetscFunctionBegin;
821   ierr = PetscTryMethod(B,"MatMPIAdjSetPreallocation_C",(Mat,PetscInt*,PetscInt*,PetscInt*),(B,i,j,values));CHKERRQ(ierr);
822   PetscFunctionReturn(0);
823 }
824 
825 #undef __FUNCT__
826 #define __FUNCT__ "MatCreateMPIAdj"
827 /*@C
828    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
829    The matrix does not have numerical values associated with it, but is
830    intended for ordering (to reduce bandwidth etc) and partitioning.
831 
832    Collective on MPI_Comm
833 
834    Input Parameters:
835 +  comm - MPI communicator
836 .  m - number of local rows
837 .  N - number of global columns
838 .  i - the indices into j for the start of each row
839 .  j - the column indices for each row (sorted for each row).
840        The indices in i and j start with zero (NOT with one).
841 -  values -[optional] edge weights
842 
843    Output Parameter:
844 .  A - the matrix
845 
846    Level: intermediate
847 
848    Notes: This matrix object does not support most matrix operations, include
849    MatSetValues().
850    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
851    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
852     call from Fortran you need not create the arrays with PetscMalloc().
853    Should not include the matrix diagonals.
854 
855    If you already have a matrix, you can create its adjacency matrix by a call
856    to MatConvert, specifying a type of MATMPIADJ.
857 
858    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
859 
860 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
861 @*/
862 PetscErrorCode  MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
863 {
864   PetscErrorCode ierr;
865 
866   PetscFunctionBegin;
867   ierr = MatCreate(comm,A);CHKERRQ(ierr);
868   ierr = MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
869   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
870   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
871   PetscFunctionReturn(0);
872 }
873 
874 
875 
876 
877 
878 
879 
880