xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 841d17a1d0bd0f89fac37f238e186da39e13aedb)
1 
2 /*
3     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
4 */
5 #include <../src/mat/impls/adj/mpi/mpiadj.h>    /*I "petscmat.h" I*/
6 #include <petscsf.h>
7 
8 #undef __FUNCT__
9 #define __FUNCT__ "MatView_MPIAdj_ASCII"
10 PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
11 {
12   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
13   PetscErrorCode    ierr;
14   PetscInt          i,j,m = A->rmap->n;
15   const char        *name;
16   PetscViewerFormat format;
17 
18   PetscFunctionBegin;
19   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
20   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
21   if (format == PETSC_VIEWER_ASCII_INFO) {
22     PetscFunctionReturn(0);
23   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
24     SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MATLAB format not supported");
25   } else {
26     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);CHKERRQ(ierr);
27     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
28     for (i=0; i<m; i++) {
29       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap->rstart);CHKERRQ(ierr);
30       for (j=a->i[i]; j<a->i[i+1]; j++) {
31         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);CHKERRQ(ierr);
32       }
33       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
34     }
35     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);CHKERRQ(ierr);
36     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
37     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
38   }
39   PetscFunctionReturn(0);
40 }
41 
42 #undef __FUNCT__
43 #define __FUNCT__ "MatView_MPIAdj"
44 PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
45 {
46   PetscErrorCode ierr;
47   PetscBool      iascii;
48 
49   PetscFunctionBegin;
50   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
51   if (iascii) {
52     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
53   }
54   PetscFunctionReturn(0);
55 }
56 
57 #undef __FUNCT__
58 #define __FUNCT__ "MatDestroy_MPIAdj"
59 PetscErrorCode MatDestroy_MPIAdj(Mat mat)
60 {
61   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;
62   PetscErrorCode ierr;
63 
64   PetscFunctionBegin;
65 #if defined(PETSC_USE_LOG)
66   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap->n,mat->cmap->n,a->nz);
67 #endif
68   ierr = PetscFree(a->diag);CHKERRQ(ierr);
69   if (a->freeaij) {
70     if (a->freeaijwithfree) {
71       if (a->i) free(a->i);
72       if (a->j) free(a->j);
73     } else {
74       ierr = PetscFree(a->i);CHKERRQ(ierr);
75       ierr = PetscFree(a->j);CHKERRQ(ierr);
76       ierr = PetscFree(a->values);CHKERRQ(ierr);
77     }
78   }
79   ierr = PetscFree(a->rowvalues);CHKERRQ(ierr);
80   ierr = PetscFree(mat->data);CHKERRQ(ierr);
81   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
82   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C",NULL);CHKERRQ(ierr);
83   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjCreateNonemptySubcommMat_C",NULL);CHKERRQ(ierr);
84   PetscFunctionReturn(0);
85 }
86 
87 #undef __FUNCT__
88 #define __FUNCT__ "MatSetOption_MPIAdj"
89 PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscBool flg)
90 {
91   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
92   PetscErrorCode ierr;
93 
94   PetscFunctionBegin;
95   switch (op) {
96   case MAT_SYMMETRIC:
97   case MAT_STRUCTURALLY_SYMMETRIC:
98   case MAT_HERMITIAN:
99     a->symmetric = flg;
100     break;
101   case MAT_SYMMETRY_ETERNAL:
102     break;
103   default:
104     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
105     break;
106   }
107   PetscFunctionReturn(0);
108 }
109 
110 
111 /*
112      Adds diagonal pointers to sparse matrix structure.
113 */
114 
115 #undef __FUNCT__
116 #define __FUNCT__ "MatMarkDiagonal_MPIAdj"
117 PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A)
118 {
119   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
120   PetscErrorCode ierr;
121   PetscInt       i,j,m = A->rmap->n;
122 
123   PetscFunctionBegin;
124   ierr = PetscMalloc1(m,&a->diag);CHKERRQ(ierr);
125   ierr = PetscLogObjectMemory((PetscObject)A,m*sizeof(PetscInt));CHKERRQ(ierr);
126   for (i=0; i<A->rmap->n; i++) {
127     for (j=a->i[i]; j<a->i[i+1]; j++) {
128       if (a->j[j] == i) {
129         a->diag[i] = j;
130         break;
131       }
132     }
133   }
134   PetscFunctionReturn(0);
135 }
136 
137 #undef __FUNCT__
138 #define __FUNCT__ "MatGetRow_MPIAdj"
139 PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
140 {
141   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
142   PetscErrorCode ierr;
143 
144   PetscFunctionBegin;
145   row -= A->rmap->rstart;
146 
147   if (row < 0 || row >= A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
148 
149   *nz = a->i[row+1] - a->i[row];
150   if (v) {
151     PetscInt j;
152     if (a->rowvalues_alloc < *nz) {
153       ierr = PetscFree(a->rowvalues);CHKERRQ(ierr);
154       a->rowvalues_alloc = PetscMax(a->rowvalues_alloc*2, *nz);
155       ierr = PetscMalloc1(a->rowvalues_alloc,&a->rowvalues);CHKERRQ(ierr);
156     }
157     for (j=0; j<*nz; j++) a->rowvalues[j] = a->values[a->i[row]+j];
158     *v = (*nz) ? a->rowvalues : NULL;
159   }
160   if (idx) *idx = (*nz) ? a->j + a->i[row] : NULL;
161   PetscFunctionReturn(0);
162 }
163 
164 #undef __FUNCT__
165 #define __FUNCT__ "MatRestoreRow_MPIAdj"
166 PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
167 {
168   PetscFunctionBegin;
169   PetscFunctionReturn(0);
170 }
171 
172 #undef __FUNCT__
173 #define __FUNCT__ "MatEqual_MPIAdj"
174 PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscBool * flg)
175 {
176   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data,*b = (Mat_MPIAdj*)B->data;
177   PetscErrorCode ierr;
178   PetscBool      flag;
179 
180   PetscFunctionBegin;
181   /* If the  matrix dimensions are not equal,or no of nonzeros */
182   if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
183     flag = PETSC_FALSE;
184   }
185 
186   /* if the a->i are the same */
187   ierr = PetscMemcmp(a->i,b->i,(A->rmap->n+1)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
188 
189   /* if a->j are the same */
190   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
191 
192   ierr = MPI_Allreduce(&flag,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
193   PetscFunctionReturn(0);
194 }
195 
196 #undef __FUNCT__
197 #define __FUNCT__ "MatGetRowIJ_MPIAdj"
198 PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
199 {
200   PetscInt   i;
201   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
202   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
203 
204   PetscFunctionBegin;
205   *m    = A->rmap->n;
206   *ia   = a->i;
207   *ja   = a->j;
208   *done = PETSC_TRUE;
209   if (oshift) {
210     for (i=0; i<(*ia)[*m]; i++) {
211       (*ja)[i]++;
212     }
213     for (i=0; i<=(*m); i++) (*ia)[i]++;
214   }
215   PetscFunctionReturn(0);
216 }
217 
218 #undef __FUNCT__
219 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj"
220 PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
221 {
222   PetscInt   i;
223   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
224   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
225 
226   PetscFunctionBegin;
227   if (ia && a->i != *ia) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
228   if (ja && a->j != *ja) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
229   if (oshift) {
230     for (i=0; i<=(*m); i++) (*ia)[i]--;
231     for (i=0; i<(*ia)[*m]; i++) {
232       (*ja)[i]--;
233     }
234   }
235   PetscFunctionReturn(0);
236 }
237 
238 #undef __FUNCT__
239 #define __FUNCT__ "MatConvertFrom_MPIAdj"
240 PetscErrorCode  MatConvertFrom_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
241 {
242   Mat               B;
243   PetscErrorCode    ierr;
244   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
245   const PetscInt    *rj;
246   const PetscScalar *ra;
247   MPI_Comm          comm;
248 
249   PetscFunctionBegin;
250   ierr = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
251   ierr = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
252   ierr = MatGetOwnershipRange(A,&rstart,NULL);CHKERRQ(ierr);
253 
254   /* count the number of nonzeros per row */
255   for (i=0; i<m; i++) {
256     ierr = MatGetRow(A,i+rstart,&len,&rj,NULL);CHKERRQ(ierr);
257     for (j=0; j<len; j++) {
258       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
259     }
260     nzeros += len;
261     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,NULL);CHKERRQ(ierr);
262   }
263 
264   /* malloc space for nonzeros */
265   ierr = PetscMalloc1(nzeros+1,&a);CHKERRQ(ierr);
266   ierr = PetscMalloc1(N+1,&ia);CHKERRQ(ierr);
267   ierr = PetscMalloc1(nzeros+1,&ja);CHKERRQ(ierr);
268 
269   nzeros = 0;
270   ia[0]  = 0;
271   for (i=0; i<m; i++) {
272     ierr = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
273     cnt  = 0;
274     for (j=0; j<len; j++) {
275       if (rj[j] != i+rstart) { /* if not diagonal */
276         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
277         ja[nzeros+cnt++] = rj[j];
278       }
279     }
280     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
281     nzeros += cnt;
282     ia[i+1] = nzeros;
283   }
284 
285   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
286   ierr = MatCreate(comm,&B);CHKERRQ(ierr);
287   ierr = MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
288   ierr = MatSetType(B,type);CHKERRQ(ierr);
289   ierr = MatMPIAdjSetPreallocation(B,ia,ja,a);CHKERRQ(ierr);
290 
291   if (reuse == MAT_REUSE_MATRIX) {
292     ierr = MatHeaderReplace(A,B);CHKERRQ(ierr);
293   } else {
294     *newmat = B;
295   }
296   PetscFunctionReturn(0);
297 }
298 
299 /* -------------------------------------------------------------------*/
300 static struct _MatOps MatOps_Values = {0,
301                                        MatGetRow_MPIAdj,
302                                        MatRestoreRow_MPIAdj,
303                                        0,
304                                 /* 4*/ 0,
305                                        0,
306                                        0,
307                                        0,
308                                        0,
309                                        0,
310                                 /*10*/ 0,
311                                        0,
312                                        0,
313                                        0,
314                                        0,
315                                 /*15*/ 0,
316                                        MatEqual_MPIAdj,
317                                        0,
318                                        0,
319                                        0,
320                                 /*20*/ 0,
321                                        0,
322                                        MatSetOption_MPIAdj,
323                                        0,
324                                 /*24*/ 0,
325                                        0,
326                                        0,
327                                        0,
328                                        0,
329                                 /*29*/ 0,
330                                        0,
331                                        0,
332                                        0,
333                                        0,
334                                 /*34*/ 0,
335                                        0,
336                                        0,
337                                        0,
338                                        0,
339                                 /*39*/ 0,
340                                        0,
341                                        0,
342                                        0,
343                                        0,
344                                 /*44*/ 0,
345                                        0,
346                                        MatShift_Basic,
347                                        0,
348                                        0,
349                                 /*49*/ 0,
350                                        MatGetRowIJ_MPIAdj,
351                                        MatRestoreRowIJ_MPIAdj,
352                                        0,
353                                        0,
354                                 /*54*/ 0,
355                                        0,
356                                        0,
357                                        0,
358                                        0,
359                                 /*59*/ 0,
360                                        MatDestroy_MPIAdj,
361                                        MatView_MPIAdj,
362                                        MatConvertFrom_MPIAdj,
363                                        0,
364                                 /*64*/ 0,
365                                        0,
366                                        0,
367                                        0,
368                                        0,
369                                 /*69*/ 0,
370                                        0,
371                                        0,
372                                        0,
373                                        0,
374                                 /*74*/ 0,
375                                        0,
376                                        0,
377                                        0,
378                                        0,
379                                 /*79*/ 0,
380                                        0,
381                                        0,
382                                        0,
383                                        0,
384                                 /*84*/ 0,
385                                        0,
386                                        0,
387                                        0,
388                                        0,
389                                 /*89*/ 0,
390                                        0,
391                                        0,
392                                        0,
393                                        0,
394                                 /*94*/ 0,
395                                        0,
396                                        0,
397                                        0,
398                                        0,
399                                 /*99*/ 0,
400                                        0,
401                                        0,
402                                        0,
403                                        0,
404                                /*104*/ 0,
405                                        0,
406                                        0,
407                                        0,
408                                        0,
409                                /*109*/ 0,
410                                        0,
411                                        0,
412                                        0,
413                                        0,
414                                /*114*/ 0,
415                                        0,
416                                        0,
417                                        0,
418                                        0,
419                                /*119*/ 0,
420                                        0,
421                                        0,
422                                        0,
423                                        0,
424                                /*124*/ 0,
425                                        0,
426                                        0,
427                                        0,
428                                        0,
429                                /*129*/ 0,
430                                        0,
431                                        0,
432                                        0,
433                                        0,
434                                /*134*/ 0,
435                                        0,
436                                        0,
437                                        0,
438                                        0,
439                                /*139*/ 0,
440                                        0,
441                                        0
442 };
443 
444 #undef __FUNCT__
445 #define __FUNCT__ "MatMPIAdjSetPreallocation_MPIAdj"
446 static PetscErrorCode  MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
447 {
448   Mat_MPIAdj     *b = (Mat_MPIAdj*)B->data;
449   PetscErrorCode ierr;
450 #if defined(PETSC_USE_DEBUG)
451   PetscInt ii;
452 #endif
453 
454   PetscFunctionBegin;
455   ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
456   ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
457 
458 #if defined(PETSC_USE_DEBUG)
459   if (i[0] != 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
460   for (ii=1; ii<B->rmap->n; ii++) {
461     if (i[ii] < 0 || i[ii] < i[ii-1]) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
462   }
463   for (ii=0; ii<i[B->rmap->n]; ii++) {
464     if (j[ii] < 0 || j[ii] >= B->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
465   }
466 #endif
467   B->preallocated = PETSC_TRUE;
468 
469   b->j      = j;
470   b->i      = i;
471   b->values = values;
472 
473   b->nz        = i[B->rmap->n];
474   b->diag      = 0;
475   b->symmetric = PETSC_FALSE;
476   b->freeaij   = PETSC_TRUE;
477 
478   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
479   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
480   PetscFunctionReturn(0);
481 }
482 
483 #undef __FUNCT__
484 #define __FUNCT__ "MatGetSubMatrix_MPIAdj_Single"
485 static PetscErrorCode MatGetSubMatrix_MPIAdj_Single(Mat adj,IS rows, IS cols, Mat *adj)
486 {
487   PetscInt        	 rows_localsize,cols_localsize,i,j,nroots,nleaves,owner,localindex,*ncols_send,*ncols_recv;
488   PetscInt           nlocalrows,*adjncy_recv,*cols_send,Ncols_recv,Ncols_send,*xadj_recv,*values_recv;
489   const PetscInt    *rows_indices,*cols_indices,*xadj, *adjncy;
490   Mat_MPIAdj        *a = (Mat_MPIAdj*)adj->data;
491   PetscMPIInt        rank;
492   PetscLayout        rmap;
493   MPI_Comm           comm;
494   PetscSF            sf;
495   PetscSFNode       *iremote;
496   PetscBool          done;
497   PetscErrorCode     ierr;
498 
499   PetscFunctionBegin;
500   /* communicator */
501   ierr = PetscObjectGetComm((PetscObject)adj,&comm);CHKERRQ(ierr);
502   /* Layouts */
503   ierr = MatGetLayouts(adj,&rmap,PETSC_NULL);CHKERRQ(ierr);
504   /* get rows information */
505   ierr = ISGetLocalSize(rows,&rows_localsize);CHKERRQ(ierr);
506   ierr = ISGetIndices(rows,&rows_indices);CHKERRQ(ierr);
507   ierr = PetscCalloc1(rows_localsize,&iremote);CHKERRQ(ierr);
508 
509   nleaves = rows_localsize;
510   for(i=0; i<rows_localsize; i++){
511     ierr = PetscLayoutFindOwnerIndex(rmap,rows_indices[i],&owner,&localindex);CHKERRQ(ierr);
512     iremote[i].rank  = owner;
513     iremote[i].index = localindex;
514   }
515   ierr = MatGetRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlocalrows,&xadj,&adjncy,&done);CHKERRQ(ierr);
516   ierr = PetscCalloc3(nlocalrows,&ncols_send,rows_localsize,&xadj_recv,rows_localsize,&ncols_recv);CHKERRQ(ierr);
517   nroots = nlocalrows;
518   for(i=0; i<nlocalrows; i++){
519 	ncols_send[i] = xadj[i+1]-xadj[i];
520   }
521   ierr = PetscSFCreate(comm,&sf);CHKERRQ(ierr);
522   ierr = PetscSFSetGraph(sf,nroots,nleaves,PETSC_NULL,PETSC_OWN_POINTER,&iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
523   ierr = PetscSFSetType(sf,PETSCSFBASIC);CHKERRQ(ierr);
524   ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
525   ierr = PetscSFBcastBegin(sf,MPIU_INT,ncols_send,ncols_recv);CHKERRQ(ierr);
526   ierr = PetscSFBcastEnd(sf,MPIU_INT,ncols_send,ncols_recv);CHKERRQ(ierr);
527   ierr = PetscSFBcastBegin(sf,MPIU_INT,xadj,xadj_recv);CHKERRQ(ierr);
528   ierr = PetscSFBcastEnd(sf,MPIU_INT,xadj,xadj_recv);CHKERRQ(ierr);
529   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
530   Ncols_recv =0;
531   for(i=0; i<rows_localsize; i++){
532 	 Ncols_recv += ncols_recv[i];
533   }
534   Ncols_send = 0;
535   for(i=0; i<nlocalrows; i++){
536 	Ncols_send += ncols_send[i];
537   }
538   ierr = PetscCalloc1(Ncols_recv,&iremote);CHKERRQ(ierr);
539   ierr = PetscCalloc2(Ncols_recv,&adjncy_recv,Ncols_recv,&values_recv);CHKERRQ(ierr);
540   nleaves = Ncols_recv;
541   for(i=0; i<rows_localsize; i++){
542     ierr = PetscLayoutFindOwner(rmap,rows_indices[i],&owner);CHKERRQ(ierr);
543     iremote[i].rank  = owner;
544     for(j=0; j<ncols_recv[i]; j++){
545       iremote[i].index =xadj_recv[i]+j;
546     }
547   }
548   nroots = Ncols_send;
549   ierr = PetscSFCreate(comm,&sf);CHKERRQ(ierr);
550   ierr = PetscSFSetGraph(sf,nroots,nleaves,PETSC_NULL,PETSC_OWN_POINTER,&iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
551   ierr = PetscSFSetType(sf,PETSCSFBASIC);CHKERRQ(ierr);
552   ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
553   ierr = PetscSFBcastBegin(sf,MPIU_INT,adjncy,adjncy_recv);CHKERRQ(ierr);
554   ierr = PetscSFBcastEnd(sf,MPIU_INT,adjncy,adjncy_recv);CHKERRQ(ierr);
555   ierr = PetscSFBcastBegin(sf,MPIU_INT,a->values,values_recv);CHKERRQ(ierr);
556   ierr = PetscSFBcastEnd(sf,MPIU_INT,a->values,values_recv);CHKERRQ(ierr);
557   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
558   ierr = MatRestoreRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlocalrows,&xadj,&adjncy,&done);CHKERRQ(ierr);
559   PetscFunctionReturn(0);
560 }
561 
562 
563 #undef __FUNCT__
564 #define __FUNCT__ "MatMPIAdjCreateNonemptySubcommMat_MPIAdj"
565 static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A,Mat *B)
566 {
567   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
568   PetscErrorCode ierr;
569   const PetscInt *ranges;
570   MPI_Comm       acomm,bcomm;
571   MPI_Group      agroup,bgroup;
572   PetscMPIInt    i,rank,size,nranks,*ranks;
573 
574   PetscFunctionBegin;
575   *B    = NULL;
576   ierr  = PetscObjectGetComm((PetscObject)A,&acomm);CHKERRQ(ierr);
577   ierr  = MPI_Comm_size(acomm,&size);CHKERRQ(ierr);
578   ierr  = MPI_Comm_size(acomm,&rank);CHKERRQ(ierr);
579   ierr  = MatGetOwnershipRanges(A,&ranges);CHKERRQ(ierr);
580   for (i=0,nranks=0; i<size; i++) {
581     if (ranges[i+1] - ranges[i] > 0) nranks++;
582   }
583   if (nranks == size) {         /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
584     ierr = PetscObjectReference((PetscObject)A);CHKERRQ(ierr);
585     *B   = A;
586     PetscFunctionReturn(0);
587   }
588 
589   ierr = PetscMalloc1(nranks,&ranks);CHKERRQ(ierr);
590   for (i=0,nranks=0; i<size; i++) {
591     if (ranges[i+1] - ranges[i] > 0) ranks[nranks++] = i;
592   }
593   ierr = MPI_Comm_group(acomm,&agroup);CHKERRQ(ierr);
594   ierr = MPI_Group_incl(agroup,nranks,ranks,&bgroup);CHKERRQ(ierr);
595   ierr = PetscFree(ranks);CHKERRQ(ierr);
596   ierr = MPI_Comm_create(acomm,bgroup,&bcomm);CHKERRQ(ierr);
597   ierr = MPI_Group_free(&agroup);CHKERRQ(ierr);
598   ierr = MPI_Group_free(&bgroup);CHKERRQ(ierr);
599   if (bcomm != MPI_COMM_NULL) {
600     PetscInt   m,N;
601     Mat_MPIAdj *b;
602     ierr       = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
603     ierr       = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
604     ierr       = MatCreateMPIAdj(bcomm,m,N,a->i,a->j,a->values,B);CHKERRQ(ierr);
605     b          = (Mat_MPIAdj*)(*B)->data;
606     b->freeaij = PETSC_FALSE;
607     ierr       = MPI_Comm_free(&bcomm);CHKERRQ(ierr);
608   }
609   PetscFunctionReturn(0);
610 }
611 
612 #undef __FUNCT__
613 #define __FUNCT__ "MatMPIAdjCreateNonemptySubcommMat"
614 /*@
615    MatMPIAdjCreateNonemptySubcommMat - create the same MPIAdj matrix on a subcommunicator containing only processes owning a positive number of rows
616 
617    Collective
618 
619    Input Arguments:
620 .  A - original MPIAdj matrix
621 
622    Output Arguments:
623 .  B - matrix on subcommunicator, NULL on ranks that owned zero rows of A
624 
625    Level: developer
626 
627    Note:
628    This function is mostly useful for internal use by mesh partitioning packages that require that every process owns at least one row.
629 
630    The matrix B should be destroyed with MatDestroy(). The arrays are not copied, so B should be destroyed before A is destroyed.
631 
632 .seealso: MatCreateMPIAdj()
633 @*/
634 PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A,Mat *B)
635 {
636   PetscErrorCode ierr;
637 
638   PetscFunctionBegin;
639   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
640   ierr = PetscUseMethod(A,"MatMPIAdjCreateNonemptySubcommMat_C",(Mat,Mat*),(A,B));CHKERRQ(ierr);
641   PetscFunctionReturn(0);
642 }
643 
644 /*MC
645    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
646    intended for use constructing orderings and partitionings.
647 
648   Level: beginner
649 
650 .seealso: MatCreateMPIAdj
651 M*/
652 
653 #undef __FUNCT__
654 #define __FUNCT__ "MatCreate_MPIAdj"
655 PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B)
656 {
657   Mat_MPIAdj     *b;
658   PetscErrorCode ierr;
659 
660   PetscFunctionBegin;
661   ierr         = PetscNewLog(B,&b);CHKERRQ(ierr);
662   B->data      = (void*)b;
663   ierr         = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
664   B->assembled = PETSC_FALSE;
665 
666   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
667   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjCreateNonemptySubcommMat_C",MatMPIAdjCreateNonemptySubcommMat_MPIAdj);CHKERRQ(ierr);
668   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ);CHKERRQ(ierr);
669   PetscFunctionReturn(0);
670 }
671 
672 #undef __FUNCT__
673 #define __FUNCT__ "MatMPIAdjSetPreallocation"
674 /*@C
675    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
676 
677    Logically Collective on MPI_Comm
678 
679    Input Parameters:
680 +  A - the matrix
681 .  i - the indices into j for the start of each row
682 .  j - the column indices for each row (sorted for each row).
683        The indices in i and j start with zero (NOT with one).
684 -  values - [optional] edge weights
685 
686    Level: intermediate
687 
688 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
689 @*/
690 PetscErrorCode  MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
691 {
692   PetscErrorCode ierr;
693 
694   PetscFunctionBegin;
695   ierr = PetscTryMethod(B,"MatMPIAdjSetPreallocation_C",(Mat,PetscInt*,PetscInt*,PetscInt*),(B,i,j,values));CHKERRQ(ierr);
696   PetscFunctionReturn(0);
697 }
698 
699 #undef __FUNCT__
700 #define __FUNCT__ "MatCreateMPIAdj"
701 /*@C
702    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
703    The matrix does not have numerical values associated with it, but is
704    intended for ordering (to reduce bandwidth etc) and partitioning.
705 
706    Collective on MPI_Comm
707 
708    Input Parameters:
709 +  comm - MPI communicator
710 .  m - number of local rows
711 .  N - number of global columns
712 .  i - the indices into j for the start of each row
713 .  j - the column indices for each row (sorted for each row).
714        The indices in i and j start with zero (NOT with one).
715 -  values -[optional] edge weights
716 
717    Output Parameter:
718 .  A - the matrix
719 
720    Level: intermediate
721 
722    Notes: This matrix object does not support most matrix operations, include
723    MatSetValues().
724    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
725    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
726     call from Fortran you need not create the arrays with PetscMalloc().
727    Should not include the matrix diagonals.
728 
729    If you already have a matrix, you can create its adjacency matrix by a call
730    to MatConvert, specifying a type of MATMPIADJ.
731 
732    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
733 
734 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
735 @*/
736 PetscErrorCode  MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
737 {
738   PetscErrorCode ierr;
739 
740   PetscFunctionBegin;
741   ierr = MatCreate(comm,A);CHKERRQ(ierr);
742   ierr = MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
743   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
744   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
745   PetscFunctionReturn(0);
746 }
747 
748 
749 
750 
751 
752 
753 
754