xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision a44bcdf57b2bdc3efd71d40abec79a20d8d3f002) !
1 
2 /*
3     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
4 */
5 #include <../src/mat/impls/adj/mpi/mpiadj.h>    /*I "petscmat.h" I*/
6 #include <petscsf.h>
7 
8 #undef __FUNCT__
9 #define __FUNCT__ "MatView_MPIAdj_ASCII"
10 PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
11 {
12   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
13   PetscErrorCode    ierr;
14   PetscInt          i,j,m = A->rmap->n;
15   const char        *name;
16   PetscViewerFormat format;
17 
18   PetscFunctionBegin;
19   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
20   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
21   if (format == PETSC_VIEWER_ASCII_INFO) {
22     PetscFunctionReturn(0);
23   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
24     SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MATLAB format not supported");
25   } else {
26     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);CHKERRQ(ierr);
27     ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
28     for (i=0; i<m; i++) {
29       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap->rstart);CHKERRQ(ierr);
30       for (j=a->i[i]; j<a->i[i+1]; j++) {
31         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);CHKERRQ(ierr);
32       }
33       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
34     }
35     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);CHKERRQ(ierr);
36     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
37     ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
38   }
39   PetscFunctionReturn(0);
40 }
41 
42 #undef __FUNCT__
43 #define __FUNCT__ "MatView_MPIAdj"
44 PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
45 {
46   PetscErrorCode ierr;
47   PetscBool      iascii;
48 
49   PetscFunctionBegin;
50   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
51   if (iascii) {
52     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
53   }
54   PetscFunctionReturn(0);
55 }
56 
57 #undef __FUNCT__
58 #define __FUNCT__ "MatDestroy_MPIAdj"
59 PetscErrorCode MatDestroy_MPIAdj(Mat mat)
60 {
61   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;
62   PetscErrorCode ierr;
63 
64   PetscFunctionBegin;
65 #if defined(PETSC_USE_LOG)
66   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap->n,mat->cmap->n,a->nz);
67 #endif
68   ierr = PetscFree(a->diag);CHKERRQ(ierr);
69   if (a->freeaij) {
70     if (a->freeaijwithfree) {
71       if (a->i) free(a->i);
72       if (a->j) free(a->j);
73     } else {
74       ierr = PetscFree(a->i);CHKERRQ(ierr);
75       ierr = PetscFree(a->j);CHKERRQ(ierr);
76       ierr = PetscFree(a->values);CHKERRQ(ierr);
77     }
78   }
79   ierr = PetscFree(a->rowvalues);CHKERRQ(ierr);
80   ierr = PetscFree(mat->data);CHKERRQ(ierr);
81   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
82   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C",NULL);CHKERRQ(ierr);
83   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjCreateNonemptySubcommMat_C",NULL);CHKERRQ(ierr);
84   PetscFunctionReturn(0);
85 }
86 
87 #undef __FUNCT__
88 #define __FUNCT__ "MatSetOption_MPIAdj"
89 PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscBool flg)
90 {
91   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
92   PetscErrorCode ierr;
93 
94   PetscFunctionBegin;
95   switch (op) {
96   case MAT_SYMMETRIC:
97   case MAT_STRUCTURALLY_SYMMETRIC:
98   case MAT_HERMITIAN:
99     a->symmetric = flg;
100     break;
101   case MAT_SYMMETRY_ETERNAL:
102     break;
103   default:
104     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
105     break;
106   }
107   PetscFunctionReturn(0);
108 }
109 
110 
111 /*
112      Adds diagonal pointers to sparse matrix structure.
113 */
114 
115 #undef __FUNCT__
116 #define __FUNCT__ "MatMarkDiagonal_MPIAdj"
117 PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A)
118 {
119   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
120   PetscErrorCode ierr;
121   PetscInt       i,j,m = A->rmap->n;
122 
123   PetscFunctionBegin;
124   ierr = PetscMalloc1(m,&a->diag);CHKERRQ(ierr);
125   ierr = PetscLogObjectMemory((PetscObject)A,m*sizeof(PetscInt));CHKERRQ(ierr);
126   for (i=0; i<A->rmap->n; i++) {
127     for (j=a->i[i]; j<a->i[i+1]; j++) {
128       if (a->j[j] == i) {
129         a->diag[i] = j;
130         break;
131       }
132     }
133   }
134   PetscFunctionReturn(0);
135 }
136 
137 #undef __FUNCT__
138 #define __FUNCT__ "MatGetRow_MPIAdj"
139 PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
140 {
141   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
142   PetscErrorCode ierr;
143 
144   PetscFunctionBegin;
145   row -= A->rmap->rstart;
146   if (row < 0 || row >= A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
147   *nz = a->i[row+1] - a->i[row];
148   if (v) {
149     PetscInt j;
150     if (a->rowvalues_alloc < *nz) {
151       ierr = PetscFree(a->rowvalues);CHKERRQ(ierr);
152       a->rowvalues_alloc = PetscMax(a->rowvalues_alloc*2, *nz);
153       ierr = PetscMalloc1(a->rowvalues_alloc,&a->rowvalues);CHKERRQ(ierr);
154     }
155     for (j=0; j<*nz; j++){
156       a->rowvalues[j] = a->values ? a->values[a->i[row]+j]:1.0;
157     }
158     *v = (*nz) ? a->rowvalues : NULL;
159   }
160   if (idx) *idx = (*nz) ? a->j + a->i[row] : NULL;
161   PetscFunctionReturn(0);
162 }
163 
164 #undef __FUNCT__
165 #define __FUNCT__ "MatRestoreRow_MPIAdj"
166 PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
167 {
168   PetscFunctionBegin;
169   PetscFunctionReturn(0);
170 }
171 
172 #undef __FUNCT__
173 #define __FUNCT__ "MatEqual_MPIAdj"
174 PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscBool * flg)
175 {
176   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data,*b = (Mat_MPIAdj*)B->data;
177   PetscErrorCode ierr;
178   PetscBool      flag;
179 
180   PetscFunctionBegin;
181   /* If the  matrix dimensions are not equal,or no of nonzeros */
182   if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
183     flag = PETSC_FALSE;
184   }
185 
186   /* if the a->i are the same */
187   ierr = PetscMemcmp(a->i,b->i,(A->rmap->n+1)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
188 
189   /* if a->j are the same */
190   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
191 
192   ierr = MPI_Allreduce(&flag,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
193   PetscFunctionReturn(0);
194 }
195 
196 #undef __FUNCT__
197 #define __FUNCT__ "MatGetRowIJ_MPIAdj"
198 PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
199 {
200   PetscInt   i;
201   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
202   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
203 
204   PetscFunctionBegin;
205   *m    = A->rmap->n;
206   *ia   = a->i;
207   *ja   = a->j;
208   *done = PETSC_TRUE;
209   if (oshift) {
210     for (i=0; i<(*ia)[*m]; i++) {
211       (*ja)[i]++;
212     }
213     for (i=0; i<=(*m); i++) (*ia)[i]++;
214   }
215   PetscFunctionReturn(0);
216 }
217 
218 #undef __FUNCT__
219 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj"
220 PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
221 {
222   PetscInt   i;
223   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
224   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
225 
226   PetscFunctionBegin;
227   if (ia && a->i != *ia) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
228   if (ja && a->j != *ja) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
229   if (oshift) {
230     for (i=0; i<=(*m); i++) (*ia)[i]--;
231     for (i=0; i<(*ia)[*m]; i++) {
232       (*ja)[i]--;
233     }
234   }
235   PetscFunctionReturn(0);
236 }
237 
238 #undef __FUNCT__
239 #define __FUNCT__ "MatConvertFrom_MPIAdj"
240 PetscErrorCode  MatConvertFrom_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
241 {
242   Mat               B;
243   PetscErrorCode    ierr;
244   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
245   const PetscInt    *rj;
246   const PetscScalar *ra;
247   MPI_Comm          comm;
248 
249   PetscFunctionBegin;
250   ierr = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
251   ierr = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
252   ierr = MatGetOwnershipRange(A,&rstart,NULL);CHKERRQ(ierr);
253 
254   /* count the number of nonzeros per row */
255   for (i=0; i<m; i++) {
256     ierr = MatGetRow(A,i+rstart,&len,&rj,NULL);CHKERRQ(ierr);
257     for (j=0; j<len; j++) {
258       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
259     }
260     nzeros += len;
261     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,NULL);CHKERRQ(ierr);
262   }
263 
264   /* malloc space for nonzeros */
265   ierr = PetscMalloc1(nzeros+1,&a);CHKERRQ(ierr);
266   ierr = PetscMalloc1(N+1,&ia);CHKERRQ(ierr);
267   ierr = PetscMalloc1(nzeros+1,&ja);CHKERRQ(ierr);
268 
269   nzeros = 0;
270   ia[0]  = 0;
271   for (i=0; i<m; i++) {
272     ierr = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
273     cnt  = 0;
274     for (j=0; j<len; j++) {
275       if (rj[j] != i+rstart) { /* if not diagonal */
276         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
277         ja[nzeros+cnt++] = rj[j];
278       }
279     }
280     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
281     nzeros += cnt;
282     ia[i+1] = nzeros;
283   }
284 
285   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
286   ierr = MatCreate(comm,&B);CHKERRQ(ierr);
287   ierr = MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
288   ierr = MatSetType(B,type);CHKERRQ(ierr);
289   ierr = MatMPIAdjSetPreallocation(B,ia,ja,a);CHKERRQ(ierr);
290 
291   if (reuse == MAT_REUSE_MATRIX) {
292     ierr = MatHeaderReplace(A,B);CHKERRQ(ierr);
293   } else {
294     *newmat = B;
295   }
296   PetscFunctionReturn(0);
297 }
298 
299 /* -------------------------------------------------------------------*/
300 static struct _MatOps MatOps_Values = {0,
301                                        MatGetRow_MPIAdj,
302                                        MatRestoreRow_MPIAdj,
303                                        0,
304                                 /* 4*/ 0,
305                                        0,
306                                        0,
307                                        0,
308                                        0,
309                                        0,
310                                 /*10*/ 0,
311                                        0,
312                                        0,
313                                        0,
314                                        0,
315                                 /*15*/ 0,
316                                        MatEqual_MPIAdj,
317                                        0,
318                                        0,
319                                        0,
320                                 /*20*/ 0,
321                                        0,
322                                        MatSetOption_MPIAdj,
323                                        0,
324                                 /*24*/ 0,
325                                        0,
326                                        0,
327                                        0,
328                                        0,
329                                 /*29*/ 0,
330                                        0,
331                                        0,
332                                        0,
333                                        0,
334                                 /*34*/ 0,
335                                        0,
336                                        0,
337                                        0,
338                                        0,
339                                 /*39*/ 0,
340                                        MatGetSubMatrices_MPIAdj,
341                                        0,
342                                        0,
343                                        0,
344                                 /*44*/ 0,
345                                        0,
346                                        MatShift_Basic,
347                                        0,
348                                        0,
349                                 /*49*/ 0,
350                                        MatGetRowIJ_MPIAdj,
351                                        MatRestoreRowIJ_MPIAdj,
352                                        0,
353                                        0,
354                                 /*54*/ 0,
355                                        0,
356                                        0,
357                                        0,
358                                        0,
359                                 /*59*/ 0,
360                                        MatDestroy_MPIAdj,
361                                        MatView_MPIAdj,
362                                        MatConvertFrom_MPIAdj,
363                                        0,
364                                 /*64*/ 0,
365                                        0,
366                                        0,
367                                        0,
368                                        0,
369                                 /*69*/ 0,
370                                        0,
371                                        0,
372                                        0,
373                                        0,
374                                 /*74*/ 0,
375                                        0,
376                                        0,
377                                        0,
378                                        0,
379                                 /*79*/ 0,
380                                        0,
381                                        0,
382                                        0,
383                                        0,
384                                 /*84*/ 0,
385                                        0,
386                                        0,
387                                        0,
388                                        0,
389                                 /*89*/ 0,
390                                        0,
391                                        0,
392                                        0,
393                                        0,
394                                 /*94*/ 0,
395                                        0,
396                                        0,
397                                        0,
398                                        0,
399                                 /*99*/ 0,
400                                        0,
401                                        0,
402                                        0,
403                                        0,
404                                /*104*/ 0,
405                                        0,
406                                        0,
407                                        0,
408                                        0,
409                                /*109*/ 0,
410                                        0,
411                                        0,
412                                        0,
413                                        0,
414                                /*114*/ 0,
415                                        0,
416                                        0,
417                                        0,
418                                        0,
419                                /*119*/ 0,
420                                        0,
421                                        0,
422                                        0,
423                                        0,
424                                /*124*/ 0,
425                                        0,
426                                        0,
427                                        0,
428                                        MatGetSubMatricesMPI_MPIAdj,
429                                /*129*/ 0,
430                                        0,
431                                        0,
432                                        0,
433                                        0,
434                                /*134*/ 0,
435                                        0,
436                                        0,
437                                        0,
438                                        0,
439                                /*139*/ 0,
440                                        0,
441                                        0
442 };
443 
444 #undef __FUNCT__
445 #define __FUNCT__ "MatMPIAdjSetPreallocation_MPIAdj"
446 static PetscErrorCode  MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
447 {
448   Mat_MPIAdj     *b = (Mat_MPIAdj*)B->data;
449   PetscErrorCode ierr;
450 #if defined(PETSC_USE_DEBUG)
451   PetscInt ii;
452 #endif
453 
454   PetscFunctionBegin;
455   ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
456   ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
457 
458 #if defined(PETSC_USE_DEBUG)
459   if (i[0] != 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
460   for (ii=1; ii<B->rmap->n; ii++) {
461     if (i[ii] < 0 || i[ii] < i[ii-1]) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
462   }
463   for (ii=0; ii<i[B->rmap->n]; ii++) {
464     if (j[ii] < 0 || j[ii] >= B->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
465   }
466 #endif
467   B->preallocated = PETSC_TRUE;
468 
469   b->j      = j;
470   b->i      = i;
471   b->values = values;
472 
473   b->nz        = i[B->rmap->n];
474   b->diag      = 0;
475   b->symmetric = PETSC_FALSE;
476   b->freeaij   = PETSC_TRUE;
477 
478   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
479   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
480   PetscFunctionReturn(0);
481 }
482 
483 static PetscErrorCode MatGetSubMatrix_MPIAdj_data(Mat,IS,IS, PetscInt **,PetscInt **,PetscInt **);
484 static PetscErrorCode MatGetSubMatrices_MPIAdj_Private(Mat,PetscInt,const IS[],const IS[],PetscBool,MatReuse,Mat **);
485 
486 #undef __FUNCT__
487 #define __FUNCT__ "MatGetSubMatricesMPI_MPIAdj"
488 PetscErrorCode MatGetSubMatricesMPI_MPIAdj(Mat mat,PetscInt n, const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
489 {
490   PetscErrorCode     ierr;
491   /*get sub-matrices across a sub communicator */
492   PetscFunctionBegin;
493   ierr = MatGetSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_TRUE,scall,submat);CHKERRQ(ierr);
494   PetscFunctionReturn(0);
495 }
496 
497 
498 #undef __FUNCT__
499 #define __FUNCT__ "MatGetSubMatrices_MPIAdj"
500 PetscErrorCode MatGetSubMatrices_MPIAdj(Mat mat,PetscInt n,const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
501 {
502   PetscErrorCode     ierr;
503 
504   PetscFunctionBegin;
505   /*get sub-matrices based on PETSC_COMM_SELF */
506   ierr = MatGetSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_FALSE,scall,submat);CHKERRQ(ierr);
507   PetscFunctionReturn(0);
508 }
509 
510 #undef __FUNCT__
511 #define __FUNCT__ "MatGetSubMatrices_MPIAdj_Private"
512 static PetscErrorCode MatGetSubMatrices_MPIAdj_Private(Mat mat,PetscInt n,const IS irow[],const IS icol[],PetscBool subcomm,MatReuse scall,Mat *submat[])
513 {
514   PetscInt           i,irow_n,icol_n,*sxadj,*sadjncy,*svalues;
515   PetscInt          *indices,nindx,j,k,loc;
516   PetscMPIInt        issame;
517   const PetscInt    *irow_indices,*icol_indices;
518   MPI_Comm           scomm_row,scomm_col,scomm_mat;
519   PetscErrorCode     ierr;
520 
521   PetscFunctionBegin;
522   nindx = 0;
523   /*
524    * Estimate a maximum number for allocating memory
525    */
526   for(i=0; i<n; i++){
527     ierr = ISGetLocalSize(irow[i],&irow_n);CHKERRQ(ierr);
528     ierr = ISGetLocalSize(icol[i],&icol_n);CHKERRQ(ierr);
529     nindx = nindx>(irow_n+icol_n)? nindx:(irow_n+icol_n);
530   }
531   ierr = PetscCalloc1(nindx,&indices);CHKERRQ(ierr);
532   /* construct a submat */
533   for(i=0; i<n; i++){
534 	/*comms */
535     if(subcomm){
536 	  ierr = PetscObjectGetComm((PetscObject)irow[i],&scomm_row);CHKERRQ(ierr);
537 	  ierr = PetscObjectGetComm((PetscObject)icol[i],&scomm_col);CHKERRQ(ierr);
538 	  ierr = MPI_Comm_compare(scomm_row,scomm_col,&issame);CHKERRQ(ierr);
539 	  if(issame != MPI_IDENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"row index set must have the same comm as the col index set\n");
540 	  ierr = MPI_Comm_compare(scomm_row,PETSC_COMM_SELF,&issame);CHKERRQ(ierr);
541 	  if(issame == MPI_IDENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP," can not use PETSC_COMM_SELF as comm when extracting a parallel submatrix\n");
542 	}else{
543 	  scomm_row = PETSC_COMM_SELF;
544 	}
545 	/*get sub-matrix data*/
546 	sxadj=0; sadjncy=0; svalues=0;
547     ierr = MatGetSubMatrix_MPIAdj_data(mat,irow[i],icol[i],&sxadj,&sadjncy,&svalues);CHKERRQ(ierr);
548     ierr = ISGetLocalSize(irow[i],&irow_n);CHKERRQ(ierr);
549     ierr = ISGetLocalSize(icol[i],&icol_n);CHKERRQ(ierr);
550     ierr = ISGetIndices(irow[i],&irow_indices);CHKERRQ(ierr);
551     ierr = PetscMemcpy(indices,irow_indices,sizeof(PetscInt)*irow_n);CHKERRQ(ierr);
552     ierr = ISRestoreIndices(irow[i],&irow_indices);CHKERRQ(ierr);
553     ierr = ISGetIndices(icol[i],&icol_indices);CHKERRQ(ierr);
554     ierr = PetscMemcpy(indices+irow_n,icol_indices,sizeof(PetscInt)*icol_n);CHKERRQ(ierr);
555     ierr = ISRestoreIndices(icol[i],&icol_indices);CHKERRQ(ierr);
556     nindx = irow_n+icol_n;
557     ierr = PetscSortRemoveDupsInt(&nindx,indices);CHKERRQ(ierr);
558     /* renumber columns */
559     for(j=0; j<irow_n; j++){
560       for(k=sxadj[j]; k<sxadj[j+1]; k++){
561     	ierr = PetscFindInt(sadjncy[k],nindx,indices,&loc);CHKERRQ(ierr);
562 #if PETSC_USE_DEBUG
563     	if(loc<0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"can not find col %d \n",sadjncy[k]);
564 #endif
565         sadjncy[k] = loc;
566       }
567     }
568     if(scall==MAT_INITIAL_MATRIX){
569       ierr = MatCreateMPIAdj(scomm_row,irow_n,icol_n,sxadj,sadjncy,svalues,submat[i]);CHKERRQ(ierr);
570     }else{
571        Mat                sadj = *(submat[i]);
572        Mat_MPIAdj         *sa  = (Mat_MPIAdj*)((sadj)->data);
573        ierr = PetscObjectGetComm((PetscObject)sadj,&scomm_mat);CHKERRQ(ierr);
574        ierr = MPI_Comm_compare(scomm_row,scomm_mat,&issame);CHKERRQ(ierr);
575        if(issame != MPI_IDENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"submatrix  must have the same comm as the col index set\n");
576        ierr = PetscMemcpy(sa->i,sxadj,sizeof(PetscInt)*(irow_n+1));CHKERRQ(ierr);
577        ierr = PetscMemcpy(sa->j,sadjncy,sizeof(PetscInt)*sxadj[irow_n]);CHKERRQ(ierr);
578        if(svalues){ierr = PetscMemcpy(sa->values,svalues,sizeof(PetscInt)*sxadj[irow_n]);CHKERRQ(ierr);}
579        ierr = PetscFree(sxadj);CHKERRQ(ierr);
580        ierr = PetscFree(sadjncy);CHKERRQ(ierr);
581        if(svalues) {ierr = PetscFree(svalues);CHKERRQ(ierr);}
582     }
583   }
584   ierr = PetscFree(indices);CHKERRQ(ierr);
585   PetscFunctionReturn(0);
586 }
587 
588 
589 #undef __FUNCT__
590 #define __FUNCT__ "MatGetSubMatrix_MPIAdj_data"
591 /*
592  * The interface should be easy to use for both MatGetSubMatrix (parallel sub-matrix) and MatGetSubMatrices (sequential sub-matrices)
593  * */
594 static PetscErrorCode MatGetSubMatrix_MPIAdj_data(Mat adj,IS irows, IS icols, PetscInt **sadj_xadj,PetscInt **sadj_adjncy,PetscInt **sadj_values)
595 {
596   PetscInt        	 nlrows_is,icols_n,i,j,nroots,nleaves,owner,rlocalindex,*ncols_send,*ncols_recv;
597   PetscInt           nlrows_mat,*adjncy_recv,Ncols_recv,Ncols_send,*xadj_recv,*values_recv;
598   PetscInt          *ncols_recv_offsets,loc,rnclos,*sadjncy,*sxadj,*svalues,isvalue;
599   const PetscInt    *irows_indices,*icols_indices,*xadj, *adjncy;
600   Mat_MPIAdj        *a = (Mat_MPIAdj*)adj->data;
601   PetscLayout        rmap;
602   MPI_Comm           comm;
603   PetscSF            sf;
604   PetscSFNode       *iremote;
605   PetscBool          done;
606   PetscErrorCode     ierr;
607 
608   PetscFunctionBegin;
609   /* communicator */
610   ierr = PetscObjectGetComm((PetscObject)adj,&comm);CHKERRQ(ierr);
611   /* Layouts */
612   ierr = MatGetLayouts(adj,&rmap,PETSC_NULL);CHKERRQ(ierr);
613   /* get rows information */
614   ierr = ISGetLocalSize(irows,&nlrows_is);CHKERRQ(ierr);
615   ierr = ISGetIndices(irows,&irows_indices);CHKERRQ(ierr);
616   ierr = PetscCalloc1(nlrows_is,&iremote);CHKERRQ(ierr);
617   /* construct sf graph*/
618   nleaves = nlrows_is;
619   for(i=0; i<nlrows_is; i++){
620 	owner = -1;
621 	rlocalindex = -1;
622     ierr = PetscLayoutFindOwnerIndex(rmap,irows_indices[i],&owner,&rlocalindex);CHKERRQ(ierr);
623     iremote[i].rank  = owner;
624     iremote[i].index = rlocalindex;
625   }
626   ierr = MatGetRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done);CHKERRQ(ierr);
627   ierr = PetscCalloc4(nlrows_mat,&ncols_send,nlrows_is,&xadj_recv,nlrows_is+1,&ncols_recv_offsets,nlrows_is,&ncols_recv);CHKERRQ(ierr);
628   nroots = nlrows_mat;
629   for(i=0; i<nlrows_mat; i++){
630 	ncols_send[i] = xadj[i+1]-xadj[i];
631   }
632   ierr = PetscSFCreate(comm,&sf);CHKERRQ(ierr);
633   ierr = PetscSFSetGraph(sf,nroots,nleaves,PETSC_NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
634   ierr = PetscSFSetType(sf,PETSCSFBASIC);CHKERRQ(ierr);
635   ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
636   ierr = PetscSFBcastBegin(sf,MPIU_INT,ncols_send,ncols_recv);CHKERRQ(ierr);
637   ierr = PetscSFBcastEnd(sf,MPIU_INT,ncols_send,ncols_recv);CHKERRQ(ierr);
638   ierr = PetscSFBcastBegin(sf,MPIU_INT,xadj,xadj_recv);CHKERRQ(ierr);
639   ierr = PetscSFBcastEnd(sf,MPIU_INT,xadj,xadj_recv);CHKERRQ(ierr);
640   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
641   Ncols_recv =0;
642   for(i=0; i<nlrows_is; i++){
643 	 Ncols_recv             += ncols_recv[i];
644 	 ncols_recv_offsets[i+1] = ncols_recv[i]+ncols_recv_offsets[i];
645   }
646   Ncols_send = 0;
647   for(i=0; i<nlrows_mat; i++){
648 	Ncols_send += ncols_send[i];
649   }
650   ierr = PetscCalloc1(Ncols_recv,&iremote);CHKERRQ(ierr);
651   ierr = PetscCalloc1(Ncols_recv,&adjncy_recv);CHKERRQ(ierr);
652   nleaves = Ncols_recv;
653   Ncols_recv = 0;
654   for(i=0; i<nlrows_is; i++){
655     ierr = PetscLayoutFindOwner(rmap,irows_indices[i],&owner);CHKERRQ(ierr);
656     for(j=0; j<ncols_recv[i]; j++){
657       iremote[Ncols_recv].rank    = owner;
658       iremote[Ncols_recv++].index = xadj_recv[i]+j;
659     }
660   }
661   ierr = ISRestoreIndices(irows,&irows_indices);CHKERRQ(ierr);
662   /*if we need to deal with edge weights ???*/
663   if(a->values){isvalue=1;}else{isvalue=0;}
664   /*involve a global communication */
665   /*ierr = MPI_Allreduce(&isvalue,&isvalue,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);*/
666   if(isvalue){ierr = PetscCalloc1(Ncols_recv,&values_recv);CHKERRQ(ierr);}
667   nroots = Ncols_send;
668   ierr = PetscSFCreate(comm,&sf);CHKERRQ(ierr);
669   ierr = PetscSFSetGraph(sf,nroots,nleaves,PETSC_NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
670   ierr = PetscSFSetType(sf,PETSCSFBASIC);CHKERRQ(ierr);
671   ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
672   ierr = PetscSFBcastBegin(sf,MPIU_INT,adjncy,adjncy_recv);CHKERRQ(ierr);
673   ierr = PetscSFBcastEnd(sf,MPIU_INT,adjncy,adjncy_recv);CHKERRQ(ierr);
674   if(isvalue){
675 	ierr = PetscSFBcastBegin(sf,MPIU_INT,a->values,values_recv);CHKERRQ(ierr);
676 	ierr = PetscSFBcastEnd(sf,MPIU_INT,a->values,values_recv);CHKERRQ(ierr);
677   }
678   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
679   ierr = MatRestoreRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done);CHKERRQ(ierr);
680   ierr = ISGetLocalSize(icols,&icols_n);CHKERRQ(ierr);
681   ierr = ISGetIndices(icols,&icols_indices);CHKERRQ(ierr);
682   rnclos = 0;
683   for(i=0; i<nlrows_is; i++){
684     for(j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++){
685       ierr = PetscFindInt(adjncy_recv[j], icols_n, icols_indices, &loc);CHKERRQ(ierr);
686       if(loc<0){
687         adjncy_recv[j] = -1;
688         if(isvalue) values_recv[j] = -1;
689         ncols_recv[i]--;
690       }else{
691     	rnclos++;
692       }
693     }
694   }
695   ierr = ISRestoreIndices(icols,&icols_indices);CHKERRQ(ierr);
696   ierr = PetscCalloc1(rnclos,&sadjncy);CHKERRQ(ierr);
697   if(isvalue) {ierr = PetscCalloc1(rnclos,&svalues);CHKERRQ(ierr);}
698   ierr = PetscCalloc1(nlrows_is+1,&sxadj);CHKERRQ(ierr);
699   rnclos = 0;
700   for(i=0; i<nlrows_is; i++){
701 	for(j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++){
702 	  if(adjncy_recv[j]<0) continue;
703 	  sadjncy[rnclos] = adjncy_recv[j];
704 	  if(isvalue) svalues[rnclos] = values_recv[j];
705 	  rnclos++;
706 	}
707   }
708   for(i=0; i<nlrows_is; i++){
709 	sxadj[i+1] = sxadj[i]+ncols_recv[i];
710   }
711   if(sadj_xadj)  { *sadj_xadj = sxadj;}else    { ierr = PetscFree(sxadj);CHKERRQ(ierr);}
712   if(sadj_adjncy){ *sadj_adjncy = sadjncy;}else{ ierr = PetscFree(sadjncy);CHKERRQ(ierr);}
713   if(sadj_values){
714 	if(isvalue) *sadj_values = svalues; else *sadj_values=0;
715   }else{
716 	if(isvalue) {ierr = PetscFree(svalues);CHKERRQ(ierr);}
717   }
718   ierr = PetscFree4(ncols_send,xadj_recv,ncols_recv_offsets,ncols_recv);CHKERRQ(ierr);
719   ierr = PetscFree(adjncy_recv);CHKERRQ(ierr);
720   if(isvalue) {ierr = PetscFree(values_recv);CHKERRQ(ierr);}
721   PetscFunctionReturn(0);
722 }
723 
724 
725 #undef __FUNCT__
726 #define __FUNCT__ "MatMPIAdjCreateNonemptySubcommMat_MPIAdj"
727 static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A,Mat *B)
728 {
729   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
730   PetscErrorCode ierr;
731   const PetscInt *ranges;
732   MPI_Comm       acomm,bcomm;
733   MPI_Group      agroup,bgroup;
734   PetscMPIInt    i,rank,size,nranks,*ranks;
735 
736   PetscFunctionBegin;
737   *B    = NULL;
738   ierr  = PetscObjectGetComm((PetscObject)A,&acomm);CHKERRQ(ierr);
739   ierr  = MPI_Comm_size(acomm,&size);CHKERRQ(ierr);
740   ierr  = MPI_Comm_size(acomm,&rank);CHKERRQ(ierr);
741   ierr  = MatGetOwnershipRanges(A,&ranges);CHKERRQ(ierr);
742   for (i=0,nranks=0; i<size; i++) {
743     if (ranges[i+1] - ranges[i] > 0) nranks++;
744   }
745   if (nranks == size) {         /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
746     ierr = PetscObjectReference((PetscObject)A);CHKERRQ(ierr);
747     *B   = A;
748     PetscFunctionReturn(0);
749   }
750 
751   ierr = PetscMalloc1(nranks,&ranks);CHKERRQ(ierr);
752   for (i=0,nranks=0; i<size; i++) {
753     if (ranges[i+1] - ranges[i] > 0) ranks[nranks++] = i;
754   }
755   ierr = MPI_Comm_group(acomm,&agroup);CHKERRQ(ierr);
756   ierr = MPI_Group_incl(agroup,nranks,ranks,&bgroup);CHKERRQ(ierr);
757   ierr = PetscFree(ranks);CHKERRQ(ierr);
758   ierr = MPI_Comm_create(acomm,bgroup,&bcomm);CHKERRQ(ierr);
759   ierr = MPI_Group_free(&agroup);CHKERRQ(ierr);
760   ierr = MPI_Group_free(&bgroup);CHKERRQ(ierr);
761   if (bcomm != MPI_COMM_NULL) {
762     PetscInt   m,N;
763     Mat_MPIAdj *b;
764     ierr       = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
765     ierr       = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
766     ierr       = MatCreateMPIAdj(bcomm,m,N,a->i,a->j,a->values,B);CHKERRQ(ierr);
767     b          = (Mat_MPIAdj*)(*B)->data;
768     b->freeaij = PETSC_FALSE;
769     ierr       = MPI_Comm_free(&bcomm);CHKERRQ(ierr);
770   }
771   PetscFunctionReturn(0);
772 }
773 
774 #undef __FUNCT__
775 #define __FUNCT__ "MatMPIAdjCreateNonemptySubcommMat"
776 /*@
777    MatMPIAdjCreateNonemptySubcommMat - create the same MPIAdj matrix on a subcommunicator containing only processes owning a positive number of rows
778 
779    Collective
780 
781    Input Arguments:
782 .  A - original MPIAdj matrix
783 
784    Output Arguments:
785 .  B - matrix on subcommunicator, NULL on ranks that owned zero rows of A
786 
787    Level: developer
788 
789    Note:
790    This function is mostly useful for internal use by mesh partitioning packages that require that every process owns at least one row.
791 
792    The matrix B should be destroyed with MatDestroy(). The arrays are not copied, so B should be destroyed before A is destroyed.
793 
794 .seealso: MatCreateMPIAdj()
795 @*/
796 PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A,Mat *B)
797 {
798   PetscErrorCode ierr;
799 
800   PetscFunctionBegin;
801   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
802   ierr = PetscUseMethod(A,"MatMPIAdjCreateNonemptySubcommMat_C",(Mat,Mat*),(A,B));CHKERRQ(ierr);
803   PetscFunctionReturn(0);
804 }
805 
806 /*MC
807    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
808    intended for use constructing orderings and partitionings.
809 
810   Level: beginner
811 
812 .seealso: MatCreateMPIAdj
813 M*/
814 
815 #undef __FUNCT__
816 #define __FUNCT__ "MatCreate_MPIAdj"
817 PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B)
818 {
819   Mat_MPIAdj     *b;
820   PetscErrorCode ierr;
821 
822   PetscFunctionBegin;
823   ierr         = PetscNewLog(B,&b);CHKERRQ(ierr);
824   B->data      = (void*)b;
825   ierr         = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
826   B->assembled = PETSC_FALSE;
827 
828   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
829   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjCreateNonemptySubcommMat_C",MatMPIAdjCreateNonemptySubcommMat_MPIAdj);CHKERRQ(ierr);
830   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ);CHKERRQ(ierr);
831   PetscFunctionReturn(0);
832 }
833 
834 #undef __FUNCT__
835 #define __FUNCT__ "MatMPIAdjSetPreallocation"
836 /*@C
837    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
838 
839    Logically Collective on MPI_Comm
840 
841    Input Parameters:
842 +  A - the matrix
843 .  i - the indices into j for the start of each row
844 .  j - the column indices for each row (sorted for each row).
845        The indices in i and j start with zero (NOT with one).
846 -  values - [optional] edge weights
847 
848    Level: intermediate
849 
850 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
851 @*/
852 PetscErrorCode  MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
853 {
854   PetscErrorCode ierr;
855 
856   PetscFunctionBegin;
857   ierr = PetscTryMethod(B,"MatMPIAdjSetPreallocation_C",(Mat,PetscInt*,PetscInt*,PetscInt*),(B,i,j,values));CHKERRQ(ierr);
858   PetscFunctionReturn(0);
859 }
860 
861 #undef __FUNCT__
862 #define __FUNCT__ "MatCreateMPIAdj"
863 /*@C
864    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
865    The matrix does not have numerical values associated with it, but is
866    intended for ordering (to reduce bandwidth etc) and partitioning.
867 
868    Collective on MPI_Comm
869 
870    Input Parameters:
871 +  comm - MPI communicator
872 .  m - number of local rows
873 .  N - number of global columns
874 .  i - the indices into j for the start of each row
875 .  j - the column indices for each row (sorted for each row).
876        The indices in i and j start with zero (NOT with one).
877 -  values -[optional] edge weights
878 
879    Output Parameter:
880 .  A - the matrix
881 
882    Level: intermediate
883 
884    Notes: This matrix object does not support most matrix operations, include
885    MatSetValues().
886    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
887    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
888     call from Fortran you need not create the arrays with PetscMalloc().
889    Should not include the matrix diagonals.
890 
891    If you already have a matrix, you can create its adjacency matrix by a call
892    to MatConvert, specifying a type of MATMPIADJ.
893 
894    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
895 
896 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
897 @*/
898 PetscErrorCode  MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
899 {
900   PetscErrorCode ierr;
901 
902   PetscFunctionBegin;
903   ierr = MatCreate(comm,A);CHKERRQ(ierr);
904   ierr = MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
905   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
906   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
907   PetscFunctionReturn(0);
908 }
909 
910 
911 
912 
913 
914 
915 
916