xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 2b8d69ca7ea5fe9190df62c1dce3bbd66fce84dd)
1 
2 /*
3     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
4 */
5 #include <../src/mat/impls/adj/mpi/mpiadj.h>    /*I "petscmat.h" I*/
6 #include <petscsf.h>
7 
8 #undef __FUNCT__
9 #define __FUNCT__ "MatView_MPIAdj_ASCII"
10 PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
11 {
12   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
13   PetscErrorCode    ierr;
14   PetscInt          i,j,m = A->rmap->n;
15   const char        *name;
16   PetscViewerFormat format;
17 
18   PetscFunctionBegin;
19   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
20   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
21   if (format == PETSC_VIEWER_ASCII_INFO) {
22     PetscFunctionReturn(0);
23   } else if (format == PETSC_VIEWER_ASCII_MATLAB) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MATLAB format not supported");
24   else {
25     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);CHKERRQ(ierr);
26     ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
27     for (i=0; i<m; i++) {
28       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap->rstart);CHKERRQ(ierr);
29       for (j=a->i[i]; j<a->i[i+1]; j++) {
30         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);CHKERRQ(ierr);
31       }
32       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
33     }
34     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);CHKERRQ(ierr);
35     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
36     ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
37   }
38   PetscFunctionReturn(0);
39 }
40 
41 #undef __FUNCT__
42 #define __FUNCT__ "MatView_MPIAdj"
43 PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
44 {
45   PetscErrorCode ierr;
46   PetscBool      iascii;
47 
48   PetscFunctionBegin;
49   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
50   if (iascii) {
51     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
52   }
53   PetscFunctionReturn(0);
54 }
55 
56 #undef __FUNCT__
57 #define __FUNCT__ "MatDestroy_MPIAdj"
58 PetscErrorCode MatDestroy_MPIAdj(Mat mat)
59 {
60   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;
61   PetscErrorCode ierr;
62 
63   PetscFunctionBegin;
64 #if defined(PETSC_USE_LOG)
65   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap->n,mat->cmap->n,a->nz);
66 #endif
67   ierr = PetscFree(a->diag);CHKERRQ(ierr);
68   if (a->freeaij) {
69     if (a->freeaijwithfree) {
70       if (a->i) free(a->i);
71       if (a->j) free(a->j);
72     } else {
73       ierr = PetscFree(a->i);CHKERRQ(ierr);
74       ierr = PetscFree(a->j);CHKERRQ(ierr);
75       ierr = PetscFree(a->values);CHKERRQ(ierr);
76     }
77   }
78   ierr = PetscFree(a->rowvalues);CHKERRQ(ierr);
79   ierr = PetscFree(mat->data);CHKERRQ(ierr);
80   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
81   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C",NULL);CHKERRQ(ierr);
82   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjCreateNonemptySubcommMat_C",NULL);CHKERRQ(ierr);
83   PetscFunctionReturn(0);
84 }
85 
86 #undef __FUNCT__
87 #define __FUNCT__ "MatSetOption_MPIAdj"
88 PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscBool flg)
89 {
90   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
91   PetscErrorCode ierr;
92 
93   PetscFunctionBegin;
94   switch (op) {
95   case MAT_SYMMETRIC:
96   case MAT_STRUCTURALLY_SYMMETRIC:
97   case MAT_HERMITIAN:
98     a->symmetric = flg;
99     break;
100   case MAT_SYMMETRY_ETERNAL:
101     break;
102   default:
103     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
104     break;
105   }
106   PetscFunctionReturn(0);
107 }
108 
109 
110 /*
111      Adds diagonal pointers to sparse matrix structure.
112 */
113 
114 #undef __FUNCT__
115 #define __FUNCT__ "MatMarkDiagonal_MPIAdj"
116 PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A)
117 {
118   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
119   PetscErrorCode ierr;
120   PetscInt       i,j,m = A->rmap->n;
121 
122   PetscFunctionBegin;
123   ierr = PetscMalloc1(m,&a->diag);CHKERRQ(ierr);
124   ierr = PetscLogObjectMemory((PetscObject)A,m*sizeof(PetscInt));CHKERRQ(ierr);
125   for (i=0; i<A->rmap->n; i++) {
126     for (j=a->i[i]; j<a->i[i+1]; j++) {
127       if (a->j[j] == i) {
128         a->diag[i] = j;
129         break;
130       }
131     }
132   }
133   PetscFunctionReturn(0);
134 }
135 
136 #undef __FUNCT__
137 #define __FUNCT__ "MatGetRow_MPIAdj"
138 PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
139 {
140   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
141   PetscErrorCode ierr;
142 
143   PetscFunctionBegin;
144   row -= A->rmap->rstart;
145   if (row < 0 || row >= A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
146   *nz = a->i[row+1] - a->i[row];
147   if (v) {
148     PetscInt j;
149     if (a->rowvalues_alloc < *nz) {
150       ierr = PetscFree(a->rowvalues);CHKERRQ(ierr);
151       a->rowvalues_alloc = PetscMax(a->rowvalues_alloc*2, *nz);
152       ierr = PetscMalloc1(a->rowvalues_alloc,&a->rowvalues);CHKERRQ(ierr);
153     }
154     for (j=0; j<*nz; j++){
155       a->rowvalues[j] = a->values ? a->values[a->i[row]+j]:1.0;
156     }
157     *v = (*nz) ? a->rowvalues : NULL;
158   }
159   if (idx) *idx = (*nz) ? a->j + a->i[row] : NULL;
160   PetscFunctionReturn(0);
161 }
162 
163 #undef __FUNCT__
164 #define __FUNCT__ "MatRestoreRow_MPIAdj"
165 PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
166 {
167   PetscFunctionBegin;
168   PetscFunctionReturn(0);
169 }
170 
171 #undef __FUNCT__
172 #define __FUNCT__ "MatEqual_MPIAdj"
173 PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscBool * flg)
174 {
175   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data,*b = (Mat_MPIAdj*)B->data;
176   PetscErrorCode ierr;
177   PetscBool      flag;
178 
179   PetscFunctionBegin;
180   /* If the  matrix dimensions are not equal,or no of nonzeros */
181   if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
182     flag = PETSC_FALSE;
183   }
184 
185   /* if the a->i are the same */
186   ierr = PetscMemcmp(a->i,b->i,(A->rmap->n+1)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
187 
188   /* if a->j are the same */
189   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
190 
191   ierr = MPIU_Allreduce(&flag,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
192   PetscFunctionReturn(0);
193 }
194 
195 #undef __FUNCT__
196 #define __FUNCT__ "MatGetRowIJ_MPIAdj"
197 PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
198 {
199   PetscInt   i;
200   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
201   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
202 
203   PetscFunctionBegin;
204   *m    = A->rmap->n;
205   *ia   = a->i;
206   *ja   = a->j;
207   *done = PETSC_TRUE;
208   if (oshift) {
209     for (i=0; i<(*ia)[*m]; i++) {
210       (*ja)[i]++;
211     }
212     for (i=0; i<=(*m); i++) (*ia)[i]++;
213   }
214   PetscFunctionReturn(0);
215 }
216 
217 #undef __FUNCT__
218 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj"
219 PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
220 {
221   PetscInt   i;
222   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
223   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
224 
225   PetscFunctionBegin;
226   if (ia && a->i != *ia) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
227   if (ja && a->j != *ja) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
228   if (oshift) {
229     for (i=0; i<=(*m); i++) (*ia)[i]--;
230     for (i=0; i<(*ia)[*m]; i++) {
231       (*ja)[i]--;
232     }
233   }
234   PetscFunctionReturn(0);
235 }
236 
237 #undef __FUNCT__
238 #define __FUNCT__ "MatConvertFrom_MPIAdj"
239 PetscErrorCode  MatConvertFrom_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
240 {
241   Mat               B;
242   PetscErrorCode    ierr;
243   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
244   const PetscInt    *rj;
245   const PetscScalar *ra;
246   MPI_Comm          comm;
247 
248   PetscFunctionBegin;
249   ierr = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
250   ierr = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
251   ierr = MatGetOwnershipRange(A,&rstart,NULL);CHKERRQ(ierr);
252 
253   /* count the number of nonzeros per row */
254   for (i=0; i<m; i++) {
255     ierr = MatGetRow(A,i+rstart,&len,&rj,NULL);CHKERRQ(ierr);
256     for (j=0; j<len; j++) {
257       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
258     }
259     nzeros += len;
260     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,NULL);CHKERRQ(ierr);
261   }
262 
263   /* malloc space for nonzeros */
264   ierr = PetscMalloc1(nzeros+1,&a);CHKERRQ(ierr);
265   ierr = PetscMalloc1(N+1,&ia);CHKERRQ(ierr);
266   ierr = PetscMalloc1(nzeros+1,&ja);CHKERRQ(ierr);
267 
268   nzeros = 0;
269   ia[0]  = 0;
270   for (i=0; i<m; i++) {
271     ierr = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
272     cnt  = 0;
273     for (j=0; j<len; j++) {
274       if (rj[j] != i+rstart) { /* if not diagonal */
275         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
276         ja[nzeros+cnt++] = rj[j];
277       }
278     }
279     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
280     nzeros += cnt;
281     ia[i+1] = nzeros;
282   }
283 
284   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
285   ierr = MatCreate(comm,&B);CHKERRQ(ierr);
286   ierr = MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
287   ierr = MatSetType(B,type);CHKERRQ(ierr);
288   ierr = MatMPIAdjSetPreallocation(B,ia,ja,a);CHKERRQ(ierr);
289 
290   if (reuse == MAT_INPLACE_MATRIX) {
291     ierr = MatHeaderReplace(A,&B);CHKERRQ(ierr);
292   } else {
293     *newmat = B;
294   }
295   PetscFunctionReturn(0);
296 }
297 
298 /* -------------------------------------------------------------------*/
299 static struct _MatOps MatOps_Values = {0,
300                                        MatGetRow_MPIAdj,
301                                        MatRestoreRow_MPIAdj,
302                                        0,
303                                 /* 4*/ 0,
304                                        0,
305                                        0,
306                                        0,
307                                        0,
308                                        0,
309                                 /*10*/ 0,
310                                        0,
311                                        0,
312                                        0,
313                                        0,
314                                 /*15*/ 0,
315                                        MatEqual_MPIAdj,
316                                        0,
317                                        0,
318                                        0,
319                                 /*20*/ 0,
320                                        0,
321                                        MatSetOption_MPIAdj,
322                                        0,
323                                 /*24*/ 0,
324                                        0,
325                                        0,
326                                        0,
327                                        0,
328                                 /*29*/ 0,
329                                        0,
330                                        0,
331                                        0,
332                                        0,
333                                 /*34*/ 0,
334                                        0,
335                                        0,
336                                        0,
337                                        0,
338                                 /*39*/ 0,
339                                        MatGetSubMatrices_MPIAdj,
340                                        0,
341                                        0,
342                                        0,
343                                 /*44*/ 0,
344                                        0,
345                                        MatShift_Basic,
346                                        0,
347                                        0,
348                                 /*49*/ 0,
349                                        MatGetRowIJ_MPIAdj,
350                                        MatRestoreRowIJ_MPIAdj,
351                                        0,
352                                        0,
353                                 /*54*/ 0,
354                                        0,
355                                        0,
356                                        0,
357                                        0,
358                                 /*59*/ 0,
359                                        MatDestroy_MPIAdj,
360                                        MatView_MPIAdj,
361                                        MatConvertFrom_MPIAdj,
362                                        0,
363                                 /*64*/ 0,
364                                        0,
365                                        0,
366                                        0,
367                                        0,
368                                 /*69*/ 0,
369                                        0,
370                                        0,
371                                        0,
372                                        0,
373                                 /*74*/ 0,
374                                        0,
375                                        0,
376                                        0,
377                                        0,
378                                 /*79*/ 0,
379                                        0,
380                                        0,
381                                        0,
382                                        0,
383                                 /*84*/ 0,
384                                        0,
385                                        0,
386                                        0,
387                                        0,
388                                 /*89*/ 0,
389                                        0,
390                                        0,
391                                        0,
392                                        0,
393                                 /*94*/ 0,
394                                        0,
395                                        0,
396                                        0,
397                                        0,
398                                 /*99*/ 0,
399                                        0,
400                                        0,
401                                        0,
402                                        0,
403                                /*104*/ 0,
404                                        0,
405                                        0,
406                                        0,
407                                        0,
408                                /*109*/ 0,
409                                        0,
410                                        0,
411                                        0,
412                                        0,
413                                /*114*/ 0,
414                                        0,
415                                        0,
416                                        0,
417                                        0,
418                                /*119*/ 0,
419                                        0,
420                                        0,
421                                        0,
422                                        0,
423                                /*124*/ 0,
424                                        0,
425                                        0,
426                                        0,
427                                        MatGetSubMatricesMPI_MPIAdj,
428                                /*129*/ 0,
429                                        0,
430                                        0,
431                                        0,
432                                        0,
433                                /*134*/ 0,
434                                        0,
435                                        0,
436                                        0,
437                                        0,
438                                /*139*/ 0,
439                                        0,
440                                        0
441 };
442 
443 #undef __FUNCT__
444 #define __FUNCT__ "MatMPIAdjSetPreallocation_MPIAdj"
445 static PetscErrorCode  MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
446 {
447   Mat_MPIAdj     *b = (Mat_MPIAdj*)B->data;
448   PetscErrorCode ierr;
449 #if defined(PETSC_USE_DEBUG)
450   PetscInt ii;
451 #endif
452 
453   PetscFunctionBegin;
454   ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
455   ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
456 
457 #if defined(PETSC_USE_DEBUG)
458   if (i[0] != 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
459   for (ii=1; ii<B->rmap->n; ii++) {
460     if (i[ii] < 0 || i[ii] < i[ii-1]) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
461   }
462   for (ii=0; ii<i[B->rmap->n]; ii++) {
463     if (j[ii] < 0 || j[ii] >= B->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
464   }
465 #endif
466   B->preallocated = PETSC_TRUE;
467 
468   b->j      = j;
469   b->i      = i;
470   b->values = values;
471 
472   b->nz        = i[B->rmap->n];
473   b->diag      = 0;
474   b->symmetric = PETSC_FALSE;
475   b->freeaij   = PETSC_TRUE;
476 
477   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
478   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
479   PetscFunctionReturn(0);
480 }
481 
482 static PetscErrorCode MatGetSubMatrix_MPIAdj_data(Mat,IS,IS, PetscInt **,PetscInt **,PetscInt **);
483 static PetscErrorCode MatGetSubMatrices_MPIAdj_Private(Mat,PetscInt,const IS[],const IS[],PetscBool,MatReuse,Mat **);
484 
485 #undef __FUNCT__
486 #define __FUNCT__ "MatGetSubMatricesMPI_MPIAdj"
487 PetscErrorCode MatGetSubMatricesMPI_MPIAdj(Mat mat,PetscInt n, const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
488 {
489   PetscErrorCode     ierr;
490   /*get sub-matrices across a sub communicator */
491   PetscFunctionBegin;
492   ierr = MatGetSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_TRUE,scall,submat);CHKERRQ(ierr);
493   PetscFunctionReturn(0);
494 }
495 
496 
497 #undef __FUNCT__
498 #define __FUNCT__ "MatGetSubMatrices_MPIAdj"
499 PetscErrorCode MatGetSubMatrices_MPIAdj(Mat mat,PetscInt n,const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
500 {
501   PetscErrorCode     ierr;
502 
503   PetscFunctionBegin;
504   /*get sub-matrices based on PETSC_COMM_SELF */
505   ierr = MatGetSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_FALSE,scall,submat);CHKERRQ(ierr);
506   PetscFunctionReturn(0);
507 }
508 
509 #undef __FUNCT__
510 #define __FUNCT__ "MatGetSubMatrices_MPIAdj_Private"
511 static PetscErrorCode MatGetSubMatrices_MPIAdj_Private(Mat mat,PetscInt n,const IS irow[],const IS icol[],PetscBool subcomm,MatReuse scall,Mat *submat[])
512 {
513   PetscInt           i,irow_n,icol_n,*sxadj,*sadjncy,*svalues;
514   PetscInt          *indices,nindx,j,k,loc;
515   PetscMPIInt        issame;
516   const PetscInt    *irow_indices,*icol_indices;
517   MPI_Comm           scomm_row,scomm_col,scomm_mat;
518   PetscErrorCode     ierr;
519 
520   PetscFunctionBegin;
521   nindx = 0;
522   /*
523    * Estimate a maximum number for allocating memory
524    */
525   for(i=0; i<n; i++){
526     ierr = ISGetLocalSize(irow[i],&irow_n);CHKERRQ(ierr);
527     ierr = ISGetLocalSize(icol[i],&icol_n);CHKERRQ(ierr);
528     nindx = nindx>(irow_n+icol_n)? nindx:(irow_n+icol_n);
529   }
530   ierr = PetscCalloc1(nindx,&indices);CHKERRQ(ierr);
531   /* construct a submat */
532   for(i=0; i<n; i++){
533 	/*comms */
534     if(subcomm){
535 	  ierr = PetscObjectGetComm((PetscObject)irow[i],&scomm_row);CHKERRQ(ierr);
536 	  ierr = PetscObjectGetComm((PetscObject)icol[i],&scomm_col);CHKERRQ(ierr);
537 	  ierr = MPI_Comm_compare(scomm_row,scomm_col,&issame);CHKERRQ(ierr);
538 	  if(issame != MPI_IDENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"row index set must have the same comm as the col index set\n");
539 	  ierr = MPI_Comm_compare(scomm_row,PETSC_COMM_SELF,&issame);CHKERRQ(ierr);
540 	  if(issame == MPI_IDENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP," can not use PETSC_COMM_SELF as comm when extracting a parallel submatrix\n");
541 	}else{
542 	  scomm_row = PETSC_COMM_SELF;
543 	}
544 	/*get sub-matrix data*/
545 	sxadj=0; sadjncy=0; svalues=0;
546     ierr = MatGetSubMatrix_MPIAdj_data(mat,irow[i],icol[i],&sxadj,&sadjncy,&svalues);CHKERRQ(ierr);
547     ierr = ISGetLocalSize(irow[i],&irow_n);CHKERRQ(ierr);
548     ierr = ISGetLocalSize(icol[i],&icol_n);CHKERRQ(ierr);
549     ierr = ISGetIndices(irow[i],&irow_indices);CHKERRQ(ierr);
550     ierr = PetscMemcpy(indices,irow_indices,sizeof(PetscInt)*irow_n);CHKERRQ(ierr);
551     ierr = ISRestoreIndices(irow[i],&irow_indices);CHKERRQ(ierr);
552     ierr = ISGetIndices(icol[i],&icol_indices);CHKERRQ(ierr);
553     ierr = PetscMemcpy(indices+irow_n,icol_indices,sizeof(PetscInt)*icol_n);CHKERRQ(ierr);
554     ierr = ISRestoreIndices(icol[i],&icol_indices);CHKERRQ(ierr);
555     nindx = irow_n+icol_n;
556     ierr = PetscSortRemoveDupsInt(&nindx,indices);CHKERRQ(ierr);
557     /* renumber columns */
558     for(j=0; j<irow_n; j++){
559       for(k=sxadj[j]; k<sxadj[j+1]; k++){
560     	ierr = PetscFindInt(sadjncy[k],nindx,indices,&loc);CHKERRQ(ierr);
561 #if PETSC_USE_DEBUG
562     	if(loc<0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"can not find col %d \n",sadjncy[k]);
563 #endif
564         sadjncy[k] = loc;
565       }
566     }
567     if(scall==MAT_INITIAL_MATRIX){
568       ierr = MatCreateMPIAdj(scomm_row,irow_n,icol_n,sxadj,sadjncy,svalues,submat[i]);CHKERRQ(ierr);
569     }else{
570        Mat                sadj = *(submat[i]);
571        Mat_MPIAdj         *sa  = (Mat_MPIAdj*)((sadj)->data);
572        ierr = PetscObjectGetComm((PetscObject)sadj,&scomm_mat);CHKERRQ(ierr);
573        ierr = MPI_Comm_compare(scomm_row,scomm_mat,&issame);CHKERRQ(ierr);
574        if(issame != MPI_IDENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"submatrix  must have the same comm as the col index set\n");
575        ierr = PetscMemcpy(sa->i,sxadj,sizeof(PetscInt)*(irow_n+1));CHKERRQ(ierr);
576        ierr = PetscMemcpy(sa->j,sadjncy,sizeof(PetscInt)*sxadj[irow_n]);CHKERRQ(ierr);
577        if(svalues){ierr = PetscMemcpy(sa->values,svalues,sizeof(PetscInt)*sxadj[irow_n]);CHKERRQ(ierr);}
578        ierr = PetscFree(sxadj);CHKERRQ(ierr);
579        ierr = PetscFree(sadjncy);CHKERRQ(ierr);
580        if(svalues) {ierr = PetscFree(svalues);CHKERRQ(ierr);}
581     }
582   }
583   ierr = PetscFree(indices);CHKERRQ(ierr);
584   PetscFunctionReturn(0);
585 }
586 
587 
588 #undef __FUNCT__
589 #define __FUNCT__ "MatGetSubMatrix_MPIAdj_data"
590 /*
591  * The interface should be easy to use for both MatGetSubMatrix (parallel sub-matrix) and MatGetSubMatrices (sequential sub-matrices)
592  * */
593 static PetscErrorCode MatGetSubMatrix_MPIAdj_data(Mat adj,IS irows, IS icols, PetscInt **sadj_xadj,PetscInt **sadj_adjncy,PetscInt **sadj_values)
594 {
595   PetscInt        	 nlrows_is,icols_n,i,j,nroots,nleaves,owner,rlocalindex,*ncols_send,*ncols_recv;
596   PetscInt           nlrows_mat,*adjncy_recv,Ncols_recv,Ncols_send,*xadj_recv,*values_recv;
597   PetscInt          *ncols_recv_offsets,loc,rnclos,*sadjncy,*sxadj,*svalues,isvalue;
598   const PetscInt    *irows_indices,*icols_indices,*xadj, *adjncy;
599   Mat_MPIAdj        *a = (Mat_MPIAdj*)adj->data;
600   PetscLayout        rmap;
601   MPI_Comm           comm;
602   PetscSF            sf;
603   PetscSFNode       *iremote;
604   PetscBool          done;
605   PetscErrorCode     ierr;
606 
607   PetscFunctionBegin;
608   /* communicator */
609   ierr = PetscObjectGetComm((PetscObject)adj,&comm);CHKERRQ(ierr);
610   /* Layouts */
611   ierr = MatGetLayouts(adj,&rmap,PETSC_NULL);CHKERRQ(ierr);
612   /* get rows information */
613   ierr = ISGetLocalSize(irows,&nlrows_is);CHKERRQ(ierr);
614   ierr = ISGetIndices(irows,&irows_indices);CHKERRQ(ierr);
615   ierr = PetscCalloc1(nlrows_is,&iremote);CHKERRQ(ierr);
616   /* construct sf graph*/
617   nleaves = nlrows_is;
618   for(i=0; i<nlrows_is; i++){
619 	owner = -1;
620 	rlocalindex = -1;
621     ierr = PetscLayoutFindOwnerIndex(rmap,irows_indices[i],&owner,&rlocalindex);CHKERRQ(ierr);
622     iremote[i].rank  = owner;
623     iremote[i].index = rlocalindex;
624   }
625   ierr = MatGetRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done);CHKERRQ(ierr);
626   ierr = PetscCalloc4(nlrows_mat,&ncols_send,nlrows_is,&xadj_recv,nlrows_is+1,&ncols_recv_offsets,nlrows_is,&ncols_recv);CHKERRQ(ierr);
627   nroots = nlrows_mat;
628   for(i=0; i<nlrows_mat; i++){
629 	ncols_send[i] = xadj[i+1]-xadj[i];
630   }
631   ierr = PetscSFCreate(comm,&sf);CHKERRQ(ierr);
632   ierr = PetscSFSetGraph(sf,nroots,nleaves,PETSC_NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
633   ierr = PetscSFSetType(sf,PETSCSFBASIC);CHKERRQ(ierr);
634   ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
635   ierr = PetscSFBcastBegin(sf,MPIU_INT,ncols_send,ncols_recv);CHKERRQ(ierr);
636   ierr = PetscSFBcastEnd(sf,MPIU_INT,ncols_send,ncols_recv);CHKERRQ(ierr);
637   ierr = PetscSFBcastBegin(sf,MPIU_INT,xadj,xadj_recv);CHKERRQ(ierr);
638   ierr = PetscSFBcastEnd(sf,MPIU_INT,xadj,xadj_recv);CHKERRQ(ierr);
639   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
640   Ncols_recv =0;
641   for(i=0; i<nlrows_is; i++){
642 	 Ncols_recv             += ncols_recv[i];
643 	 ncols_recv_offsets[i+1] = ncols_recv[i]+ncols_recv_offsets[i];
644   }
645   Ncols_send = 0;
646   for(i=0; i<nlrows_mat; i++){
647 	Ncols_send += ncols_send[i];
648   }
649   ierr = PetscCalloc1(Ncols_recv,&iremote);CHKERRQ(ierr);
650   ierr = PetscCalloc1(Ncols_recv,&adjncy_recv);CHKERRQ(ierr);
651   nleaves = Ncols_recv;
652   Ncols_recv = 0;
653   for(i=0; i<nlrows_is; i++){
654     ierr = PetscLayoutFindOwner(rmap,irows_indices[i],&owner);CHKERRQ(ierr);
655     for(j=0; j<ncols_recv[i]; j++){
656       iremote[Ncols_recv].rank    = owner;
657       iremote[Ncols_recv++].index = xadj_recv[i]+j;
658     }
659   }
660   ierr = ISRestoreIndices(irows,&irows_indices);CHKERRQ(ierr);
661   /*if we need to deal with edge weights ???*/
662   if(a->values){isvalue=1;}else{isvalue=0;}
663   /*involve a global communication */
664   /*ierr = MPIU_Allreduce(&isvalue,&isvalue,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);*/
665   if(isvalue){ierr = PetscCalloc1(Ncols_recv,&values_recv);CHKERRQ(ierr);}
666   nroots = Ncols_send;
667   ierr = PetscSFCreate(comm,&sf);CHKERRQ(ierr);
668   ierr = PetscSFSetGraph(sf,nroots,nleaves,PETSC_NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
669   ierr = PetscSFSetType(sf,PETSCSFBASIC);CHKERRQ(ierr);
670   ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
671   ierr = PetscSFBcastBegin(sf,MPIU_INT,adjncy,adjncy_recv);CHKERRQ(ierr);
672   ierr = PetscSFBcastEnd(sf,MPIU_INT,adjncy,adjncy_recv);CHKERRQ(ierr);
673   if(isvalue){
674 	ierr = PetscSFBcastBegin(sf,MPIU_INT,a->values,values_recv);CHKERRQ(ierr);
675 	ierr = PetscSFBcastEnd(sf,MPIU_INT,a->values,values_recv);CHKERRQ(ierr);
676   }
677   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
678   ierr = MatRestoreRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done);CHKERRQ(ierr);
679   ierr = ISGetLocalSize(icols,&icols_n);CHKERRQ(ierr);
680   ierr = ISGetIndices(icols,&icols_indices);CHKERRQ(ierr);
681   rnclos = 0;
682   for(i=0; i<nlrows_is; i++){
683     for(j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++){
684       ierr = PetscFindInt(adjncy_recv[j], icols_n, icols_indices, &loc);CHKERRQ(ierr);
685       if(loc<0){
686         adjncy_recv[j] = -1;
687         if(isvalue) values_recv[j] = -1;
688         ncols_recv[i]--;
689       }else{
690     	rnclos++;
691       }
692     }
693   }
694   ierr = ISRestoreIndices(icols,&icols_indices);CHKERRQ(ierr);
695   ierr = PetscCalloc1(rnclos,&sadjncy);CHKERRQ(ierr);
696   if(isvalue) {ierr = PetscCalloc1(rnclos,&svalues);CHKERRQ(ierr);}
697   ierr = PetscCalloc1(nlrows_is+1,&sxadj);CHKERRQ(ierr);
698   rnclos = 0;
699   for(i=0; i<nlrows_is; i++){
700 	for(j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++){
701 	  if(adjncy_recv[j]<0) continue;
702 	  sadjncy[rnclos] = adjncy_recv[j];
703 	  if(isvalue) svalues[rnclos] = values_recv[j];
704 	  rnclos++;
705 	}
706   }
707   for(i=0; i<nlrows_is; i++){
708 	sxadj[i+1] = sxadj[i]+ncols_recv[i];
709   }
710   if(sadj_xadj)  { *sadj_xadj = sxadj;}else    { ierr = PetscFree(sxadj);CHKERRQ(ierr);}
711   if(sadj_adjncy){ *sadj_adjncy = sadjncy;}else{ ierr = PetscFree(sadjncy);CHKERRQ(ierr);}
712   if(sadj_values){
713 	if(isvalue) *sadj_values = svalues; else *sadj_values=0;
714   }else{
715 	if(isvalue) {ierr = PetscFree(svalues);CHKERRQ(ierr);}
716   }
717   ierr = PetscFree4(ncols_send,xadj_recv,ncols_recv_offsets,ncols_recv);CHKERRQ(ierr);
718   ierr = PetscFree(adjncy_recv);CHKERRQ(ierr);
719   if(isvalue) {ierr = PetscFree(values_recv);CHKERRQ(ierr);}
720   PetscFunctionReturn(0);
721 }
722 
723 
724 #undef __FUNCT__
725 #define __FUNCT__ "MatMPIAdjCreateNonemptySubcommMat_MPIAdj"
726 static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A,Mat *B)
727 {
728   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
729   PetscErrorCode ierr;
730   const PetscInt *ranges;
731   MPI_Comm       acomm,bcomm;
732   MPI_Group      agroup,bgroup;
733   PetscMPIInt    i,rank,size,nranks,*ranks;
734 
735   PetscFunctionBegin;
736   *B    = NULL;
737   ierr  = PetscObjectGetComm((PetscObject)A,&acomm);CHKERRQ(ierr);
738   ierr  = MPI_Comm_size(acomm,&size);CHKERRQ(ierr);
739   ierr  = MPI_Comm_size(acomm,&rank);CHKERRQ(ierr);
740   ierr  = MatGetOwnershipRanges(A,&ranges);CHKERRQ(ierr);
741   for (i=0,nranks=0; i<size; i++) {
742     if (ranges[i+1] - ranges[i] > 0) nranks++;
743   }
744   if (nranks == size) {         /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
745     ierr = PetscObjectReference((PetscObject)A);CHKERRQ(ierr);
746     *B   = A;
747     PetscFunctionReturn(0);
748   }
749 
750   ierr = PetscMalloc1(nranks,&ranks);CHKERRQ(ierr);
751   for (i=0,nranks=0; i<size; i++) {
752     if (ranges[i+1] - ranges[i] > 0) ranks[nranks++] = i;
753   }
754   ierr = MPI_Comm_group(acomm,&agroup);CHKERRQ(ierr);
755   ierr = MPI_Group_incl(agroup,nranks,ranks,&bgroup);CHKERRQ(ierr);
756   ierr = PetscFree(ranks);CHKERRQ(ierr);
757   ierr = MPI_Comm_create(acomm,bgroup,&bcomm);CHKERRQ(ierr);
758   ierr = MPI_Group_free(&agroup);CHKERRQ(ierr);
759   ierr = MPI_Group_free(&bgroup);CHKERRQ(ierr);
760   if (bcomm != MPI_COMM_NULL) {
761     PetscInt   m,N;
762     Mat_MPIAdj *b;
763     ierr       = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
764     ierr       = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
765     ierr       = MatCreateMPIAdj(bcomm,m,N,a->i,a->j,a->values,B);CHKERRQ(ierr);
766     b          = (Mat_MPIAdj*)(*B)->data;
767     b->freeaij = PETSC_FALSE;
768     ierr       = MPI_Comm_free(&bcomm);CHKERRQ(ierr);
769   }
770   PetscFunctionReturn(0);
771 }
772 
773 #undef __FUNCT__
774 #define __FUNCT__ "MatMPIAdjCreateNonemptySubcommMat"
775 /*@
776    MatMPIAdjCreateNonemptySubcommMat - create the same MPIAdj matrix on a subcommunicator containing only processes owning a positive number of rows
777 
778    Collective
779 
780    Input Arguments:
781 .  A - original MPIAdj matrix
782 
783    Output Arguments:
784 .  B - matrix on subcommunicator, NULL on ranks that owned zero rows of A
785 
786    Level: developer
787 
788    Note:
789    This function is mostly useful for internal use by mesh partitioning packages that require that every process owns at least one row.
790 
791    The matrix B should be destroyed with MatDestroy(). The arrays are not copied, so B should be destroyed before A is destroyed.
792 
793 .seealso: MatCreateMPIAdj()
794 @*/
795 PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A,Mat *B)
796 {
797   PetscErrorCode ierr;
798 
799   PetscFunctionBegin;
800   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
801   ierr = PetscUseMethod(A,"MatMPIAdjCreateNonemptySubcommMat_C",(Mat,Mat*),(A,B));CHKERRQ(ierr);
802   PetscFunctionReturn(0);
803 }
804 
805 /*MC
806    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
807    intended for use constructing orderings and partitionings.
808 
809   Level: beginner
810 
811 .seealso: MatCreateMPIAdj
812 M*/
813 
814 #undef __FUNCT__
815 #define __FUNCT__ "MatCreate_MPIAdj"
816 PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B)
817 {
818   Mat_MPIAdj     *b;
819   PetscErrorCode ierr;
820 
821   PetscFunctionBegin;
822   ierr         = PetscNewLog(B,&b);CHKERRQ(ierr);
823   B->data      = (void*)b;
824   ierr         = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
825   B->assembled = PETSC_FALSE;
826 
827   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
828   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjCreateNonemptySubcommMat_C",MatMPIAdjCreateNonemptySubcommMat_MPIAdj);CHKERRQ(ierr);
829   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ);CHKERRQ(ierr);
830   PetscFunctionReturn(0);
831 }
832 
833 #undef __FUNCT__
834 #define __FUNCT__ "MatMPIAdjSetPreallocation"
835 /*@C
836    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
837 
838    Logically Collective on MPI_Comm
839 
840    Input Parameters:
841 +  A - the matrix
842 .  i - the indices into j for the start of each row
843 .  j - the column indices for each row (sorted for each row).
844        The indices in i and j start with zero (NOT with one).
845 -  values - [optional] edge weights
846 
847    Level: intermediate
848 
849 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
850 @*/
851 PetscErrorCode  MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
852 {
853   PetscErrorCode ierr;
854 
855   PetscFunctionBegin;
856   ierr = PetscTryMethod(B,"MatMPIAdjSetPreallocation_C",(Mat,PetscInt*,PetscInt*,PetscInt*),(B,i,j,values));CHKERRQ(ierr);
857   PetscFunctionReturn(0);
858 }
859 
860 #undef __FUNCT__
861 #define __FUNCT__ "MatCreateMPIAdj"
862 /*@C
863    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
864    The matrix does not have numerical values associated with it, but is
865    intended for ordering (to reduce bandwidth etc) and partitioning.
866 
867    Collective on MPI_Comm
868 
869    Input Parameters:
870 +  comm - MPI communicator
871 .  m - number of local rows
872 .  N - number of global columns
873 .  i - the indices into j for the start of each row
874 .  j - the column indices for each row (sorted for each row).
875        The indices in i and j start with zero (NOT with one).
876 -  values -[optional] edge weights
877 
878    Output Parameter:
879 .  A - the matrix
880 
881    Level: intermediate
882 
883    Notes: This matrix object does not support most matrix operations, include
884    MatSetValues().
885    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
886    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
887     call from Fortran you need not create the arrays with PetscMalloc().
888    Should not include the matrix diagonals.
889 
890    If you already have a matrix, you can create its adjacency matrix by a call
891    to MatConvert, specifying a type of MATMPIADJ.
892 
893    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
894 
895 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
896 @*/
897 PetscErrorCode  MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
898 {
899   PetscErrorCode ierr;
900 
901   PetscFunctionBegin;
902   ierr = MatCreate(comm,A);CHKERRQ(ierr);
903   ierr = MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
904   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
905   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
906   PetscFunctionReturn(0);
907 }
908 
909 
910 
911 
912 
913 
914 
915