xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision b1f5bccad0ccce244f56b49ddfa56a0eb5fa3ca3)
1 
2 /*
3     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
4 */
5 #include <../src/mat/impls/adj/mpi/mpiadj.h>    /*I "petscmat.h" I*/
6 #include <petscsf.h>
7 
8 #undef __FUNCT__
9 #define __FUNCT__ "MatView_MPIAdj_ASCII"
10 PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
11 {
12   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
13   PetscErrorCode    ierr;
14   PetscInt          i,j,m = A->rmap->n;
15   const char        *name;
16   PetscViewerFormat format;
17 
18   PetscFunctionBegin;
19   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
20   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
21   if (format == PETSC_VIEWER_ASCII_INFO) {
22     PetscFunctionReturn(0);
23   } else if (format == PETSC_VIEWER_ASCII_MATLAB) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MATLAB format not supported");
24   else {
25     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);CHKERRQ(ierr);
26     ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
27     for (i=0; i<m; i++) {
28       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap->rstart);CHKERRQ(ierr);
29       for (j=a->i[i]; j<a->i[i+1]; j++) {
30         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);CHKERRQ(ierr);
31       }
32       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
33     }
34     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);CHKERRQ(ierr);
35     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
36     ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
37   }
38   PetscFunctionReturn(0);
39 }
40 
41 #undef __FUNCT__
42 #define __FUNCT__ "MatView_MPIAdj"
43 PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
44 {
45   PetscErrorCode ierr;
46   PetscBool      iascii;
47 
48   PetscFunctionBegin;
49   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
50   if (iascii) {
51     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
52   }
53   PetscFunctionReturn(0);
54 }
55 
56 #undef __FUNCT__
57 #define __FUNCT__ "MatDestroy_MPIAdj"
58 PetscErrorCode MatDestroy_MPIAdj(Mat mat)
59 {
60   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;
61   PetscErrorCode ierr;
62 
63   PetscFunctionBegin;
64 #if defined(PETSC_USE_LOG)
65   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap->n,mat->cmap->n,a->nz);
66 #endif
67   ierr = PetscFree(a->diag);CHKERRQ(ierr);
68   if (a->freeaij) {
69     if (a->freeaijwithfree) {
70       if (a->i) free(a->i);
71       if (a->j) free(a->j);
72     } else {
73       ierr = PetscFree(a->i);CHKERRQ(ierr);
74       ierr = PetscFree(a->j);CHKERRQ(ierr);
75       ierr = PetscFree(a->values);CHKERRQ(ierr);
76     }
77   }
78   ierr = PetscFree(a->rowvalues);CHKERRQ(ierr);
79   ierr = PetscFree(mat->data);CHKERRQ(ierr);
80   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
81   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C",NULL);CHKERRQ(ierr);
82   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjCreateNonemptySubcommMat_C",NULL);CHKERRQ(ierr);
83   PetscFunctionReturn(0);
84 }
85 
86 #undef __FUNCT__
87 #define __FUNCT__ "MatSetOption_MPIAdj"
88 PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscBool flg)
89 {
90   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
91   PetscErrorCode ierr;
92 
93   PetscFunctionBegin;
94   switch (op) {
95   case MAT_SYMMETRIC:
96   case MAT_STRUCTURALLY_SYMMETRIC:
97   case MAT_HERMITIAN:
98     a->symmetric = flg;
99     break;
100   case MAT_SYMMETRY_ETERNAL:
101     break;
102   default:
103     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
104     break;
105   }
106   PetscFunctionReturn(0);
107 }
108 
109 
110 /*
111      Adds diagonal pointers to sparse matrix structure.
112 */
113 
114 #undef __FUNCT__
115 #define __FUNCT__ "MatMarkDiagonal_MPIAdj"
116 PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A)
117 {
118   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
119   PetscErrorCode ierr;
120   PetscInt       i,j,m = A->rmap->n;
121 
122   PetscFunctionBegin;
123   ierr = PetscMalloc1(m,&a->diag);CHKERRQ(ierr);
124   ierr = PetscLogObjectMemory((PetscObject)A,m*sizeof(PetscInt));CHKERRQ(ierr);
125   for (i=0; i<A->rmap->n; i++) {
126     for (j=a->i[i]; j<a->i[i+1]; j++) {
127       if (a->j[j] == i) {
128         a->diag[i] = j;
129         break;
130       }
131     }
132   }
133   PetscFunctionReturn(0);
134 }
135 
136 #undef __FUNCT__
137 #define __FUNCT__ "MatGetRow_MPIAdj"
138 PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
139 {
140   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
141   PetscErrorCode ierr;
142 
143   PetscFunctionBegin;
144   row -= A->rmap->rstart;
145   if (row < 0 || row >= A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
146   *nz = a->i[row+1] - a->i[row];
147   if (v) {
148     PetscInt j;
149     if (a->rowvalues_alloc < *nz) {
150       ierr = PetscFree(a->rowvalues);CHKERRQ(ierr);
151       a->rowvalues_alloc = PetscMax(a->rowvalues_alloc*2, *nz);
152       ierr = PetscMalloc1(a->rowvalues_alloc,&a->rowvalues);CHKERRQ(ierr);
153     }
154     for (j=0; j<*nz; j++){
155       a->rowvalues[j] = a->values ? a->values[a->i[row]+j]:1.0;
156     }
157     *v = (*nz) ? a->rowvalues : NULL;
158   }
159   if (idx) *idx = (*nz) ? a->j + a->i[row] : NULL;
160   PetscFunctionReturn(0);
161 }
162 
163 #undef __FUNCT__
164 #define __FUNCT__ "MatRestoreRow_MPIAdj"
165 PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
166 {
167   PetscFunctionBegin;
168   PetscFunctionReturn(0);
169 }
170 
171 #undef __FUNCT__
172 #define __FUNCT__ "MatEqual_MPIAdj"
173 PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscBool * flg)
174 {
175   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data,*b = (Mat_MPIAdj*)B->data;
176   PetscErrorCode ierr;
177   PetscBool      flag;
178 
179   PetscFunctionBegin;
180   /* If the  matrix dimensions are not equal,or no of nonzeros */
181   if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
182     flag = PETSC_FALSE;
183   }
184 
185   /* if the a->i are the same */
186   ierr = PetscMemcmp(a->i,b->i,(A->rmap->n+1)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
187 
188   /* if a->j are the same */
189   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
190 
191   ierr = MPIU_Allreduce(&flag,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
192   PetscFunctionReturn(0);
193 }
194 
195 #undef __FUNCT__
196 #define __FUNCT__ "MatGetRowIJ_MPIAdj"
197 PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
198 {
199   PetscInt   i;
200   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
201   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
202 
203   PetscFunctionBegin;
204   *m    = A->rmap->n;
205   *ia   = a->i;
206   *ja   = a->j;
207   *done = PETSC_TRUE;
208   if (oshift) {
209     for (i=0; i<(*ia)[*m]; i++) {
210       (*ja)[i]++;
211     }
212     for (i=0; i<=(*m); i++) (*ia)[i]++;
213   }
214   PetscFunctionReturn(0);
215 }
216 
217 #undef __FUNCT__
218 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj"
219 PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
220 {
221   PetscInt   i;
222   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
223   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
224 
225   PetscFunctionBegin;
226   if (ia && a->i != *ia) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
227   if (ja && a->j != *ja) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
228   if (oshift) {
229     if (!ia) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"If oshift then you must passed in inia[] argument");
230     if (!ja) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"If oshift then you must passed in inja[] argument");
231     for (i=0; i<=(*m); i++) (*ia)[i]--;
232     for (i=0; i<(*ia)[*m]; i++) {
233       (*ja)[i]--;
234     }
235   }
236   PetscFunctionReturn(0);
237 }
238 
239 #undef __FUNCT__
240 #define __FUNCT__ "MatConvertFrom_MPIAdj"
241 PetscErrorCode  MatConvertFrom_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
242 {
243   Mat               B;
244   PetscErrorCode    ierr;
245   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
246   const PetscInt    *rj;
247   const PetscScalar *ra;
248   MPI_Comm          comm;
249 
250   PetscFunctionBegin;
251   ierr = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
252   ierr = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
253   ierr = MatGetOwnershipRange(A,&rstart,NULL);CHKERRQ(ierr);
254 
255   /* count the number of nonzeros per row */
256   for (i=0; i<m; i++) {
257     ierr = MatGetRow(A,i+rstart,&len,&rj,NULL);CHKERRQ(ierr);
258     for (j=0; j<len; j++) {
259       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
260     }
261     nzeros += len;
262     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,NULL);CHKERRQ(ierr);
263   }
264 
265   /* malloc space for nonzeros */
266   ierr = PetscMalloc1(nzeros+1,&a);CHKERRQ(ierr);
267   ierr = PetscMalloc1(N+1,&ia);CHKERRQ(ierr);
268   ierr = PetscMalloc1(nzeros+1,&ja);CHKERRQ(ierr);
269 
270   nzeros = 0;
271   ia[0]  = 0;
272   for (i=0; i<m; i++) {
273     ierr = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
274     cnt  = 0;
275     for (j=0; j<len; j++) {
276       if (rj[j] != i+rstart) { /* if not diagonal */
277         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
278         ja[nzeros+cnt++] = rj[j];
279       }
280     }
281     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
282     nzeros += cnt;
283     ia[i+1] = nzeros;
284   }
285 
286   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
287   ierr = MatCreate(comm,&B);CHKERRQ(ierr);
288   ierr = MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
289   ierr = MatSetType(B,type);CHKERRQ(ierr);
290   ierr = MatMPIAdjSetPreallocation(B,ia,ja,a);CHKERRQ(ierr);
291 
292   if (reuse == MAT_INPLACE_MATRIX) {
293     ierr = MatHeaderReplace(A,&B);CHKERRQ(ierr);
294   } else {
295     *newmat = B;
296   }
297   PetscFunctionReturn(0);
298 }
299 
300 /* -------------------------------------------------------------------*/
301 static struct _MatOps MatOps_Values = {0,
302                                        MatGetRow_MPIAdj,
303                                        MatRestoreRow_MPIAdj,
304                                        0,
305                                 /* 4*/ 0,
306                                        0,
307                                        0,
308                                        0,
309                                        0,
310                                        0,
311                                 /*10*/ 0,
312                                        0,
313                                        0,
314                                        0,
315                                        0,
316                                 /*15*/ 0,
317                                        MatEqual_MPIAdj,
318                                        0,
319                                        0,
320                                        0,
321                                 /*20*/ 0,
322                                        0,
323                                        MatSetOption_MPIAdj,
324                                        0,
325                                 /*24*/ 0,
326                                        0,
327                                        0,
328                                        0,
329                                        0,
330                                 /*29*/ 0,
331                                        0,
332                                        0,
333                                        0,
334                                        0,
335                                 /*34*/ 0,
336                                        0,
337                                        0,
338                                        0,
339                                        0,
340                                 /*39*/ 0,
341                                        MatGetSubMatrices_MPIAdj,
342                                        0,
343                                        0,
344                                        0,
345                                 /*44*/ 0,
346                                        0,
347                                        MatShift_Basic,
348                                        0,
349                                        0,
350                                 /*49*/ 0,
351                                        MatGetRowIJ_MPIAdj,
352                                        MatRestoreRowIJ_MPIAdj,
353                                        0,
354                                        0,
355                                 /*54*/ 0,
356                                        0,
357                                        0,
358                                        0,
359                                        0,
360                                 /*59*/ 0,
361                                        MatDestroy_MPIAdj,
362                                        MatView_MPIAdj,
363                                        MatConvertFrom_MPIAdj,
364                                        0,
365                                 /*64*/ 0,
366                                        0,
367                                        0,
368                                        0,
369                                        0,
370                                 /*69*/ 0,
371                                        0,
372                                        0,
373                                        0,
374                                        0,
375                                 /*74*/ 0,
376                                        0,
377                                        0,
378                                        0,
379                                        0,
380                                 /*79*/ 0,
381                                        0,
382                                        0,
383                                        0,
384                                        0,
385                                 /*84*/ 0,
386                                        0,
387                                        0,
388                                        0,
389                                        0,
390                                 /*89*/ 0,
391                                        0,
392                                        0,
393                                        0,
394                                        0,
395                                 /*94*/ 0,
396                                        0,
397                                        0,
398                                        0,
399                                        0,
400                                 /*99*/ 0,
401                                        0,
402                                        0,
403                                        0,
404                                        0,
405                                /*104*/ 0,
406                                        0,
407                                        0,
408                                        0,
409                                        0,
410                                /*109*/ 0,
411                                        0,
412                                        0,
413                                        0,
414                                        0,
415                                /*114*/ 0,
416                                        0,
417                                        0,
418                                        0,
419                                        0,
420                                /*119*/ 0,
421                                        0,
422                                        0,
423                                        0,
424                                        0,
425                                /*124*/ 0,
426                                        0,
427                                        0,
428                                        0,
429                                        MatGetSubMatricesMPI_MPIAdj,
430                                /*129*/ 0,
431                                        0,
432                                        0,
433                                        0,
434                                        0,
435                                /*134*/ 0,
436                                        0,
437                                        0,
438                                        0,
439                                        0,
440                                /*139*/ 0,
441                                        0,
442                                        0
443 };
444 
445 #undef __FUNCT__
446 #define __FUNCT__ "MatMPIAdjSetPreallocation_MPIAdj"
447 static PetscErrorCode  MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
448 {
449   Mat_MPIAdj     *b = (Mat_MPIAdj*)B->data;
450   PetscErrorCode ierr;
451 #if defined(PETSC_USE_DEBUG)
452   PetscInt ii;
453 #endif
454 
455   PetscFunctionBegin;
456   ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
457   ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
458 
459 #if defined(PETSC_USE_DEBUG)
460   if (i[0] != 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
461   for (ii=1; ii<B->rmap->n; ii++) {
462     if (i[ii] < 0 || i[ii] < i[ii-1]) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
463   }
464   for (ii=0; ii<i[B->rmap->n]; ii++) {
465     if (j[ii] < 0 || j[ii] >= B->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
466   }
467 #endif
468   B->preallocated = PETSC_TRUE;
469 
470   b->j      = j;
471   b->i      = i;
472   b->values = values;
473 
474   b->nz        = i[B->rmap->n];
475   b->diag      = 0;
476   b->symmetric = PETSC_FALSE;
477   b->freeaij   = PETSC_TRUE;
478 
479   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
480   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
481   PetscFunctionReturn(0);
482 }
483 
484 static PetscErrorCode MatGetSubMatrix_MPIAdj_data(Mat,IS,IS, PetscInt **,PetscInt **,PetscInt **);
485 static PetscErrorCode MatGetSubMatrices_MPIAdj_Private(Mat,PetscInt,const IS[],const IS[],PetscBool,MatReuse,Mat **);
486 
487 #undef __FUNCT__
488 #define __FUNCT__ "MatGetSubMatricesMPI_MPIAdj"
489 PetscErrorCode MatGetSubMatricesMPI_MPIAdj(Mat mat,PetscInt n, const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
490 {
491   PetscErrorCode     ierr;
492   /*get sub-matrices across a sub communicator */
493   PetscFunctionBegin;
494   ierr = MatGetSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_TRUE,scall,submat);CHKERRQ(ierr);
495   PetscFunctionReturn(0);
496 }
497 
498 
499 #undef __FUNCT__
500 #define __FUNCT__ "MatGetSubMatrices_MPIAdj"
501 PetscErrorCode MatGetSubMatrices_MPIAdj(Mat mat,PetscInt n,const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
502 {
503   PetscErrorCode     ierr;
504 
505   PetscFunctionBegin;
506   /*get sub-matrices based on PETSC_COMM_SELF */
507   ierr = MatGetSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_FALSE,scall,submat);CHKERRQ(ierr);
508   PetscFunctionReturn(0);
509 }
510 
511 #undef __FUNCT__
512 #define __FUNCT__ "MatGetSubMatrices_MPIAdj_Private"
513 static PetscErrorCode MatGetSubMatrices_MPIAdj_Private(Mat mat,PetscInt n,const IS irow[],const IS icol[],PetscBool subcomm,MatReuse scall,Mat *submat[])
514 {
515   PetscInt           i,irow_n,icol_n,*sxadj,*sadjncy,*svalues;
516   PetscInt          *indices,nindx,j,k,loc;
517   PetscMPIInt        issame;
518   const PetscInt    *irow_indices,*icol_indices;
519   MPI_Comm           scomm_row,scomm_col,scomm_mat;
520   PetscErrorCode     ierr;
521 
522   PetscFunctionBegin;
523   nindx = 0;
524   /*
525    * Estimate a maximum number for allocating memory
526    */
527   for(i=0; i<n; i++){
528     ierr = ISGetLocalSize(irow[i],&irow_n);CHKERRQ(ierr);
529     ierr = ISGetLocalSize(icol[i],&icol_n);CHKERRQ(ierr);
530     nindx = nindx>(irow_n+icol_n)? nindx:(irow_n+icol_n);
531   }
532   ierr = PetscCalloc1(nindx,&indices);CHKERRQ(ierr);
533   /* construct a submat */
534   for(i=0; i<n; i++){
535 	/*comms */
536     if(subcomm){
537 	  ierr = PetscObjectGetComm((PetscObject)irow[i],&scomm_row);CHKERRQ(ierr);
538 	  ierr = PetscObjectGetComm((PetscObject)icol[i],&scomm_col);CHKERRQ(ierr);
539 	  ierr = MPI_Comm_compare(scomm_row,scomm_col,&issame);CHKERRQ(ierr);
540 	  if(issame != MPI_IDENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"row index set must have the same comm as the col index set\n");
541 	  ierr = MPI_Comm_compare(scomm_row,PETSC_COMM_SELF,&issame);CHKERRQ(ierr);
542 	  if(issame == MPI_IDENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP," can not use PETSC_COMM_SELF as comm when extracting a parallel submatrix\n");
543 	}else{
544 	  scomm_row = PETSC_COMM_SELF;
545 	}
546 	/*get sub-matrix data*/
547 	sxadj=0; sadjncy=0; svalues=0;
548     ierr = MatGetSubMatrix_MPIAdj_data(mat,irow[i],icol[i],&sxadj,&sadjncy,&svalues);CHKERRQ(ierr);
549     ierr = ISGetLocalSize(irow[i],&irow_n);CHKERRQ(ierr);
550     ierr = ISGetLocalSize(icol[i],&icol_n);CHKERRQ(ierr);
551     ierr = ISGetIndices(irow[i],&irow_indices);CHKERRQ(ierr);
552     ierr = PetscMemcpy(indices,irow_indices,sizeof(PetscInt)*irow_n);CHKERRQ(ierr);
553     ierr = ISRestoreIndices(irow[i],&irow_indices);CHKERRQ(ierr);
554     ierr = ISGetIndices(icol[i],&icol_indices);CHKERRQ(ierr);
555     ierr = PetscMemcpy(indices+irow_n,icol_indices,sizeof(PetscInt)*icol_n);CHKERRQ(ierr);
556     ierr = ISRestoreIndices(icol[i],&icol_indices);CHKERRQ(ierr);
557     nindx = irow_n+icol_n;
558     ierr = PetscSortRemoveDupsInt(&nindx,indices);CHKERRQ(ierr);
559     /* renumber columns */
560     for(j=0; j<irow_n; j++){
561       for(k=sxadj[j]; k<sxadj[j+1]; k++){
562     	ierr = PetscFindInt(sadjncy[k],nindx,indices,&loc);CHKERRQ(ierr);
563 #if PETSC_USE_DEBUG
564     	if(loc<0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"can not find col %d \n",sadjncy[k]);
565 #endif
566         sadjncy[k] = loc;
567       }
568     }
569     if(scall==MAT_INITIAL_MATRIX){
570       ierr = MatCreateMPIAdj(scomm_row,irow_n,icol_n,sxadj,sadjncy,svalues,submat[i]);CHKERRQ(ierr);
571     }else{
572        Mat                sadj = *(submat[i]);
573        Mat_MPIAdj         *sa  = (Mat_MPIAdj*)((sadj)->data);
574        ierr = PetscObjectGetComm((PetscObject)sadj,&scomm_mat);CHKERRQ(ierr);
575        ierr = MPI_Comm_compare(scomm_row,scomm_mat,&issame);CHKERRQ(ierr);
576        if(issame != MPI_IDENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"submatrix  must have the same comm as the col index set\n");
577        ierr = PetscMemcpy(sa->i,sxadj,sizeof(PetscInt)*(irow_n+1));CHKERRQ(ierr);
578        ierr = PetscMemcpy(sa->j,sadjncy,sizeof(PetscInt)*sxadj[irow_n]);CHKERRQ(ierr);
579        if(svalues){ierr = PetscMemcpy(sa->values,svalues,sizeof(PetscInt)*sxadj[irow_n]);CHKERRQ(ierr);}
580        ierr = PetscFree(sxadj);CHKERRQ(ierr);
581        ierr = PetscFree(sadjncy);CHKERRQ(ierr);
582        if(svalues) {ierr = PetscFree(svalues);CHKERRQ(ierr);}
583     }
584   }
585   ierr = PetscFree(indices);CHKERRQ(ierr);
586   PetscFunctionReturn(0);
587 }
588 
589 
590 #undef __FUNCT__
591 #define __FUNCT__ "MatGetSubMatrix_MPIAdj_data"
592 /*
593  * The interface should be easy to use for both MatGetSubMatrix (parallel sub-matrix) and MatGetSubMatrices (sequential sub-matrices)
594  * */
595 static PetscErrorCode MatGetSubMatrix_MPIAdj_data(Mat adj,IS irows, IS icols, PetscInt **sadj_xadj,PetscInt **sadj_adjncy,PetscInt **sadj_values)
596 {
597   PetscInt        	 nlrows_is,icols_n,i,j,nroots,nleaves,owner,rlocalindex,*ncols_send,*ncols_recv;
598   PetscInt           nlrows_mat,*adjncy_recv,Ncols_recv,Ncols_send,*xadj_recv,*values_recv;
599   PetscInt          *ncols_recv_offsets,loc,rnclos,*sadjncy,*sxadj,*svalues,isvalue;
600   const PetscInt    *irows_indices,*icols_indices,*xadj, *adjncy;
601   Mat_MPIAdj        *a = (Mat_MPIAdj*)adj->data;
602   PetscLayout        rmap;
603   MPI_Comm           comm;
604   PetscSF            sf;
605   PetscSFNode       *iremote;
606   PetscBool          done;
607   PetscErrorCode     ierr;
608 
609   PetscFunctionBegin;
610   /* communicator */
611   ierr = PetscObjectGetComm((PetscObject)adj,&comm);CHKERRQ(ierr);
612   /* Layouts */
613   ierr = MatGetLayouts(adj,&rmap,PETSC_NULL);CHKERRQ(ierr);
614   /* get rows information */
615   ierr = ISGetLocalSize(irows,&nlrows_is);CHKERRQ(ierr);
616   ierr = ISGetIndices(irows,&irows_indices);CHKERRQ(ierr);
617   ierr = PetscCalloc1(nlrows_is,&iremote);CHKERRQ(ierr);
618   /* construct sf graph*/
619   nleaves = nlrows_is;
620   for(i=0; i<nlrows_is; i++){
621 	owner = -1;
622 	rlocalindex = -1;
623     ierr = PetscLayoutFindOwnerIndex(rmap,irows_indices[i],&owner,&rlocalindex);CHKERRQ(ierr);
624     iremote[i].rank  = owner;
625     iremote[i].index = rlocalindex;
626   }
627   ierr = MatGetRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done);CHKERRQ(ierr);
628   ierr = PetscCalloc4(nlrows_mat,&ncols_send,nlrows_is,&xadj_recv,nlrows_is+1,&ncols_recv_offsets,nlrows_is,&ncols_recv);CHKERRQ(ierr);
629   nroots = nlrows_mat;
630   for(i=0; i<nlrows_mat; i++){
631 	ncols_send[i] = xadj[i+1]-xadj[i];
632   }
633   ierr = PetscSFCreate(comm,&sf);CHKERRQ(ierr);
634   ierr = PetscSFSetGraph(sf,nroots,nleaves,PETSC_NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
635   ierr = PetscSFSetType(sf,PETSCSFBASIC);CHKERRQ(ierr);
636   ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
637   ierr = PetscSFBcastBegin(sf,MPIU_INT,ncols_send,ncols_recv);CHKERRQ(ierr);
638   ierr = PetscSFBcastEnd(sf,MPIU_INT,ncols_send,ncols_recv);CHKERRQ(ierr);
639   ierr = PetscSFBcastBegin(sf,MPIU_INT,xadj,xadj_recv);CHKERRQ(ierr);
640   ierr = PetscSFBcastEnd(sf,MPIU_INT,xadj,xadj_recv);CHKERRQ(ierr);
641   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
642   Ncols_recv =0;
643   for(i=0; i<nlrows_is; i++){
644 	 Ncols_recv             += ncols_recv[i];
645 	 ncols_recv_offsets[i+1] = ncols_recv[i]+ncols_recv_offsets[i];
646   }
647   Ncols_send = 0;
648   for(i=0; i<nlrows_mat; i++){
649 	Ncols_send += ncols_send[i];
650   }
651   ierr = PetscCalloc1(Ncols_recv,&iremote);CHKERRQ(ierr);
652   ierr = PetscCalloc1(Ncols_recv,&adjncy_recv);CHKERRQ(ierr);
653   nleaves = Ncols_recv;
654   Ncols_recv = 0;
655   for(i=0; i<nlrows_is; i++){
656     ierr = PetscLayoutFindOwner(rmap,irows_indices[i],&owner);CHKERRQ(ierr);
657     for(j=0; j<ncols_recv[i]; j++){
658       iremote[Ncols_recv].rank    = owner;
659       iremote[Ncols_recv++].index = xadj_recv[i]+j;
660     }
661   }
662   ierr = ISRestoreIndices(irows,&irows_indices);CHKERRQ(ierr);
663   /*if we need to deal with edge weights ???*/
664   if(a->values){isvalue=1;}else{isvalue=0;}
665   /*involve a global communication */
666   /*ierr = MPIU_Allreduce(&isvalue,&isvalue,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);*/
667   if(isvalue){ierr = PetscCalloc1(Ncols_recv,&values_recv);CHKERRQ(ierr);}
668   nroots = Ncols_send;
669   ierr = PetscSFCreate(comm,&sf);CHKERRQ(ierr);
670   ierr = PetscSFSetGraph(sf,nroots,nleaves,PETSC_NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
671   ierr = PetscSFSetType(sf,PETSCSFBASIC);CHKERRQ(ierr);
672   ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
673   ierr = PetscSFBcastBegin(sf,MPIU_INT,adjncy,adjncy_recv);CHKERRQ(ierr);
674   ierr = PetscSFBcastEnd(sf,MPIU_INT,adjncy,adjncy_recv);CHKERRQ(ierr);
675   if(isvalue){
676 	ierr = PetscSFBcastBegin(sf,MPIU_INT,a->values,values_recv);CHKERRQ(ierr);
677 	ierr = PetscSFBcastEnd(sf,MPIU_INT,a->values,values_recv);CHKERRQ(ierr);
678   }
679   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
680   ierr = MatRestoreRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done);CHKERRQ(ierr);
681   ierr = ISGetLocalSize(icols,&icols_n);CHKERRQ(ierr);
682   ierr = ISGetIndices(icols,&icols_indices);CHKERRQ(ierr);
683   rnclos = 0;
684   for(i=0; i<nlrows_is; i++){
685     for(j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++){
686       ierr = PetscFindInt(adjncy_recv[j], icols_n, icols_indices, &loc);CHKERRQ(ierr);
687       if(loc<0){
688         adjncy_recv[j] = -1;
689         if(isvalue) values_recv[j] = -1;
690         ncols_recv[i]--;
691       }else{
692     	rnclos++;
693       }
694     }
695   }
696   ierr = ISRestoreIndices(icols,&icols_indices);CHKERRQ(ierr);
697   ierr = PetscCalloc1(rnclos,&sadjncy);CHKERRQ(ierr);
698   if(isvalue) {ierr = PetscCalloc1(rnclos,&svalues);CHKERRQ(ierr);}
699   ierr = PetscCalloc1(nlrows_is+1,&sxadj);CHKERRQ(ierr);
700   rnclos = 0;
701   for(i=0; i<nlrows_is; i++){
702 	for(j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++){
703 	  if(adjncy_recv[j]<0) continue;
704 	  sadjncy[rnclos] = adjncy_recv[j];
705 	  if(isvalue) svalues[rnclos] = values_recv[j];
706 	  rnclos++;
707 	}
708   }
709   for(i=0; i<nlrows_is; i++){
710 	sxadj[i+1] = sxadj[i]+ncols_recv[i];
711   }
712   if(sadj_xadj)  { *sadj_xadj = sxadj;}else    { ierr = PetscFree(sxadj);CHKERRQ(ierr);}
713   if(sadj_adjncy){ *sadj_adjncy = sadjncy;}else{ ierr = PetscFree(sadjncy);CHKERRQ(ierr);}
714   if(sadj_values){
715 	if(isvalue) *sadj_values = svalues; else *sadj_values=0;
716   }else{
717 	if(isvalue) {ierr = PetscFree(svalues);CHKERRQ(ierr);}
718   }
719   ierr = PetscFree4(ncols_send,xadj_recv,ncols_recv_offsets,ncols_recv);CHKERRQ(ierr);
720   ierr = PetscFree(adjncy_recv);CHKERRQ(ierr);
721   if(isvalue) {ierr = PetscFree(values_recv);CHKERRQ(ierr);}
722   PetscFunctionReturn(0);
723 }
724 
725 
726 #undef __FUNCT__
727 #define __FUNCT__ "MatMPIAdjCreateNonemptySubcommMat_MPIAdj"
728 static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A,Mat *B)
729 {
730   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
731   PetscErrorCode ierr;
732   const PetscInt *ranges;
733   MPI_Comm       acomm,bcomm;
734   MPI_Group      agroup,bgroup;
735   PetscMPIInt    i,rank,size,nranks,*ranks;
736 
737   PetscFunctionBegin;
738   *B    = NULL;
739   ierr  = PetscObjectGetComm((PetscObject)A,&acomm);CHKERRQ(ierr);
740   ierr  = MPI_Comm_size(acomm,&size);CHKERRQ(ierr);
741   ierr  = MPI_Comm_size(acomm,&rank);CHKERRQ(ierr);
742   ierr  = MatGetOwnershipRanges(A,&ranges);CHKERRQ(ierr);
743   for (i=0,nranks=0; i<size; i++) {
744     if (ranges[i+1] - ranges[i] > 0) nranks++;
745   }
746   if (nranks == size) {         /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
747     ierr = PetscObjectReference((PetscObject)A);CHKERRQ(ierr);
748     *B   = A;
749     PetscFunctionReturn(0);
750   }
751 
752   ierr = PetscMalloc1(nranks,&ranks);CHKERRQ(ierr);
753   for (i=0,nranks=0; i<size; i++) {
754     if (ranges[i+1] - ranges[i] > 0) ranks[nranks++] = i;
755   }
756   ierr = MPI_Comm_group(acomm,&agroup);CHKERRQ(ierr);
757   ierr = MPI_Group_incl(agroup,nranks,ranks,&bgroup);CHKERRQ(ierr);
758   ierr = PetscFree(ranks);CHKERRQ(ierr);
759   ierr = MPI_Comm_create(acomm,bgroup,&bcomm);CHKERRQ(ierr);
760   ierr = MPI_Group_free(&agroup);CHKERRQ(ierr);
761   ierr = MPI_Group_free(&bgroup);CHKERRQ(ierr);
762   if (bcomm != MPI_COMM_NULL) {
763     PetscInt   m,N;
764     Mat_MPIAdj *b;
765     ierr       = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
766     ierr       = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
767     ierr       = MatCreateMPIAdj(bcomm,m,N,a->i,a->j,a->values,B);CHKERRQ(ierr);
768     b          = (Mat_MPIAdj*)(*B)->data;
769     b->freeaij = PETSC_FALSE;
770     ierr       = MPI_Comm_free(&bcomm);CHKERRQ(ierr);
771   }
772   PetscFunctionReturn(0);
773 }
774 
775 #undef __FUNCT__
776 #define __FUNCT__ "MatMPIAdjCreateNonemptySubcommMat"
777 /*@
778    MatMPIAdjCreateNonemptySubcommMat - create the same MPIAdj matrix on a subcommunicator containing only processes owning a positive number of rows
779 
780    Collective
781 
782    Input Arguments:
783 .  A - original MPIAdj matrix
784 
785    Output Arguments:
786 .  B - matrix on subcommunicator, NULL on ranks that owned zero rows of A
787 
788    Level: developer
789 
790    Note:
791    This function is mostly useful for internal use by mesh partitioning packages that require that every process owns at least one row.
792 
793    The matrix B should be destroyed with MatDestroy(). The arrays are not copied, so B should be destroyed before A is destroyed.
794 
795 .seealso: MatCreateMPIAdj()
796 @*/
797 PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A,Mat *B)
798 {
799   PetscErrorCode ierr;
800 
801   PetscFunctionBegin;
802   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
803   ierr = PetscUseMethod(A,"MatMPIAdjCreateNonemptySubcommMat_C",(Mat,Mat*),(A,B));CHKERRQ(ierr);
804   PetscFunctionReturn(0);
805 }
806 
807 /*MC
808    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
809    intended for use constructing orderings and partitionings.
810 
811   Level: beginner
812 
813 .seealso: MatCreateMPIAdj
814 M*/
815 
816 #undef __FUNCT__
817 #define __FUNCT__ "MatCreate_MPIAdj"
818 PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B)
819 {
820   Mat_MPIAdj     *b;
821   PetscErrorCode ierr;
822 
823   PetscFunctionBegin;
824   ierr         = PetscNewLog(B,&b);CHKERRQ(ierr);
825   B->data      = (void*)b;
826   ierr         = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
827   B->assembled = PETSC_FALSE;
828 
829   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
830   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjCreateNonemptySubcommMat_C",MatMPIAdjCreateNonemptySubcommMat_MPIAdj);CHKERRQ(ierr);
831   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ);CHKERRQ(ierr);
832   PetscFunctionReturn(0);
833 }
834 
835 #undef __FUNCT__
836 #define __FUNCT__ "MatMPIAdjSetPreallocation"
837 /*@C
838    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
839 
840    Logically Collective on MPI_Comm
841 
842    Input Parameters:
843 +  A - the matrix
844 .  i - the indices into j for the start of each row
845 .  j - the column indices for each row (sorted for each row).
846        The indices in i and j start with zero (NOT with one).
847 -  values - [optional] edge weights
848 
849    Level: intermediate
850 
851 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
852 @*/
853 PetscErrorCode  MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
854 {
855   PetscErrorCode ierr;
856 
857   PetscFunctionBegin;
858   ierr = PetscTryMethod(B,"MatMPIAdjSetPreallocation_C",(Mat,PetscInt*,PetscInt*,PetscInt*),(B,i,j,values));CHKERRQ(ierr);
859   PetscFunctionReturn(0);
860 }
861 
862 #undef __FUNCT__
863 #define __FUNCT__ "MatCreateMPIAdj"
864 /*@C
865    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
866    The matrix does not have numerical values associated with it, but is
867    intended for ordering (to reduce bandwidth etc) and partitioning.
868 
869    Collective on MPI_Comm
870 
871    Input Parameters:
872 +  comm - MPI communicator
873 .  m - number of local rows
874 .  N - number of global columns
875 .  i - the indices into j for the start of each row
876 .  j - the column indices for each row (sorted for each row).
877        The indices in i and j start with zero (NOT with one).
878 -  values -[optional] edge weights
879 
880    Output Parameter:
881 .  A - the matrix
882 
883    Level: intermediate
884 
885    Notes: This matrix object does not support most matrix operations, include
886    MatSetValues().
887    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
888    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
889     call from Fortran you need not create the arrays with PetscMalloc().
890    Should not include the matrix diagonals.
891 
892    If you already have a matrix, you can create its adjacency matrix by a call
893    to MatConvert, specifying a type of MATMPIADJ.
894 
895    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
896 
897 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
898 @*/
899 PetscErrorCode  MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
900 {
901   PetscErrorCode ierr;
902 
903   PetscFunctionBegin;
904   ierr = MatCreate(comm,A);CHKERRQ(ierr);
905   ierr = MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
906   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
907   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
908   PetscFunctionReturn(0);
909 }
910 
911 
912 
913 
914 
915 
916 
917