xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 85374dbcaa1024ee16c4dd940037b2541b0bc6cf) !
1 
2 /*
3     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
4 */
5 #include <../src/mat/impls/adj/mpi/mpiadj.h>    /*I "petscmat.h" I*/
6 #include <petscsf.h>
7 
8 #undef __FUNCT__
9 #define __FUNCT__ "MatView_MPIAdj_ASCII"
10 PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
11 {
12   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
13   PetscErrorCode    ierr;
14   PetscInt          i,j,m = A->rmap->n;
15   const char        *name;
16   PetscViewerFormat format;
17 
18   PetscFunctionBegin;
19   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
20   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
21   if (format == PETSC_VIEWER_ASCII_INFO) {
22     PetscFunctionReturn(0);
23   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
24     SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MATLAB format not supported");
25   } else {
26     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);CHKERRQ(ierr);
27     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
28     for (i=0; i<m; i++) {
29       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap->rstart);CHKERRQ(ierr);
30       for (j=a->i[i]; j<a->i[i+1]; j++) {
31         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);CHKERRQ(ierr);
32       }
33       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
34     }
35     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);CHKERRQ(ierr);
36     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
37     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
38   }
39   PetscFunctionReturn(0);
40 }
41 
42 #undef __FUNCT__
43 #define __FUNCT__ "MatView_MPIAdj"
44 PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
45 {
46   PetscErrorCode ierr;
47   PetscBool      iascii;
48 
49   PetscFunctionBegin;
50   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
51   if (iascii) {
52     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
53   }
54   PetscFunctionReturn(0);
55 }
56 
57 #undef __FUNCT__
58 #define __FUNCT__ "MatDestroy_MPIAdj"
59 PetscErrorCode MatDestroy_MPIAdj(Mat mat)
60 {
61   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;
62   PetscErrorCode ierr;
63 
64   PetscFunctionBegin;
65 #if defined(PETSC_USE_LOG)
66   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap->n,mat->cmap->n,a->nz);
67 #endif
68   ierr = PetscFree(a->diag);CHKERRQ(ierr);
69   if (a->freeaij) {
70     if (a->freeaijwithfree) {
71       if (a->i) free(a->i);
72       if (a->j) free(a->j);
73     } else {
74       ierr = PetscFree(a->i);CHKERRQ(ierr);
75       ierr = PetscFree(a->j);CHKERRQ(ierr);
76       ierr = PetscFree(a->values);CHKERRQ(ierr);
77     }
78   }
79   ierr = PetscFree(a->rowvalues);CHKERRQ(ierr);
80   ierr = PetscFree(mat->data);CHKERRQ(ierr);
81   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
82   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C",NULL);CHKERRQ(ierr);
83   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjCreateNonemptySubcommMat_C",NULL);CHKERRQ(ierr);
84   PetscFunctionReturn(0);
85 }
86 
87 #undef __FUNCT__
88 #define __FUNCT__ "MatSetOption_MPIAdj"
89 PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscBool flg)
90 {
91   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
92   PetscErrorCode ierr;
93 
94   PetscFunctionBegin;
95   switch (op) {
96   case MAT_SYMMETRIC:
97   case MAT_STRUCTURALLY_SYMMETRIC:
98   case MAT_HERMITIAN:
99     a->symmetric = flg;
100     break;
101   case MAT_SYMMETRY_ETERNAL:
102     break;
103   default:
104     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
105     break;
106   }
107   PetscFunctionReturn(0);
108 }
109 
110 
111 /*
112      Adds diagonal pointers to sparse matrix structure.
113 */
114 
115 #undef __FUNCT__
116 #define __FUNCT__ "MatMarkDiagonal_MPIAdj"
117 PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A)
118 {
119   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
120   PetscErrorCode ierr;
121   PetscInt       i,j,m = A->rmap->n;
122 
123   PetscFunctionBegin;
124   ierr = PetscMalloc1(m,&a->diag);CHKERRQ(ierr);
125   ierr = PetscLogObjectMemory((PetscObject)A,m*sizeof(PetscInt));CHKERRQ(ierr);
126   for (i=0; i<A->rmap->n; i++) {
127     for (j=a->i[i]; j<a->i[i+1]; j++) {
128       if (a->j[j] == i) {
129         a->diag[i] = j;
130         break;
131       }
132     }
133   }
134   PetscFunctionReturn(0);
135 }
136 
137 #undef __FUNCT__
138 #define __FUNCT__ "MatGetRow_MPIAdj"
139 PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
140 {
141   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
142   PetscErrorCode ierr;
143 
144   PetscFunctionBegin;
145   row -= A->rmap->rstart;
146 
147   if (row < 0 || row >= A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
148 
149   *nz = a->i[row+1] - a->i[row];
150   if (v) {
151     PetscInt j;
152     if (a->rowvalues_alloc < *nz) {
153       ierr = PetscFree(a->rowvalues);CHKERRQ(ierr);
154       a->rowvalues_alloc = PetscMax(a->rowvalues_alloc*2, *nz);
155       ierr = PetscMalloc1(a->rowvalues_alloc,&a->rowvalues);CHKERRQ(ierr);
156     }
157     for (j=0; j<*nz; j++) a->rowvalues[j] = a->values[a->i[row]+j];
158     *v = (*nz) ? a->rowvalues : NULL;
159   }
160   if (idx) *idx = (*nz) ? a->j + a->i[row] : NULL;
161   PetscFunctionReturn(0);
162 }
163 
164 #undef __FUNCT__
165 #define __FUNCT__ "MatRestoreRow_MPIAdj"
166 PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
167 {
168   PetscFunctionBegin;
169   PetscFunctionReturn(0);
170 }
171 
172 #undef __FUNCT__
173 #define __FUNCT__ "MatEqual_MPIAdj"
174 PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscBool * flg)
175 {
176   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data,*b = (Mat_MPIAdj*)B->data;
177   PetscErrorCode ierr;
178   PetscBool      flag;
179 
180   PetscFunctionBegin;
181   /* If the  matrix dimensions are not equal,or no of nonzeros */
182   if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
183     flag = PETSC_FALSE;
184   }
185 
186   /* if the a->i are the same */
187   ierr = PetscMemcmp(a->i,b->i,(A->rmap->n+1)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
188 
189   /* if a->j are the same */
190   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
191 
192   ierr = MPI_Allreduce(&flag,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
193   PetscFunctionReturn(0);
194 }
195 
196 #undef __FUNCT__
197 #define __FUNCT__ "MatGetRowIJ_MPIAdj"
198 PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
199 {
200   PetscInt   i;
201   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
202   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
203 
204   PetscFunctionBegin;
205   *m    = A->rmap->n;
206   *ia   = a->i;
207   *ja   = a->j;
208   *done = PETSC_TRUE;
209   if (oshift) {
210     for (i=0; i<(*ia)[*m]; i++) {
211       (*ja)[i]++;
212     }
213     for (i=0; i<=(*m); i++) (*ia)[i]++;
214   }
215   PetscFunctionReturn(0);
216 }
217 
218 #undef __FUNCT__
219 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj"
220 PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
221 {
222   PetscInt   i;
223   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
224   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
225 
226   PetscFunctionBegin;
227   if (ia && a->i != *ia) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
228   if (ja && a->j != *ja) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
229   if (oshift) {
230     for (i=0; i<=(*m); i++) (*ia)[i]--;
231     for (i=0; i<(*ia)[*m]; i++) {
232       (*ja)[i]--;
233     }
234   }
235   PetscFunctionReturn(0);
236 }
237 
238 #undef __FUNCT__
239 #define __FUNCT__ "MatConvertFrom_MPIAdj"
240 PetscErrorCode  MatConvertFrom_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
241 {
242   Mat               B;
243   PetscErrorCode    ierr;
244   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
245   const PetscInt    *rj;
246   const PetscScalar *ra;
247   MPI_Comm          comm;
248 
249   PetscFunctionBegin;
250   ierr = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
251   ierr = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
252   ierr = MatGetOwnershipRange(A,&rstart,NULL);CHKERRQ(ierr);
253 
254   /* count the number of nonzeros per row */
255   for (i=0; i<m; i++) {
256     ierr = MatGetRow(A,i+rstart,&len,&rj,NULL);CHKERRQ(ierr);
257     for (j=0; j<len; j++) {
258       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
259     }
260     nzeros += len;
261     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,NULL);CHKERRQ(ierr);
262   }
263 
264   /* malloc space for nonzeros */
265   ierr = PetscMalloc1(nzeros+1,&a);CHKERRQ(ierr);
266   ierr = PetscMalloc1(N+1,&ia);CHKERRQ(ierr);
267   ierr = PetscMalloc1(nzeros+1,&ja);CHKERRQ(ierr);
268 
269   nzeros = 0;
270   ia[0]  = 0;
271   for (i=0; i<m; i++) {
272     ierr = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
273     cnt  = 0;
274     for (j=0; j<len; j++) {
275       if (rj[j] != i+rstart) { /* if not diagonal */
276         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
277         ja[nzeros+cnt++] = rj[j];
278       }
279     }
280     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
281     nzeros += cnt;
282     ia[i+1] = nzeros;
283   }
284 
285   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
286   ierr = MatCreate(comm,&B);CHKERRQ(ierr);
287   ierr = MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
288   ierr = MatSetType(B,type);CHKERRQ(ierr);
289   ierr = MatMPIAdjSetPreallocation(B,ia,ja,a);CHKERRQ(ierr);
290 
291   if (reuse == MAT_REUSE_MATRIX) {
292     ierr = MatHeaderReplace(A,B);CHKERRQ(ierr);
293   } else {
294     *newmat = B;
295   }
296   PetscFunctionReturn(0);
297 }
298 
299 /* -------------------------------------------------------------------*/
300 static struct _MatOps MatOps_Values = {0,
301                                        MatGetRow_MPIAdj,
302                                        MatRestoreRow_MPIAdj,
303                                        0,
304                                 /* 4*/ 0,
305                                        0,
306                                        0,
307                                        0,
308                                        0,
309                                        0,
310                                 /*10*/ 0,
311                                        0,
312                                        0,
313                                        0,
314                                        0,
315                                 /*15*/ 0,
316                                        MatEqual_MPIAdj,
317                                        0,
318                                        0,
319                                        0,
320                                 /*20*/ 0,
321                                        0,
322                                        MatSetOption_MPIAdj,
323                                        0,
324                                 /*24*/ 0,
325                                        0,
326                                        0,
327                                        0,
328                                        0,
329                                 /*29*/ 0,
330                                        0,
331                                        0,
332                                        0,
333                                        0,
334                                 /*34*/ 0,
335                                        0,
336                                        0,
337                                        0,
338                                        0,
339                                 /*39*/ 0,
340                                        0,
341                                        0,
342                                        0,
343                                        0,
344                                 /*44*/ 0,
345                                        0,
346                                        MatShift_Basic,
347                                        0,
348                                        0,
349                                 /*49*/ 0,
350                                        MatGetRowIJ_MPIAdj,
351                                        MatRestoreRowIJ_MPIAdj,
352                                        0,
353                                        0,
354                                 /*54*/ 0,
355                                        0,
356                                        0,
357                                        0,
358                                        0,
359                                 /*59*/ 0,
360                                        MatDestroy_MPIAdj,
361                                        MatView_MPIAdj,
362                                        MatConvertFrom_MPIAdj,
363                                        0,
364                                 /*64*/ 0,
365                                        0,
366                                        0,
367                                        0,
368                                        0,
369                                 /*69*/ 0,
370                                        0,
371                                        0,
372                                        0,
373                                        0,
374                                 /*74*/ 0,
375                                        0,
376                                        0,
377                                        0,
378                                        0,
379                                 /*79*/ 0,
380                                        0,
381                                        0,
382                                        0,
383                                        0,
384                                 /*84*/ 0,
385                                        0,
386                                        0,
387                                        0,
388                                        0,
389                                 /*89*/ 0,
390                                        0,
391                                        0,
392                                        0,
393                                        0,
394                                 /*94*/ 0,
395                                        0,
396                                        0,
397                                        0,
398                                        0,
399                                 /*99*/ 0,
400                                        0,
401                                        0,
402                                        0,
403                                        0,
404                                /*104*/ 0,
405                                        0,
406                                        0,
407                                        0,
408                                        0,
409                                /*109*/ 0,
410                                        0,
411                                        0,
412                                        0,
413                                        0,
414                                /*114*/ 0,
415                                        0,
416                                        0,
417                                        0,
418                                        0,
419                                /*119*/ 0,
420                                        0,
421                                        0,
422                                        0,
423                                        0,
424                                /*124*/ 0,
425                                        0,
426                                        0,
427                                        0,
428                                        0,
429                                /*129*/ 0,
430                                        0,
431                                        0,
432                                        0,
433                                        0,
434                                /*134*/ 0,
435                                        0,
436                                        0,
437                                        0,
438                                        0,
439                                /*139*/ 0,
440                                        0,
441                                        0
442 };
443 
444 #undef __FUNCT__
445 #define __FUNCT__ "MatMPIAdjSetPreallocation_MPIAdj"
446 static PetscErrorCode  MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
447 {
448   Mat_MPIAdj     *b = (Mat_MPIAdj*)B->data;
449   PetscErrorCode ierr;
450 #if defined(PETSC_USE_DEBUG)
451   PetscInt ii;
452 #endif
453 
454   PetscFunctionBegin;
455   ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
456   ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
457 
458 #if defined(PETSC_USE_DEBUG)
459   if (i[0] != 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
460   for (ii=1; ii<B->rmap->n; ii++) {
461     if (i[ii] < 0 || i[ii] < i[ii-1]) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
462   }
463   for (ii=0; ii<i[B->rmap->n]; ii++) {
464     if (j[ii] < 0 || j[ii] >= B->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
465   }
466 #endif
467   B->preallocated = PETSC_TRUE;
468 
469   b->j      = j;
470   b->i      = i;
471   b->values = values;
472 
473   b->nz        = i[B->rmap->n];
474   b->diag      = 0;
475   b->symmetric = PETSC_FALSE;
476   b->freeaij   = PETSC_TRUE;
477 
478   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
479   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
480   PetscFunctionReturn(0);
481 }
482 
483 static PetscErrorCode MatGetSubMatrix_MPIAdj_data(Mat adj,IS rows, IS cols, PetscInt **sadj_xadj,PetscInt **sadj_adjncy,PetscInt **sadj_values);
484 
485 #undef __FUNCT__
486 #define __FUNCT__ "MatGetSubMatrices_MPIAdj"
487 PetscErrorCode MatGetSubMatrices_MPIAdj(Mat mat,PetscInt n,const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
488 {
489   PetscInt           i,irow_localsize,icol_localsize,*sxadj,*sadjncy,*svalues;
490   PetscInt          *indices,nindices,j,k,loc;
491   const PetscInt    *irow_indices,*icol_indices;
492   PetscErrorCode     ierr;
493 
494   PetscFunctionBegin;
495   nindices = 0;
496   for(i=0; i<n; i++){
497     ierr = ISGetLocalSize(irow[i],&irow_localsize);CHKERRQ(ierr);
498     ierr = ISGetLocalSize(icol[i],&icol_localsize);CHKERRQ(ierr);
499     nindices = nindices>(irow_localsize+icol_localsize)? nindices:(irow_localsize+icol_localsize);
500   }
501   ierr = PetscCalloc1(nindices,&indices);CHKERRQ(ierr);
502   for(i=0; i<n; i++){
503     ierr = MatGetSubMatrix_MPIAdj_data(mat,irow[i],icol[i],&sxadj,&sadjncy,&svalues);CHKERRQ(ierr);
504     ierr = ISGetLocalSize(irow[i],&irow_localsize);CHKERRQ(ierr);
505     ierr = ISGetLocalSize(icol[i],&icol_localsize);CHKERRQ(ierr);
506     ierr = ISGetIndices(irow[i],&irow_indices);CHKERRQ(ierr);
507     ierr = PetscMemcpy(indices,irow_indices,sizeof(PetscInt)*irow_localsize);CHKERRQ(ierr);
508     ierr = ISRestoreIndices(irow[i],&irow_indices);CHKERRQ(ierr);
509     ierr = ISGetIndices(icol[i],&icol_indices);CHKERRQ(ierr);
510     ierr = PetscMemcpy(indices+irow_localsize,icol_indices,sizeof(PetscInt)*icol_localsize);CHKERRQ(ierr);
511     ierr = ISRestoreIndices(icol[i],&icol_indices);CHKERRQ(ierr);
512     nindices = irow_localsize+icol_localsize;
513     ierr = PetscSortRemoveDupsInt(&nindices,indices);CHKERRQ(ierr);
514     for(j=0; j<irow_localsize; j++){
515       for(k=sxadj[j]; k<sxadj[j]; k++){
516     	ierr = PetscFindInt(sadjncy[k],nindices,indices,&loc);CHKERRQ(ierr);
517 #if PETSC_USE_DEBUG
518     	if(loc<0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"can not find col %d \n",sadjncy[k]);
519 #endif
520         sadjncy[k] = loc;
521       }
522     }
523     if(scall==MAT_INITIAL_MATRIX){
524       ierr = MatCreateMPIAdj(PETSC_COMM_SELF,irow_localsize,icol_localsize,sxadj,sadjncy,svalues,submat[i]);CHKERRQ(ierr);
525     }else{
526        Mat                sadj = *(submat[i]);
527        Mat_MPIAdj        *sa = (Mat_MPIAdj*)((sadj)->data);
528        ierr = PetscMemcpy(sa->i,sxadj,sizeof(PetscInt)*(irow_localsize+1));CHKERRQ(ierr);
529        ierr = PetscMemcpy(sa->j,sadjncy,sizeof(PetscInt)*sxadj[irow_localsize]);CHKERRQ(ierr);
530        ierr = PetscMemcpy(sa->values,svalues,sizeof(PetscInt)*sxadj[irow_localsize]);CHKERRQ(ierr);
531        ierr = PetscFree(sxadj);CHKERRQ(ierr);
532        ierr = PetscFree(sadjncy);CHKERRQ(ierr);
533        ierr = PetscFree(svalues);CHKERRQ(ierr);
534     }
535   }
536   PetscFunctionReturn(0);
537 }
538 
539 
540 #undef __FUNCT__
541 #define __FUNCT__ "MatGetSubMatrix_MPIAdj_data"
542 static PetscErrorCode MatGetSubMatrix_MPIAdj_data(Mat adj,IS rows, IS cols, PetscInt **sadj_xadj,PetscInt **sadj_adjncy,PetscInt **sadj_values)
543 {
544   PetscInt        	 rows_localsize,cols_localsize,i,j,nroots,nleaves,owner,localindex,*ncols_send,*ncols_recv;
545   PetscInt           nlocalrows,*adjncy_recv,Ncols_recv,Ncols_send,*xadj_recv,*values_recv;
546   PetscInt          *ncols_recv_offsets,loc,rnclos,*sadjncy,*sxadj,*svalues;
547   const PetscInt    *rows_indices,*cols_indices,*xadj, *adjncy;
548   Mat_MPIAdj        *a = (Mat_MPIAdj*)adj->data;
549   PetscLayout        rmap;
550   MPI_Comm           comm;
551   PetscSF            sf;
552   PetscSFNode       *iremote;
553   PetscBool          done;
554   PetscErrorCode     ierr;
555 
556   PetscFunctionBegin;
557   /* communicator */
558   ierr = PetscObjectGetComm((PetscObject)adj,&comm);CHKERRQ(ierr);
559   /* Layouts */
560   ierr = MatGetLayouts(adj,&rmap,PETSC_NULL);CHKERRQ(ierr);
561   /* get rows information */
562   ierr = ISGetLocalSize(rows,&rows_localsize);CHKERRQ(ierr);
563   ierr = ISGetIndices(rows,&rows_indices);CHKERRQ(ierr);
564   ierr = PetscCalloc1(rows_localsize,&iremote);CHKERRQ(ierr);
565 
566   nleaves = rows_localsize;
567   for(i=0; i<rows_localsize; i++){
568     ierr = PetscLayoutFindOwnerIndex(rmap,rows_indices[i],&owner,&localindex);CHKERRQ(ierr);
569     iremote[i].rank  = owner;
570     iremote[i].index = localindex;
571   }
572   ierr = MatGetRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlocalrows,&xadj,&adjncy,&done);CHKERRQ(ierr);
573   ierr = PetscCalloc4(nlocalrows,&ncols_send,rows_localsize,&xadj_recv,rows_localsize+1,&ncols_recv_offsets,rows_localsize,&ncols_recv);CHKERRQ(ierr);
574   nroots = nlocalrows;
575   for(i=0; i<nlocalrows; i++){
576 	ncols_send[i] = xadj[i+1]-xadj[i];
577   }
578   ierr = PetscSFCreate(comm,&sf);CHKERRQ(ierr);
579   ierr = PetscSFSetGraph(sf,nroots,nleaves,PETSC_NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
580   ierr = PetscSFSetType(sf,PETSCSFBASIC);CHKERRQ(ierr);
581   ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
582   ierr = PetscSFBcastBegin(sf,MPIU_INT,ncols_send,ncols_recv);CHKERRQ(ierr);
583   ierr = PetscSFBcastEnd(sf,MPIU_INT,ncols_send,ncols_recv);CHKERRQ(ierr);
584   ierr = PetscSFBcastBegin(sf,MPIU_INT,xadj,xadj_recv);CHKERRQ(ierr);
585   ierr = PetscSFBcastEnd(sf,MPIU_INT,xadj,xadj_recv);CHKERRQ(ierr);
586   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
587   Ncols_recv =0;
588   for(i=0; i<rows_localsize; i++){
589 	 Ncols_recv             += ncols_recv[i];
590 	 ncols_recv_offsets[i+1] = ncols_recv[i]+ncols_recv_offsets[i];
591   }
592   Ncols_send = 0;
593   for(i=0; i<nlocalrows; i++){
594 	Ncols_send += ncols_send[i];
595   }
596   ierr = PetscCalloc1(Ncols_recv,&iremote);CHKERRQ(ierr);
597   ierr = PetscCalloc2(Ncols_recv,&adjncy_recv,Ncols_recv,&values_recv);CHKERRQ(ierr);
598   nleaves = Ncols_recv;
599   for(i=0; i<rows_localsize; i++){
600     ierr = PetscLayoutFindOwner(rmap,rows_indices[i],&owner);CHKERRQ(ierr);
601     iremote[i].rank  = owner;
602     for(j=0; j<ncols_recv[i]; j++){
603       iremote[i].index =xadj_recv[i]+j;
604     }
605   }
606   ierr = ISRestoreIndices(rows,&rows_indices);CHKERRQ(ierr);
607   nroots = Ncols_send;
608   ierr = PetscSFCreate(comm,&sf);CHKERRQ(ierr);
609   ierr = PetscSFSetGraph(sf,nroots,nleaves,PETSC_NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
610   ierr = PetscSFSetType(sf,PETSCSFBASIC);CHKERRQ(ierr);
611   ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
612   ierr = PetscSFBcastBegin(sf,MPIU_INT,adjncy,adjncy_recv);CHKERRQ(ierr);
613   ierr = PetscSFBcastEnd(sf,MPIU_INT,adjncy,adjncy_recv);CHKERRQ(ierr);
614   ierr = PetscSFBcastBegin(sf,MPIU_INT,a->values,values_recv);CHKERRQ(ierr);
615   ierr = PetscSFBcastEnd(sf,MPIU_INT,a->values,values_recv);CHKERRQ(ierr);
616   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
617   ierr = MatRestoreRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlocalrows,&xadj,&adjncy,&done);CHKERRQ(ierr);
618   ierr = ISGetLocalSize(cols,&cols_localsize);CHKERRQ(ierr);
619   ierr = ISGetIndices(cols,&cols_indices);CHKERRQ(ierr);
620   rnclos = 0;
621   for(i=0; i<rows_localsize; i++){
622     for(j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++){
623       ierr = PetscFindInt(adjncy_recv[j], cols_localsize, cols_indices, &loc);CHKERRQ(ierr);
624       if(loc<0){
625         adjncy_recv[j] = -1;
626         values_recv[j] = -1;
627         ncols_recv[i]--;
628       }else{
629     	rnclos++;
630       }
631     }
632   }
633   ierr = ISRestoreIndices(cols,&cols_indices);CHKERRQ(ierr);
634   ierr = PetscCalloc1(rnclos,&sadjncy);CHKERRQ(ierr);
635   ierr = PetscCalloc1(rnclos,&svalues);CHKERRQ(ierr);
636   ierr = PetscCalloc1(rows_localsize+1,&sxadj);CHKERRQ(ierr);
637   rnclos = 0;
638   for(i=0; i<rows_localsize; i++){
639 	for(j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++){
640 	  if(adjncy_recv[j]<0) continue;
641 	  sadjncy[rnclos] = adjncy_recv[j];
642 	  svalues[rnclos] = values_recv[j];
643 	  rnclos++;
644 	}
645   }
646   for(i=0; i<rows_localsize; i++){
647 	sxadj[i+1] = sxadj[i]+ncols_recv[i];
648   }
649   if(sadj_xadj)  { *sadj_xadj = sxadj;}else    { ierr = PetscFree(sxadj);CHKERRQ(ierr);}
650   if(sadj_adjncy){ *sadj_adjncy = sadjncy;}else{ ierr = PetscFree(sadjncy);CHKERRQ(ierr);}
651   if(sadj_values){ *sadj_values = svalues;}else{ ierr = PetscFree(svalues);CHKERRQ(ierr);}
652   ierr = PetscFree4(ncols_send,xadj_recv,ncols_recv_offsets,ncols_recv);CHKERRQ(ierr);
653   ierr = PetscFree2(adjncy_recv,values_recv);CHKERRQ(ierr);
654   PetscFunctionReturn(0);
655 }
656 
657 
658 #undef __FUNCT__
659 #define __FUNCT__ "MatMPIAdjCreateNonemptySubcommMat_MPIAdj"
660 static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A,Mat *B)
661 {
662   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
663   PetscErrorCode ierr;
664   const PetscInt *ranges;
665   MPI_Comm       acomm,bcomm;
666   MPI_Group      agroup,bgroup;
667   PetscMPIInt    i,rank,size,nranks,*ranks;
668 
669   PetscFunctionBegin;
670   *B    = NULL;
671   ierr  = PetscObjectGetComm((PetscObject)A,&acomm);CHKERRQ(ierr);
672   ierr  = MPI_Comm_size(acomm,&size);CHKERRQ(ierr);
673   ierr  = MPI_Comm_size(acomm,&rank);CHKERRQ(ierr);
674   ierr  = MatGetOwnershipRanges(A,&ranges);CHKERRQ(ierr);
675   for (i=0,nranks=0; i<size; i++) {
676     if (ranges[i+1] - ranges[i] > 0) nranks++;
677   }
678   if (nranks == size) {         /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
679     ierr = PetscObjectReference((PetscObject)A);CHKERRQ(ierr);
680     *B   = A;
681     PetscFunctionReturn(0);
682   }
683 
684   ierr = PetscMalloc1(nranks,&ranks);CHKERRQ(ierr);
685   for (i=0,nranks=0; i<size; i++) {
686     if (ranges[i+1] - ranges[i] > 0) ranks[nranks++] = i;
687   }
688   ierr = MPI_Comm_group(acomm,&agroup);CHKERRQ(ierr);
689   ierr = MPI_Group_incl(agroup,nranks,ranks,&bgroup);CHKERRQ(ierr);
690   ierr = PetscFree(ranks);CHKERRQ(ierr);
691   ierr = MPI_Comm_create(acomm,bgroup,&bcomm);CHKERRQ(ierr);
692   ierr = MPI_Group_free(&agroup);CHKERRQ(ierr);
693   ierr = MPI_Group_free(&bgroup);CHKERRQ(ierr);
694   if (bcomm != MPI_COMM_NULL) {
695     PetscInt   m,N;
696     Mat_MPIAdj *b;
697     ierr       = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
698     ierr       = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
699     ierr       = MatCreateMPIAdj(bcomm,m,N,a->i,a->j,a->values,B);CHKERRQ(ierr);
700     b          = (Mat_MPIAdj*)(*B)->data;
701     b->freeaij = PETSC_FALSE;
702     ierr       = MPI_Comm_free(&bcomm);CHKERRQ(ierr);
703   }
704   PetscFunctionReturn(0);
705 }
706 
707 #undef __FUNCT__
708 #define __FUNCT__ "MatMPIAdjCreateNonemptySubcommMat"
709 /*@
710    MatMPIAdjCreateNonemptySubcommMat - create the same MPIAdj matrix on a subcommunicator containing only processes owning a positive number of rows
711 
712    Collective
713 
714    Input Arguments:
715 .  A - original MPIAdj matrix
716 
717    Output Arguments:
718 .  B - matrix on subcommunicator, NULL on ranks that owned zero rows of A
719 
720    Level: developer
721 
722    Note:
723    This function is mostly useful for internal use by mesh partitioning packages that require that every process owns at least one row.
724 
725    The matrix B should be destroyed with MatDestroy(). The arrays are not copied, so B should be destroyed before A is destroyed.
726 
727 .seealso: MatCreateMPIAdj()
728 @*/
729 PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A,Mat *B)
730 {
731   PetscErrorCode ierr;
732 
733   PetscFunctionBegin;
734   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
735   ierr = PetscUseMethod(A,"MatMPIAdjCreateNonemptySubcommMat_C",(Mat,Mat*),(A,B));CHKERRQ(ierr);
736   PetscFunctionReturn(0);
737 }
738 
739 /*MC
740    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
741    intended for use constructing orderings and partitionings.
742 
743   Level: beginner
744 
745 .seealso: MatCreateMPIAdj
746 M*/
747 
748 #undef __FUNCT__
749 #define __FUNCT__ "MatCreate_MPIAdj"
750 PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B)
751 {
752   Mat_MPIAdj     *b;
753   PetscErrorCode ierr;
754 
755   PetscFunctionBegin;
756   ierr         = PetscNewLog(B,&b);CHKERRQ(ierr);
757   B->data      = (void*)b;
758   ierr         = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
759   B->assembled = PETSC_FALSE;
760 
761   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
762   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjCreateNonemptySubcommMat_C",MatMPIAdjCreateNonemptySubcommMat_MPIAdj);CHKERRQ(ierr);
763   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ);CHKERRQ(ierr);
764   PetscFunctionReturn(0);
765 }
766 
767 #undef __FUNCT__
768 #define __FUNCT__ "MatMPIAdjSetPreallocation"
769 /*@C
770    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
771 
772    Logically Collective on MPI_Comm
773 
774    Input Parameters:
775 +  A - the matrix
776 .  i - the indices into j for the start of each row
777 .  j - the column indices for each row (sorted for each row).
778        The indices in i and j start with zero (NOT with one).
779 -  values - [optional] edge weights
780 
781    Level: intermediate
782 
783 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
784 @*/
785 PetscErrorCode  MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
786 {
787   PetscErrorCode ierr;
788 
789   PetscFunctionBegin;
790   ierr = PetscTryMethod(B,"MatMPIAdjSetPreallocation_C",(Mat,PetscInt*,PetscInt*,PetscInt*),(B,i,j,values));CHKERRQ(ierr);
791   PetscFunctionReturn(0);
792 }
793 
794 #undef __FUNCT__
795 #define __FUNCT__ "MatCreateMPIAdj"
796 /*@C
797    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
798    The matrix does not have numerical values associated with it, but is
799    intended for ordering (to reduce bandwidth etc) and partitioning.
800 
801    Collective on MPI_Comm
802 
803    Input Parameters:
804 +  comm - MPI communicator
805 .  m - number of local rows
806 .  N - number of global columns
807 .  i - the indices into j for the start of each row
808 .  j - the column indices for each row (sorted for each row).
809        The indices in i and j start with zero (NOT with one).
810 -  values -[optional] edge weights
811 
812    Output Parameter:
813 .  A - the matrix
814 
815    Level: intermediate
816 
817    Notes: This matrix object does not support most matrix operations, include
818    MatSetValues().
819    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
820    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
821     call from Fortran you need not create the arrays with PetscMalloc().
822    Should not include the matrix diagonals.
823 
824    If you already have a matrix, you can create its adjacency matrix by a call
825    to MatConvert, specifying a type of MATMPIADJ.
826 
827    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
828 
829 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
830 @*/
831 PetscErrorCode  MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
832 {
833   PetscErrorCode ierr;
834 
835   PetscFunctionBegin;
836   ierr = MatCreate(comm,A);CHKERRQ(ierr);
837   ierr = MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
838   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
839   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
840   PetscFunctionReturn(0);
841 }
842 
843 
844 
845 
846 
847 
848 
849