xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision a79482ff88c096eb73b6d30dd972596b036637e0)
1 
2 /*
3     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
4 */
5 #include <../src/mat/impls/adj/mpi/mpiadj.h>    /*I "petscmat.h" I*/
6 #include <petscsf.h>
7 
8 #undef __FUNCT__
9 #define __FUNCT__ "MatView_MPIAdj_ASCII"
10 PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
11 {
12   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
13   PetscErrorCode    ierr;
14   PetscInt          i,j,m = A->rmap->n;
15   const char        *name;
16   PetscViewerFormat format;
17 
18   PetscFunctionBegin;
19   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
20   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
21   if (format == PETSC_VIEWER_ASCII_INFO) {
22     PetscFunctionReturn(0);
23   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
24     SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MATLAB format not supported");
25   } else {
26     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);CHKERRQ(ierr);
27     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
28     for (i=0; i<m; i++) {
29       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap->rstart);CHKERRQ(ierr);
30       for (j=a->i[i]; j<a->i[i+1]; j++) {
31         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);CHKERRQ(ierr);
32       }
33       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
34     }
35     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);CHKERRQ(ierr);
36     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
37     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
38   }
39   PetscFunctionReturn(0);
40 }
41 
42 #undef __FUNCT__
43 #define __FUNCT__ "MatView_MPIAdj"
44 PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
45 {
46   PetscErrorCode ierr;
47   PetscBool      iascii;
48 
49   PetscFunctionBegin;
50   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
51   if (iascii) {
52     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
53   }
54   PetscFunctionReturn(0);
55 }
56 
57 #undef __FUNCT__
58 #define __FUNCT__ "MatDestroy_MPIAdj"
59 PetscErrorCode MatDestroy_MPIAdj(Mat mat)
60 {
61   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;
62   PetscErrorCode ierr;
63 
64   PetscFunctionBegin;
65 #if defined(PETSC_USE_LOG)
66   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap->n,mat->cmap->n,a->nz);
67 #endif
68   ierr = PetscFree(a->diag);CHKERRQ(ierr);
69   if (a->freeaij) {
70     if (a->freeaijwithfree) {
71       if (a->i) free(a->i);
72       if (a->j) free(a->j);
73     } else {
74       ierr = PetscFree(a->i);CHKERRQ(ierr);
75       ierr = PetscFree(a->j);CHKERRQ(ierr);
76       ierr = PetscFree(a->values);CHKERRQ(ierr);
77     }
78   }
79   ierr = PetscFree(a->rowvalues);CHKERRQ(ierr);
80   ierr = PetscFree(mat->data);CHKERRQ(ierr);
81   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
82   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C",NULL);CHKERRQ(ierr);
83   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjCreateNonemptySubcommMat_C",NULL);CHKERRQ(ierr);
84   PetscFunctionReturn(0);
85 }
86 
87 #undef __FUNCT__
88 #define __FUNCT__ "MatSetOption_MPIAdj"
89 PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscBool flg)
90 {
91   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
92   PetscErrorCode ierr;
93 
94   PetscFunctionBegin;
95   switch (op) {
96   case MAT_SYMMETRIC:
97   case MAT_STRUCTURALLY_SYMMETRIC:
98   case MAT_HERMITIAN:
99     a->symmetric = flg;
100     break;
101   case MAT_SYMMETRY_ETERNAL:
102     break;
103   default:
104     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
105     break;
106   }
107   PetscFunctionReturn(0);
108 }
109 
110 
111 /*
112      Adds diagonal pointers to sparse matrix structure.
113 */
114 
115 #undef __FUNCT__
116 #define __FUNCT__ "MatMarkDiagonal_MPIAdj"
117 PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A)
118 {
119   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
120   PetscErrorCode ierr;
121   PetscInt       i,j,m = A->rmap->n;
122 
123   PetscFunctionBegin;
124   ierr = PetscMalloc1(m,&a->diag);CHKERRQ(ierr);
125   ierr = PetscLogObjectMemory((PetscObject)A,m*sizeof(PetscInt));CHKERRQ(ierr);
126   for (i=0; i<A->rmap->n; i++) {
127     for (j=a->i[i]; j<a->i[i+1]; j++) {
128       if (a->j[j] == i) {
129         a->diag[i] = j;
130         break;
131       }
132     }
133   }
134   PetscFunctionReturn(0);
135 }
136 
137 #undef __FUNCT__
138 #define __FUNCT__ "MatGetRow_MPIAdj"
139 PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
140 {
141   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
142   PetscErrorCode ierr;
143 
144   PetscFunctionBegin;
145   row -= A->rmap->rstart;
146 
147   if (row < 0 || row >= A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
148 
149   *nz = a->i[row+1] - a->i[row];
150   if (v) {
151     PetscInt j;
152     if (a->rowvalues_alloc < *nz) {
153       ierr = PetscFree(a->rowvalues);CHKERRQ(ierr);
154       a->rowvalues_alloc = PetscMax(a->rowvalues_alloc*2, *nz);
155       ierr = PetscMalloc1(a->rowvalues_alloc,&a->rowvalues);CHKERRQ(ierr);
156     }
157     for (j=0; j<*nz; j++) a->rowvalues[j] = a->values[a->i[row]+j];
158     *v = (*nz) ? a->rowvalues : NULL;
159   }
160   if (idx) *idx = (*nz) ? a->j + a->i[row] : NULL;
161   PetscFunctionReturn(0);
162 }
163 
164 #undef __FUNCT__
165 #define __FUNCT__ "MatRestoreRow_MPIAdj"
166 PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
167 {
168   PetscFunctionBegin;
169   PetscFunctionReturn(0);
170 }
171 
172 #undef __FUNCT__
173 #define __FUNCT__ "MatEqual_MPIAdj"
174 PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscBool * flg)
175 {
176   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data,*b = (Mat_MPIAdj*)B->data;
177   PetscErrorCode ierr;
178   PetscBool      flag;
179 
180   PetscFunctionBegin;
181   /* If the  matrix dimensions are not equal,or no of nonzeros */
182   if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
183     flag = PETSC_FALSE;
184   }
185 
186   /* if the a->i are the same */
187   ierr = PetscMemcmp(a->i,b->i,(A->rmap->n+1)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
188 
189   /* if a->j are the same */
190   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
191 
192   ierr = MPI_Allreduce(&flag,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
193   PetscFunctionReturn(0);
194 }
195 
196 #undef __FUNCT__
197 #define __FUNCT__ "MatGetRowIJ_MPIAdj"
198 PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
199 {
200   PetscInt   i;
201   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
202   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
203 
204   PetscFunctionBegin;
205   *m    = A->rmap->n;
206   *ia   = a->i;
207   *ja   = a->j;
208   *done = PETSC_TRUE;
209   if (oshift) {
210     for (i=0; i<(*ia)[*m]; i++) {
211       (*ja)[i]++;
212     }
213     for (i=0; i<=(*m); i++) (*ia)[i]++;
214   }
215   PetscFunctionReturn(0);
216 }
217 
218 #undef __FUNCT__
219 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj"
220 PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
221 {
222   PetscInt   i;
223   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
224   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
225 
226   PetscFunctionBegin;
227   if (ia && a->i != *ia) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
228   if (ja && a->j != *ja) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
229   if (oshift) {
230     for (i=0; i<=(*m); i++) (*ia)[i]--;
231     for (i=0; i<(*ia)[*m]; i++) {
232       (*ja)[i]--;
233     }
234   }
235   PetscFunctionReturn(0);
236 }
237 
238 #undef __FUNCT__
239 #define __FUNCT__ "MatConvertFrom_MPIAdj"
240 PetscErrorCode  MatConvertFrom_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
241 {
242   Mat               B;
243   PetscErrorCode    ierr;
244   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
245   const PetscInt    *rj;
246   const PetscScalar *ra;
247   MPI_Comm          comm;
248 
249   PetscFunctionBegin;
250   ierr = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
251   ierr = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
252   ierr = MatGetOwnershipRange(A,&rstart,NULL);CHKERRQ(ierr);
253 
254   /* count the number of nonzeros per row */
255   for (i=0; i<m; i++) {
256     ierr = MatGetRow(A,i+rstart,&len,&rj,NULL);CHKERRQ(ierr);
257     for (j=0; j<len; j++) {
258       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
259     }
260     nzeros += len;
261     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,NULL);CHKERRQ(ierr);
262   }
263 
264   /* malloc space for nonzeros */
265   ierr = PetscMalloc1(nzeros+1,&a);CHKERRQ(ierr);
266   ierr = PetscMalloc1(N+1,&ia);CHKERRQ(ierr);
267   ierr = PetscMalloc1(nzeros+1,&ja);CHKERRQ(ierr);
268 
269   nzeros = 0;
270   ia[0]  = 0;
271   for (i=0; i<m; i++) {
272     ierr = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
273     cnt  = 0;
274     for (j=0; j<len; j++) {
275       if (rj[j] != i+rstart) { /* if not diagonal */
276         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
277         ja[nzeros+cnt++] = rj[j];
278       }
279     }
280     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
281     nzeros += cnt;
282     ia[i+1] = nzeros;
283   }
284 
285   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
286   ierr = MatCreate(comm,&B);CHKERRQ(ierr);
287   ierr = MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
288   ierr = MatSetType(B,type);CHKERRQ(ierr);
289   ierr = MatMPIAdjSetPreallocation(B,ia,ja,a);CHKERRQ(ierr);
290 
291   if (reuse == MAT_REUSE_MATRIX) {
292     ierr = MatHeaderReplace(A,B);CHKERRQ(ierr);
293   } else {
294     *newmat = B;
295   }
296   PetscFunctionReturn(0);
297 }
298 
299 /* -------------------------------------------------------------------*/
300 static struct _MatOps MatOps_Values = {0,
301                                        MatGetRow_MPIAdj,
302                                        MatRestoreRow_MPIAdj,
303                                        0,
304                                 /* 4*/ 0,
305                                        0,
306                                        0,
307                                        0,
308                                        0,
309                                        0,
310                                 /*10*/ 0,
311                                        0,
312                                        0,
313                                        0,
314                                        0,
315                                 /*15*/ 0,
316                                        MatEqual_MPIAdj,
317                                        0,
318                                        0,
319                                        0,
320                                 /*20*/ 0,
321                                        0,
322                                        MatSetOption_MPIAdj,
323                                        0,
324                                 /*24*/ 0,
325                                        0,
326                                        0,
327                                        0,
328                                        0,
329                                 /*29*/ 0,
330                                        0,
331                                        0,
332                                        0,
333                                        0,
334                                 /*34*/ 0,
335                                        0,
336                                        0,
337                                        0,
338                                        0,
339                                 /*39*/ 0,
340                                        MatGetSubMatrices_MPIAdj,
341                                        0,
342                                        0,
343                                        0,
344                                 /*44*/ 0,
345                                        0,
346                                        MatShift_Basic,
347                                        0,
348                                        0,
349                                 /*49*/ 0,
350                                        MatGetRowIJ_MPIAdj,
351                                        MatRestoreRowIJ_MPIAdj,
352                                        0,
353                                        0,
354                                 /*54*/ 0,
355                                        0,
356                                        0,
357                                        0,
358                                        0,
359                                 /*59*/ 0,
360                                        MatDestroy_MPIAdj,
361                                        MatView_MPIAdj,
362                                        MatConvertFrom_MPIAdj,
363                                        0,
364                                 /*64*/ 0,
365                                        0,
366                                        0,
367                                        0,
368                                        0,
369                                 /*69*/ 0,
370                                        0,
371                                        0,
372                                        0,
373                                        0,
374                                 /*74*/ 0,
375                                        0,
376                                        0,
377                                        0,
378                                        0,
379                                 /*79*/ 0,
380                                        0,
381                                        0,
382                                        0,
383                                        0,
384                                 /*84*/ 0,
385                                        0,
386                                        0,
387                                        0,
388                                        0,
389                                 /*89*/ 0,
390                                        0,
391                                        0,
392                                        0,
393                                        0,
394                                 /*94*/ 0,
395                                        0,
396                                        0,
397                                        0,
398                                        0,
399                                 /*99*/ 0,
400                                        0,
401                                        0,
402                                        0,
403                                        0,
404                                /*104*/ 0,
405                                        0,
406                                        0,
407                                        0,
408                                        0,
409                                /*109*/ 0,
410                                        0,
411                                        0,
412                                        0,
413                                        0,
414                                /*114*/ 0,
415                                        0,
416                                        0,
417                                        0,
418                                        0,
419                                /*119*/ 0,
420                                        0,
421                                        0,
422                                        0,
423                                        0,
424                                /*124*/ 0,
425                                        0,
426                                        0,
427                                        0,
428                                        0,
429                                /*129*/ 0,
430                                        0,
431                                        0,
432                                        0,
433                                        0,
434                                /*134*/ 0,
435                                        0,
436                                        0,
437                                        0,
438                                        0,
439                                /*139*/ 0,
440                                        0,
441                                        0
442 };
443 
444 #undef __FUNCT__
445 #define __FUNCT__ "MatMPIAdjSetPreallocation_MPIAdj"
446 static PetscErrorCode  MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
447 {
448   Mat_MPIAdj     *b = (Mat_MPIAdj*)B->data;
449   PetscErrorCode ierr;
450 #if defined(PETSC_USE_DEBUG)
451   PetscInt ii;
452 #endif
453 
454   PetscFunctionBegin;
455   ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
456   ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
457 
458 #if defined(PETSC_USE_DEBUG)
459   if (i[0] != 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
460   for (ii=1; ii<B->rmap->n; ii++) {
461     if (i[ii] < 0 || i[ii] < i[ii-1]) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
462   }
463   for (ii=0; ii<i[B->rmap->n]; ii++) {
464     if (j[ii] < 0 || j[ii] >= B->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
465   }
466 #endif
467   B->preallocated = PETSC_TRUE;
468 
469   b->j      = j;
470   b->i      = i;
471   b->values = values;
472 
473   b->nz        = i[B->rmap->n];
474   b->diag      = 0;
475   b->symmetric = PETSC_FALSE;
476   b->freeaij   = PETSC_TRUE;
477 
478   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
479   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
480   PetscFunctionReturn(0);
481 }
482 
483 static PetscErrorCode MatGetSubMatrix_MPIAdj_data(Mat adj,IS irows, IS icols, PetscInt **sadj_xadj,PetscInt **sadj_adjncy,PetscInt **sadj_values);
484 
485 #undef __FUNCT__
486 #define __FUNCT__ "MatGetSubMatrices_MPIAdj"
487 PetscErrorCode MatGetSubMatrices_MPIAdj(Mat mat,PetscInt n,const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
488 {
489   PetscInt           i,irow_n,icol_n,*sxadj,*sadjncy,*svalues;
490   PetscInt          *indices,nindx,j,k,loc;
491   const PetscInt    *irow_indices,*icol_indices;
492   PetscErrorCode     ierr;
493 
494   PetscFunctionBegin;
495   nindx = 0;
496   for(i=0; i<n; i++){
497     ierr = ISGetLocalSize(irow[i],&irow_n);CHKERRQ(ierr);
498     ierr = ISGetLocalSize(icol[i],&icol_n);CHKERRQ(ierr);
499     nindx = nindx>(irow_n+icol_n)? nindx:(irow_n+icol_n);
500   }
501   ierr = PetscCalloc1(nindx,&indices);CHKERRQ(ierr);
502   for(i=0; i<n; i++){
503 	sxadj=0; sadjncy=0; svalues=0;
504     ierr = MatGetSubMatrix_MPIAdj_data(mat,irow[i],icol[i],&sxadj,&sadjncy,&svalues);CHKERRQ(ierr);
505     ierr = ISGetLocalSize(irow[i],&irow_n);CHKERRQ(ierr);
506     ierr = ISGetLocalSize(icol[i],&icol_n);CHKERRQ(ierr);
507     ierr = ISGetIndices(irow[i],&irow_indices);CHKERRQ(ierr);
508     ierr = PetscMemcpy(indices,irow_indices,sizeof(PetscInt)*irow_n);CHKERRQ(ierr);
509     ierr = ISRestoreIndices(irow[i],&irow_indices);CHKERRQ(ierr);
510     ierr = ISGetIndices(icol[i],&icol_indices);CHKERRQ(ierr);
511     ierr = PetscMemcpy(indices+irow_n,icol_indices,sizeof(PetscInt)*icol_n);CHKERRQ(ierr);
512     ierr = ISRestoreIndices(icol[i],&icol_indices);CHKERRQ(ierr);
513     nindx = irow_n+icol_n;
514     ierr = PetscSortRemoveDupsInt(&nindx,indices);CHKERRQ(ierr);
515     /* renumber columns */
516     for(j=0; j<irow_n; j++){
517       for(k=sxadj[j]; k<sxadj[j+1]; k++){
518     	ierr = PetscFindInt(sadjncy[k],nindx,indices,&loc);CHKERRQ(ierr);
519 #if PETSC_USE_DEBUG
520     	if(loc<0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"can not find col %d \n",sadjncy[k]);
521 #endif
522         sadjncy[k] = loc;
523       }
524     }
525     if(scall==MAT_INITIAL_MATRIX){
526       ierr = MatCreateMPIAdj(PETSC_COMM_SELF,irow_n,icol_n,sxadj,sadjncy,svalues,submat[i]);CHKERRQ(ierr);
527     }else{
528        Mat                sadj = *(submat[i]);
529        Mat_MPIAdj        *sa = (Mat_MPIAdj*)((sadj)->data);
530        ierr = PetscMemcpy(sa->i,sxadj,sizeof(PetscInt)*(irow_n+1));CHKERRQ(ierr);
531        ierr = PetscMemcpy(sa->j,sadjncy,sizeof(PetscInt)*sxadj[irow_n]);CHKERRQ(ierr);
532        if(svalues){ierr = PetscMemcpy(sa->values,svalues,sizeof(PetscInt)*sxadj[irow_n]);CHKERRQ(ierr);}
533        ierr = PetscFree(sxadj);CHKERRQ(ierr);
534        ierr = PetscFree(sadjncy);CHKERRQ(ierr);
535        if(svalues) {ierr = PetscFree(svalues);CHKERRQ(ierr);}
536     }
537   }
538   ierr = PetscFree(indices);CHKERRQ(ierr);
539   PetscFunctionReturn(0);
540 }
541 
542 
543 #undef __FUNCT__
544 #define __FUNCT__ "MatGetSubMatrix_MPIAdj_data"
545 /*
546  * The interface should be easy to use for both MatGetSubMatrix (parallel sub-matrix) and MatGetSubMatrices (sequential sub-matrices)
547  * */
548 static PetscErrorCode MatGetSubMatrix_MPIAdj_data(Mat adj,IS irows, IS icols, PetscInt **sadj_xadj,PetscInt **sadj_adjncy,PetscInt **sadj_values)
549 {
550   PetscInt        	 nlrows_is,icols_n,i,j,nroots,nleaves,owner,rlocalindex,*ncols_send,*ncols_recv;
551   PetscInt           nlrows_mat,*adjncy_recv,Ncols_recv,Ncols_send,*xadj_recv,*values_recv;
552   PetscInt          *ncols_recv_offsets,loc,rnclos,*sadjncy,*sxadj,*svalues,isvalue;
553   const PetscInt    *irows_indices,*icols_indices,*xadj, *adjncy;
554   Mat_MPIAdj        *a = (Mat_MPIAdj*)adj->data;
555   PetscLayout        rmap;
556   MPI_Comm           comm;
557   PetscSF            sf;
558   PetscSFNode       *iremote;
559   PetscBool          done;
560   PetscErrorCode     ierr;
561 
562   PetscFunctionBegin;
563   /* communicator */
564   ierr = PetscObjectGetComm((PetscObject)adj,&comm);CHKERRQ(ierr);
565   /* Layouts */
566   ierr = MatGetLayouts(adj,&rmap,PETSC_NULL);CHKERRQ(ierr);
567   /* get rows information */
568   ierr = ISGetLocalSize(irows,&nlrows_is);CHKERRQ(ierr);
569   ierr = ISGetIndices(irows,&irows_indices);CHKERRQ(ierr);
570   ierr = PetscCalloc1(nlrows_is,&iremote);CHKERRQ(ierr);
571   /* construct sf graph*/
572   nleaves = nlrows_is;
573   for(i=0; i<nlrows_is; i++){
574 	owner = -1;
575 	rlocalindex = -1;
576     ierr = PetscLayoutFindOwnerIndex(rmap,irows_indices[i],&owner,&rlocalindex);CHKERRQ(ierr);
577     iremote[i].rank  = owner;
578     iremote[i].index = rlocalindex;
579   }
580   ierr = MatGetRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done);CHKERRQ(ierr);
581   ierr = PetscCalloc4(nlrows_mat,&ncols_send,nlrows_is,&xadj_recv,nlrows_is+1,&ncols_recv_offsets,nlrows_is,&ncols_recv);CHKERRQ(ierr);
582   nroots = nlrows_mat;
583   for(i=0; i<nlrows_mat; i++){
584 	ncols_send[i] = xadj[i+1]-xadj[i];
585   }
586   ierr = PetscSFCreate(comm,&sf);CHKERRQ(ierr);
587   ierr = PetscSFSetGraph(sf,nroots,nleaves,PETSC_NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
588   ierr = PetscSFSetType(sf,PETSCSFBASIC);CHKERRQ(ierr);
589   ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
590   ierr = PetscSFBcastBegin(sf,MPIU_INT,ncols_send,ncols_recv);CHKERRQ(ierr);
591   ierr = PetscSFBcastEnd(sf,MPIU_INT,ncols_send,ncols_recv);CHKERRQ(ierr);
592   ierr = PetscSFBcastBegin(sf,MPIU_INT,xadj,xadj_recv);CHKERRQ(ierr);
593   ierr = PetscSFBcastEnd(sf,MPIU_INT,xadj,xadj_recv);CHKERRQ(ierr);
594   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
595   Ncols_recv =0;
596   for(i=0; i<nlrows_is; i++){
597 	 Ncols_recv             += ncols_recv[i];
598 	 ncols_recv_offsets[i+1] = ncols_recv[i]+ncols_recv_offsets[i];
599   }
600   Ncols_send = 0;
601   for(i=0; i<nlrows_mat; i++){
602 	Ncols_send += ncols_send[i];
603   }
604   ierr = PetscCalloc1(Ncols_recv,&iremote);CHKERRQ(ierr);
605   ierr = PetscCalloc1(Ncols_recv,&adjncy_recv);CHKERRQ(ierr);
606   nleaves = Ncols_recv;
607   Ncols_recv = 0;
608   for(i=0; i<nlrows_is; i++){
609     ierr = PetscLayoutFindOwner(rmap,irows_indices[i],&owner);CHKERRQ(ierr);
610     for(j=0; j<ncols_recv[i]; j++){
611       iremote[Ncols_recv].rank    = owner;
612       iremote[Ncols_recv++].index = xadj_recv[i]+j;
613     }
614   }
615   ierr = ISRestoreIndices(irows,&irows_indices);CHKERRQ(ierr);
616   /*if we need to deal with edge weights ???*/
617   if(a->values){isvalue=1;}else{isvalue=0;}
618   /*involve a global communication */
619   /*ierr = MPI_Allreduce(&isvalue,&isvalue,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);*/
620   if(isvalue){ierr = PetscCalloc1(Ncols_recv,&values_recv);CHKERRQ(ierr);}
621   nroots = Ncols_send;
622   ierr = PetscSFCreate(comm,&sf);CHKERRQ(ierr);
623   ierr = PetscSFSetGraph(sf,nroots,nleaves,PETSC_NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
624   ierr = PetscSFSetType(sf,PETSCSFBASIC);CHKERRQ(ierr);
625   ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
626   ierr = PetscSFBcastBegin(sf,MPIU_INT,adjncy,adjncy_recv);CHKERRQ(ierr);
627   ierr = PetscSFBcastEnd(sf,MPIU_INT,adjncy,adjncy_recv);CHKERRQ(ierr);
628   if(isvalue){
629 	ierr = PetscSFBcastBegin(sf,MPIU_INT,a->values,values_recv);CHKERRQ(ierr);
630 	ierr = PetscSFBcastEnd(sf,MPIU_INT,a->values,values_recv);CHKERRQ(ierr);
631   }
632   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
633   ierr = MatRestoreRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done);CHKERRQ(ierr);
634   ierr = ISGetLocalSize(icols,&icols_n);CHKERRQ(ierr);
635   ierr = ISGetIndices(icols,&icols_indices);CHKERRQ(ierr);
636   rnclos = 0;
637   for(i=0; i<nlrows_is; i++){
638     for(j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++){
639       ierr = PetscFindInt(adjncy_recv[j], icols_n, icols_indices, &loc);CHKERRQ(ierr);
640       if(loc<0){
641         adjncy_recv[j] = -1;
642         if(isvalue) values_recv[j] = -1;
643         ncols_recv[i]--;
644       }else{
645     	rnclos++;
646       }
647     }
648   }
649   ierr = ISRestoreIndices(icols,&icols_indices);CHKERRQ(ierr);
650   ierr = PetscCalloc1(rnclos,&sadjncy);CHKERRQ(ierr);
651   if(isvalue) {ierr = PetscCalloc1(rnclos,&svalues);CHKERRQ(ierr);}
652   ierr = PetscCalloc1(nlrows_is+1,&sxadj);CHKERRQ(ierr);
653   rnclos = 0;
654   for(i=0; i<nlrows_is; i++){
655 	for(j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++){
656 	  if(adjncy_recv[j]<0) continue;
657 	  sadjncy[rnclos] = adjncy_recv[j];
658 	  if(isvalue) svalues[rnclos] = values_recv[j];
659 	  rnclos++;
660 	}
661   }
662   for(i=0; i<nlrows_is; i++){
663 	sxadj[i+1] = sxadj[i]+ncols_recv[i];
664   }
665   if(sadj_xadj)  { *sadj_xadj = sxadj;}else    { ierr = PetscFree(sxadj);CHKERRQ(ierr);}
666   if(sadj_adjncy){ *sadj_adjncy = sadjncy;}else{ ierr = PetscFree(sadjncy);CHKERRQ(ierr);}
667   if(sadj_values){
668 	if(isvalue) *sadj_values = svalues; else *sadj_values=0;
669   }else{
670 	if(isvalue) {ierr = PetscFree(svalues);CHKERRQ(ierr);}
671   }
672   ierr = PetscFree4(ncols_send,xadj_recv,ncols_recv_offsets,ncols_recv);CHKERRQ(ierr);
673   ierr = PetscFree(adjncy_recv);CHKERRQ(ierr);
674   if(isvalue) {ierr = PetscFree(values_recv);CHKERRQ(ierr);}
675   PetscFunctionReturn(0);
676 }
677 
678 
679 #undef __FUNCT__
680 #define __FUNCT__ "MatMPIAdjCreateNonemptySubcommMat_MPIAdj"
681 static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A,Mat *B)
682 {
683   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
684   PetscErrorCode ierr;
685   const PetscInt *ranges;
686   MPI_Comm       acomm,bcomm;
687   MPI_Group      agroup,bgroup;
688   PetscMPIInt    i,rank,size,nranks,*ranks;
689 
690   PetscFunctionBegin;
691   *B    = NULL;
692   ierr  = PetscObjectGetComm((PetscObject)A,&acomm);CHKERRQ(ierr);
693   ierr  = MPI_Comm_size(acomm,&size);CHKERRQ(ierr);
694   ierr  = MPI_Comm_size(acomm,&rank);CHKERRQ(ierr);
695   ierr  = MatGetOwnershipRanges(A,&ranges);CHKERRQ(ierr);
696   for (i=0,nranks=0; i<size; i++) {
697     if (ranges[i+1] - ranges[i] > 0) nranks++;
698   }
699   if (nranks == size) {         /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
700     ierr = PetscObjectReference((PetscObject)A);CHKERRQ(ierr);
701     *B   = A;
702     PetscFunctionReturn(0);
703   }
704 
705   ierr = PetscMalloc1(nranks,&ranks);CHKERRQ(ierr);
706   for (i=0,nranks=0; i<size; i++) {
707     if (ranges[i+1] - ranges[i] > 0) ranks[nranks++] = i;
708   }
709   ierr = MPI_Comm_group(acomm,&agroup);CHKERRQ(ierr);
710   ierr = MPI_Group_incl(agroup,nranks,ranks,&bgroup);CHKERRQ(ierr);
711   ierr = PetscFree(ranks);CHKERRQ(ierr);
712   ierr = MPI_Comm_create(acomm,bgroup,&bcomm);CHKERRQ(ierr);
713   ierr = MPI_Group_free(&agroup);CHKERRQ(ierr);
714   ierr = MPI_Group_free(&bgroup);CHKERRQ(ierr);
715   if (bcomm != MPI_COMM_NULL) {
716     PetscInt   m,N;
717     Mat_MPIAdj *b;
718     ierr       = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
719     ierr       = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
720     ierr       = MatCreateMPIAdj(bcomm,m,N,a->i,a->j,a->values,B);CHKERRQ(ierr);
721     b          = (Mat_MPIAdj*)(*B)->data;
722     b->freeaij = PETSC_FALSE;
723     ierr       = MPI_Comm_free(&bcomm);CHKERRQ(ierr);
724   }
725   PetscFunctionReturn(0);
726 }
727 
728 #undef __FUNCT__
729 #define __FUNCT__ "MatMPIAdjCreateNonemptySubcommMat"
730 /*@
731    MatMPIAdjCreateNonemptySubcommMat - create the same MPIAdj matrix on a subcommunicator containing only processes owning a positive number of rows
732 
733    Collective
734 
735    Input Arguments:
736 .  A - original MPIAdj matrix
737 
738    Output Arguments:
739 .  B - matrix on subcommunicator, NULL on ranks that owned zero rows of A
740 
741    Level: developer
742 
743    Note:
744    This function is mostly useful for internal use by mesh partitioning packages that require that every process owns at least one row.
745 
746    The matrix B should be destroyed with MatDestroy(). The arrays are not copied, so B should be destroyed before A is destroyed.
747 
748 .seealso: MatCreateMPIAdj()
749 @*/
750 PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A,Mat *B)
751 {
752   PetscErrorCode ierr;
753 
754   PetscFunctionBegin;
755   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
756   ierr = PetscUseMethod(A,"MatMPIAdjCreateNonemptySubcommMat_C",(Mat,Mat*),(A,B));CHKERRQ(ierr);
757   PetscFunctionReturn(0);
758 }
759 
760 /*MC
761    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
762    intended for use constructing orderings and partitionings.
763 
764   Level: beginner
765 
766 .seealso: MatCreateMPIAdj
767 M*/
768 
769 #undef __FUNCT__
770 #define __FUNCT__ "MatCreate_MPIAdj"
771 PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B)
772 {
773   Mat_MPIAdj     *b;
774   PetscErrorCode ierr;
775 
776   PetscFunctionBegin;
777   ierr         = PetscNewLog(B,&b);CHKERRQ(ierr);
778   B->data      = (void*)b;
779   ierr         = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
780   B->assembled = PETSC_FALSE;
781 
782   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
783   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjCreateNonemptySubcommMat_C",MatMPIAdjCreateNonemptySubcommMat_MPIAdj);CHKERRQ(ierr);
784   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ);CHKERRQ(ierr);
785   PetscFunctionReturn(0);
786 }
787 
788 #undef __FUNCT__
789 #define __FUNCT__ "MatMPIAdjSetPreallocation"
790 /*@C
791    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
792 
793    Logically Collective on MPI_Comm
794 
795    Input Parameters:
796 +  A - the matrix
797 .  i - the indices into j for the start of each row
798 .  j - the column indices for each row (sorted for each row).
799        The indices in i and j start with zero (NOT with one).
800 -  values - [optional] edge weights
801 
802    Level: intermediate
803 
804 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
805 @*/
806 PetscErrorCode  MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
807 {
808   PetscErrorCode ierr;
809 
810   PetscFunctionBegin;
811   ierr = PetscTryMethod(B,"MatMPIAdjSetPreallocation_C",(Mat,PetscInt*,PetscInt*,PetscInt*),(B,i,j,values));CHKERRQ(ierr);
812   PetscFunctionReturn(0);
813 }
814 
815 #undef __FUNCT__
816 #define __FUNCT__ "MatCreateMPIAdj"
817 /*@C
818    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
819    The matrix does not have numerical values associated with it, but is
820    intended for ordering (to reduce bandwidth etc) and partitioning.
821 
822    Collective on MPI_Comm
823 
824    Input Parameters:
825 +  comm - MPI communicator
826 .  m - number of local rows
827 .  N - number of global columns
828 .  i - the indices into j for the start of each row
829 .  j - the column indices for each row (sorted for each row).
830        The indices in i and j start with zero (NOT with one).
831 -  values -[optional] edge weights
832 
833    Output Parameter:
834 .  A - the matrix
835 
836    Level: intermediate
837 
838    Notes: This matrix object does not support most matrix operations, include
839    MatSetValues().
840    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
841    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
842     call from Fortran you need not create the arrays with PetscMalloc().
843    Should not include the matrix diagonals.
844 
845    If you already have a matrix, you can create its adjacency matrix by a call
846    to MatConvert, specifying a type of MATMPIADJ.
847 
848    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
849 
850 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
851 @*/
852 PetscErrorCode  MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
853 {
854   PetscErrorCode ierr;
855 
856   PetscFunctionBegin;
857   ierr = MatCreate(comm,A);CHKERRQ(ierr);
858   ierr = MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
859   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
860   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
861   PetscFunctionReturn(0);
862 }
863 
864 
865 
866 
867 
868 
869 
870