xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 5edf65166e22df6feda2ed6bd8a6e348419dee17)
1 
2 /*
3     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
4 */
5 #include <../src/mat/impls/adj/mpi/mpiadj.h>    /*I "petscmat.h" I*/
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "MatView_MPIAdj_ASCII"
9 PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
10 {
11   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
12   PetscErrorCode    ierr;
13   PetscInt          i,j,m = A->rmap->n;
14   const char        *name;
15   PetscViewerFormat format;
16 
17   PetscFunctionBegin;
18   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
19   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
20   if (format == PETSC_VIEWER_ASCII_INFO) {
21     PetscFunctionReturn(0);
22   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
23     SETERRQ(((PetscObject)A)->comm,PETSC_ERR_SUP,"MATLAB format not supported");
24   } else {
25     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);CHKERRQ(ierr);
26     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
27     for (i=0; i<m; i++) {
28       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap->rstart);CHKERRQ(ierr);
29       for (j=a->i[i]; j<a->i[i+1]; j++) {
30         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);CHKERRQ(ierr);
31       }
32       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
33     }
34     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);CHKERRQ(ierr);
35     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
36     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
37   }
38   PetscFunctionReturn(0);
39 }
40 
41 #undef __FUNCT__
42 #define __FUNCT__ "MatView_MPIAdj"
43 PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
44 {
45   PetscErrorCode ierr;
46   PetscBool      iascii;
47 
48   PetscFunctionBegin;
49   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
50   if (iascii) {
51     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
52   } else {
53     SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
54   }
55   PetscFunctionReturn(0);
56 }
57 
58 #undef __FUNCT__
59 #define __FUNCT__ "MatDestroy_MPIAdj"
60 PetscErrorCode MatDestroy_MPIAdj(Mat mat)
61 {
62   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;
63   PetscErrorCode ierr;
64 
65   PetscFunctionBegin;
66 #if defined(PETSC_USE_LOG)
67   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap->n,mat->cmap->n,a->nz);
68 #endif
69   ierr = PetscFree(a->diag);CHKERRQ(ierr);
70   if (a->freeaij) {
71     if (a->freeaijwithfree) {
72       if (a->i) free(a->i);
73       if (a->j) free(a->j);
74     } else {
75       ierr = PetscFree(a->i);CHKERRQ(ierr);
76       ierr = PetscFree(a->j);CHKERRQ(ierr);
77       ierr = PetscFree(a->values);CHKERRQ(ierr);
78     }
79   }
80   ierr = PetscFree(mat->data);CHKERRQ(ierr);
81   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
82   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C","",PETSC_NULL);CHKERRQ(ierr);
83   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjCreateNonemptySubcommMat_C","",PETSC_NULL);CHKERRQ(ierr);
84   PetscFunctionReturn(0);
85 }
86 
87 #undef __FUNCT__
88 #define __FUNCT__ "MatSetOption_MPIAdj"
89 PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscBool  flg)
90 {
91   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
92   PetscErrorCode ierr;
93 
94   PetscFunctionBegin;
95   switch (op) {
96   case MAT_SYMMETRIC:
97   case MAT_STRUCTURALLY_SYMMETRIC:
98   case MAT_HERMITIAN:
99     a->symmetric = flg;
100     break;
101   case MAT_SYMMETRY_ETERNAL:
102     break;
103   default:
104     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
105     break;
106   }
107   PetscFunctionReturn(0);
108 }
109 
110 
111 /*
112      Adds diagonal pointers to sparse matrix structure.
113 */
114 
115 #undef __FUNCT__
116 #define __FUNCT__ "MatMarkDiagonal_MPIAdj"
117 PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A)
118 {
119   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
120   PetscErrorCode ierr;
121   PetscInt       i,j,m = A->rmap->n;
122 
123   PetscFunctionBegin;
124   ierr = PetscMalloc(m*sizeof(PetscInt),&a->diag);CHKERRQ(ierr);
125   ierr = PetscLogObjectMemory(A,m*sizeof(PetscInt));CHKERRQ(ierr);
126   for (i=0; i<A->rmap->n; i++) {
127     for (j=a->i[i]; j<a->i[i+1]; j++) {
128       if (a->j[j] == i) {
129         a->diag[i] = j;
130         break;
131       }
132     }
133   }
134   PetscFunctionReturn(0);
135 }
136 
137 #undef __FUNCT__
138 #define __FUNCT__ "MatGetRow_MPIAdj"
139 PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
140 {
141   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
142   PetscInt   *itmp;
143 
144   PetscFunctionBegin;
145   row -= A->rmap->rstart;
146 
147   if (row < 0 || row >= A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
148 
149   *nz = a->i[row+1] - a->i[row];
150   if (v) *v = PETSC_NULL;
151   if (idx) {
152     itmp = a->j + a->i[row];
153     if (*nz) {
154       *idx = itmp;
155     }
156     else *idx = 0;
157   }
158   PetscFunctionReturn(0);
159 }
160 
161 #undef __FUNCT__
162 #define __FUNCT__ "MatRestoreRow_MPIAdj"
163 PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
164 {
165   PetscFunctionBegin;
166   PetscFunctionReturn(0);
167 }
168 
169 #undef __FUNCT__
170 #define __FUNCT__ "MatEqual_MPIAdj"
171 PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscBool * flg)
172 {
173   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
174   PetscErrorCode ierr;
175   PetscBool      flag;
176 
177   PetscFunctionBegin;
178   /* If the  matrix dimensions are not equal,or no of nonzeros */
179   if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
180     flag = PETSC_FALSE;
181   }
182 
183   /* if the a->i are the same */
184   ierr = PetscMemcmp(a->i,b->i,(A->rmap->n+1)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
185 
186   /* if a->j are the same */
187   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
188 
189   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,((PetscObject)A)->comm);CHKERRQ(ierr);
190   PetscFunctionReturn(0);
191 }
192 
193 #undef __FUNCT__
194 #define __FUNCT__ "MatGetRowIJ_MPIAdj"
195 PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool  symmetric,PetscBool  blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
196 {
197   PetscInt       i;
198   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data;
199   PetscInt       **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
200 
201   PetscFunctionBegin;
202   *m    = A->rmap->n;
203   *ia   = a->i;
204   *ja   = a->j;
205   *done = PETSC_TRUE;
206   if (oshift) {
207     for (i=0; i<(*ia)[*m]; i++) {
208       (*ja)[i]++;
209     }
210     for (i=0; i<=(*m); i++) (*ia)[i]++;
211   }
212   PetscFunctionReturn(0);
213 }
214 
215 #undef __FUNCT__
216 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj"
217 PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool  symmetric,PetscBool  blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
218 {
219   PetscInt   i;
220   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
221   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
222 
223   PetscFunctionBegin;
224   if (ia && a->i != *ia) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
225   if (ja && a->j != *ja) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
226   if (oshift) {
227     for (i=0; i<=(*m); i++) (*ia)[i]--;
228     for (i=0; i<(*ia)[*m]; i++) {
229       (*ja)[i]--;
230     }
231   }
232   PetscFunctionReturn(0);
233 }
234 
235 #undef __FUNCT__
236 #define __FUNCT__ "MatConvertFrom_MPIAdj"
237 PetscErrorCode  MatConvertFrom_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
238 {
239   Mat               B;
240   PetscErrorCode    ierr;
241   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
242   const PetscInt    *rj;
243   const PetscScalar *ra;
244   MPI_Comm          comm;
245 
246   PetscFunctionBegin;
247   ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr);
248   ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr);
249   ierr = MatGetOwnershipRange(A,&rstart,PETSC_NULL);CHKERRQ(ierr);
250 
251   /* count the number of nonzeros per row */
252   for (i=0; i<m; i++) {
253     ierr   = MatGetRow(A,i+rstart,&len,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr);
254     ierr   = MatRestoreRow(A,i+rstart,&len,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr);
255     nzeros += len;
256   }
257 
258   /* malloc space for nonzeros */
259   ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&a);CHKERRQ(ierr);
260   ierr = PetscMalloc((N+1)*sizeof(PetscInt),&ia);CHKERRQ(ierr);
261   ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&ja);CHKERRQ(ierr);
262 
263   nzeros = 0;
264   ia[0]  = 0;
265   for (i=0; i<m; i++) {
266     ierr    = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
267     cnt     = 0;
268     for (j=0; j<len; j++) {
269       a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
270       ja[nzeros+cnt++] = rj[j];
271     }
272     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
273     nzeros += cnt;
274     ia[i+1] = nzeros;
275   }
276 
277   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
278   ierr = MatCreate(comm,&B);CHKERRQ(ierr);
279   ierr = MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
280   ierr = MatSetType(B,type);CHKERRQ(ierr);
281   ierr = MatMPIAdjSetPreallocation(B,ia,ja,a);CHKERRQ(ierr);
282 
283   if (reuse == MAT_REUSE_MATRIX) {
284     ierr = MatHeaderReplace(A,B);CHKERRQ(ierr);
285   } else {
286     *newmat = B;
287   }
288   PetscFunctionReturn(0);
289 }
290 
291 /* -------------------------------------------------------------------*/
292 static struct _MatOps MatOps_Values = {0,
293        MatGetRow_MPIAdj,
294        MatRestoreRow_MPIAdj,
295        0,
296 /* 4*/ 0,
297        0,
298        0,
299        0,
300        0,
301        0,
302 /*10*/ 0,
303        0,
304        0,
305        0,
306        0,
307 /*15*/ 0,
308        MatEqual_MPIAdj,
309        0,
310        0,
311        0,
312 /*20*/ 0,
313        0,
314        MatSetOption_MPIAdj,
315        0,
316 /*24*/ 0,
317        0,
318        0,
319        0,
320        0,
321 /*29*/ 0,
322        0,
323        0,
324        0,
325        0,
326 /*34*/ 0,
327        0,
328        0,
329        0,
330        0,
331 /*39*/ 0,
332        0,
333        0,
334        0,
335        0,
336 /*44*/ 0,
337        0,
338        0,
339        0,
340        0,
341 /*49*/ 0,
342        MatGetRowIJ_MPIAdj,
343        MatRestoreRowIJ_MPIAdj,
344        0,
345        0,
346 /*54*/ 0,
347        0,
348        0,
349        0,
350        0,
351 /*59*/ 0,
352        MatDestroy_MPIAdj,
353        MatView_MPIAdj,
354        MatConvertFrom_MPIAdj,
355        0,
356 /*64*/ 0,
357        0,
358        0,
359        0,
360        0,
361 /*69*/ 0,
362        0,
363        0,
364        0,
365        0,
366 /*74*/ 0,
367        0,
368        0,
369        0,
370        0,
371 /*79*/ 0,
372        0,
373        0,
374        0,
375        0,
376 /*84*/ 0,
377        0,
378        0,
379        0,
380        0,
381 /*89*/ 0,
382        0,
383        0,
384        0,
385        0,
386 /*94*/ 0,
387        0,
388        0,
389        0};
390 
391 EXTERN_C_BEGIN
392 #undef __FUNCT__
393 #define __FUNCT__ "MatMPIAdjSetPreallocation_MPIAdj"
394 PetscErrorCode  MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
395 {
396   Mat_MPIAdj     *b = (Mat_MPIAdj *)B->data;
397   PetscErrorCode ierr;
398 #if defined(PETSC_USE_DEBUG)
399   PetscInt       ii;
400 #endif
401 
402   PetscFunctionBegin;
403   ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
404   ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
405 
406 #if defined(PETSC_USE_DEBUG)
407   if (i[0] != 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
408   for (ii=1; ii<B->rmap->n; ii++) {
409     if (i[ii] < 0 || i[ii] < i[ii-1]) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
410   }
411   for (ii=0; ii<i[B->rmap->n]; ii++) {
412     if (j[ii] < 0 || j[ii] >= B->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
413   }
414 #endif
415   B->preallocated = PETSC_TRUE;
416 
417   b->j      = j;
418   b->i      = i;
419   b->values = values;
420 
421   b->nz               = i[B->rmap->n];
422   b->diag             = 0;
423   b->symmetric        = PETSC_FALSE;
424   b->freeaij          = PETSC_TRUE;
425 
426   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
427   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
428   PetscFunctionReturn(0);
429 }
430 EXTERN_C_END
431 
432 #undef __FUNCT__
433 #define __FUNCT__ "MatMPIAdjCreateNonemptySubcommMat_MPIAdj"
434 PETSC_EXTERN_C PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A,Mat *B)
435 {
436   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
437   PetscErrorCode ierr;
438   const PetscInt *ranges;
439   MPI_Comm       acomm,bcomm;
440   MPI_Group      agroup,bgroup;
441   PetscMPIInt    i,rank,size,nranks,*ranks;
442 
443   PetscFunctionBegin;
444   *B = PETSC_NULL;
445   acomm = ((PetscObject)A)->comm;
446   ierr = MPI_Comm_size(acomm,&size);CHKERRQ(ierr);
447   ierr = MPI_Comm_size(acomm,&rank);CHKERRQ(ierr);
448   ierr = MatGetOwnershipRanges(A,&ranges);CHKERRQ(ierr);
449   for (i=0,nranks=0; i<size; i++) {
450     if (ranges[i+1] - ranges[i] > 0) nranks++;
451   }
452   if (nranks == size) {         /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
453     ierr = PetscObjectReference((PetscObject)A);CHKERRQ(ierr);
454     *B = A;
455     PetscFunctionReturn(0);
456   }
457 
458   ierr = PetscMalloc(nranks*sizeof(PetscMPIInt),&ranks);CHKERRQ(ierr);
459   for (i=0,nranks=0; i<size; i++) {
460     if (ranges[i+1] - ranges[i] > 0) ranks[nranks++] = i;
461   }
462   ierr = MPI_Comm_group(acomm,&agroup);CHKERRQ(ierr);
463   ierr = MPI_Group_incl(agroup,nranks,ranks,&bgroup);CHKERRQ(ierr);
464   ierr = PetscFree(ranks);CHKERRQ(ierr);
465   ierr = MPI_Comm_create(acomm,bgroup,&bcomm);CHKERRQ(ierr);
466   ierr = MPI_Group_free(&agroup);CHKERRQ(ierr);
467   ierr = MPI_Group_free(&bgroup);CHKERRQ(ierr);
468   if (bcomm != MPI_COMM_NULL) {
469     PetscInt   m,N;
470     Mat_MPIAdj *b;
471     ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr);
472     ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr);
473     ierr = MatCreateMPIAdj(bcomm,m,N,a->i,a->j,a->values,B);CHKERRQ(ierr);
474     b = (Mat_MPIAdj*)(*B)->data;
475     b->freeaij = PETSC_FALSE;
476     ierr = MPI_Comm_free(&bcomm);CHKERRQ(ierr);
477   }
478   PetscFunctionReturn(0);
479 }
480 
481 #undef __FUNCT__
482 #define __FUNCT__ "MatMPIAdjCreateNonemptySubcommMat"
483 /*@
484    MatMPIAdjCreateNonemptySubcommMat - create the same MPIAdj matrix on a subcommunicator containing only processes owning a positive number of rows
485 
486    Collective
487 
488    Input Arguments:
489 .  A - original MPIAdj matrix
490 
491    Output Arguments:
492 .  B - matrix on subcommunicator, PETSC_NULL on ranks that owned zero rows of A
493 
494    Level: developer
495 
496    Note:
497    This function is mostly useful for internal use by mesh partitioning packages that require that every process owns at least one row.
498 
499    The matrix B should be destroyed with MatDestroy(). The arrays are not copied, so B should be destroyed before A is destroyed.
500 
501 .seealso: MatCreateMPIAdj()
502 @*/
503 PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A,Mat *B)
504 {
505   PetscErrorCode ierr;
506 
507   PetscFunctionBegin;
508   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
509   ierr = PetscUseMethod(A,"MatMPIAdjCreateNonemptySubcommMat_C",(Mat,Mat*),(A,B));CHKERRQ(ierr);
510   PetscFunctionReturn(0);
511 }
512 
513 /*MC
514    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
515    intended for use constructing orderings and partitionings.
516 
517   Level: beginner
518 
519 .seealso: MatCreateMPIAdj
520 M*/
521 
522 EXTERN_C_BEGIN
523 #undef __FUNCT__
524 #define __FUNCT__ "MatCreate_MPIAdj"
525 PetscErrorCode  MatCreate_MPIAdj(Mat B)
526 {
527   Mat_MPIAdj     *b;
528   PetscErrorCode ierr;
529 
530   PetscFunctionBegin;
531   ierr                = PetscNewLog(B,Mat_MPIAdj,&b);CHKERRQ(ierr);
532   B->data             = (void*)b;
533   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
534   B->assembled        = PETSC_FALSE;
535 
536   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAdjSetPreallocation_C",
537                                     "MatMPIAdjSetPreallocation_MPIAdj",
538                                      MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
539   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAdjCreateNonemptySubcommMat_C",
540                                     "MatMPIAdjCreateNonemptySubcommMat_MPIAdj",
541                                      MatMPIAdjCreateNonemptySubcommMat_MPIAdj);CHKERRQ(ierr);
542   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ);CHKERRQ(ierr);
543   PetscFunctionReturn(0);
544 }
545 EXTERN_C_END
546 
547 #undef __FUNCT__
548 #define __FUNCT__ "MatMPIAdjSetPreallocation"
549 /*@C
550    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
551 
552    Logically Collective on MPI_Comm
553 
554    Input Parameters:
555 +  A - the matrix
556 .  i - the indices into j for the start of each row
557 .  j - the column indices for each row (sorted for each row).
558        The indices in i and j start with zero (NOT with one).
559 -  values - [optional] edge weights
560 
561    Level: intermediate
562 
563 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
564 @*/
565 PetscErrorCode  MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
566 {
567   PetscErrorCode ierr;
568 
569   PetscFunctionBegin;
570   ierr = PetscTryMethod(B,"MatMPIAdjSetPreallocation_C",(Mat,PetscInt*,PetscInt*,PetscInt*),(B,i,j,values));CHKERRQ(ierr);
571   PetscFunctionReturn(0);
572 }
573 
574 #undef __FUNCT__
575 #define __FUNCT__ "MatCreateMPIAdj"
576 /*@C
577    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
578    The matrix does not have numerical values associated with it, but is
579    intended for ordering (to reduce bandwidth etc) and partitioning.
580 
581    Collective on MPI_Comm
582 
583    Input Parameters:
584 +  comm - MPI communicator
585 .  m - number of local rows
586 .  N - number of global columns
587 .  i - the indices into j for the start of each row
588 .  j - the column indices for each row (sorted for each row).
589        The indices in i and j start with zero (NOT with one).
590 -  values -[optional] edge weights
591 
592    Output Parameter:
593 .  A - the matrix
594 
595    Level: intermediate
596 
597    Notes: This matrix object does not support most matrix operations, include
598    MatSetValues().
599    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
600    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
601     call from Fortran you need not create the arrays with PetscMalloc().
602    Should not include the matrix diagonals.
603 
604    If you already have a matrix, you can create its adjacency matrix by a call
605    to MatConvert, specifying a type of MATMPIADJ.
606 
607    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
608 
609 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
610 @*/
611 PetscErrorCode  MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
612 {
613   PetscErrorCode ierr;
614 
615   PetscFunctionBegin;
616   ierr = MatCreate(comm,A);CHKERRQ(ierr);
617   ierr = MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
618   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
619   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
620   PetscFunctionReturn(0);
621 }
622 
623 
624 
625 
626 
627 
628 
629