xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 670f3ff9a12f3667ff7d4cebfc50ebc8579c9e33)
1 
2 /*
3     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
4 */
5 #include <../src/mat/impls/adj/mpi/mpiadj.h>    /*I "petscmat.h" I*/
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "MatView_MPIAdj_ASCII"
9 PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
10 {
11   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
12   PetscErrorCode    ierr;
13   PetscInt          i,j,m = A->rmap->n;
14   const char        *name;
15   PetscViewerFormat format;
16 
17   PetscFunctionBegin;
18   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
19   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
20   if (format == PETSC_VIEWER_ASCII_INFO) {
21     PetscFunctionReturn(0);
22   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
23     SETERRQ(((PetscObject)A)->comm,PETSC_ERR_SUP,"MATLAB format not supported");
24   } else {
25     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);CHKERRQ(ierr);
26     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
27     for (i=0; i<m; i++) {
28       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap->rstart);CHKERRQ(ierr);
29       for (j=a->i[i]; j<a->i[i+1]; j++) {
30         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);CHKERRQ(ierr);
31       }
32       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
33     }
34     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);CHKERRQ(ierr);
35     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
36     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
37   }
38   PetscFunctionReturn(0);
39 }
40 
41 #undef __FUNCT__
42 #define __FUNCT__ "MatView_MPIAdj"
43 PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
44 {
45   PetscErrorCode ierr;
46   PetscBool      iascii;
47 
48   PetscFunctionBegin;
49   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
50   if (iascii) {
51     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
52   } else {
53     SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
54   }
55   PetscFunctionReturn(0);
56 }
57 
58 #undef __FUNCT__
59 #define __FUNCT__ "MatDestroy_MPIAdj"
60 PetscErrorCode MatDestroy_MPIAdj(Mat mat)
61 {
62   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;
63   PetscErrorCode ierr;
64 
65   PetscFunctionBegin;
66 #if defined(PETSC_USE_LOG)
67   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap->n,mat->cmap->n,a->nz);
68 #endif
69   ierr = PetscFree(a->diag);CHKERRQ(ierr);
70   if (a->freeaij) {
71     if (a->freeaijwithfree) {
72       if (a->i) free(a->i);
73       if (a->j) free(a->j);
74     } else {
75       ierr = PetscFree(a->i);CHKERRQ(ierr);
76       ierr = PetscFree(a->j);CHKERRQ(ierr);
77       ierr = PetscFree(a->values);CHKERRQ(ierr);
78     }
79   }
80   ierr = PetscFree(mat->data);CHKERRQ(ierr);
81   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
82   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C","",PETSC_NULL);CHKERRQ(ierr);
83   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjCreateNonemptySubcommMat_C","",PETSC_NULL);CHKERRQ(ierr);
84   PetscFunctionReturn(0);
85 }
86 
87 #undef __FUNCT__
88 #define __FUNCT__ "MatSetOption_MPIAdj"
89 PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscBool  flg)
90 {
91   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
92   PetscErrorCode ierr;
93 
94   PetscFunctionBegin;
95   switch (op) {
96   case MAT_SYMMETRIC:
97   case MAT_STRUCTURALLY_SYMMETRIC:
98   case MAT_HERMITIAN:
99     a->symmetric = flg;
100     break;
101   case MAT_SYMMETRY_ETERNAL:
102     break;
103   default:
104     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
105     break;
106   }
107   PetscFunctionReturn(0);
108 }
109 
110 
111 /*
112      Adds diagonal pointers to sparse matrix structure.
113 */
114 
115 #undef __FUNCT__
116 #define __FUNCT__ "MatMarkDiagonal_MPIAdj"
117 PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A)
118 {
119   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
120   PetscErrorCode ierr;
121   PetscInt       i,j,m = A->rmap->n;
122 
123   PetscFunctionBegin;
124   ierr = PetscMalloc(m*sizeof(PetscInt),&a->diag);CHKERRQ(ierr);
125   ierr = PetscLogObjectMemory(A,m*sizeof(PetscInt));CHKERRQ(ierr);
126   for (i=0; i<A->rmap->n; i++) {
127     for (j=a->i[i]; j<a->i[i+1]; j++) {
128       if (a->j[j] == i) {
129         a->diag[i] = j;
130         break;
131       }
132     }
133   }
134   PetscFunctionReturn(0);
135 }
136 
137 #undef __FUNCT__
138 #define __FUNCT__ "MatGetRow_MPIAdj"
139 PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
140 {
141   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
142   PetscInt   *itmp;
143 
144   PetscFunctionBegin;
145   row -= A->rmap->rstart;
146 
147   if (row < 0 || row >= A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
148 
149   *nz = a->i[row+1] - a->i[row];
150   if (v) *v = PETSC_NULL;
151   if (idx) {
152     itmp = a->j + a->i[row];
153     if (*nz) {
154       *idx = itmp;
155     }
156     else *idx = 0;
157   }
158   PetscFunctionReturn(0);
159 }
160 
161 #undef __FUNCT__
162 #define __FUNCT__ "MatRestoreRow_MPIAdj"
163 PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
164 {
165   PetscFunctionBegin;
166   PetscFunctionReturn(0);
167 }
168 
169 #undef __FUNCT__
170 #define __FUNCT__ "MatEqual_MPIAdj"
171 PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscBool * flg)
172 {
173   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
174   PetscErrorCode ierr;
175   PetscBool      flag;
176 
177   PetscFunctionBegin;
178   /* If the  matrix dimensions are not equal,or no of nonzeros */
179   if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
180     flag = PETSC_FALSE;
181   }
182 
183   /* if the a->i are the same */
184   ierr = PetscMemcmp(a->i,b->i,(A->rmap->n+1)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
185 
186   /* if a->j are the same */
187   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
188 
189   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,((PetscObject)A)->comm);CHKERRQ(ierr);
190   PetscFunctionReturn(0);
191 }
192 
193 #undef __FUNCT__
194 #define __FUNCT__ "MatGetRowIJ_MPIAdj"
195 PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool  symmetric,PetscBool  blockcompressed,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscBool  *done)
196 {
197   PetscInt       i;
198   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data;
199 
200   PetscFunctionBegin;
201   *m    = A->rmap->n;
202   *ia   = a->i;
203   *ja   = a->j;
204   *done = PETSC_TRUE;
205   if (oshift) {
206     for (i=0; i<(*ia)[*m]; i++) {
207       (*ja)[i]++;
208     }
209     for (i=0; i<=(*m); i++) (*ia)[i]++;
210   }
211   PetscFunctionReturn(0);
212 }
213 
214 #undef __FUNCT__
215 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj"
216 PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool  symmetric,PetscBool  blockcompressed,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscBool  *done)
217 {
218   PetscInt   i;
219   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
220 
221   PetscFunctionBegin;
222   if (ia && a->i != *ia) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
223   if (ja && a->j != *ja) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
224   if (oshift) {
225     for (i=0; i<=(*m); i++) (*ia)[i]--;
226     for (i=0; i<(*ia)[*m]; i++) {
227       (*ja)[i]--;
228     }
229   }
230   PetscFunctionReturn(0);
231 }
232 
233 #undef __FUNCT__
234 #define __FUNCT__ "MatConvertFrom_MPIAdj"
235 PetscErrorCode  MatConvertFrom_MPIAdj(Mat A,const MatType type,MatReuse reuse,Mat *newmat)
236 {
237   Mat               B;
238   PetscErrorCode    ierr;
239   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
240   const PetscInt    *rj;
241   const PetscScalar *ra;
242   MPI_Comm          comm;
243 
244   PetscFunctionBegin;
245   ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr);
246   ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr);
247   ierr = MatGetOwnershipRange(A,&rstart,PETSC_NULL);CHKERRQ(ierr);
248 
249   /* count the number of nonzeros per row */
250   for (i=0; i<m; i++) {
251     ierr   = MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
252     for (j=0; j<len; j++) {
253       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
254     }
255     ierr   = MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
256     nzeros += len;
257   }
258 
259   /* malloc space for nonzeros */
260   ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&a);CHKERRQ(ierr);
261   ierr = PetscMalloc((N+1)*sizeof(PetscInt),&ia);CHKERRQ(ierr);
262   ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&ja);CHKERRQ(ierr);
263 
264   nzeros = 0;
265   ia[0]  = 0;
266   for (i=0; i<m; i++) {
267     ierr    = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
268     cnt     = 0;
269     for (j=0; j<len; j++) {
270       if (rj[j] != i+rstart) { /* if not diagonal */
271         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
272         ja[nzeros+cnt++] = rj[j];
273       }
274     }
275     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
276     nzeros += cnt;
277     ia[i+1] = nzeros;
278   }
279 
280   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
281   ierr = MatCreate(comm,&B);CHKERRQ(ierr);
282   ierr = MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
283   ierr = MatSetType(B,type);CHKERRQ(ierr);
284   ierr = MatMPIAdjSetPreallocation(B,ia,ja,a);CHKERRQ(ierr);
285 
286   if (reuse == MAT_REUSE_MATRIX) {
287     ierr = MatHeaderReplace(A,B);CHKERRQ(ierr);
288   } else {
289     *newmat = B;
290   }
291   PetscFunctionReturn(0);
292 }
293 
294 /* -------------------------------------------------------------------*/
295 static struct _MatOps MatOps_Values = {0,
296        MatGetRow_MPIAdj,
297        MatRestoreRow_MPIAdj,
298        0,
299 /* 4*/ 0,
300        0,
301        0,
302        0,
303        0,
304        0,
305 /*10*/ 0,
306        0,
307        0,
308        0,
309        0,
310 /*15*/ 0,
311        MatEqual_MPIAdj,
312        0,
313        0,
314        0,
315 /*20*/ 0,
316        0,
317        MatSetOption_MPIAdj,
318        0,
319 /*24*/ 0,
320        0,
321        0,
322        0,
323        0,
324 /*29*/ 0,
325        0,
326        0,
327        0,
328        0,
329 /*34*/ 0,
330        0,
331        0,
332        0,
333        0,
334 /*39*/ 0,
335        0,
336        0,
337        0,
338        0,
339 /*44*/ 0,
340        0,
341        0,
342        0,
343        0,
344 /*49*/ 0,
345        MatGetRowIJ_MPIAdj,
346        MatRestoreRowIJ_MPIAdj,
347        0,
348        0,
349 /*54*/ 0,
350        0,
351        0,
352        0,
353        0,
354 /*59*/ 0,
355        MatDestroy_MPIAdj,
356        MatView_MPIAdj,
357        MatConvertFrom_MPIAdj,
358        0,
359 /*64*/ 0,
360        0,
361        0,
362        0,
363        0,
364 /*69*/ 0,
365        0,
366        0,
367        0,
368        0,
369 /*74*/ 0,
370        0,
371        0,
372        0,
373        0,
374 /*79*/ 0,
375        0,
376        0,
377        0,
378        0,
379 /*84*/ 0,
380        0,
381        0,
382        0,
383        0,
384 /*89*/ 0,
385        0,
386        0,
387        0,
388        0,
389 /*94*/ 0,
390        0,
391        0,
392        0};
393 
394 EXTERN_C_BEGIN
395 #undef __FUNCT__
396 #define __FUNCT__ "MatMPIAdjSetPreallocation_MPIAdj"
397 PetscErrorCode  MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
398 {
399   Mat_MPIAdj     *b = (Mat_MPIAdj *)B->data;
400   PetscErrorCode ierr;
401 #if defined(PETSC_USE_DEBUG)
402   PetscInt       ii;
403 #endif
404 
405   PetscFunctionBegin;
406   ierr = PetscLayoutSetBlockSize(B->rmap,1);CHKERRQ(ierr);
407   ierr = PetscLayoutSetBlockSize(B->cmap,1);CHKERRQ(ierr);
408   ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
409   ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
410 
411 #if defined(PETSC_USE_DEBUG)
412   if (i[0] != 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
413   for (ii=1; ii<B->rmap->n; ii++) {
414     if (i[ii] < 0 || i[ii] < i[ii-1]) {
415       SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
416     }
417   }
418   for (ii=0; ii<i[B->rmap->n]; ii++) {
419     if (j[ii] < 0 || j[ii] >= B->cmap->N) {
420       SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
421     }
422   }
423 #endif
424   B->preallocated = PETSC_TRUE;
425 
426   b->j      = j;
427   b->i      = i;
428   b->values = values;
429 
430   b->nz               = i[B->rmap->n];
431   b->diag             = 0;
432   b->symmetric        = PETSC_FALSE;
433   b->freeaij          = PETSC_TRUE;
434 
435   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
436   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
437   PetscFunctionReturn(0);
438 }
439 EXTERN_C_END
440 
441 #undef __FUNCT__
442 #define __FUNCT__ "MatMPIAdjCreateNonemptySubcommMat_MPIAdj"
443 PETSC_EXTERN_C PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A,Mat *B)
444 {
445   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
446   PetscErrorCode ierr;
447   const PetscInt *ranges;
448   MPI_Comm       acomm,bcomm;
449   MPI_Group      agroup,bgroup;
450   PetscMPIInt    i,rank,size,nranks,*ranks;
451 
452   PetscFunctionBegin;
453   *B = PETSC_NULL;
454   acomm = ((PetscObject)A)->comm;
455   ierr = MPI_Comm_size(acomm,&size);CHKERRQ(ierr);
456   ierr = MPI_Comm_size(acomm,&rank);CHKERRQ(ierr);
457   ierr = MatGetOwnershipRanges(A,&ranges);CHKERRQ(ierr);
458   for (i=0,nranks=0; i<size; i++) {
459     if (ranges[i+1] - ranges[i] > 0) nranks++;
460   }
461   if (nranks == size) {         /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
462     ierr = PetscObjectReference((PetscObject)A);CHKERRQ(ierr);
463     *B = A;
464     PetscFunctionReturn(0);
465   }
466 
467   ierr = PetscMalloc(nranks*sizeof(PetscMPIInt),&ranks);CHKERRQ(ierr);
468   for (i=0,nranks=0; i<size; i++) {
469     if (ranges[i+1] - ranges[i] > 0) ranks[nranks++] = i;
470   }
471   ierr = MPI_Comm_group(acomm,&agroup);CHKERRQ(ierr);
472   ierr = MPI_Group_incl(agroup,nranks,ranks,&bgroup);CHKERRQ(ierr);
473   ierr = PetscFree(ranks);CHKERRQ(ierr);
474   ierr = MPI_Comm_create(acomm,bgroup,&bcomm);CHKERRQ(ierr);
475   ierr = MPI_Group_free(&agroup);CHKERRQ(ierr);
476   ierr = MPI_Group_free(&bgroup);CHKERRQ(ierr);
477   if (bcomm != MPI_COMM_NULL) {
478     PetscInt   m,N;
479     Mat_MPIAdj *b;
480     ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr);
481     ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr);
482     ierr = MatCreateMPIAdj(bcomm,m,N,a->i,a->j,a->values,B);CHKERRQ(ierr);
483     b = (Mat_MPIAdj*)(*B)->data;
484     b->freeaij = PETSC_FALSE;
485     ierr = MPI_Comm_free(&bcomm);CHKERRQ(ierr);
486   }
487   PetscFunctionReturn(0);
488 }
489 
490 #undef __FUNCT__
491 #define __FUNCT__ "MatMPIAdjCreateNonemptySubcommMat"
492 /*@
493    MatMPIAdjCreateNonemptySubcommMat - create the same MPIAdj matrix on a subcommunicator containing only processes owning a positive number of rows
494 
495    Collective
496 
497    Input Arguments:
498 .  A - original MPIAdj matrix
499 
500    Output Arguments:
501 .  B - matrix on subcommunicator, PETSC_NULL on ranks that owned zero rows of A
502 
503    Level: developer
504 
505    Note:
506    This function is mostly useful for internal use by mesh partitioning packages that require that every process owns at least one row.
507 
508    The matrix B should be destroyed with MatDestroy(). The arrays are not copied, so B should be destroyed before A is destroyed.
509 
510 .seealso: MatCreateMPIAdj()
511 @*/
512 PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A,Mat *B)
513 {
514   PetscErrorCode ierr;
515 
516   PetscFunctionBegin;
517   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
518   ierr = PetscUseMethod(A,"MatMPIAdjCreateNonemptySubcommMat_C",(Mat,Mat*),(A,B));CHKERRQ(ierr);
519   PetscFunctionReturn(0);
520 }
521 
522 /*MC
523    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
524    intended for use constructing orderings and partitionings.
525 
526   Level: beginner
527 
528 .seealso: MatCreateMPIAdj
529 M*/
530 
531 EXTERN_C_BEGIN
532 #undef __FUNCT__
533 #define __FUNCT__ "MatCreate_MPIAdj"
534 PetscErrorCode  MatCreate_MPIAdj(Mat B)
535 {
536   Mat_MPIAdj     *b;
537   PetscErrorCode ierr;
538 
539   PetscFunctionBegin;
540   ierr                = PetscNewLog(B,Mat_MPIAdj,&b);CHKERRQ(ierr);
541   B->data             = (void*)b;
542   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
543   B->assembled        = PETSC_FALSE;
544 
545   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAdjSetPreallocation_C",
546                                     "MatMPIAdjSetPreallocation_MPIAdj",
547                                      MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
548   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAdjCreateNonemptySubcommMat_C",
549                                     "MatMPIAdjCreateNonemptySubcommMat_MPIAdj",
550                                      MatMPIAdjCreateNonemptySubcommMat_MPIAdj);CHKERRQ(ierr);
551   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ);CHKERRQ(ierr);
552   PetscFunctionReturn(0);
553 }
554 EXTERN_C_END
555 
556 #undef __FUNCT__
557 #define __FUNCT__ "MatMPIAdjSetPreallocation"
558 /*@C
559    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
560 
561    Logically Collective on MPI_Comm
562 
563    Input Parameters:
564 +  A - the matrix
565 .  i - the indices into j for the start of each row
566 .  j - the column indices for each row (sorted for each row).
567        The indices in i and j start with zero (NOT with one).
568 -  values - [optional] edge weights
569 
570    Level: intermediate
571 
572 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
573 @*/
574 PetscErrorCode  MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
575 {
576   PetscErrorCode ierr;
577 
578   PetscFunctionBegin;
579   ierr = PetscTryMethod(B,"MatMPIAdjSetPreallocation_C",(Mat,PetscInt*,PetscInt*,PetscInt*),(B,i,j,values));CHKERRQ(ierr);
580   PetscFunctionReturn(0);
581 }
582 
583 #undef __FUNCT__
584 #define __FUNCT__ "MatCreateMPIAdj"
585 /*@C
586    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
587    The matrix does not have numerical values associated with it, but is
588    intended for ordering (to reduce bandwidth etc) and partitioning.
589 
590    Collective on MPI_Comm
591 
592    Input Parameters:
593 +  comm - MPI communicator
594 .  m - number of local rows
595 .  N - number of global columns
596 .  i - the indices into j for the start of each row
597 .  j - the column indices for each row (sorted for each row).
598        The indices in i and j start with zero (NOT with one).
599 -  values -[optional] edge weights
600 
601    Output Parameter:
602 .  A - the matrix
603 
604    Level: intermediate
605 
606    Notes: This matrix object does not support most matrix operations, include
607    MatSetValues().
608    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
609    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
610     call from Fortran you need not create the arrays with PetscMalloc().
611    Should not include the matrix diagonals.
612 
613    If you already have a matrix, you can create its adjacency matrix by a call
614    to MatConvert, specifying a type of MATMPIADJ.
615 
616    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
617 
618 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
619 @*/
620 PetscErrorCode  MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
621 {
622   PetscErrorCode ierr;
623 
624   PetscFunctionBegin;
625   ierr = MatCreate(comm,A);CHKERRQ(ierr);
626   ierr = MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
627   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
628   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
629   PetscFunctionReturn(0);
630 }
631 
632 
633 
634 
635 
636 
637 
638