xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 5c8f6a953e7ed1c81f507d64aebddb11080b60e9)
1 
2 /*
3     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
4 */
5 #include <../src/mat/impls/adj/mpi/mpiadj.h>    /*I "petscmat.h" I*/
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "MatView_MPIAdj_ASCII"
9 PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
10 {
11   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
12   PetscErrorCode    ierr;
13   PetscInt          i,j,m = A->rmap->n;
14   const char        *name;
15   PetscViewerFormat format;
16 
17   PetscFunctionBegin;
18   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
19   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
20   if (format == PETSC_VIEWER_ASCII_INFO) {
21     PetscFunctionReturn(0);
22   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
23     SETERRQ(((PetscObject)A)->comm,PETSC_ERR_SUP,"MATLAB format not supported");
24   } else {
25     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);CHKERRQ(ierr);
26     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
27     for (i=0; i<m; i++) {
28       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap->rstart);CHKERRQ(ierr);
29       for (j=a->i[i]; j<a->i[i+1]; j++) {
30         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);CHKERRQ(ierr);
31       }
32       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
33     }
34     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);CHKERRQ(ierr);
35     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
36     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
37   }
38   PetscFunctionReturn(0);
39 }
40 
41 #undef __FUNCT__
42 #define __FUNCT__ "MatView_MPIAdj"
43 PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
44 {
45   PetscErrorCode ierr;
46   PetscBool      iascii;
47 
48   PetscFunctionBegin;
49   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
50   if (iascii) {
51     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
52   }
53   PetscFunctionReturn(0);
54 }
55 
56 #undef __FUNCT__
57 #define __FUNCT__ "MatDestroy_MPIAdj"
58 PetscErrorCode MatDestroy_MPIAdj(Mat mat)
59 {
60   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;
61   PetscErrorCode ierr;
62 
63   PetscFunctionBegin;
64 #if defined(PETSC_USE_LOG)
65   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap->n,mat->cmap->n,a->nz);
66 #endif
67   ierr = PetscFree(a->diag);CHKERRQ(ierr);
68   if (a->freeaij) {
69     if (a->freeaijwithfree) {
70       if (a->i) free(a->i);
71       if (a->j) free(a->j);
72     } else {
73       ierr = PetscFree(a->i);CHKERRQ(ierr);
74       ierr = PetscFree(a->j);CHKERRQ(ierr);
75       ierr = PetscFree(a->values);CHKERRQ(ierr);
76     }
77   }
78   ierr = PetscFree(mat->data);CHKERRQ(ierr);
79   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
80   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C","",PETSC_NULL);CHKERRQ(ierr);
81   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjCreateNonemptySubcommMat_C","",PETSC_NULL);CHKERRQ(ierr);
82   PetscFunctionReturn(0);
83 }
84 
85 #undef __FUNCT__
86 #define __FUNCT__ "MatSetOption_MPIAdj"
87 PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscBool  flg)
88 {
89   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
90   PetscErrorCode ierr;
91 
92   PetscFunctionBegin;
93   switch (op) {
94   case MAT_SYMMETRIC:
95   case MAT_STRUCTURALLY_SYMMETRIC:
96   case MAT_HERMITIAN:
97     a->symmetric = flg;
98     break;
99   case MAT_SYMMETRY_ETERNAL:
100     break;
101   default:
102     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
103     break;
104   }
105   PetscFunctionReturn(0);
106 }
107 
108 
109 /*
110      Adds diagonal pointers to sparse matrix structure.
111 */
112 
113 #undef __FUNCT__
114 #define __FUNCT__ "MatMarkDiagonal_MPIAdj"
115 PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A)
116 {
117   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
118   PetscErrorCode ierr;
119   PetscInt       i,j,m = A->rmap->n;
120 
121   PetscFunctionBegin;
122   ierr = PetscMalloc(m*sizeof(PetscInt),&a->diag);CHKERRQ(ierr);
123   ierr = PetscLogObjectMemory(A,m*sizeof(PetscInt));CHKERRQ(ierr);
124   for (i=0; i<A->rmap->n; i++) {
125     for (j=a->i[i]; j<a->i[i+1]; j++) {
126       if (a->j[j] == i) {
127         a->diag[i] = j;
128         break;
129       }
130     }
131   }
132   PetscFunctionReturn(0);
133 }
134 
135 #undef __FUNCT__
136 #define __FUNCT__ "MatGetRow_MPIAdj"
137 PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
138 {
139   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
140   PetscInt   *itmp;
141 
142   PetscFunctionBegin;
143   row -= A->rmap->rstart;
144 
145   if (row < 0 || row >= A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
146 
147   *nz = a->i[row+1] - a->i[row];
148   if (v) *v = PETSC_NULL;
149   if (idx) {
150     itmp = a->j + a->i[row];
151     if (*nz) {
152       *idx = itmp;
153     }
154     else *idx = 0;
155   }
156   PetscFunctionReturn(0);
157 }
158 
159 #undef __FUNCT__
160 #define __FUNCT__ "MatRestoreRow_MPIAdj"
161 PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
162 {
163   PetscFunctionBegin;
164   PetscFunctionReturn(0);
165 }
166 
167 #undef __FUNCT__
168 #define __FUNCT__ "MatEqual_MPIAdj"
169 PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscBool * flg)
170 {
171   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
172   PetscErrorCode ierr;
173   PetscBool      flag;
174 
175   PetscFunctionBegin;
176   /* If the  matrix dimensions are not equal,or no of nonzeros */
177   if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
178     flag = PETSC_FALSE;
179   }
180 
181   /* if the a->i are the same */
182   ierr = PetscMemcmp(a->i,b->i,(A->rmap->n+1)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
183 
184   /* if a->j are the same */
185   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
186 
187   ierr = MPI_Allreduce(&flag,flg,1,MPIU_BOOL,MPI_LAND,((PetscObject)A)->comm);CHKERRQ(ierr);
188   PetscFunctionReturn(0);
189 }
190 
191 #undef __FUNCT__
192 #define __FUNCT__ "MatGetRowIJ_MPIAdj"
193 PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool  symmetric,PetscBool  blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
194 {
195   PetscInt       i;
196   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data;
197   PetscInt       **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
198 
199   PetscFunctionBegin;
200   *m    = A->rmap->n;
201   *ia   = a->i;
202   *ja   = a->j;
203   *done = PETSC_TRUE;
204   if (oshift) {
205     for (i=0; i<(*ia)[*m]; i++) {
206       (*ja)[i]++;
207     }
208     for (i=0; i<=(*m); i++) (*ia)[i]++;
209   }
210   PetscFunctionReturn(0);
211 }
212 
213 #undef __FUNCT__
214 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj"
215 PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool  symmetric,PetscBool  blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
216 {
217   PetscInt   i;
218   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
219   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
220 
221   PetscFunctionBegin;
222   if (ia && a->i != *ia) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
223   if (ja && a->j != *ja) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
224   if (oshift) {
225     for (i=0; i<=(*m); i++) (*ia)[i]--;
226     for (i=0; i<(*ia)[*m]; i++) {
227       (*ja)[i]--;
228     }
229   }
230   PetscFunctionReturn(0);
231 }
232 
233 #undef __FUNCT__
234 #define __FUNCT__ "MatConvertFrom_MPIAdj"
235 PetscErrorCode  MatConvertFrom_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
236 {
237   Mat               B;
238   PetscErrorCode    ierr;
239   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
240   const PetscInt    *rj;
241   const PetscScalar *ra;
242   MPI_Comm          comm;
243 
244   PetscFunctionBegin;
245   ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr);
246   ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr);
247   ierr = MatGetOwnershipRange(A,&rstart,PETSC_NULL);CHKERRQ(ierr);
248 
249   /* count the number of nonzeros per row */
250   for (i=0; i<m; i++) {
251     ierr   = MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
252     for (j=0; j<len; j++) {
253       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
254     }
255     ierr   = MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
256     nzeros += len;
257   }
258 
259   /* malloc space for nonzeros */
260   ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&a);CHKERRQ(ierr);
261   ierr = PetscMalloc((N+1)*sizeof(PetscInt),&ia);CHKERRQ(ierr);
262   ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&ja);CHKERRQ(ierr);
263 
264   nzeros = 0;
265   ia[0]  = 0;
266   for (i=0; i<m; i++) {
267     ierr    = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
268     cnt     = 0;
269     for (j=0; j<len; j++) {
270       if (rj[j] != i+rstart) { /* if not diagonal */
271         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
272         ja[nzeros+cnt++] = rj[j];
273       }
274     }
275     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
276     nzeros += cnt;
277     ia[i+1] = nzeros;
278   }
279 
280   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
281   ierr = MatCreate(comm,&B);CHKERRQ(ierr);
282   ierr = MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
283   ierr = MatSetType(B,type);CHKERRQ(ierr);
284   ierr = MatMPIAdjSetPreallocation(B,ia,ja,a);CHKERRQ(ierr);
285 
286   if (reuse == MAT_REUSE_MATRIX) {
287     ierr = MatHeaderReplace(A,B);CHKERRQ(ierr);
288   } else {
289     *newmat = B;
290   }
291   PetscFunctionReturn(0);
292 }
293 
294 /* -------------------------------------------------------------------*/
295 static struct _MatOps MatOps_Values = {0,
296        MatGetRow_MPIAdj,
297        MatRestoreRow_MPIAdj,
298        0,
299 /* 4*/ 0,
300        0,
301        0,
302        0,
303        0,
304        0,
305 /*10*/ 0,
306        0,
307        0,
308        0,
309        0,
310 /*15*/ 0,
311        MatEqual_MPIAdj,
312        0,
313        0,
314        0,
315 /*20*/ 0,
316        0,
317        MatSetOption_MPIAdj,
318        0,
319 /*24*/ 0,
320        0,
321        0,
322        0,
323        0,
324 /*29*/ 0,
325        0,
326        0,
327        0,
328        0,
329 /*34*/ 0,
330        0,
331        0,
332        0,
333        0,
334 /*39*/ 0,
335        0,
336        0,
337        0,
338        0,
339 /*44*/ 0,
340        0,
341        0,
342        0,
343        0,
344 /*49*/ 0,
345        MatGetRowIJ_MPIAdj,
346        MatRestoreRowIJ_MPIAdj,
347        0,
348        0,
349 /*54*/ 0,
350        0,
351        0,
352        0,
353        0,
354 /*59*/ 0,
355        MatDestroy_MPIAdj,
356        MatView_MPIAdj,
357        MatConvertFrom_MPIAdj,
358        0,
359 /*64*/ 0,
360        0,
361        0,
362        0,
363        0,
364 /*69*/ 0,
365        0,
366        0,
367        0,
368        0,
369 /*74*/ 0,
370        0,
371        0,
372        0,
373        0,
374 /*79*/ 0,
375        0,
376        0,
377        0,
378        0,
379 /*84*/ 0,
380        0,
381        0,
382        0,
383        0,
384 /*89*/ 0,
385        0,
386        0,
387        0,
388        0,
389 /*94*/ 0,
390        0,
391        0,
392        0};
393 
394 EXTERN_C_BEGIN
395 #undef __FUNCT__
396 #define __FUNCT__ "MatMPIAdjSetPreallocation_MPIAdj"
397 PetscErrorCode  MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
398 {
399   Mat_MPIAdj     *b = (Mat_MPIAdj *)B->data;
400   PetscErrorCode ierr;
401 #if defined(PETSC_USE_DEBUG)
402   PetscInt       ii;
403 #endif
404 
405   PetscFunctionBegin;
406   ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
407   ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
408 
409 #if defined(PETSC_USE_DEBUG)
410   if (i[0] != 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
411   for (ii=1; ii<B->rmap->n; ii++) {
412     if (i[ii] < 0 || i[ii] < i[ii-1]) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
413   }
414   for (ii=0; ii<i[B->rmap->n]; ii++) {
415     if (j[ii] < 0 || j[ii] >= B->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
416   }
417 #endif
418   B->preallocated = PETSC_TRUE;
419 
420   b->j      = j;
421   b->i      = i;
422   b->values = values;
423 
424   b->nz               = i[B->rmap->n];
425   b->diag             = 0;
426   b->symmetric        = PETSC_FALSE;
427   b->freeaij          = PETSC_TRUE;
428 
429   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
430   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
431   PetscFunctionReturn(0);
432 }
433 EXTERN_C_END
434 
435 #undef __FUNCT__
436 #define __FUNCT__ "MatMPIAdjCreateNonemptySubcommMat_MPIAdj"
437 PETSC_EXTERN_C PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A,Mat *B)
438 {
439   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
440   PetscErrorCode ierr;
441   const PetscInt *ranges;
442   MPI_Comm       acomm,bcomm;
443   MPI_Group      agroup,bgroup;
444   PetscMPIInt    i,rank,size,nranks,*ranks;
445 
446   PetscFunctionBegin;
447   *B = PETSC_NULL;
448   acomm = ((PetscObject)A)->comm;
449   ierr = MPI_Comm_size(acomm,&size);CHKERRQ(ierr);
450   ierr = MPI_Comm_size(acomm,&rank);CHKERRQ(ierr);
451   ierr = MatGetOwnershipRanges(A,&ranges);CHKERRQ(ierr);
452   for (i=0,nranks=0; i<size; i++) {
453     if (ranges[i+1] - ranges[i] > 0) nranks++;
454   }
455   if (nranks == size) {         /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
456     ierr = PetscObjectReference((PetscObject)A);CHKERRQ(ierr);
457     *B = A;
458     PetscFunctionReturn(0);
459   }
460 
461   ierr = PetscMalloc(nranks*sizeof(PetscMPIInt),&ranks);CHKERRQ(ierr);
462   for (i=0,nranks=0; i<size; i++) {
463     if (ranges[i+1] - ranges[i] > 0) ranks[nranks++] = i;
464   }
465   ierr = MPI_Comm_group(acomm,&agroup);CHKERRQ(ierr);
466   ierr = MPI_Group_incl(agroup,nranks,ranks,&bgroup);CHKERRQ(ierr);
467   ierr = PetscFree(ranks);CHKERRQ(ierr);
468   ierr = MPI_Comm_create(acomm,bgroup,&bcomm);CHKERRQ(ierr);
469   ierr = MPI_Group_free(&agroup);CHKERRQ(ierr);
470   ierr = MPI_Group_free(&bgroup);CHKERRQ(ierr);
471   if (bcomm != MPI_COMM_NULL) {
472     PetscInt   m,N;
473     Mat_MPIAdj *b;
474     ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr);
475     ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr);
476     ierr = MatCreateMPIAdj(bcomm,m,N,a->i,a->j,a->values,B);CHKERRQ(ierr);
477     b = (Mat_MPIAdj*)(*B)->data;
478     b->freeaij = PETSC_FALSE;
479     ierr = MPI_Comm_free(&bcomm);CHKERRQ(ierr);
480   }
481   PetscFunctionReturn(0);
482 }
483 
484 #undef __FUNCT__
485 #define __FUNCT__ "MatMPIAdjCreateNonemptySubcommMat"
486 /*@
487    MatMPIAdjCreateNonemptySubcommMat - create the same MPIAdj matrix on a subcommunicator containing only processes owning a positive number of rows
488 
489    Collective
490 
491    Input Arguments:
492 .  A - original MPIAdj matrix
493 
494    Output Arguments:
495 .  B - matrix on subcommunicator, PETSC_NULL on ranks that owned zero rows of A
496 
497    Level: developer
498 
499    Note:
500    This function is mostly useful for internal use by mesh partitioning packages that require that every process owns at least one row.
501 
502    The matrix B should be destroyed with MatDestroy(). The arrays are not copied, so B should be destroyed before A is destroyed.
503 
504 .seealso: MatCreateMPIAdj()
505 @*/
506 PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A,Mat *B)
507 {
508   PetscErrorCode ierr;
509 
510   PetscFunctionBegin;
511   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
512   ierr = PetscUseMethod(A,"MatMPIAdjCreateNonemptySubcommMat_C",(Mat,Mat*),(A,B));CHKERRQ(ierr);
513   PetscFunctionReturn(0);
514 }
515 
516 /*MC
517    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
518    intended for use constructing orderings and partitionings.
519 
520   Level: beginner
521 
522 .seealso: MatCreateMPIAdj
523 M*/
524 
525 EXTERN_C_BEGIN
526 #undef __FUNCT__
527 #define __FUNCT__ "MatCreate_MPIAdj"
528 PetscErrorCode  MatCreate_MPIAdj(Mat B)
529 {
530   Mat_MPIAdj     *b;
531   PetscErrorCode ierr;
532 
533   PetscFunctionBegin;
534   ierr                = PetscNewLog(B,Mat_MPIAdj,&b);CHKERRQ(ierr);
535   B->data             = (void*)b;
536   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
537   B->assembled        = PETSC_FALSE;
538 
539   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAdjSetPreallocation_C",
540                                     "MatMPIAdjSetPreallocation_MPIAdj",
541                                      MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
542   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAdjCreateNonemptySubcommMat_C",
543                                     "MatMPIAdjCreateNonemptySubcommMat_MPIAdj",
544                                      MatMPIAdjCreateNonemptySubcommMat_MPIAdj);CHKERRQ(ierr);
545   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ);CHKERRQ(ierr);
546   PetscFunctionReturn(0);
547 }
548 EXTERN_C_END
549 
550 #undef __FUNCT__
551 #define __FUNCT__ "MatMPIAdjSetPreallocation"
552 /*@C
553    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
554 
555    Logically Collective on MPI_Comm
556 
557    Input Parameters:
558 +  A - the matrix
559 .  i - the indices into j for the start of each row
560 .  j - the column indices for each row (sorted for each row).
561        The indices in i and j start with zero (NOT with one).
562 -  values - [optional] edge weights
563 
564    Level: intermediate
565 
566 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
567 @*/
568 PetscErrorCode  MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
569 {
570   PetscErrorCode ierr;
571 
572   PetscFunctionBegin;
573   ierr = PetscTryMethod(B,"MatMPIAdjSetPreallocation_C",(Mat,PetscInt*,PetscInt*,PetscInt*),(B,i,j,values));CHKERRQ(ierr);
574   PetscFunctionReturn(0);
575 }
576 
577 #undef __FUNCT__
578 #define __FUNCT__ "MatCreateMPIAdj"
579 /*@C
580    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
581    The matrix does not have numerical values associated with it, but is
582    intended for ordering (to reduce bandwidth etc) and partitioning.
583 
584    Collective on MPI_Comm
585 
586    Input Parameters:
587 +  comm - MPI communicator
588 .  m - number of local rows
589 .  N - number of global columns
590 .  i - the indices into j for the start of each row
591 .  j - the column indices for each row (sorted for each row).
592        The indices in i and j start with zero (NOT with one).
593 -  values -[optional] edge weights
594 
595    Output Parameter:
596 .  A - the matrix
597 
598    Level: intermediate
599 
600    Notes: This matrix object does not support most matrix operations, include
601    MatSetValues().
602    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
603    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
604     call from Fortran you need not create the arrays with PetscMalloc().
605    Should not include the matrix diagonals.
606 
607    If you already have a matrix, you can create its adjacency matrix by a call
608    to MatConvert, specifying a type of MATMPIADJ.
609 
610    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
611 
612 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
613 @*/
614 PetscErrorCode  MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
615 {
616   PetscErrorCode ierr;
617 
618   PetscFunctionBegin;
619   ierr = MatCreate(comm,A);CHKERRQ(ierr);
620   ierr = MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
621   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
622   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
623   PetscFunctionReturn(0);
624 }
625 
626 
627 
628 
629 
630 
631 
632