xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 3bf036e263fd78807e2931ff42d9ddcd8aae3fd4)
1 
2 /*
3     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
4 */
5 #include <../src/mat/impls/adj/mpi/mpiadj.h>    /*I "petscmat.h" I*/
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "MatView_MPIAdj_ASCII"
9 PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
10 {
11   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
12   PetscErrorCode    ierr;
13   PetscInt          i,j,m = A->rmap->n;
14   const char        *name;
15   PetscViewerFormat format;
16 
17   PetscFunctionBegin;
18   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
19   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
20   if (format == PETSC_VIEWER_ASCII_INFO) {
21     PetscFunctionReturn(0);
22   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
23     SETERRQ(((PetscObject)A)->comm,PETSC_ERR_SUP,"MATLAB format not supported");
24   } else {
25     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);CHKERRQ(ierr);
26     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
27     for (i=0; i<m; i++) {
28       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap->rstart);CHKERRQ(ierr);
29       for (j=a->i[i]; j<a->i[i+1]; j++) {
30         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);CHKERRQ(ierr);
31       }
32       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
33     }
34     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);CHKERRQ(ierr);
35     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
36     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
37   }
38   PetscFunctionReturn(0);
39 }
40 
41 #undef __FUNCT__
42 #define __FUNCT__ "MatView_MPIAdj"
43 PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
44 {
45   PetscErrorCode ierr;
46   PetscBool      iascii;
47 
48   PetscFunctionBegin;
49   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
50   if (iascii) {
51     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
52   } else {
53     SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
54   }
55   PetscFunctionReturn(0);
56 }
57 
58 #undef __FUNCT__
59 #define __FUNCT__ "MatDestroy_MPIAdj"
60 PetscErrorCode MatDestroy_MPIAdj(Mat mat)
61 {
62   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;
63   PetscErrorCode ierr;
64 
65   PetscFunctionBegin;
66 #if defined(PETSC_USE_LOG)
67   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap->n,mat->cmap->n,a->nz);
68 #endif
69   ierr = PetscFree(a->diag);CHKERRQ(ierr);
70   if (a->freeaij) {
71     if (a->freeaijwithfree) {
72       if (a->i) free(a->i);
73       if (a->j) free(a->j);
74     } else {
75       ierr = PetscFree(a->i);CHKERRQ(ierr);
76       ierr = PetscFree(a->j);CHKERRQ(ierr);
77       ierr = PetscFree(a->values);CHKERRQ(ierr);
78     }
79   }
80   ierr = PetscFree(mat->data);CHKERRQ(ierr);
81   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
82   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C","",PETSC_NULL);CHKERRQ(ierr);
83   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjCreateNonemptySubcommMat_C","",PETSC_NULL);CHKERRQ(ierr);
84   PetscFunctionReturn(0);
85 }
86 
87 #undef __FUNCT__
88 #define __FUNCT__ "MatSetOption_MPIAdj"
89 PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscBool  flg)
90 {
91   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
92   PetscErrorCode ierr;
93 
94   PetscFunctionBegin;
95   switch (op) {
96   case MAT_SYMMETRIC:
97   case MAT_STRUCTURALLY_SYMMETRIC:
98   case MAT_HERMITIAN:
99     a->symmetric = flg;
100     break;
101   case MAT_SYMMETRY_ETERNAL:
102     break;
103   default:
104     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
105     break;
106   }
107   PetscFunctionReturn(0);
108 }
109 
110 
111 /*
112      Adds diagonal pointers to sparse matrix structure.
113 */
114 
115 #undef __FUNCT__
116 #define __FUNCT__ "MatMarkDiagonal_MPIAdj"
117 PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A)
118 {
119   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
120   PetscErrorCode ierr;
121   PetscInt       i,j,m = A->rmap->n;
122 
123   PetscFunctionBegin;
124   ierr = PetscMalloc(m*sizeof(PetscInt),&a->diag);CHKERRQ(ierr);
125   ierr = PetscLogObjectMemory(A,m*sizeof(PetscInt));CHKERRQ(ierr);
126   for (i=0; i<A->rmap->n; i++) {
127     for (j=a->i[i]; j<a->i[i+1]; j++) {
128       if (a->j[j] == i) {
129         a->diag[i] = j;
130         break;
131       }
132     }
133   }
134   PetscFunctionReturn(0);
135 }
136 
137 #undef __FUNCT__
138 #define __FUNCT__ "MatGetRow_MPIAdj"
139 PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
140 {
141   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
142   PetscInt   *itmp;
143 
144   PetscFunctionBegin;
145   row -= A->rmap->rstart;
146 
147   if (row < 0 || row >= A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
148 
149   *nz = a->i[row+1] - a->i[row];
150   if (v) *v = PETSC_NULL;
151   if (idx) {
152     itmp = a->j + a->i[row];
153     if (*nz) {
154       *idx = itmp;
155     }
156     else *idx = 0;
157   }
158   PetscFunctionReturn(0);
159 }
160 
161 #undef __FUNCT__
162 #define __FUNCT__ "MatRestoreRow_MPIAdj"
163 PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
164 {
165   PetscFunctionBegin;
166   PetscFunctionReturn(0);
167 }
168 
169 #undef __FUNCT__
170 #define __FUNCT__ "MatEqual_MPIAdj"
171 PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscBool * flg)
172 {
173   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
174   PetscErrorCode ierr;
175   PetscBool      flag;
176 
177   PetscFunctionBegin;
178   /* If the  matrix dimensions are not equal,or no of nonzeros */
179   if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
180     flag = PETSC_FALSE;
181   }
182 
183   /* if the a->i are the same */
184   ierr = PetscMemcmp(a->i,b->i,(A->rmap->n+1)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
185 
186   /* if a->j are the same */
187   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
188 
189   ierr = MPI_Allreduce(&flag,flg,1,MPIU_BOOL,MPI_LAND,((PetscObject)A)->comm);CHKERRQ(ierr);
190   PetscFunctionReturn(0);
191 }
192 
193 #undef __FUNCT__
194 #define __FUNCT__ "MatGetRowIJ_MPIAdj"
195 PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool  symmetric,PetscBool  blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
196 {
197   PetscInt       i;
198   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data;
199   PetscInt       **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
200 
201   PetscFunctionBegin;
202   *m    = A->rmap->n;
203   *ia   = a->i;
204   *ja   = a->j;
205   *done = PETSC_TRUE;
206   if (oshift) {
207     for (i=0; i<(*ia)[*m]; i++) {
208       (*ja)[i]++;
209     }
210     for (i=0; i<=(*m); i++) (*ia)[i]++;
211   }
212   PetscFunctionReturn(0);
213 }
214 
215 #undef __FUNCT__
216 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj"
217 PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool  symmetric,PetscBool  blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
218 {
219   PetscInt   i;
220   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
221   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
222 
223   PetscFunctionBegin;
224   if (ia && a->i != *ia) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
225   if (ja && a->j != *ja) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
226   if (oshift) {
227     for (i=0; i<=(*m); i++) (*ia)[i]--;
228     for (i=0; i<(*ia)[*m]; i++) {
229       (*ja)[i]--;
230     }
231   }
232   PetscFunctionReturn(0);
233 }
234 
235 #undef __FUNCT__
236 #define __FUNCT__ "MatConvertFrom_MPIAdj"
237 PetscErrorCode  MatConvertFrom_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
238 {
239   Mat               B;
240   PetscErrorCode    ierr;
241   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
242   const PetscInt    *rj;
243   const PetscScalar *ra;
244   MPI_Comm          comm;
245 
246   PetscFunctionBegin;
247   ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr);
248   ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr);
249   ierr = MatGetOwnershipRange(A,&rstart,PETSC_NULL);CHKERRQ(ierr);
250 
251   /* count the number of nonzeros per row */
252   for (i=0; i<m; i++) {
253     ierr   = MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
254     for (j=0; j<len; j++) {
255       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
256     }
257     ierr   = MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
258     nzeros += len;
259   }
260 
261   /* malloc space for nonzeros */
262   ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&a);CHKERRQ(ierr);
263   ierr = PetscMalloc((N+1)*sizeof(PetscInt),&ia);CHKERRQ(ierr);
264   ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&ja);CHKERRQ(ierr);
265 
266   nzeros = 0;
267   ia[0]  = 0;
268   for (i=0; i<m; i++) {
269     ierr    = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
270     cnt     = 0;
271     for (j=0; j<len; j++) {
272       if (rj[j] != i+rstart) { /* if not diagonal */
273         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
274         ja[nzeros+cnt++] = rj[j];
275       }
276     }
277     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
278     nzeros += cnt;
279     ia[i+1] = nzeros;
280   }
281 
282   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
283   ierr = MatCreate(comm,&B);CHKERRQ(ierr);
284   ierr = MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
285   ierr = MatSetType(B,type);CHKERRQ(ierr);
286   ierr = MatMPIAdjSetPreallocation(B,ia,ja,a);CHKERRQ(ierr);
287 
288   if (reuse == MAT_REUSE_MATRIX) {
289     ierr = MatHeaderReplace(A,B);CHKERRQ(ierr);
290   } else {
291     *newmat = B;
292   }
293   PetscFunctionReturn(0);
294 }
295 
296 /* -------------------------------------------------------------------*/
297 static struct _MatOps MatOps_Values = {0,
298        MatGetRow_MPIAdj,
299        MatRestoreRow_MPIAdj,
300        0,
301 /* 4*/ 0,
302        0,
303        0,
304        0,
305        0,
306        0,
307 /*10*/ 0,
308        0,
309        0,
310        0,
311        0,
312 /*15*/ 0,
313        MatEqual_MPIAdj,
314        0,
315        0,
316        0,
317 /*20*/ 0,
318        0,
319        MatSetOption_MPIAdj,
320        0,
321 /*24*/ 0,
322        0,
323        0,
324        0,
325        0,
326 /*29*/ 0,
327        0,
328        0,
329        0,
330        0,
331 /*34*/ 0,
332        0,
333        0,
334        0,
335        0,
336 /*39*/ 0,
337        0,
338        0,
339        0,
340        0,
341 /*44*/ 0,
342        0,
343        0,
344        0,
345        0,
346 /*49*/ 0,
347        MatGetRowIJ_MPIAdj,
348        MatRestoreRowIJ_MPIAdj,
349        0,
350        0,
351 /*54*/ 0,
352        0,
353        0,
354        0,
355        0,
356 /*59*/ 0,
357        MatDestroy_MPIAdj,
358        MatView_MPIAdj,
359        MatConvertFrom_MPIAdj,
360        0,
361 /*64*/ 0,
362        0,
363        0,
364        0,
365        0,
366 /*69*/ 0,
367        0,
368        0,
369        0,
370        0,
371 /*74*/ 0,
372        0,
373        0,
374        0,
375        0,
376 /*79*/ 0,
377        0,
378        0,
379        0,
380        0,
381 /*84*/ 0,
382        0,
383        0,
384        0,
385        0,
386 /*89*/ 0,
387        0,
388        0,
389        0,
390        0,
391 /*94*/ 0,
392        0,
393        0,
394        0};
395 
396 EXTERN_C_BEGIN
397 #undef __FUNCT__
398 #define __FUNCT__ "MatMPIAdjSetPreallocation_MPIAdj"
399 PetscErrorCode  MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
400 {
401   Mat_MPIAdj     *b = (Mat_MPIAdj *)B->data;
402   PetscErrorCode ierr;
403 #if defined(PETSC_USE_DEBUG)
404   PetscInt       ii;
405 #endif
406 
407   PetscFunctionBegin;
408   ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
409   ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
410 
411 #if defined(PETSC_USE_DEBUG)
412   if (i[0] != 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
413   for (ii=1; ii<B->rmap->n; ii++) {
414     if (i[ii] < 0 || i[ii] < i[ii-1]) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
415   }
416   for (ii=0; ii<i[B->rmap->n]; ii++) {
417     if (j[ii] < 0 || j[ii] >= B->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
418   }
419 #endif
420   B->preallocated = PETSC_TRUE;
421 
422   b->j      = j;
423   b->i      = i;
424   b->values = values;
425 
426   b->nz               = i[B->rmap->n];
427   b->diag             = 0;
428   b->symmetric        = PETSC_FALSE;
429   b->freeaij          = PETSC_TRUE;
430 
431   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
432   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
433   PetscFunctionReturn(0);
434 }
435 EXTERN_C_END
436 
437 #undef __FUNCT__
438 #define __FUNCT__ "MatMPIAdjCreateNonemptySubcommMat_MPIAdj"
439 PETSC_EXTERN_C PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A,Mat *B)
440 {
441   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
442   PetscErrorCode ierr;
443   const PetscInt *ranges;
444   MPI_Comm       acomm,bcomm;
445   MPI_Group      agroup,bgroup;
446   PetscMPIInt    i,rank,size,nranks,*ranks;
447 
448   PetscFunctionBegin;
449   *B = PETSC_NULL;
450   acomm = ((PetscObject)A)->comm;
451   ierr = MPI_Comm_size(acomm,&size);CHKERRQ(ierr);
452   ierr = MPI_Comm_size(acomm,&rank);CHKERRQ(ierr);
453   ierr = MatGetOwnershipRanges(A,&ranges);CHKERRQ(ierr);
454   for (i=0,nranks=0; i<size; i++) {
455     if (ranges[i+1] - ranges[i] > 0) nranks++;
456   }
457   if (nranks == size) {         /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
458     ierr = PetscObjectReference((PetscObject)A);CHKERRQ(ierr);
459     *B = A;
460     PetscFunctionReturn(0);
461   }
462 
463   ierr = PetscMalloc(nranks*sizeof(PetscMPIInt),&ranks);CHKERRQ(ierr);
464   for (i=0,nranks=0; i<size; i++) {
465     if (ranges[i+1] - ranges[i] > 0) ranks[nranks++] = i;
466   }
467   ierr = MPI_Comm_group(acomm,&agroup);CHKERRQ(ierr);
468   ierr = MPI_Group_incl(agroup,nranks,ranks,&bgroup);CHKERRQ(ierr);
469   ierr = PetscFree(ranks);CHKERRQ(ierr);
470   ierr = MPI_Comm_create(acomm,bgroup,&bcomm);CHKERRQ(ierr);
471   ierr = MPI_Group_free(&agroup);CHKERRQ(ierr);
472   ierr = MPI_Group_free(&bgroup);CHKERRQ(ierr);
473   if (bcomm != MPI_COMM_NULL) {
474     PetscInt   m,N;
475     Mat_MPIAdj *b;
476     ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr);
477     ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr);
478     ierr = MatCreateMPIAdj(bcomm,m,N,a->i,a->j,a->values,B);CHKERRQ(ierr);
479     b = (Mat_MPIAdj*)(*B)->data;
480     b->freeaij = PETSC_FALSE;
481     ierr = MPI_Comm_free(&bcomm);CHKERRQ(ierr);
482   }
483   PetscFunctionReturn(0);
484 }
485 
486 #undef __FUNCT__
487 #define __FUNCT__ "MatMPIAdjCreateNonemptySubcommMat"
488 /*@
489    MatMPIAdjCreateNonemptySubcommMat - create the same MPIAdj matrix on a subcommunicator containing only processes owning a positive number of rows
490 
491    Collective
492 
493    Input Arguments:
494 .  A - original MPIAdj matrix
495 
496    Output Arguments:
497 .  B - matrix on subcommunicator, PETSC_NULL on ranks that owned zero rows of A
498 
499    Level: developer
500 
501    Note:
502    This function is mostly useful for internal use by mesh partitioning packages that require that every process owns at least one row.
503 
504    The matrix B should be destroyed with MatDestroy(). The arrays are not copied, so B should be destroyed before A is destroyed.
505 
506 .seealso: MatCreateMPIAdj()
507 @*/
508 PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A,Mat *B)
509 {
510   PetscErrorCode ierr;
511 
512   PetscFunctionBegin;
513   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
514   ierr = PetscUseMethod(A,"MatMPIAdjCreateNonemptySubcommMat_C",(Mat,Mat*),(A,B));CHKERRQ(ierr);
515   PetscFunctionReturn(0);
516 }
517 
518 /*MC
519    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
520    intended for use constructing orderings and partitionings.
521 
522   Level: beginner
523 
524 .seealso: MatCreateMPIAdj
525 M*/
526 
527 EXTERN_C_BEGIN
528 #undef __FUNCT__
529 #define __FUNCT__ "MatCreate_MPIAdj"
530 PetscErrorCode  MatCreate_MPIAdj(Mat B)
531 {
532   Mat_MPIAdj     *b;
533   PetscErrorCode ierr;
534 
535   PetscFunctionBegin;
536   ierr                = PetscNewLog(B,Mat_MPIAdj,&b);CHKERRQ(ierr);
537   B->data             = (void*)b;
538   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
539   B->assembled        = PETSC_FALSE;
540 
541   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAdjSetPreallocation_C",
542                                     "MatMPIAdjSetPreallocation_MPIAdj",
543                                      MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
544   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAdjCreateNonemptySubcommMat_C",
545                                     "MatMPIAdjCreateNonemptySubcommMat_MPIAdj",
546                                      MatMPIAdjCreateNonemptySubcommMat_MPIAdj);CHKERRQ(ierr);
547   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ);CHKERRQ(ierr);
548   PetscFunctionReturn(0);
549 }
550 EXTERN_C_END
551 
552 #undef __FUNCT__
553 #define __FUNCT__ "MatMPIAdjSetPreallocation"
554 /*@C
555    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
556 
557    Logically Collective on MPI_Comm
558 
559    Input Parameters:
560 +  A - the matrix
561 .  i - the indices into j for the start of each row
562 .  j - the column indices for each row (sorted for each row).
563        The indices in i and j start with zero (NOT with one).
564 -  values - [optional] edge weights
565 
566    Level: intermediate
567 
568 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
569 @*/
570 PetscErrorCode  MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
571 {
572   PetscErrorCode ierr;
573 
574   PetscFunctionBegin;
575   ierr = PetscTryMethod(B,"MatMPIAdjSetPreallocation_C",(Mat,PetscInt*,PetscInt*,PetscInt*),(B,i,j,values));CHKERRQ(ierr);
576   PetscFunctionReturn(0);
577 }
578 
579 #undef __FUNCT__
580 #define __FUNCT__ "MatCreateMPIAdj"
581 /*@C
582    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
583    The matrix does not have numerical values associated with it, but is
584    intended for ordering (to reduce bandwidth etc) and partitioning.
585 
586    Collective on MPI_Comm
587 
588    Input Parameters:
589 +  comm - MPI communicator
590 .  m - number of local rows
591 .  N - number of global columns
592 .  i - the indices into j for the start of each row
593 .  j - the column indices for each row (sorted for each row).
594        The indices in i and j start with zero (NOT with one).
595 -  values -[optional] edge weights
596 
597    Output Parameter:
598 .  A - the matrix
599 
600    Level: intermediate
601 
602    Notes: This matrix object does not support most matrix operations, include
603    MatSetValues().
604    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
605    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
606     call from Fortran you need not create the arrays with PetscMalloc().
607    Should not include the matrix diagonals.
608 
609    If you already have a matrix, you can create its adjacency matrix by a call
610    to MatConvert, specifying a type of MATMPIADJ.
611 
612    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
613 
614 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
615 @*/
616 PetscErrorCode  MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
617 {
618   PetscErrorCode ierr;
619 
620   PetscFunctionBegin;
621   ierr = MatCreate(comm,A);CHKERRQ(ierr);
622   ierr = MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
623   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
624   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
625   PetscFunctionReturn(0);
626 }
627 
628 
629 
630 
631 
632 
633 
634