xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 3923b477fd0dced8a2d147b4fb4519fe3af97d3f)
1 
2 /*
3     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
4 */
5 #include <../src/mat/impls/adj/mpi/mpiadj.h>    /*I "petscmat.h" I*/
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "MatView_MPIAdj_ASCII"
9 PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
10 {
11   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
12   PetscErrorCode    ierr;
13   PetscInt          i,j,m = A->rmap->n;
14   const char        *name;
15   PetscViewerFormat format;
16 
17   PetscFunctionBegin;
18   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
19   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
20   if (format == PETSC_VIEWER_ASCII_INFO) {
21     PetscFunctionReturn(0);
22   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
23     SETERRQ(((PetscObject)A)->comm,PETSC_ERR_SUP,"MATLAB format not supported");
24   } else {
25     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);CHKERRQ(ierr);
26     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
27     for (i=0; i<m; i++) {
28       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap->rstart);CHKERRQ(ierr);
29       for (j=a->i[i]; j<a->i[i+1]; j++) {
30         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);CHKERRQ(ierr);
31       }
32       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
33     }
34     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);CHKERRQ(ierr);
35     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
36     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
37   }
38   PetscFunctionReturn(0);
39 }
40 
41 #undef __FUNCT__
42 #define __FUNCT__ "MatView_MPIAdj"
43 PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
44 {
45   PetscErrorCode ierr;
46   PetscBool      iascii;
47 
48   PetscFunctionBegin;
49   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
50   if (iascii) {
51     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
52   } else {
53     SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
54   }
55   PetscFunctionReturn(0);
56 }
57 
58 #undef __FUNCT__
59 #define __FUNCT__ "MatDestroy_MPIAdj"
60 PetscErrorCode MatDestroy_MPIAdj(Mat mat)
61 {
62   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;
63   PetscErrorCode ierr;
64 
65   PetscFunctionBegin;
66 #if defined(PETSC_USE_LOG)
67   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap->n,mat->cmap->n,a->nz);
68 #endif
69   ierr = PetscFree(a->diag);CHKERRQ(ierr);
70   if (a->freeaij) {
71     if (a->freeaijwithfree) {
72       if (a->i) free(a->i);
73       if (a->j) free(a->j);
74     } else {
75       ierr = PetscFree(a->i);CHKERRQ(ierr);
76       ierr = PetscFree(a->j);CHKERRQ(ierr);
77       ierr = PetscFree(a->values);CHKERRQ(ierr);
78     }
79   }
80   ierr = PetscFree(mat->data);CHKERRQ(ierr);
81   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
82   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C","",PETSC_NULL);CHKERRQ(ierr);
83   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjCreateNonemptySubcommMat_C","",PETSC_NULL);CHKERRQ(ierr);
84   PetscFunctionReturn(0);
85 }
86 
87 #undef __FUNCT__
88 #define __FUNCT__ "MatSetOption_MPIAdj"
89 PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscBool  flg)
90 {
91   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
92   PetscErrorCode ierr;
93 
94   PetscFunctionBegin;
95   switch (op) {
96   case MAT_SYMMETRIC:
97   case MAT_STRUCTURALLY_SYMMETRIC:
98   case MAT_HERMITIAN:
99     a->symmetric = flg;
100     break;
101   case MAT_SYMMETRY_ETERNAL:
102     break;
103   default:
104     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
105     break;
106   }
107   PetscFunctionReturn(0);
108 }
109 
110 
111 /*
112      Adds diagonal pointers to sparse matrix structure.
113 */
114 
115 #undef __FUNCT__
116 #define __FUNCT__ "MatMarkDiagonal_MPIAdj"
117 PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A)
118 {
119   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
120   PetscErrorCode ierr;
121   PetscInt       i,j,m = A->rmap->n;
122 
123   PetscFunctionBegin;
124   ierr = PetscMalloc(m*sizeof(PetscInt),&a->diag);CHKERRQ(ierr);
125   ierr = PetscLogObjectMemory(A,m*sizeof(PetscInt));CHKERRQ(ierr);
126   for (i=0; i<A->rmap->n; i++) {
127     for (j=a->i[i]; j<a->i[i+1]; j++) {
128       if (a->j[j] == i) {
129         a->diag[i] = j;
130         break;
131       }
132     }
133   }
134   PetscFunctionReturn(0);
135 }
136 
137 #undef __FUNCT__
138 #define __FUNCT__ "MatGetRow_MPIAdj"
139 PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
140 {
141   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
142   PetscInt   *itmp;
143 
144   PetscFunctionBegin;
145   row -= A->rmap->rstart;
146 
147   if (row < 0 || row >= A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
148 
149   *nz = a->i[row+1] - a->i[row];
150   if (v) *v = PETSC_NULL;
151   if (idx) {
152     itmp = a->j + a->i[row];
153     if (*nz) {
154       *idx = itmp;
155     }
156     else *idx = 0;
157   }
158   PetscFunctionReturn(0);
159 }
160 
161 #undef __FUNCT__
162 #define __FUNCT__ "MatRestoreRow_MPIAdj"
163 PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
164 {
165   PetscFunctionBegin;
166   PetscFunctionReturn(0);
167 }
168 
169 #undef __FUNCT__
170 #define __FUNCT__ "MatEqual_MPIAdj"
171 PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscBool * flg)
172 {
173   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
174   PetscErrorCode ierr;
175   PetscBool      flag;
176 
177   PetscFunctionBegin;
178   /* If the  matrix dimensions are not equal,or no of nonzeros */
179   if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
180     flag = PETSC_FALSE;
181   }
182 
183   /* if the a->i are the same */
184   ierr = PetscMemcmp(a->i,b->i,(A->rmap->n+1)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
185 
186   /* if a->j are the same */
187   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
188 
189   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,((PetscObject)A)->comm);CHKERRQ(ierr);
190   PetscFunctionReturn(0);
191 }
192 
193 #undef __FUNCT__
194 #define __FUNCT__ "MatGetRowIJ_MPIAdj"
195 PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool  symmetric,PetscBool  blockcompressed,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscBool  *done)
196 {
197   PetscInt       i;
198   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data;
199 
200   PetscFunctionBegin;
201   *m    = A->rmap->n;
202   *ia   = a->i;
203   *ja   = a->j;
204   *done = PETSC_TRUE;
205   if (oshift) {
206     for (i=0; i<(*ia)[*m]; i++) {
207       (*ja)[i]++;
208     }
209     for (i=0; i<=(*m); i++) (*ia)[i]++;
210   }
211   PetscFunctionReturn(0);
212 }
213 
214 #undef __FUNCT__
215 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj"
216 PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool  symmetric,PetscBool  blockcompressed,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscBool  *done)
217 {
218   PetscInt   i;
219   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
220 
221   PetscFunctionBegin;
222   if (ia && a->i != *ia) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
223   if (ja && a->j != *ja) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
224   if (oshift) {
225     for (i=0; i<=(*m); i++) (*ia)[i]--;
226     for (i=0; i<(*ia)[*m]; i++) {
227       (*ja)[i]--;
228     }
229   }
230   PetscFunctionReturn(0);
231 }
232 
233 #undef __FUNCT__
234 #define __FUNCT__ "MatConvertFrom_MPIAdj"
235 PetscErrorCode  MatConvertFrom_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
236 {
237   Mat               B;
238   PetscErrorCode    ierr;
239   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
240   const PetscInt    *rj;
241   const PetscScalar *ra;
242   MPI_Comm          comm;
243 
244   PetscFunctionBegin;
245   ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr);
246   ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr);
247   ierr = MatGetOwnershipRange(A,&rstart,PETSC_NULL);CHKERRQ(ierr);
248 
249   /* count the number of nonzeros per row */
250   for (i=0; i<m; i++) {
251     ierr   = MatGetRow(A,i+rstart,&len,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr);
252     ierr   = MatRestoreRow(A,i+rstart,&len,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr);
253     nzeros += len;
254   }
255 
256   /* malloc space for nonzeros */
257   ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&a);CHKERRQ(ierr);
258   ierr = PetscMalloc((N+1)*sizeof(PetscInt),&ia);CHKERRQ(ierr);
259   ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&ja);CHKERRQ(ierr);
260 
261   nzeros = 0;
262   ia[0]  = 0;
263   for (i=0; i<m; i++) {
264     ierr    = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
265     cnt     = 0;
266     for (j=0; j<len; j++) {
267       a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
268       ja[nzeros+cnt++] = rj[j];
269     }
270     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
271     nzeros += cnt;
272     ia[i+1] = nzeros;
273   }
274 
275   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
276   ierr = MatCreate(comm,&B);CHKERRQ(ierr);
277   ierr = MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
278   ierr = MatSetType(B,type);CHKERRQ(ierr);
279   ierr = MatMPIAdjSetPreallocation(B,ia,ja,a);CHKERRQ(ierr);
280 
281   if (reuse == MAT_REUSE_MATRIX) {
282     ierr = MatHeaderReplace(A,B);CHKERRQ(ierr);
283   } else {
284     *newmat = B;
285   }
286   PetscFunctionReturn(0);
287 }
288 
289 /* -------------------------------------------------------------------*/
290 static struct _MatOps MatOps_Values = {0,
291        MatGetRow_MPIAdj,
292        MatRestoreRow_MPIAdj,
293        0,
294 /* 4*/ 0,
295        0,
296        0,
297        0,
298        0,
299        0,
300 /*10*/ 0,
301        0,
302        0,
303        0,
304        0,
305 /*15*/ 0,
306        MatEqual_MPIAdj,
307        0,
308        0,
309        0,
310 /*20*/ 0,
311        0,
312        MatSetOption_MPIAdj,
313        0,
314 /*24*/ 0,
315        0,
316        0,
317        0,
318        0,
319 /*29*/ 0,
320        0,
321        0,
322        0,
323        0,
324 /*34*/ 0,
325        0,
326        0,
327        0,
328        0,
329 /*39*/ 0,
330        0,
331        0,
332        0,
333        0,
334 /*44*/ 0,
335        0,
336        0,
337        0,
338        0,
339 /*49*/ 0,
340        MatGetRowIJ_MPIAdj,
341        MatRestoreRowIJ_MPIAdj,
342        0,
343        0,
344 /*54*/ 0,
345        0,
346        0,
347        0,
348        0,
349 /*59*/ 0,
350        MatDestroy_MPIAdj,
351        MatView_MPIAdj,
352        MatConvertFrom_MPIAdj,
353        0,
354 /*64*/ 0,
355        0,
356        0,
357        0,
358        0,
359 /*69*/ 0,
360        0,
361        0,
362        0,
363        0,
364 /*74*/ 0,
365        0,
366        0,
367        0,
368        0,
369 /*79*/ 0,
370        0,
371        0,
372        0,
373        0,
374 /*84*/ 0,
375        0,
376        0,
377        0,
378        0,
379 /*89*/ 0,
380        0,
381        0,
382        0,
383        0,
384 /*94*/ 0,
385        0,
386        0,
387        0};
388 
389 EXTERN_C_BEGIN
390 #undef __FUNCT__
391 #define __FUNCT__ "MatMPIAdjSetPreallocation_MPIAdj"
392 PetscErrorCode  MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
393 {
394   Mat_MPIAdj     *b = (Mat_MPIAdj *)B->data;
395   PetscErrorCode ierr;
396 #if defined(PETSC_USE_DEBUG)
397   PetscInt       ii;
398 #endif
399 
400   PetscFunctionBegin;
401   ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
402   ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
403 
404 #if defined(PETSC_USE_DEBUG)
405   if (i[0] != 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
406   for (ii=1; ii<B->rmap->n; ii++) {
407     if (i[ii] < 0 || i[ii] < i[ii-1]) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
408   }
409   for (ii=0; ii<i[B->rmap->n]; ii++) {
410     if (j[ii] < 0 || j[ii] >= B->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
411   }
412 #endif
413   B->preallocated = PETSC_TRUE;
414 
415   b->j      = j;
416   b->i      = i;
417   b->values = values;
418 
419   b->nz               = i[B->rmap->n];
420   b->diag             = 0;
421   b->symmetric        = PETSC_FALSE;
422   b->freeaij          = PETSC_TRUE;
423 
424   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
425   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
426   PetscFunctionReturn(0);
427 }
428 EXTERN_C_END
429 
430 #undef __FUNCT__
431 #define __FUNCT__ "MatMPIAdjCreateNonemptySubcommMat_MPIAdj"
432 PETSC_EXTERN_C PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A,Mat *B)
433 {
434   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
435   PetscErrorCode ierr;
436   const PetscInt *ranges;
437   MPI_Comm       acomm,bcomm;
438   MPI_Group      agroup,bgroup;
439   PetscMPIInt    i,rank,size,nranks,*ranks;
440 
441   PetscFunctionBegin;
442   *B = PETSC_NULL;
443   acomm = ((PetscObject)A)->comm;
444   ierr = MPI_Comm_size(acomm,&size);CHKERRQ(ierr);
445   ierr = MPI_Comm_size(acomm,&rank);CHKERRQ(ierr);
446   ierr = MatGetOwnershipRanges(A,&ranges);CHKERRQ(ierr);
447   for (i=0,nranks=0; i<size; i++) {
448     if (ranges[i+1] - ranges[i] > 0) nranks++;
449   }
450   if (nranks == size) {         /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
451     ierr = PetscObjectReference((PetscObject)A);CHKERRQ(ierr);
452     *B = A;
453     PetscFunctionReturn(0);
454   }
455 
456   ierr = PetscMalloc(nranks*sizeof(PetscMPIInt),&ranks);CHKERRQ(ierr);
457   for (i=0,nranks=0; i<size; i++) {
458     if (ranges[i+1] - ranges[i] > 0) ranks[nranks++] = i;
459   }
460   ierr = MPI_Comm_group(acomm,&agroup);CHKERRQ(ierr);
461   ierr = MPI_Group_incl(agroup,nranks,ranks,&bgroup);CHKERRQ(ierr);
462   ierr = PetscFree(ranks);CHKERRQ(ierr);
463   ierr = MPI_Comm_create(acomm,bgroup,&bcomm);CHKERRQ(ierr);
464   ierr = MPI_Group_free(&agroup);CHKERRQ(ierr);
465   ierr = MPI_Group_free(&bgroup);CHKERRQ(ierr);
466   if (bcomm != MPI_COMM_NULL) {
467     PetscInt   m,N;
468     Mat_MPIAdj *b;
469     ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr);
470     ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr);
471     ierr = MatCreateMPIAdj(bcomm,m,N,a->i,a->j,a->values,B);CHKERRQ(ierr);
472     b = (Mat_MPIAdj*)(*B)->data;
473     b->freeaij = PETSC_FALSE;
474     ierr = MPI_Comm_free(&bcomm);CHKERRQ(ierr);
475   }
476   PetscFunctionReturn(0);
477 }
478 
479 #undef __FUNCT__
480 #define __FUNCT__ "MatMPIAdjCreateNonemptySubcommMat"
481 /*@
482    MatMPIAdjCreateNonemptySubcommMat - create the same MPIAdj matrix on a subcommunicator containing only processes owning a positive number of rows
483 
484    Collective
485 
486    Input Arguments:
487 .  A - original MPIAdj matrix
488 
489    Output Arguments:
490 .  B - matrix on subcommunicator, PETSC_NULL on ranks that owned zero rows of A
491 
492    Level: developer
493 
494    Note:
495    This function is mostly useful for internal use by mesh partitioning packages that require that every process owns at least one row.
496 
497    The matrix B should be destroyed with MatDestroy(). The arrays are not copied, so B should be destroyed before A is destroyed.
498 
499 .seealso: MatCreateMPIAdj()
500 @*/
501 PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A,Mat *B)
502 {
503   PetscErrorCode ierr;
504 
505   PetscFunctionBegin;
506   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
507   ierr = PetscUseMethod(A,"MatMPIAdjCreateNonemptySubcommMat_C",(Mat,Mat*),(A,B));CHKERRQ(ierr);
508   PetscFunctionReturn(0);
509 }
510 
511 /*MC
512    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
513    intended for use constructing orderings and partitionings.
514 
515   Level: beginner
516 
517 .seealso: MatCreateMPIAdj
518 M*/
519 
520 EXTERN_C_BEGIN
521 #undef __FUNCT__
522 #define __FUNCT__ "MatCreate_MPIAdj"
523 PetscErrorCode  MatCreate_MPIAdj(Mat B)
524 {
525   Mat_MPIAdj     *b;
526   PetscErrorCode ierr;
527 
528   PetscFunctionBegin;
529   ierr                = PetscNewLog(B,Mat_MPIAdj,&b);CHKERRQ(ierr);
530   B->data             = (void*)b;
531   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
532   B->assembled        = PETSC_FALSE;
533 
534   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAdjSetPreallocation_C",
535                                     "MatMPIAdjSetPreallocation_MPIAdj",
536                                      MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
537   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAdjCreateNonemptySubcommMat_C",
538                                     "MatMPIAdjCreateNonemptySubcommMat_MPIAdj",
539                                      MatMPIAdjCreateNonemptySubcommMat_MPIAdj);CHKERRQ(ierr);
540   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ);CHKERRQ(ierr);
541   PetscFunctionReturn(0);
542 }
543 EXTERN_C_END
544 
545 #undef __FUNCT__
546 #define __FUNCT__ "MatMPIAdjSetPreallocation"
547 /*@C
548    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
549 
550    Logically Collective on MPI_Comm
551 
552    Input Parameters:
553 +  A - the matrix
554 .  i - the indices into j for the start of each row
555 .  j - the column indices for each row (sorted for each row).
556        The indices in i and j start with zero (NOT with one).
557 -  values - [optional] edge weights
558 
559    Level: intermediate
560 
561 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
562 @*/
563 PetscErrorCode  MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
564 {
565   PetscErrorCode ierr;
566 
567   PetscFunctionBegin;
568   ierr = PetscTryMethod(B,"MatMPIAdjSetPreallocation_C",(Mat,PetscInt*,PetscInt*,PetscInt*),(B,i,j,values));CHKERRQ(ierr);
569   PetscFunctionReturn(0);
570 }
571 
572 #undef __FUNCT__
573 #define __FUNCT__ "MatCreateMPIAdj"
574 /*@C
575    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
576    The matrix does not have numerical values associated with it, but is
577    intended for ordering (to reduce bandwidth etc) and partitioning.
578 
579    Collective on MPI_Comm
580 
581    Input Parameters:
582 +  comm - MPI communicator
583 .  m - number of local rows
584 .  N - number of global columns
585 .  i - the indices into j for the start of each row
586 .  j - the column indices for each row (sorted for each row).
587        The indices in i and j start with zero (NOT with one).
588 -  values -[optional] edge weights
589 
590    Output Parameter:
591 .  A - the matrix
592 
593    Level: intermediate
594 
595    Notes: This matrix object does not support most matrix operations, include
596    MatSetValues().
597    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
598    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
599     call from Fortran you need not create the arrays with PetscMalloc().
600    Should not include the matrix diagonals.
601 
602    If you already have a matrix, you can create its adjacency matrix by a call
603    to MatConvert, specifying a type of MATMPIADJ.
604 
605    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
606 
607 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
608 @*/
609 PetscErrorCode  MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
610 {
611   PetscErrorCode ierr;
612 
613   PetscFunctionBegin;
614   ierr = MatCreate(comm,A);CHKERRQ(ierr);
615   ierr = MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
616   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
617   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
618   PetscFunctionReturn(0);
619 }
620 
621 
622 
623 
624 
625 
626 
627