xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 8cc058d9cd56c1ccb3be12a47760ddfc446aaffc)
1 
2 /*
3     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
4 */
5 #include <../src/mat/impls/adj/mpi/mpiadj.h>    /*I "petscmat.h" I*/
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "MatView_MPIAdj_ASCII"
9 PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
10 {
11   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
12   PetscErrorCode    ierr;
13   PetscInt          i,j,m = A->rmap->n;
14   const char        *name;
15   PetscViewerFormat format;
16 
17   PetscFunctionBegin;
18   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
19   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
20   if (format == PETSC_VIEWER_ASCII_INFO) {
21     PetscFunctionReturn(0);
22   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
23     SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MATLAB format not supported");
24   } else {
25     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);CHKERRQ(ierr);
26     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
27     for (i=0; i<m; i++) {
28       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap->rstart);CHKERRQ(ierr);
29       for (j=a->i[i]; j<a->i[i+1]; j++) {
30         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);CHKERRQ(ierr);
31       }
32       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
33     }
34     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);CHKERRQ(ierr);
35     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
36     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
37   }
38   PetscFunctionReturn(0);
39 }
40 
41 #undef __FUNCT__
42 #define __FUNCT__ "MatView_MPIAdj"
43 PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
44 {
45   PetscErrorCode ierr;
46   PetscBool      iascii;
47 
48   PetscFunctionBegin;
49   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
50   if (iascii) {
51     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
52   }
53   PetscFunctionReturn(0);
54 }
55 
56 #undef __FUNCT__
57 #define __FUNCT__ "MatDestroy_MPIAdj"
58 PetscErrorCode MatDestroy_MPIAdj(Mat mat)
59 {
60   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;
61   PetscErrorCode ierr;
62 
63   PetscFunctionBegin;
64 #if defined(PETSC_USE_LOG)
65   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap->n,mat->cmap->n,a->nz);
66 #endif
67   ierr = PetscFree(a->diag);CHKERRQ(ierr);
68   if (a->freeaij) {
69     if (a->freeaijwithfree) {
70       if (a->i) free(a->i);
71       if (a->j) free(a->j);
72     } else {
73       ierr = PetscFree(a->i);CHKERRQ(ierr);
74       ierr = PetscFree(a->j);CHKERRQ(ierr);
75       ierr = PetscFree(a->values);CHKERRQ(ierr);
76     }
77   }
78   ierr = PetscFree(mat->data);CHKERRQ(ierr);
79   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
80   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C","",NULL);CHKERRQ(ierr);
81   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjCreateNonemptySubcommMat_C","",NULL);CHKERRQ(ierr);
82   PetscFunctionReturn(0);
83 }
84 
85 #undef __FUNCT__
86 #define __FUNCT__ "MatSetOption_MPIAdj"
87 PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscBool flg)
88 {
89   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
90   PetscErrorCode ierr;
91 
92   PetscFunctionBegin;
93   switch (op) {
94   case MAT_SYMMETRIC:
95   case MAT_STRUCTURALLY_SYMMETRIC:
96   case MAT_HERMITIAN:
97     a->symmetric = flg;
98     break;
99   case MAT_SYMMETRY_ETERNAL:
100     break;
101   default:
102     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
103     break;
104   }
105   PetscFunctionReturn(0);
106 }
107 
108 
109 /*
110      Adds diagonal pointers to sparse matrix structure.
111 */
112 
113 #undef __FUNCT__
114 #define __FUNCT__ "MatMarkDiagonal_MPIAdj"
115 PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A)
116 {
117   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
118   PetscErrorCode ierr;
119   PetscInt       i,j,m = A->rmap->n;
120 
121   PetscFunctionBegin;
122   ierr = PetscMalloc(m*sizeof(PetscInt),&a->diag);CHKERRQ(ierr);
123   ierr = PetscLogObjectMemory(A,m*sizeof(PetscInt));CHKERRQ(ierr);
124   for (i=0; i<A->rmap->n; i++) {
125     for (j=a->i[i]; j<a->i[i+1]; j++) {
126       if (a->j[j] == i) {
127         a->diag[i] = j;
128         break;
129       }
130     }
131   }
132   PetscFunctionReturn(0);
133 }
134 
135 #undef __FUNCT__
136 #define __FUNCT__ "MatGetRow_MPIAdj"
137 PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
138 {
139   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
140   PetscInt   *itmp;
141 
142   PetscFunctionBegin;
143   row -= A->rmap->rstart;
144 
145   if (row < 0 || row >= A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
146 
147   *nz = a->i[row+1] - a->i[row];
148   if (v) *v = NULL;
149   if (idx) {
150     itmp = a->j + a->i[row];
151     if (*nz) {
152       *idx = itmp;
153     }
154     else *idx = 0;
155   }
156   PetscFunctionReturn(0);
157 }
158 
159 #undef __FUNCT__
160 #define __FUNCT__ "MatRestoreRow_MPIAdj"
161 PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
162 {
163   PetscFunctionBegin;
164   PetscFunctionReturn(0);
165 }
166 
167 #undef __FUNCT__
168 #define __FUNCT__ "MatEqual_MPIAdj"
169 PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscBool * flg)
170 {
171   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data,*b = (Mat_MPIAdj*)B->data;
172   PetscErrorCode ierr;
173   PetscBool      flag;
174 
175   PetscFunctionBegin;
176   /* If the  matrix dimensions are not equal,or no of nonzeros */
177   if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
178     flag = PETSC_FALSE;
179   }
180 
181   /* if the a->i are the same */
182   ierr = PetscMemcmp(a->i,b->i,(A->rmap->n+1)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
183 
184   /* if a->j are the same */
185   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
186 
187   ierr = MPI_Allreduce(&flag,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
188   PetscFunctionReturn(0);
189 }
190 
191 #undef __FUNCT__
192 #define __FUNCT__ "MatGetRowIJ_MPIAdj"
193 PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
194 {
195   PetscInt   i;
196   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
197   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
198 
199   PetscFunctionBegin;
200   *m    = A->rmap->n;
201   *ia   = a->i;
202   *ja   = a->j;
203   *done = PETSC_TRUE;
204   if (oshift) {
205     for (i=0; i<(*ia)[*m]; i++) {
206       (*ja)[i]++;
207     }
208     for (i=0; i<=(*m); i++) (*ia)[i]++;
209   }
210   PetscFunctionReturn(0);
211 }
212 
213 #undef __FUNCT__
214 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj"
215 PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
216 {
217   PetscInt   i;
218   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
219   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
220 
221   PetscFunctionBegin;
222   if (ia && a->i != *ia) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
223   if (ja && a->j != *ja) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
224   if (oshift) {
225     for (i=0; i<=(*m); i++) (*ia)[i]--;
226     for (i=0; i<(*ia)[*m]; i++) {
227       (*ja)[i]--;
228     }
229   }
230   PetscFunctionReturn(0);
231 }
232 
233 #undef __FUNCT__
234 #define __FUNCT__ "MatConvertFrom_MPIAdj"
235 PetscErrorCode  MatConvertFrom_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
236 {
237   Mat               B;
238   PetscErrorCode    ierr;
239   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
240   const PetscInt    *rj;
241   const PetscScalar *ra;
242   MPI_Comm          comm;
243 
244   PetscFunctionBegin;
245   ierr = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
246   ierr = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
247   ierr = MatGetOwnershipRange(A,&rstart,NULL);CHKERRQ(ierr);
248 
249   /* count the number of nonzeros per row */
250   for (i=0; i<m; i++) {
251     ierr = MatGetRow(A,i+rstart,&len,&rj,NULL);CHKERRQ(ierr);
252     for (j=0; j<len; j++) {
253       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
254     }
255     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,NULL);CHKERRQ(ierr);
256     nzeros += len;
257   }
258 
259   /* malloc space for nonzeros */
260   ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&a);CHKERRQ(ierr);
261   ierr = PetscMalloc((N+1)*sizeof(PetscInt),&ia);CHKERRQ(ierr);
262   ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&ja);CHKERRQ(ierr);
263 
264   nzeros = 0;
265   ia[0]  = 0;
266   for (i=0; i<m; i++) {
267     ierr = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
268     cnt  = 0;
269     for (j=0; j<len; j++) {
270       if (rj[j] != i+rstart) { /* if not diagonal */
271         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
272         ja[nzeros+cnt++] = rj[j];
273       }
274     }
275     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
276     nzeros += cnt;
277     ia[i+1] = nzeros;
278   }
279 
280   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
281   ierr = MatCreate(comm,&B);CHKERRQ(ierr);
282   ierr = MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
283   ierr = MatSetType(B,type);CHKERRQ(ierr);
284   ierr = MatMPIAdjSetPreallocation(B,ia,ja,a);CHKERRQ(ierr);
285 
286   if (reuse == MAT_REUSE_MATRIX) {
287     ierr = MatHeaderReplace(A,B);CHKERRQ(ierr);
288   } else {
289     *newmat = B;
290   }
291   PetscFunctionReturn(0);
292 }
293 
294 /* -------------------------------------------------------------------*/
295 static struct _MatOps MatOps_Values = {0,
296        MatGetRow_MPIAdj,
297        MatRestoreRow_MPIAdj,
298        0,
299 /* 4*/ 0,
300        0,
301        0,
302        0,
303        0,
304        0,
305 /*10*/ 0,
306        0,
307        0,
308        0,
309        0,
310 /*15*/ 0,
311        MatEqual_MPIAdj,
312        0,
313        0,
314        0,
315 /*20*/ 0,
316        0,
317        MatSetOption_MPIAdj,
318        0,
319 /*24*/ 0,
320        0,
321        0,
322        0,
323        0,
324 /*29*/ 0,
325        0,
326        0,
327        0,
328        0,
329 /*34*/ 0,
330        0,
331        0,
332        0,
333        0,
334 /*39*/ 0,
335        0,
336        0,
337        0,
338        0,
339 /*44*/ 0,
340        0,
341        0,
342        0,
343        0,
344 /*49*/ 0,
345        MatGetRowIJ_MPIAdj,
346        MatRestoreRowIJ_MPIAdj,
347        0,
348        0,
349 /*54*/ 0,
350        0,
351        0,
352        0,
353        0,
354 /*59*/ 0,
355        MatDestroy_MPIAdj,
356        MatView_MPIAdj,
357        MatConvertFrom_MPIAdj,
358        0,
359 /*64*/ 0,
360        0,
361        0,
362        0,
363        0,
364 /*69*/ 0,
365        0,
366        0,
367        0,
368        0,
369 /*74*/ 0,
370        0,
371        0,
372        0,
373        0,
374 /*79*/ 0,
375        0,
376        0,
377        0,
378        0,
379 /*84*/ 0,
380        0,
381        0,
382        0,
383        0,
384 /*89*/ 0,
385        0,
386        0,
387        0,
388        0,
389 /*94*/ 0,
390        0,
391        0,
392        0};
393 
394 #undef __FUNCT__
395 #define __FUNCT__ "MatMPIAdjSetPreallocation_MPIAdj"
396 static PetscErrorCode  MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
397 {
398   Mat_MPIAdj     *b = (Mat_MPIAdj*)B->data;
399   PetscErrorCode ierr;
400 #if defined(PETSC_USE_DEBUG)
401   PetscInt ii;
402 #endif
403 
404   PetscFunctionBegin;
405   ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
406   ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
407 
408 #if defined(PETSC_USE_DEBUG)
409   if (i[0] != 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
410   for (ii=1; ii<B->rmap->n; ii++) {
411     if (i[ii] < 0 || i[ii] < i[ii-1]) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
412   }
413   for (ii=0; ii<i[B->rmap->n]; ii++) {
414     if (j[ii] < 0 || j[ii] >= B->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
415   }
416 #endif
417   B->preallocated = PETSC_TRUE;
418 
419   b->j      = j;
420   b->i      = i;
421   b->values = values;
422 
423   b->nz        = i[B->rmap->n];
424   b->diag      = 0;
425   b->symmetric = PETSC_FALSE;
426   b->freeaij   = PETSC_TRUE;
427 
428   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
429   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
430   PetscFunctionReturn(0);
431 }
432 
433 #undef __FUNCT__
434 #define __FUNCT__ "MatMPIAdjCreateNonemptySubcommMat_MPIAdj"
435 static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A,Mat *B)
436 {
437   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
438   PetscErrorCode ierr;
439   const PetscInt *ranges;
440   MPI_Comm       acomm,bcomm;
441   MPI_Group      agroup,bgroup;
442   PetscMPIInt    i,rank,size,nranks,*ranks;
443 
444   PetscFunctionBegin;
445   *B    = NULL;
446   ierr  = PetscObjectGetComm((PetscObject)A,&acomm);CHKERRQ(ierr);
447   ierr  = MPI_Comm_size(acomm,&size);CHKERRQ(ierr);
448   ierr  = MPI_Comm_size(acomm,&rank);CHKERRQ(ierr);
449   ierr  = MatGetOwnershipRanges(A,&ranges);CHKERRQ(ierr);
450   for (i=0,nranks=0; i<size; i++) {
451     if (ranges[i+1] - ranges[i] > 0) nranks++;
452   }
453   if (nranks == size) {         /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
454     ierr = PetscObjectReference((PetscObject)A);CHKERRQ(ierr);
455     *B   = A;
456     PetscFunctionReturn(0);
457   }
458 
459   ierr = PetscMalloc(nranks*sizeof(PetscMPIInt),&ranks);CHKERRQ(ierr);
460   for (i=0,nranks=0; i<size; i++) {
461     if (ranges[i+1] - ranges[i] > 0) ranks[nranks++] = i;
462   }
463   ierr = MPI_Comm_group(acomm,&agroup);CHKERRQ(ierr);
464   ierr = MPI_Group_incl(agroup,nranks,ranks,&bgroup);CHKERRQ(ierr);
465   ierr = PetscFree(ranks);CHKERRQ(ierr);
466   ierr = MPI_Comm_create(acomm,bgroup,&bcomm);CHKERRQ(ierr);
467   ierr = MPI_Group_free(&agroup);CHKERRQ(ierr);
468   ierr = MPI_Group_free(&bgroup);CHKERRQ(ierr);
469   if (bcomm != MPI_COMM_NULL) {
470     PetscInt   m,N;
471     Mat_MPIAdj *b;
472     ierr       = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
473     ierr       = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
474     ierr       = MatCreateMPIAdj(bcomm,m,N,a->i,a->j,a->values,B);CHKERRQ(ierr);
475     b          = (Mat_MPIAdj*)(*B)->data;
476     b->freeaij = PETSC_FALSE;
477     ierr       = MPI_Comm_free(&bcomm);CHKERRQ(ierr);
478   }
479   PetscFunctionReturn(0);
480 }
481 
482 #undef __FUNCT__
483 #define __FUNCT__ "MatMPIAdjCreateNonemptySubcommMat"
484 /*@
485    MatMPIAdjCreateNonemptySubcommMat - create the same MPIAdj matrix on a subcommunicator containing only processes owning a positive number of rows
486 
487    Collective
488 
489    Input Arguments:
490 .  A - original MPIAdj matrix
491 
492    Output Arguments:
493 .  B - matrix on subcommunicator, NULL on ranks that owned zero rows of A
494 
495    Level: developer
496 
497    Note:
498    This function is mostly useful for internal use by mesh partitioning packages that require that every process owns at least one row.
499 
500    The matrix B should be destroyed with MatDestroy(). The arrays are not copied, so B should be destroyed before A is destroyed.
501 
502 .seealso: MatCreateMPIAdj()
503 @*/
504 PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A,Mat *B)
505 {
506   PetscErrorCode ierr;
507 
508   PetscFunctionBegin;
509   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
510   ierr = PetscUseMethod(A,"MatMPIAdjCreateNonemptySubcommMat_C",(Mat,Mat*),(A,B));CHKERRQ(ierr);
511   PetscFunctionReturn(0);
512 }
513 
514 /*MC
515    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
516    intended for use constructing orderings and partitionings.
517 
518   Level: beginner
519 
520 .seealso: MatCreateMPIAdj
521 M*/
522 
523 #undef __FUNCT__
524 #define __FUNCT__ "MatCreate_MPIAdj"
525 PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B)
526 {
527   Mat_MPIAdj     *b;
528   PetscErrorCode ierr;
529 
530   PetscFunctionBegin;
531   ierr         = PetscNewLog(B,Mat_MPIAdj,&b);CHKERRQ(ierr);
532   B->data      = (void*)b;
533   ierr         = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
534   B->assembled = PETSC_FALSE;
535 
536   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C","MatMPIAdjSetPreallocation_MPIAdj",MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
537   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjCreateNonemptySubcommMat_C","MatMPIAdjCreateNonemptySubcommMat_MPIAdj",MatMPIAdjCreateNonemptySubcommMat_MPIAdj);CHKERRQ(ierr);
538   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ);CHKERRQ(ierr);
539   PetscFunctionReturn(0);
540 }
541 
542 #undef __FUNCT__
543 #define __FUNCT__ "MatMPIAdjSetPreallocation"
544 /*@C
545    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
546 
547    Logically Collective on MPI_Comm
548 
549    Input Parameters:
550 +  A - the matrix
551 .  i - the indices into j for the start of each row
552 .  j - the column indices for each row (sorted for each row).
553        The indices in i and j start with zero (NOT with one).
554 -  values - [optional] edge weights
555 
556    Level: intermediate
557 
558 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
559 @*/
560 PetscErrorCode  MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
561 {
562   PetscErrorCode ierr;
563 
564   PetscFunctionBegin;
565   ierr = PetscTryMethod(B,"MatMPIAdjSetPreallocation_C",(Mat,PetscInt*,PetscInt*,PetscInt*),(B,i,j,values));CHKERRQ(ierr);
566   PetscFunctionReturn(0);
567 }
568 
569 #undef __FUNCT__
570 #define __FUNCT__ "MatCreateMPIAdj"
571 /*@C
572    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
573    The matrix does not have numerical values associated with it, but is
574    intended for ordering (to reduce bandwidth etc) and partitioning.
575 
576    Collective on MPI_Comm
577 
578    Input Parameters:
579 +  comm - MPI communicator
580 .  m - number of local rows
581 .  N - number of global columns
582 .  i - the indices into j for the start of each row
583 .  j - the column indices for each row (sorted for each row).
584        The indices in i and j start with zero (NOT with one).
585 -  values -[optional] edge weights
586 
587    Output Parameter:
588 .  A - the matrix
589 
590    Level: intermediate
591 
592    Notes: This matrix object does not support most matrix operations, include
593    MatSetValues().
594    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
595    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
596     call from Fortran you need not create the arrays with PetscMalloc().
597    Should not include the matrix diagonals.
598 
599    If you already have a matrix, you can create its adjacency matrix by a call
600    to MatConvert, specifying a type of MATMPIADJ.
601 
602    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
603 
604 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
605 @*/
606 PetscErrorCode  MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
607 {
608   PetscErrorCode ierr;
609 
610   PetscFunctionBegin;
611   ierr = MatCreate(comm,A);CHKERRQ(ierr);
612   ierr = MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
613   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
614   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
615   PetscFunctionReturn(0);
616 }
617 
618 
619 
620 
621 
622 
623 
624