xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision e6e75211d226c622f451867f53ce5d558649ff4f)
1 
2 /*
3     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
4 */
5 #include <../src/mat/impls/adj/mpi/mpiadj.h>    /*I "petscmat.h" I*/
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "MatView_MPIAdj_ASCII"
9 PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
10 {
11   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
12   PetscErrorCode    ierr;
13   PetscInt          i,j,m = A->rmap->n;
14   const char        *name;
15   PetscViewerFormat format;
16 
17   PetscFunctionBegin;
18   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
19   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
20   if (format == PETSC_VIEWER_ASCII_INFO) {
21     PetscFunctionReturn(0);
22   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
23     SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MATLAB format not supported");
24   } else {
25     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);CHKERRQ(ierr);
26     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
27     for (i=0; i<m; i++) {
28       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap->rstart);CHKERRQ(ierr);
29       for (j=a->i[i]; j<a->i[i+1]; j++) {
30         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);CHKERRQ(ierr);
31       }
32       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
33     }
34     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);CHKERRQ(ierr);
35     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
36     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
37   }
38   PetscFunctionReturn(0);
39 }
40 
41 #undef __FUNCT__
42 #define __FUNCT__ "MatView_MPIAdj"
43 PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
44 {
45   PetscErrorCode ierr;
46   PetscBool      iascii;
47 
48   PetscFunctionBegin;
49   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
50   if (iascii) {
51     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
52   }
53   PetscFunctionReturn(0);
54 }
55 
56 #undef __FUNCT__
57 #define __FUNCT__ "MatDestroy_MPIAdj"
58 PetscErrorCode MatDestroy_MPIAdj(Mat mat)
59 {
60   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;
61   PetscErrorCode ierr;
62 
63   PetscFunctionBegin;
64 #if defined(PETSC_USE_LOG)
65   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap->n,mat->cmap->n,a->nz);
66 #endif
67   ierr = PetscFree(a->diag);CHKERRQ(ierr);
68   if (a->freeaij) {
69     if (a->freeaijwithfree) {
70       if (a->i) free(a->i);
71       if (a->j) free(a->j);
72     } else {
73       ierr = PetscFree(a->i);CHKERRQ(ierr);
74       ierr = PetscFree(a->j);CHKERRQ(ierr);
75       ierr = PetscFree(a->values);CHKERRQ(ierr);
76     }
77   }
78   ierr = PetscFree(a->rowvalues);CHKERRQ(ierr);
79   ierr = PetscFree(mat->data);CHKERRQ(ierr);
80   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
81   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C",NULL);CHKERRQ(ierr);
82   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjCreateNonemptySubcommMat_C",NULL);CHKERRQ(ierr);
83   PetscFunctionReturn(0);
84 }
85 
86 #undef __FUNCT__
87 #define __FUNCT__ "MatSetOption_MPIAdj"
88 PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscBool flg)
89 {
90   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
91   PetscErrorCode ierr;
92 
93   PetscFunctionBegin;
94   switch (op) {
95   case MAT_SYMMETRIC:
96   case MAT_STRUCTURALLY_SYMMETRIC:
97   case MAT_HERMITIAN:
98     a->symmetric = flg;
99     break;
100   case MAT_SYMMETRY_ETERNAL:
101     break;
102   default:
103     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
104     break;
105   }
106   PetscFunctionReturn(0);
107 }
108 
109 
110 /*
111      Adds diagonal pointers to sparse matrix structure.
112 */
113 
114 #undef __FUNCT__
115 #define __FUNCT__ "MatMarkDiagonal_MPIAdj"
116 PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A)
117 {
118   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
119   PetscErrorCode ierr;
120   PetscInt       i,j,m = A->rmap->n;
121 
122   PetscFunctionBegin;
123   ierr = PetscMalloc1(m,&a->diag);CHKERRQ(ierr);
124   ierr = PetscLogObjectMemory((PetscObject)A,m*sizeof(PetscInt));CHKERRQ(ierr);
125   for (i=0; i<A->rmap->n; i++) {
126     for (j=a->i[i]; j<a->i[i+1]; j++) {
127       if (a->j[j] == i) {
128         a->diag[i] = j;
129         break;
130       }
131     }
132   }
133   PetscFunctionReturn(0);
134 }
135 
136 #undef __FUNCT__
137 #define __FUNCT__ "MatGetRow_MPIAdj"
138 PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
139 {
140   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
141   PetscErrorCode ierr;
142 
143   PetscFunctionBegin;
144   row -= A->rmap->rstart;
145 
146   if (row < 0 || row >= A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
147 
148   *nz = a->i[row+1] - a->i[row];
149   if (v) {
150     PetscInt j;
151     if (a->rowvalues_alloc < *nz) {
152       ierr = PetscFree(a->rowvalues);CHKERRQ(ierr);
153       a->rowvalues_alloc = PetscMax(a->rowvalues_alloc*2, *nz);
154       ierr = PetscMalloc1(a->rowvalues_alloc,&a->rowvalues);CHKERRQ(ierr);
155     }
156     for (j=0; j<*nz; j++) a->rowvalues[j] = a->values[a->i[row]+j];
157     *v = (*nz) ? a->rowvalues : NULL;
158   }
159   if (idx) *idx = (*nz) ? a->j + a->i[row] : NULL;
160   PetscFunctionReturn(0);
161 }
162 
163 #undef __FUNCT__
164 #define __FUNCT__ "MatRestoreRow_MPIAdj"
165 PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
166 {
167   PetscFunctionBegin;
168   PetscFunctionReturn(0);
169 }
170 
171 #undef __FUNCT__
172 #define __FUNCT__ "MatEqual_MPIAdj"
173 PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscBool * flg)
174 {
175   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data,*b = (Mat_MPIAdj*)B->data;
176   PetscErrorCode ierr;
177   PetscBool      flag;
178 
179   PetscFunctionBegin;
180   /* If the  matrix dimensions are not equal,or no of nonzeros */
181   if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
182     flag = PETSC_FALSE;
183   }
184 
185   /* if the a->i are the same */
186   ierr = PetscMemcmp(a->i,b->i,(A->rmap->n+1)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
187 
188   /* if a->j are the same */
189   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
190 
191   ierr = MPI_Allreduce(&flag,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
192   PetscFunctionReturn(0);
193 }
194 
195 #undef __FUNCT__
196 #define __FUNCT__ "MatGetRowIJ_MPIAdj"
197 PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
198 {
199   PetscInt   i;
200   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
201   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
202 
203   PetscFunctionBegin;
204   *m    = A->rmap->n;
205   *ia   = a->i;
206   *ja   = a->j;
207   *done = PETSC_TRUE;
208   if (oshift) {
209     for (i=0; i<(*ia)[*m]; i++) {
210       (*ja)[i]++;
211     }
212     for (i=0; i<=(*m); i++) (*ia)[i]++;
213   }
214   PetscFunctionReturn(0);
215 }
216 
217 #undef __FUNCT__
218 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj"
219 PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
220 {
221   PetscInt   i;
222   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
223   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
224 
225   PetscFunctionBegin;
226   if (ia && a->i != *ia) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
227   if (ja && a->j != *ja) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
228   if (oshift) {
229     for (i=0; i<=(*m); i++) (*ia)[i]--;
230     for (i=0; i<(*ia)[*m]; i++) {
231       (*ja)[i]--;
232     }
233   }
234   PetscFunctionReturn(0);
235 }
236 
237 #undef __FUNCT__
238 #define __FUNCT__ "MatConvertFrom_MPIAdj"
239 PetscErrorCode  MatConvertFrom_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
240 {
241   Mat               B;
242   PetscErrorCode    ierr;
243   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
244   const PetscInt    *rj;
245   const PetscScalar *ra;
246   MPI_Comm          comm;
247 
248   PetscFunctionBegin;
249   ierr = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
250   ierr = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
251   ierr = MatGetOwnershipRange(A,&rstart,NULL);CHKERRQ(ierr);
252 
253   /* count the number of nonzeros per row */
254   for (i=0; i<m; i++) {
255     ierr = MatGetRow(A,i+rstart,&len,&rj,NULL);CHKERRQ(ierr);
256     for (j=0; j<len; j++) {
257       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
258     }
259     nzeros += len;
260     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,NULL);CHKERRQ(ierr);
261   }
262 
263   /* malloc space for nonzeros */
264   ierr = PetscMalloc1(nzeros+1,&a);CHKERRQ(ierr);
265   ierr = PetscMalloc1(N+1,&ia);CHKERRQ(ierr);
266   ierr = PetscMalloc1(nzeros+1,&ja);CHKERRQ(ierr);
267 
268   nzeros = 0;
269   ia[0]  = 0;
270   for (i=0; i<m; i++) {
271     ierr = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
272     cnt  = 0;
273     for (j=0; j<len; j++) {
274       if (rj[j] != i+rstart) { /* if not diagonal */
275         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
276         ja[nzeros+cnt++] = rj[j];
277       }
278     }
279     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
280     nzeros += cnt;
281     ia[i+1] = nzeros;
282   }
283 
284   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
285   ierr = MatCreate(comm,&B);CHKERRQ(ierr);
286   ierr = MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
287   ierr = MatSetType(B,type);CHKERRQ(ierr);
288   ierr = MatMPIAdjSetPreallocation(B,ia,ja,a);CHKERRQ(ierr);
289 
290   if (reuse == MAT_REUSE_MATRIX) {
291     ierr = MatHeaderReplace(A,B);CHKERRQ(ierr);
292   } else {
293     *newmat = B;
294   }
295   PetscFunctionReturn(0);
296 }
297 
298 /* -------------------------------------------------------------------*/
299 static struct _MatOps MatOps_Values = {0,
300                                        MatGetRow_MPIAdj,
301                                        MatRestoreRow_MPIAdj,
302                                        0,
303                                 /* 4*/ 0,
304                                        0,
305                                        0,
306                                        0,
307                                        0,
308                                        0,
309                                 /*10*/ 0,
310                                        0,
311                                        0,
312                                        0,
313                                        0,
314                                 /*15*/ 0,
315                                        MatEqual_MPIAdj,
316                                        0,
317                                        0,
318                                        0,
319                                 /*20*/ 0,
320                                        0,
321                                        MatSetOption_MPIAdj,
322                                        0,
323                                 /*24*/ 0,
324                                        0,
325                                        0,
326                                        0,
327                                        0,
328                                 /*29*/ 0,
329                                        0,
330                                        0,
331                                        0,
332                                        0,
333                                 /*34*/ 0,
334                                        0,
335                                        0,
336                                        0,
337                                        0,
338                                 /*39*/ 0,
339                                        0,
340                                        0,
341                                        0,
342                                        0,
343                                 /*44*/ 0,
344                                        0,
345                                        MatShift_Basic,
346                                        0,
347                                        0,
348                                 /*49*/ 0,
349                                        MatGetRowIJ_MPIAdj,
350                                        MatRestoreRowIJ_MPIAdj,
351                                        0,
352                                        0,
353                                 /*54*/ 0,
354                                        0,
355                                        0,
356                                        0,
357                                        0,
358                                 /*59*/ 0,
359                                        MatDestroy_MPIAdj,
360                                        MatView_MPIAdj,
361                                        MatConvertFrom_MPIAdj,
362                                        0,
363                                 /*64*/ 0,
364                                        0,
365                                        0,
366                                        0,
367                                        0,
368                                 /*69*/ 0,
369                                        0,
370                                        0,
371                                        0,
372                                        0,
373                                 /*74*/ 0,
374                                        0,
375                                        0,
376                                        0,
377                                        0,
378                                 /*79*/ 0,
379                                        0,
380                                        0,
381                                        0,
382                                        0,
383                                 /*84*/ 0,
384                                        0,
385                                        0,
386                                        0,
387                                        0,
388                                 /*89*/ 0,
389                                        0,
390                                        0,
391                                        0,
392                                        0,
393                                 /*94*/ 0,
394                                        0,
395                                        0,
396                                        0,
397                                        0,
398                                 /*99*/ 0,
399                                        0,
400                                        0,
401                                        0,
402                                        0,
403                                /*104*/ 0,
404                                        0,
405                                        0,
406                                        0,
407                                        0,
408                                /*109*/ 0,
409                                        0,
410                                        0,
411                                        0,
412                                        0,
413                                /*114*/ 0,
414                                        0,
415                                        0,
416                                        0,
417                                        0,
418                                /*119*/ 0,
419                                        0,
420                                        0,
421                                        0,
422                                        0,
423                                /*124*/ 0,
424                                        0,
425                                        0,
426                                        0,
427                                        0,
428                                /*129*/ 0,
429                                        0,
430                                        0,
431                                        0,
432                                        0,
433                                /*134*/ 0,
434                                        0,
435                                        0,
436                                        0,
437                                        0,
438                                /*139*/ 0,
439                                        0,
440                                        0
441 };
442 
443 #undef __FUNCT__
444 #define __FUNCT__ "MatMPIAdjSetPreallocation_MPIAdj"
445 static PetscErrorCode  MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
446 {
447   Mat_MPIAdj     *b = (Mat_MPIAdj*)B->data;
448   PetscErrorCode ierr;
449 #if defined(PETSC_USE_DEBUG)
450   PetscInt ii;
451 #endif
452 
453   PetscFunctionBegin;
454   ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
455   ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
456 
457 #if defined(PETSC_USE_DEBUG)
458   if (i[0] != 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
459   for (ii=1; ii<B->rmap->n; ii++) {
460     if (i[ii] < 0 || i[ii] < i[ii-1]) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
461   }
462   for (ii=0; ii<i[B->rmap->n]; ii++) {
463     if (j[ii] < 0 || j[ii] >= B->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
464   }
465 #endif
466   B->preallocated = PETSC_TRUE;
467 
468   b->j      = j;
469   b->i      = i;
470   b->values = values;
471 
472   b->nz        = i[B->rmap->n];
473   b->diag      = 0;
474   b->symmetric = PETSC_FALSE;
475   b->freeaij   = PETSC_TRUE;
476 
477   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
478   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
479   PetscFunctionReturn(0);
480 }
481 
482 #undef __FUNCT__
483 #define __FUNCT__ "MatMPIAdjCreateNonemptySubcommMat_MPIAdj"
484 static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A,Mat *B)
485 {
486   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
487   PetscErrorCode ierr;
488   const PetscInt *ranges;
489   MPI_Comm       acomm,bcomm;
490   MPI_Group      agroup,bgroup;
491   PetscMPIInt    i,rank,size,nranks,*ranks;
492 
493   PetscFunctionBegin;
494   *B    = NULL;
495   ierr  = PetscObjectGetComm((PetscObject)A,&acomm);CHKERRQ(ierr);
496   ierr  = MPI_Comm_size(acomm,&size);CHKERRQ(ierr);
497   ierr  = MPI_Comm_size(acomm,&rank);CHKERRQ(ierr);
498   ierr  = MatGetOwnershipRanges(A,&ranges);CHKERRQ(ierr);
499   for (i=0,nranks=0; i<size; i++) {
500     if (ranges[i+1] - ranges[i] > 0) nranks++;
501   }
502   if (nranks == size) {         /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
503     ierr = PetscObjectReference((PetscObject)A);CHKERRQ(ierr);
504     *B   = A;
505     PetscFunctionReturn(0);
506   }
507 
508   ierr = PetscMalloc1(nranks,&ranks);CHKERRQ(ierr);
509   for (i=0,nranks=0; i<size; i++) {
510     if (ranges[i+1] - ranges[i] > 0) ranks[nranks++] = i;
511   }
512   ierr = MPI_Comm_group(acomm,&agroup);CHKERRQ(ierr);
513   ierr = MPI_Group_incl(agroup,nranks,ranks,&bgroup);CHKERRQ(ierr);
514   ierr = PetscFree(ranks);CHKERRQ(ierr);
515   ierr = MPI_Comm_create(acomm,bgroup,&bcomm);CHKERRQ(ierr);
516   ierr = MPI_Group_free(&agroup);CHKERRQ(ierr);
517   ierr = MPI_Group_free(&bgroup);CHKERRQ(ierr);
518   if (bcomm != MPI_COMM_NULL) {
519     PetscInt   m,N;
520     Mat_MPIAdj *b;
521     ierr       = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
522     ierr       = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
523     ierr       = MatCreateMPIAdj(bcomm,m,N,a->i,a->j,a->values,B);CHKERRQ(ierr);
524     b          = (Mat_MPIAdj*)(*B)->data;
525     b->freeaij = PETSC_FALSE;
526     ierr       = MPI_Comm_free(&bcomm);CHKERRQ(ierr);
527   }
528   PetscFunctionReturn(0);
529 }
530 
531 #undef __FUNCT__
532 #define __FUNCT__ "MatMPIAdjCreateNonemptySubcommMat"
533 /*@
534    MatMPIAdjCreateNonemptySubcommMat - create the same MPIAdj matrix on a subcommunicator containing only processes owning a positive number of rows
535 
536    Collective
537 
538    Input Arguments:
539 .  A - original MPIAdj matrix
540 
541    Output Arguments:
542 .  B - matrix on subcommunicator, NULL on ranks that owned zero rows of A
543 
544    Level: developer
545 
546    Note:
547    This function is mostly useful for internal use by mesh partitioning packages that require that every process owns at least one row.
548 
549    The matrix B should be destroyed with MatDestroy(). The arrays are not copied, so B should be destroyed before A is destroyed.
550 
551 .seealso: MatCreateMPIAdj()
552 @*/
553 PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A,Mat *B)
554 {
555   PetscErrorCode ierr;
556 
557   PetscFunctionBegin;
558   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
559   ierr = PetscUseMethod(A,"MatMPIAdjCreateNonemptySubcommMat_C",(Mat,Mat*),(A,B));CHKERRQ(ierr);
560   PetscFunctionReturn(0);
561 }
562 
563 /*MC
564    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
565    intended for use constructing orderings and partitionings.
566 
567   Level: beginner
568 
569 .seealso: MatCreateMPIAdj
570 M*/
571 
572 #undef __FUNCT__
573 #define __FUNCT__ "MatCreate_MPIAdj"
574 PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B)
575 {
576   Mat_MPIAdj     *b;
577   PetscErrorCode ierr;
578 
579   PetscFunctionBegin;
580   ierr         = PetscNewLog(B,&b);CHKERRQ(ierr);
581   B->data      = (void*)b;
582   ierr         = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
583   B->assembled = PETSC_FALSE;
584 
585   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
586   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjCreateNonemptySubcommMat_C",MatMPIAdjCreateNonemptySubcommMat_MPIAdj);CHKERRQ(ierr);
587   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ);CHKERRQ(ierr);
588   PetscFunctionReturn(0);
589 }
590 
591 #undef __FUNCT__
592 #define __FUNCT__ "MatMPIAdjSetPreallocation"
593 /*@C
594    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
595 
596    Logically Collective on MPI_Comm
597 
598    Input Parameters:
599 +  A - the matrix
600 .  i - the indices into j for the start of each row
601 .  j - the column indices for each row (sorted for each row).
602        The indices in i and j start with zero (NOT with one).
603 -  values - [optional] edge weights
604 
605    Level: intermediate
606 
607 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
608 @*/
609 PetscErrorCode  MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
610 {
611   PetscErrorCode ierr;
612 
613   PetscFunctionBegin;
614   ierr = PetscTryMethod(B,"MatMPIAdjSetPreallocation_C",(Mat,PetscInt*,PetscInt*,PetscInt*),(B,i,j,values));CHKERRQ(ierr);
615   PetscFunctionReturn(0);
616 }
617 
618 #undef __FUNCT__
619 #define __FUNCT__ "MatCreateMPIAdj"
620 /*@C
621    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
622    The matrix does not have numerical values associated with it, but is
623    intended for ordering (to reduce bandwidth etc) and partitioning.
624 
625    Collective on MPI_Comm
626 
627    Input Parameters:
628 +  comm - MPI communicator
629 .  m - number of local rows
630 .  N - number of global columns
631 .  i - the indices into j for the start of each row
632 .  j - the column indices for each row (sorted for each row).
633        The indices in i and j start with zero (NOT with one).
634 -  values -[optional] edge weights
635 
636    Output Parameter:
637 .  A - the matrix
638 
639    Level: intermediate
640 
641    Notes: This matrix object does not support most matrix operations, include
642    MatSetValues().
643    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
644    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
645     call from Fortran you need not create the arrays with PetscMalloc().
646    Should not include the matrix diagonals.
647 
648    If you already have a matrix, you can create its adjacency matrix by a call
649    to MatConvert, specifying a type of MATMPIADJ.
650 
651    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
652 
653 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
654 @*/
655 PetscErrorCode  MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
656 {
657   PetscErrorCode ierr;
658 
659   PetscFunctionBegin;
660   ierr = MatCreate(comm,A);CHKERRQ(ierr);
661   ierr = MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
662   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
663   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
664   PetscFunctionReturn(0);
665 }
666 
667 
668 
669 
670 
671 
672 
673