xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision cc85fe4ded5189db5e5e073ce90ef04de0003fdb)
1 
2 /*
3     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
4 */
5 #include <../src/mat/impls/adj/mpi/mpiadj.h>    /*I "petscmat.h" I*/
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "MatView_MPIAdj_ASCII"
9 PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
10 {
11   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
12   PetscErrorCode    ierr;
13   PetscInt          i,j,m = A->rmap->n;
14   const char        *name;
15   PetscViewerFormat format;
16 
17   PetscFunctionBegin;
18   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
19   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
20   if (format == PETSC_VIEWER_ASCII_INFO) {
21     PetscFunctionReturn(0);
22   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
23     SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MATLAB format not supported");
24   } else {
25     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);CHKERRQ(ierr);
26     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
27     for (i=0; i<m; i++) {
28       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap->rstart);CHKERRQ(ierr);
29       for (j=a->i[i]; j<a->i[i+1]; j++) {
30         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);CHKERRQ(ierr);
31       }
32       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
33     }
34     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);CHKERRQ(ierr);
35     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
36     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
37   }
38   PetscFunctionReturn(0);
39 }
40 
41 #undef __FUNCT__
42 #define __FUNCT__ "MatView_MPIAdj"
43 PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
44 {
45   PetscErrorCode ierr;
46   PetscBool      iascii;
47 
48   PetscFunctionBegin;
49   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
50   if (iascii) {
51     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
52   }
53   PetscFunctionReturn(0);
54 }
55 
56 #undef __FUNCT__
57 #define __FUNCT__ "MatDestroy_MPIAdj"
58 PetscErrorCode MatDestroy_MPIAdj(Mat mat)
59 {
60   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;
61   PetscErrorCode ierr;
62 
63   PetscFunctionBegin;
64 #if defined(PETSC_USE_LOG)
65   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap->n,mat->cmap->n,a->nz);
66 #endif
67   ierr = PetscFree(a->diag);CHKERRQ(ierr);
68   if (a->freeaij) {
69     if (a->freeaijwithfree) {
70       if (a->i) free(a->i);
71       if (a->j) free(a->j);
72     } else {
73       ierr = PetscFree(a->i);CHKERRQ(ierr);
74       ierr = PetscFree(a->j);CHKERRQ(ierr);
75       ierr = PetscFree(a->values);CHKERRQ(ierr);
76     }
77   }
78   ierr = PetscFree(mat->data);CHKERRQ(ierr);
79   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
80   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C",NULL);CHKERRQ(ierr);
81   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjCreateNonemptySubcommMat_C",NULL);CHKERRQ(ierr);
82   PetscFunctionReturn(0);
83 }
84 
85 #undef __FUNCT__
86 #define __FUNCT__ "MatSetOption_MPIAdj"
87 PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscBool flg)
88 {
89   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
90   PetscErrorCode ierr;
91 
92   PetscFunctionBegin;
93   switch (op) {
94   case MAT_SYMMETRIC:
95   case MAT_STRUCTURALLY_SYMMETRIC:
96   case MAT_HERMITIAN:
97     a->symmetric = flg;
98     break;
99   case MAT_SYMMETRY_ETERNAL:
100     break;
101   default:
102     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
103     break;
104   }
105   PetscFunctionReturn(0);
106 }
107 
108 
109 /*
110      Adds diagonal pointers to sparse matrix structure.
111 */
112 
113 #undef __FUNCT__
114 #define __FUNCT__ "MatMarkDiagonal_MPIAdj"
115 PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A)
116 {
117   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
118   PetscErrorCode ierr;
119   PetscInt       i,j,m = A->rmap->n;
120 
121   PetscFunctionBegin;
122   ierr = PetscMalloc(m*sizeof(PetscInt),&a->diag);CHKERRQ(ierr);
123   ierr = PetscLogObjectMemory((PetscObject)A,m*sizeof(PetscInt));CHKERRQ(ierr);
124   for (i=0; i<A->rmap->n; i++) {
125     for (j=a->i[i]; j<a->i[i+1]; j++) {
126       if (a->j[j] == i) {
127         a->diag[i] = j;
128         break;
129       }
130     }
131   }
132   PetscFunctionReturn(0);
133 }
134 
135 #undef __FUNCT__
136 #define __FUNCT__ "MatGetRow_MPIAdj"
137 PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
138 {
139   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
140   PetscInt   *itmp;
141 
142   PetscFunctionBegin;
143   row -= A->rmap->rstart;
144 
145   if (row < 0 || row >= A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
146 
147   *nz = a->i[row+1] - a->i[row];
148   if (v) *v = NULL;
149   if (idx) {
150     itmp = a->j + a->i[row];
151     if (*nz) {
152       *idx = itmp;
153     }
154     else *idx = 0;
155   }
156   PetscFunctionReturn(0);
157 }
158 
159 #undef __FUNCT__
160 #define __FUNCT__ "MatRestoreRow_MPIAdj"
161 PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
162 {
163   PetscFunctionBegin;
164   PetscFunctionReturn(0);
165 }
166 
167 #undef __FUNCT__
168 #define __FUNCT__ "MatEqual_MPIAdj"
169 PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscBool * flg)
170 {
171   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data,*b = (Mat_MPIAdj*)B->data;
172   PetscErrorCode ierr;
173   PetscBool      flag;
174 
175   PetscFunctionBegin;
176   /* If the  matrix dimensions are not equal,or no of nonzeros */
177   if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
178     flag = PETSC_FALSE;
179   }
180 
181   /* if the a->i are the same */
182   ierr = PetscMemcmp(a->i,b->i,(A->rmap->n+1)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
183 
184   /* if a->j are the same */
185   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
186 
187   ierr = MPI_Allreduce(&flag,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
188   PetscFunctionReturn(0);
189 }
190 
191 #undef __FUNCT__
192 #define __FUNCT__ "MatGetRowIJ_MPIAdj"
193 PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
194 {
195   PetscInt   i;
196   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
197   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
198 
199   PetscFunctionBegin;
200   *m    = A->rmap->n;
201   *ia   = a->i;
202   *ja   = a->j;
203   *done = PETSC_TRUE;
204   if (oshift) {
205     for (i=0; i<(*ia)[*m]; i++) {
206       (*ja)[i]++;
207     }
208     for (i=0; i<=(*m); i++) (*ia)[i]++;
209   }
210   PetscFunctionReturn(0);
211 }
212 
213 #undef __FUNCT__
214 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj"
215 PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
216 {
217   PetscInt   i;
218   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
219   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
220 
221   PetscFunctionBegin;
222   if (ia && a->i != *ia) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
223   if (ja && a->j != *ja) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
224   if (oshift) {
225     for (i=0; i<=(*m); i++) (*ia)[i]--;
226     for (i=0; i<(*ia)[*m]; i++) {
227       (*ja)[i]--;
228     }
229   }
230   PetscFunctionReturn(0);
231 }
232 
233 #undef __FUNCT__
234 #define __FUNCT__ "MatConvertFrom_MPIAdj"
235 PetscErrorCode  MatConvertFrom_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
236 {
237   Mat               B;
238   PetscErrorCode    ierr;
239   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
240   const PetscInt    *rj;
241   const PetscScalar *ra;
242   MPI_Comm          comm;
243 
244   PetscFunctionBegin;
245   ierr = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
246   ierr = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
247   ierr = MatGetOwnershipRange(A,&rstart,NULL);CHKERRQ(ierr);
248 
249   /* count the number of nonzeros per row */
250   for (i=0; i<m; i++) {
251     ierr = MatGetRow(A,i+rstart,&len,&rj,NULL);CHKERRQ(ierr);
252     for (j=0; j<len; j++) {
253       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
254     }
255     nzeros += len;
256     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,NULL);CHKERRQ(ierr);
257   }
258 
259   /* malloc space for nonzeros */
260   ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&a);CHKERRQ(ierr);
261   ierr = PetscMalloc((N+1)*sizeof(PetscInt),&ia);CHKERRQ(ierr);
262   ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&ja);CHKERRQ(ierr);
263 
264   nzeros = 0;
265   ia[0]  = 0;
266   for (i=0; i<m; i++) {
267     ierr = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
268     cnt  = 0;
269     for (j=0; j<len; j++) {
270       if (rj[j] != i+rstart) { /* if not diagonal */
271         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
272         ja[nzeros+cnt++] = rj[j];
273       }
274     }
275     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
276     nzeros += cnt;
277     ia[i+1] = nzeros;
278   }
279 
280   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
281   ierr = MatCreate(comm,&B);CHKERRQ(ierr);
282   ierr = MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
283   ierr = MatSetType(B,type);CHKERRQ(ierr);
284   ierr = MatMPIAdjSetPreallocation(B,ia,ja,a);CHKERRQ(ierr);
285 
286   if (reuse == MAT_REUSE_MATRIX) {
287     ierr = MatHeaderReplace(A,B);CHKERRQ(ierr);
288   } else {
289     *newmat = B;
290   }
291   PetscFunctionReturn(0);
292 }
293 
294 /* -------------------------------------------------------------------*/
295 static struct _MatOps MatOps_Values = {0,
296                                        MatGetRow_MPIAdj,
297                                        MatRestoreRow_MPIAdj,
298                                        0,
299                                 /* 4*/ 0,
300                                        0,
301                                        0,
302                                        0,
303                                        0,
304                                        0,
305                                 /*10*/ 0,
306                                        0,
307                                        0,
308                                        0,
309                                        0,
310                                 /*15*/ 0,
311                                        MatEqual_MPIAdj,
312                                        0,
313                                        0,
314                                        0,
315                                 /*20*/ 0,
316                                        0,
317                                        MatSetOption_MPIAdj,
318                                        0,
319                                 /*24*/ 0,
320                                        0,
321                                        0,
322                                        0,
323                                        0,
324                                 /*29*/ 0,
325                                        0,
326                                        0,
327                                        0,
328                                        0,
329                                 /*34*/ 0,
330                                        0,
331                                        0,
332                                        0,
333                                        0,
334                                 /*39*/ 0,
335                                        0,
336                                        0,
337                                        0,
338                                        0,
339                                 /*44*/ 0,
340                                        0,
341                                        0,
342                                        0,
343                                        0,
344                                 /*49*/ 0,
345                                        MatGetRowIJ_MPIAdj,
346                                        MatRestoreRowIJ_MPIAdj,
347                                        0,
348                                        0,
349                                 /*54*/ 0,
350                                        0,
351                                        0,
352                                        0,
353                                        0,
354                                 /*59*/ 0,
355                                        MatDestroy_MPIAdj,
356                                        MatView_MPIAdj,
357                                        MatConvertFrom_MPIAdj,
358                                        0,
359                                 /*64*/ 0,
360                                        0,
361                                        0,
362                                        0,
363                                        0,
364                                 /*69*/ 0,
365                                        0,
366                                        0,
367                                        0,
368                                        0,
369                                 /*74*/ 0,
370                                        0,
371                                        0,
372                                        0,
373                                        0,
374                                 /*79*/ 0,
375                                        0,
376                                        0,
377                                        0,
378                                        0,
379                                 /*84*/ 0,
380                                        0,
381                                        0,
382                                        0,
383                                        0,
384                                 /*89*/ 0,
385                                        0,
386                                        0,
387                                        0,
388                                        0,
389                                 /*94*/ 0,
390                                        0,
391                                        0,
392                                        0,
393                                        0,
394                                 /*99*/ 0,
395                                        0,
396                                        0,
397                                        0,
398                                        0,
399                                /*104*/ 0,
400                                        0,
401                                        0,
402                                        0,
403                                        0,
404                                /*109*/ 0,
405                                        0,
406                                        0,
407                                        0,
408                                        0,
409                                /*114*/ 0,
410                                        0,
411                                        0,
412                                        0,
413                                        0,
414                                /*119*/ 0,
415                                        0,
416                                        0,
417                                        0,
418                                        0,
419                                /*124*/ 0,
420                                        0,
421                                        0,
422                                        0,
423                                        0,
424                                /*129*/ 0,
425                                        0,
426                                        0,
427                                        0,
428                                        0,
429                                /*134*/ 0,
430                                        0,
431                                        0,
432                                        0,
433                                        0,
434                                /*139*/ 0,
435                                        0
436 };
437 
438 #undef __FUNCT__
439 #define __FUNCT__ "MatMPIAdjSetPreallocation_MPIAdj"
440 static PetscErrorCode  MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
441 {
442   Mat_MPIAdj     *b = (Mat_MPIAdj*)B->data;
443   PetscErrorCode ierr;
444 #if defined(PETSC_USE_DEBUG)
445   PetscInt ii;
446 #endif
447 
448   PetscFunctionBegin;
449   ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
450   ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
451 
452 #if defined(PETSC_USE_DEBUG)
453   if (i[0] != 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
454   for (ii=1; ii<B->rmap->n; ii++) {
455     if (i[ii] < 0 || i[ii] < i[ii-1]) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
456   }
457   for (ii=0; ii<i[B->rmap->n]; ii++) {
458     if (j[ii] < 0 || j[ii] >= B->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
459   }
460 #endif
461   B->preallocated = PETSC_TRUE;
462 
463   b->j      = j;
464   b->i      = i;
465   b->values = values;
466 
467   b->nz        = i[B->rmap->n];
468   b->diag      = 0;
469   b->symmetric = PETSC_FALSE;
470   b->freeaij   = PETSC_TRUE;
471 
472   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
473   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
474   PetscFunctionReturn(0);
475 }
476 
477 #undef __FUNCT__
478 #define __FUNCT__ "MatMPIAdjCreateNonemptySubcommMat_MPIAdj"
479 static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A,Mat *B)
480 {
481   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
482   PetscErrorCode ierr;
483   const PetscInt *ranges;
484   MPI_Comm       acomm,bcomm;
485   MPI_Group      agroup,bgroup;
486   PetscMPIInt    i,rank,size,nranks,*ranks;
487 
488   PetscFunctionBegin;
489   *B    = NULL;
490   ierr  = PetscObjectGetComm((PetscObject)A,&acomm);CHKERRQ(ierr);
491   ierr  = MPI_Comm_size(acomm,&size);CHKERRQ(ierr);
492   ierr  = MPI_Comm_size(acomm,&rank);CHKERRQ(ierr);
493   ierr  = MatGetOwnershipRanges(A,&ranges);CHKERRQ(ierr);
494   for (i=0,nranks=0; i<size; i++) {
495     if (ranges[i+1] - ranges[i] > 0) nranks++;
496   }
497   if (nranks == size) {         /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
498     ierr = PetscObjectReference((PetscObject)A);CHKERRQ(ierr);
499     *B   = A;
500     PetscFunctionReturn(0);
501   }
502 
503   ierr = PetscMalloc(nranks*sizeof(PetscMPIInt),&ranks);CHKERRQ(ierr);
504   for (i=0,nranks=0; i<size; i++) {
505     if (ranges[i+1] - ranges[i] > 0) ranks[nranks++] = i;
506   }
507   ierr = MPI_Comm_group(acomm,&agroup);CHKERRQ(ierr);
508   ierr = MPI_Group_incl(agroup,nranks,ranks,&bgroup);CHKERRQ(ierr);
509   ierr = PetscFree(ranks);CHKERRQ(ierr);
510   ierr = MPI_Comm_create(acomm,bgroup,&bcomm);CHKERRQ(ierr);
511   ierr = MPI_Group_free(&agroup);CHKERRQ(ierr);
512   ierr = MPI_Group_free(&bgroup);CHKERRQ(ierr);
513   if (bcomm != MPI_COMM_NULL) {
514     PetscInt   m,N;
515     Mat_MPIAdj *b;
516     ierr       = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
517     ierr       = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
518     ierr       = MatCreateMPIAdj(bcomm,m,N,a->i,a->j,a->values,B);CHKERRQ(ierr);
519     b          = (Mat_MPIAdj*)(*B)->data;
520     b->freeaij = PETSC_FALSE;
521     ierr       = MPI_Comm_free(&bcomm);CHKERRQ(ierr);
522   }
523   PetscFunctionReturn(0);
524 }
525 
526 #undef __FUNCT__
527 #define __FUNCT__ "MatMPIAdjCreateNonemptySubcommMat"
528 /*@
529    MatMPIAdjCreateNonemptySubcommMat - create the same MPIAdj matrix on a subcommunicator containing only processes owning a positive number of rows
530 
531    Collective
532 
533    Input Arguments:
534 .  A - original MPIAdj matrix
535 
536    Output Arguments:
537 .  B - matrix on subcommunicator, NULL on ranks that owned zero rows of A
538 
539    Level: developer
540 
541    Note:
542    This function is mostly useful for internal use by mesh partitioning packages that require that every process owns at least one row.
543 
544    The matrix B should be destroyed with MatDestroy(). The arrays are not copied, so B should be destroyed before A is destroyed.
545 
546 .seealso: MatCreateMPIAdj()
547 @*/
548 PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A,Mat *B)
549 {
550   PetscErrorCode ierr;
551 
552   PetscFunctionBegin;
553   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
554   ierr = PetscUseMethod(A,"MatMPIAdjCreateNonemptySubcommMat_C",(Mat,Mat*),(A,B));CHKERRQ(ierr);
555   PetscFunctionReturn(0);
556 }
557 
558 /*MC
559    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
560    intended for use constructing orderings and partitionings.
561 
562   Level: beginner
563 
564 .seealso: MatCreateMPIAdj
565 M*/
566 
567 #undef __FUNCT__
568 #define __FUNCT__ "MatCreate_MPIAdj"
569 PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B)
570 {
571   Mat_MPIAdj     *b;
572   PetscErrorCode ierr;
573 
574   PetscFunctionBegin;
575   ierr         = PetscNewLog(B,Mat_MPIAdj,&b);CHKERRQ(ierr);
576   B->data      = (void*)b;
577   ierr         = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
578   B->assembled = PETSC_FALSE;
579 
580   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
581   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjCreateNonemptySubcommMat_C",MatMPIAdjCreateNonemptySubcommMat_MPIAdj);CHKERRQ(ierr);
582   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ);CHKERRQ(ierr);
583   PetscFunctionReturn(0);
584 }
585 
586 #undef __FUNCT__
587 #define __FUNCT__ "MatMPIAdjSetPreallocation"
588 /*@C
589    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
590 
591    Logically Collective on MPI_Comm
592 
593    Input Parameters:
594 +  A - the matrix
595 .  i - the indices into j for the start of each row
596 .  j - the column indices for each row (sorted for each row).
597        The indices in i and j start with zero (NOT with one).
598 -  values - [optional] edge weights
599 
600    Level: intermediate
601 
602 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
603 @*/
604 PetscErrorCode  MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
605 {
606   PetscErrorCode ierr;
607 
608   PetscFunctionBegin;
609   ierr = PetscTryMethod(B,"MatMPIAdjSetPreallocation_C",(Mat,PetscInt*,PetscInt*,PetscInt*),(B,i,j,values));CHKERRQ(ierr);
610   PetscFunctionReturn(0);
611 }
612 
613 #undef __FUNCT__
614 #define __FUNCT__ "MatCreateMPIAdj"
615 /*@C
616    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
617    The matrix does not have numerical values associated with it, but is
618    intended for ordering (to reduce bandwidth etc) and partitioning.
619 
620    Collective on MPI_Comm
621 
622    Input Parameters:
623 +  comm - MPI communicator
624 .  m - number of local rows
625 .  N - number of global columns
626 .  i - the indices into j for the start of each row
627 .  j - the column indices for each row (sorted for each row).
628        The indices in i and j start with zero (NOT with one).
629 -  values -[optional] edge weights
630 
631    Output Parameter:
632 .  A - the matrix
633 
634    Level: intermediate
635 
636    Notes: This matrix object does not support most matrix operations, include
637    MatSetValues().
638    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
639    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
640     call from Fortran you need not create the arrays with PetscMalloc().
641    Should not include the matrix diagonals.
642 
643    If you already have a matrix, you can create its adjacency matrix by a call
644    to MatConvert, specifying a type of MATMPIADJ.
645 
646    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
647 
648 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
649 @*/
650 PetscErrorCode  MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
651 {
652   PetscErrorCode ierr;
653 
654   PetscFunctionBegin;
655   ierr = MatCreate(comm,A);CHKERRQ(ierr);
656   ierr = MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
657   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
658   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
659   PetscFunctionReturn(0);
660 }
661 
662 
663 
664 
665 
666 
667 
668