xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision f9426fe092dba0ba2fdf65dfec8d938c4b10a31c)
1 
2 /*
3     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
4 */
5 #include <../src/mat/impls/adj/mpi/mpiadj.h>    /*I "petscmat.h" I*/
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "MatView_MPIAdj_ASCII"
9 PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
10 {
11   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
12   PetscErrorCode    ierr;
13   PetscInt          i,j,m = A->rmap->n;
14   const char        *name;
15   PetscViewerFormat format;
16 
17   PetscFunctionBegin;
18   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
19   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
20   if (format == PETSC_VIEWER_ASCII_INFO) {
21     PetscFunctionReturn(0);
22   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
23     SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MATLAB format not supported");
24   } else {
25     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);CHKERRQ(ierr);
26     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
27     for (i=0; i<m; i++) {
28       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap->rstart);CHKERRQ(ierr);
29       for (j=a->i[i]; j<a->i[i+1]; j++) {
30         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);CHKERRQ(ierr);
31       }
32       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
33     }
34     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);CHKERRQ(ierr);
35     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
36     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
37   }
38   PetscFunctionReturn(0);
39 }
40 
41 #undef __FUNCT__
42 #define __FUNCT__ "MatView_MPIAdj"
43 PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
44 {
45   PetscErrorCode ierr;
46   PetscBool      iascii;
47 
48   PetscFunctionBegin;
49   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
50   if (iascii) {
51     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
52   }
53   PetscFunctionReturn(0);
54 }
55 
56 #undef __FUNCT__
57 #define __FUNCT__ "MatDestroy_MPIAdj"
58 PetscErrorCode MatDestroy_MPIAdj(Mat mat)
59 {
60   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;
61   PetscErrorCode ierr;
62 
63   PetscFunctionBegin;
64 #if defined(PETSC_USE_LOG)
65   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap->n,mat->cmap->n,a->nz);
66 #endif
67   ierr = PetscFree(a->diag);CHKERRQ(ierr);
68   if (a->freeaij) {
69     if (a->freeaijwithfree) {
70       if (a->i) free(a->i);
71       if (a->j) free(a->j);
72     } else {
73       ierr = PetscFree(a->i);CHKERRQ(ierr);
74       ierr = PetscFree(a->j);CHKERRQ(ierr);
75       ierr = PetscFree(a->values);CHKERRQ(ierr);
76     }
77   }
78   ierr = PetscFree(mat->data);CHKERRQ(ierr);
79   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
80   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C",NULL);CHKERRQ(ierr);
81   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjCreateNonemptySubcommMat_C",NULL);CHKERRQ(ierr);
82   PetscFunctionReturn(0);
83 }
84 
85 #undef __FUNCT__
86 #define __FUNCT__ "MatSetOption_MPIAdj"
87 PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscBool flg)
88 {
89   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
90   PetscErrorCode ierr;
91 
92   PetscFunctionBegin;
93   switch (op) {
94   case MAT_SYMMETRIC:
95   case MAT_STRUCTURALLY_SYMMETRIC:
96   case MAT_HERMITIAN:
97     a->symmetric = flg;
98     break;
99   case MAT_SYMMETRY_ETERNAL:
100     break;
101   default:
102     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
103     break;
104   }
105   PetscFunctionReturn(0);
106 }
107 
108 
109 /*
110      Adds diagonal pointers to sparse matrix structure.
111 */
112 
113 #undef __FUNCT__
114 #define __FUNCT__ "MatMarkDiagonal_MPIAdj"
115 PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A)
116 {
117   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
118   PetscErrorCode ierr;
119   PetscInt       i,j,m = A->rmap->n;
120 
121   PetscFunctionBegin;
122   ierr = PetscMalloc(m*sizeof(PetscInt),&a->diag);CHKERRQ(ierr);
123   ierr = PetscLogObjectMemory((PetscObject)A,m*sizeof(PetscInt));CHKERRQ(ierr);
124   for (i=0; i<A->rmap->n; i++) {
125     for (j=a->i[i]; j<a->i[i+1]; j++) {
126       if (a->j[j] == i) {
127         a->diag[i] = j;
128         break;
129       }
130     }
131   }
132   PetscFunctionReturn(0);
133 }
134 
135 #undef __FUNCT__
136 #define __FUNCT__ "MatGetRow_MPIAdj"
137 PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
138 {
139   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
140   PetscInt   *itmp;
141 
142   PetscFunctionBegin;
143   row -= A->rmap->rstart;
144 
145   if (row < 0 || row >= A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
146 
147   *nz = a->i[row+1] - a->i[row];
148   if (v) *v = NULL;
149   if (idx) {
150     itmp = a->j + a->i[row];
151     if (*nz) {
152       *idx = itmp;
153     }
154     else *idx = 0;
155   }
156   PetscFunctionReturn(0);
157 }
158 
159 #undef __FUNCT__
160 #define __FUNCT__ "MatRestoreRow_MPIAdj"
161 PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
162 {
163   PetscFunctionBegin;
164   PetscFunctionReturn(0);
165 }
166 
167 #undef __FUNCT__
168 #define __FUNCT__ "MatEqual_MPIAdj"
169 PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscBool * flg)
170 {
171   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data,*b = (Mat_MPIAdj*)B->data;
172   PetscErrorCode ierr;
173   PetscBool      flag;
174 
175   PetscFunctionBegin;
176   /* If the  matrix dimensions are not equal,or no of nonzeros */
177   if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
178     flag = PETSC_FALSE;
179   }
180 
181   /* if the a->i are the same */
182   ierr = PetscMemcmp(a->i,b->i,(A->rmap->n+1)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
183 
184   /* if a->j are the same */
185   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
186 
187   ierr = MPI_Allreduce(&flag,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
188   PetscFunctionReturn(0);
189 }
190 
191 #undef __FUNCT__
192 #define __FUNCT__ "MatGetRowIJ_MPIAdj"
193 PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
194 {
195   PetscInt   i;
196   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
197   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
198 
199   PetscFunctionBegin;
200   *m    = A->rmap->n;
201   *ia   = a->i;
202   *ja   = a->j;
203   *done = PETSC_TRUE;
204   if (oshift) {
205     for (i=0; i<(*ia)[*m]; i++) {
206       (*ja)[i]++;
207     }
208     for (i=0; i<=(*m); i++) (*ia)[i]++;
209   }
210   PetscFunctionReturn(0);
211 }
212 
213 #undef __FUNCT__
214 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj"
215 PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
216 {
217   PetscInt   i;
218   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
219   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
220 
221   PetscFunctionBegin;
222   if (ia && a->i != *ia) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
223   if (ja && a->j != *ja) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
224   if (oshift) {
225     for (i=0; i<=(*m); i++) (*ia)[i]--;
226     for (i=0; i<(*ia)[*m]; i++) {
227       (*ja)[i]--;
228     }
229   }
230   PetscFunctionReturn(0);
231 }
232 
233 #undef __FUNCT__
234 #define __FUNCT__ "MatConvertFrom_MPIAdj"
235 PetscErrorCode  MatConvertFrom_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
236 {
237   Mat               B;
238   PetscErrorCode    ierr;
239   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
240   const PetscInt    *rj;
241   const PetscScalar *ra;
242   MPI_Comm          comm;
243 
244   PetscFunctionBegin;
245   ierr = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
246   ierr = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
247   ierr = MatGetOwnershipRange(A,&rstart,NULL);CHKERRQ(ierr);
248 
249   /* count the number of nonzeros per row */
250   for (i=0; i<m; i++) {
251     ierr = MatGetRow(A,i+rstart,&len,&rj,NULL);CHKERRQ(ierr);
252     for (j=0; j<len; j++) {
253       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
254     }
255     nzeros += len;
256     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,NULL);CHKERRQ(ierr);
257   }
258 
259   /* malloc space for nonzeros */
260   ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&a);CHKERRQ(ierr);
261   ierr = PetscMalloc((N+1)*sizeof(PetscInt),&ia);CHKERRQ(ierr);
262   ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&ja);CHKERRQ(ierr);
263 
264   nzeros = 0;
265   ia[0]  = 0;
266   for (i=0; i<m; i++) {
267     ierr = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
268     cnt  = 0;
269     for (j=0; j<len; j++) {
270       if (rj[j] != i+rstart) { /* if not diagonal */
271         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
272         ja[nzeros+cnt++] = rj[j];
273       }
274     }
275     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
276     nzeros += cnt;
277     ia[i+1] = nzeros;
278   }
279 
280   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
281   ierr = MatCreate(comm,&B);CHKERRQ(ierr);
282   ierr = MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
283   ierr = MatSetType(B,type);CHKERRQ(ierr);
284   ierr = MatMPIAdjSetPreallocation(B,ia,ja,a);CHKERRQ(ierr);
285 
286   if (reuse == MAT_REUSE_MATRIX) {
287     ierr = MatHeaderReplace(A,B);CHKERRQ(ierr);
288   } else {
289     *newmat = B;
290   }
291   PetscFunctionReturn(0);
292 }
293 
294 /* -------------------------------------------------------------------*/
295 static struct _MatOps MatOps_Values = {0,
296                                        MatGetRow_MPIAdj,
297                                        MatRestoreRow_MPIAdj,
298                                        0,
299                                 /* 4*/ 0,
300                                        0,
301                                        0,
302                                        0,
303                                        0,
304                                        0,
305                                 /*10*/ 0,
306                                        0,
307                                        0,
308                                        0,
309                                        0,
310                                 /*15*/ 0,
311                                        MatEqual_MPIAdj,
312                                        0,
313                                        0,
314                                        0,
315                                 /*20*/ 0,
316                                        0,
317                                        MatSetOption_MPIAdj,
318                                        0,
319                                 /*24*/ 0,
320                                        0,
321                                        0,
322                                        0,
323                                        0,
324                                 /*29*/ 0,
325                                        0,
326                                        0,
327                                        0,
328                                        0,
329                                 /*34*/ 0,
330                                        0,
331                                        0,
332                                        0,
333                                        0,
334                                 /*39*/ 0,
335                                        0,
336                                        0,
337                                        0,
338                                        0,
339                                 /*44*/ 0,
340                                        0,
341                                        0,
342                                        0,
343                                        0,
344                                 /*49*/ 0,
345                                        MatGetRowIJ_MPIAdj,
346                                        MatRestoreRowIJ_MPIAdj,
347                                        0,
348                                        0,
349                                 /*54*/ 0,
350                                        0,
351                                        0,
352                                        0,
353                                        0,
354                                 /*59*/ 0,
355                                        MatDestroy_MPIAdj,
356                                        MatView_MPIAdj,
357                                        MatConvertFrom_MPIAdj,
358                                        0,
359                                 /*64*/ 0,
360                                        0,
361                                        0,
362                                        0,
363                                        0,
364                                 /*69*/ 0,
365                                        0,
366                                        0,
367                                        0,
368                                        0,
369                                 /*74*/ 0,
370                                        0,
371                                        0,
372                                        0,
373                                        0,
374                                 /*79*/ 0,
375                                        0,
376                                        0,
377                                        0,
378                                        0,
379                                 /*84*/ 0,
380                                        0,
381                                        0,
382                                        0,
383                                        0,
384                                 /*89*/ 0,
385                                        0,
386                                        0,
387                                        0,
388                                        0,
389                                 /*94*/ 0,
390                                        0,
391                                        0,
392                                        0,
393                                        0,
394                                 /*99*/ 0,
395                                        0,
396                                        0,
397                                        0,
398                                        0,
399                                /*104*/ 0,
400                                        0,
401                                        0,
402                                        0,
403                                        0,
404                                /*109*/ 0,
405                                        0,
406                                        0,
407                                        0,
408                                        0,
409                                /*114*/ 0,
410                                        0,
411                                        0,
412                                        0,
413                                        0,
414                                /*119*/ 0,
415                                        0,
416                                        0,
417                                        0,
418                                        0,
419                                /*124*/ 0,
420                                        0,
421                                        0,
422                                        0,
423                                        0,
424                                /*129*/ 0,
425                                        0,
426                                        0,
427                                        0,
428                                        0,
429                                /*134*/ 0,
430                                        0,
431                                        0,
432                                        0,
433                                        0,
434                                /*139*/ 0,
435                                        0,
436                                        0
437 };
438 
439 #undef __FUNCT__
440 #define __FUNCT__ "MatMPIAdjSetPreallocation_MPIAdj"
441 static PetscErrorCode  MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
442 {
443   Mat_MPIAdj     *b = (Mat_MPIAdj*)B->data;
444   PetscErrorCode ierr;
445 #if defined(PETSC_USE_DEBUG)
446   PetscInt ii;
447 #endif
448 
449   PetscFunctionBegin;
450   ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
451   ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
452 
453 #if defined(PETSC_USE_DEBUG)
454   if (i[0] != 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
455   for (ii=1; ii<B->rmap->n; ii++) {
456     if (i[ii] < 0 || i[ii] < i[ii-1]) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
457   }
458   for (ii=0; ii<i[B->rmap->n]; ii++) {
459     if (j[ii] < 0 || j[ii] >= B->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
460   }
461 #endif
462   B->preallocated = PETSC_TRUE;
463 
464   b->j      = j;
465   b->i      = i;
466   b->values = values;
467 
468   b->nz        = i[B->rmap->n];
469   b->diag      = 0;
470   b->symmetric = PETSC_FALSE;
471   b->freeaij   = PETSC_TRUE;
472 
473   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
474   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
475   PetscFunctionReturn(0);
476 }
477 
478 #undef __FUNCT__
479 #define __FUNCT__ "MatMPIAdjCreateNonemptySubcommMat_MPIAdj"
480 static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A,Mat *B)
481 {
482   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
483   PetscErrorCode ierr;
484   const PetscInt *ranges;
485   MPI_Comm       acomm,bcomm;
486   MPI_Group      agroup,bgroup;
487   PetscMPIInt    i,rank,size,nranks,*ranks;
488 
489   PetscFunctionBegin;
490   *B    = NULL;
491   ierr  = PetscObjectGetComm((PetscObject)A,&acomm);CHKERRQ(ierr);
492   ierr  = MPI_Comm_size(acomm,&size);CHKERRQ(ierr);
493   ierr  = MPI_Comm_size(acomm,&rank);CHKERRQ(ierr);
494   ierr  = MatGetOwnershipRanges(A,&ranges);CHKERRQ(ierr);
495   for (i=0,nranks=0; i<size; i++) {
496     if (ranges[i+1] - ranges[i] > 0) nranks++;
497   }
498   if (nranks == size) {         /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
499     ierr = PetscObjectReference((PetscObject)A);CHKERRQ(ierr);
500     *B   = A;
501     PetscFunctionReturn(0);
502   }
503 
504   ierr = PetscMalloc(nranks*sizeof(PetscMPIInt),&ranks);CHKERRQ(ierr);
505   for (i=0,nranks=0; i<size; i++) {
506     if (ranges[i+1] - ranges[i] > 0) ranks[nranks++] = i;
507   }
508   ierr = MPI_Comm_group(acomm,&agroup);CHKERRQ(ierr);
509   ierr = MPI_Group_incl(agroup,nranks,ranks,&bgroup);CHKERRQ(ierr);
510   ierr = PetscFree(ranks);CHKERRQ(ierr);
511   ierr = MPI_Comm_create(acomm,bgroup,&bcomm);CHKERRQ(ierr);
512   ierr = MPI_Group_free(&agroup);CHKERRQ(ierr);
513   ierr = MPI_Group_free(&bgroup);CHKERRQ(ierr);
514   if (bcomm != MPI_COMM_NULL) {
515     PetscInt   m,N;
516     Mat_MPIAdj *b;
517     ierr       = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
518     ierr       = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
519     ierr       = MatCreateMPIAdj(bcomm,m,N,a->i,a->j,a->values,B);CHKERRQ(ierr);
520     b          = (Mat_MPIAdj*)(*B)->data;
521     b->freeaij = PETSC_FALSE;
522     ierr       = MPI_Comm_free(&bcomm);CHKERRQ(ierr);
523   }
524   PetscFunctionReturn(0);
525 }
526 
527 #undef __FUNCT__
528 #define __FUNCT__ "MatMPIAdjCreateNonemptySubcommMat"
529 /*@
530    MatMPIAdjCreateNonemptySubcommMat - create the same MPIAdj matrix on a subcommunicator containing only processes owning a positive number of rows
531 
532    Collective
533 
534    Input Arguments:
535 .  A - original MPIAdj matrix
536 
537    Output Arguments:
538 .  B - matrix on subcommunicator, NULL on ranks that owned zero rows of A
539 
540    Level: developer
541 
542    Note:
543    This function is mostly useful for internal use by mesh partitioning packages that require that every process owns at least one row.
544 
545    The matrix B should be destroyed with MatDestroy(). The arrays are not copied, so B should be destroyed before A is destroyed.
546 
547 .seealso: MatCreateMPIAdj()
548 @*/
549 PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A,Mat *B)
550 {
551   PetscErrorCode ierr;
552 
553   PetscFunctionBegin;
554   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
555   ierr = PetscUseMethod(A,"MatMPIAdjCreateNonemptySubcommMat_C",(Mat,Mat*),(A,B));CHKERRQ(ierr);
556   PetscFunctionReturn(0);
557 }
558 
559 /*MC
560    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
561    intended for use constructing orderings and partitionings.
562 
563   Level: beginner
564 
565 .seealso: MatCreateMPIAdj
566 M*/
567 
568 #undef __FUNCT__
569 #define __FUNCT__ "MatCreate_MPIAdj"
570 PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B)
571 {
572   Mat_MPIAdj     *b;
573   PetscErrorCode ierr;
574 
575   PetscFunctionBegin;
576   ierr         = PetscNewLog(B,Mat_MPIAdj,&b);CHKERRQ(ierr);
577   B->data      = (void*)b;
578   ierr         = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
579   B->assembled = PETSC_FALSE;
580 
581   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
582   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjCreateNonemptySubcommMat_C",MatMPIAdjCreateNonemptySubcommMat_MPIAdj);CHKERRQ(ierr);
583   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ);CHKERRQ(ierr);
584   PetscFunctionReturn(0);
585 }
586 
587 #undef __FUNCT__
588 #define __FUNCT__ "MatMPIAdjSetPreallocation"
589 /*@C
590    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
591 
592    Logically Collective on MPI_Comm
593 
594    Input Parameters:
595 +  A - the matrix
596 .  i - the indices into j for the start of each row
597 .  j - the column indices for each row (sorted for each row).
598        The indices in i and j start with zero (NOT with one).
599 -  values - [optional] edge weights
600 
601    Level: intermediate
602 
603 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
604 @*/
605 PetscErrorCode  MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
606 {
607   PetscErrorCode ierr;
608 
609   PetscFunctionBegin;
610   ierr = PetscTryMethod(B,"MatMPIAdjSetPreallocation_C",(Mat,PetscInt*,PetscInt*,PetscInt*),(B,i,j,values));CHKERRQ(ierr);
611   PetscFunctionReturn(0);
612 }
613 
614 #undef __FUNCT__
615 #define __FUNCT__ "MatCreateMPIAdj"
616 /*@C
617    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
618    The matrix does not have numerical values associated with it, but is
619    intended for ordering (to reduce bandwidth etc) and partitioning.
620 
621    Collective on MPI_Comm
622 
623    Input Parameters:
624 +  comm - MPI communicator
625 .  m - number of local rows
626 .  N - number of global columns
627 .  i - the indices into j for the start of each row
628 .  j - the column indices for each row (sorted for each row).
629        The indices in i and j start with zero (NOT with one).
630 -  values -[optional] edge weights
631 
632    Output Parameter:
633 .  A - the matrix
634 
635    Level: intermediate
636 
637    Notes: This matrix object does not support most matrix operations, include
638    MatSetValues().
639    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
640    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
641     call from Fortran you need not create the arrays with PetscMalloc().
642    Should not include the matrix diagonals.
643 
644    If you already have a matrix, you can create its adjacency matrix by a call
645    to MatConvert, specifying a type of MATMPIADJ.
646 
647    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
648 
649 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
650 @*/
651 PetscErrorCode  MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
652 {
653   PetscErrorCode ierr;
654 
655   PetscFunctionBegin;
656   ierr = MatCreate(comm,A);CHKERRQ(ierr);
657   ierr = MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
658   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
659   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
660   PetscFunctionReturn(0);
661 }
662 
663 
664 
665 
666 
667 
668 
669