xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 84df9cb40eca90ea9b18a456fab7a4ecc7f6c1a4)
1 
2 /*
3     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
4 */
5 #include <../src/mat/impls/adj/mpi/mpiadj.h>
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "MatView_MPIAdj_ASCII"
9 PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
10 {
11   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
12   PetscErrorCode    ierr;
13   PetscInt          i,j,m = A->rmap->n;
14   const char        *name;
15   PetscViewerFormat format;
16 
17   PetscFunctionBegin;
18   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
19   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
20   if (format == PETSC_VIEWER_ASCII_INFO) {
21     PetscFunctionReturn(0);
22   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
23     SETERRQ(((PetscObject)A)->comm,PETSC_ERR_SUP,"MATLAB format not supported");
24   } else {
25     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);CHKERRQ(ierr);
26     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
27     for (i=0; i<m; i++) {
28       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap->rstart);CHKERRQ(ierr);
29       for (j=a->i[i]; j<a->i[i+1]; j++) {
30         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);CHKERRQ(ierr);
31       }
32       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
33     }
34     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);CHKERRQ(ierr);
35     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
36     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
37   }
38   PetscFunctionReturn(0);
39 }
40 
41 #undef __FUNCT__
42 #define __FUNCT__ "MatView_MPIAdj"
43 PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
44 {
45   PetscErrorCode ierr;
46   PetscBool      iascii;
47 
48   PetscFunctionBegin;
49   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
50   if (iascii) {
51     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
52   } else {
53     SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
54   }
55   PetscFunctionReturn(0);
56 }
57 
58 #undef __FUNCT__
59 #define __FUNCT__ "MatDestroy_MPIAdj"
60 PetscErrorCode MatDestroy_MPIAdj(Mat mat)
61 {
62   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;
63   PetscErrorCode ierr;
64 
65   PetscFunctionBegin;
66 #if defined(PETSC_USE_LOG)
67   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap->n,mat->cmap->n,a->nz);
68 #endif
69   ierr = PetscFree(a->diag);CHKERRQ(ierr);
70   if (a->freeaij) {
71     if (a->freeaijwithfree) {
72       if (a->i) free(a->i);
73       if (a->j) free(a->j);
74     } else {
75       ierr = PetscFree(a->i);CHKERRQ(ierr);
76       ierr = PetscFree(a->j);CHKERRQ(ierr);
77       ierr = PetscFree(a->values);CHKERRQ(ierr);
78     }
79   }
80   ierr = PetscFree(mat->data);CHKERRQ(ierr);
81   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
82   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C","",PETSC_NULL);CHKERRQ(ierr);
83   PetscFunctionReturn(0);
84 }
85 
86 #undef __FUNCT__
87 #define __FUNCT__ "MatSetOption_MPIAdj"
88 PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscBool  flg)
89 {
90   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
91   PetscErrorCode ierr;
92 
93   PetscFunctionBegin;
94   switch (op) {
95   case MAT_SYMMETRIC:
96   case MAT_STRUCTURALLY_SYMMETRIC:
97   case MAT_HERMITIAN:
98     a->symmetric = flg;
99     break;
100   case MAT_SYMMETRY_ETERNAL:
101     break;
102   default:
103     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
104     break;
105   }
106   PetscFunctionReturn(0);
107 }
108 
109 
110 /*
111      Adds diagonal pointers to sparse matrix structure.
112 */
113 
114 #undef __FUNCT__
115 #define __FUNCT__ "MatMarkDiagonal_MPIAdj"
116 PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A)
117 {
118   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
119   PetscErrorCode ierr;
120   PetscInt       i,j,m = A->rmap->n;
121 
122   PetscFunctionBegin;
123   ierr = PetscMalloc(m*sizeof(PetscInt),&a->diag);CHKERRQ(ierr);
124   ierr = PetscLogObjectMemory(A,m*sizeof(PetscInt));CHKERRQ(ierr);
125   for (i=0; i<A->rmap->n; i++) {
126     for (j=a->i[i]; j<a->i[i+1]; j++) {
127       if (a->j[j] == i) {
128         a->diag[i] = j;
129         break;
130       }
131     }
132   }
133   PetscFunctionReturn(0);
134 }
135 
136 #undef __FUNCT__
137 #define __FUNCT__ "MatGetRow_MPIAdj"
138 PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
139 {
140   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
141   PetscInt   *itmp;
142 
143   PetscFunctionBegin;
144   row -= A->rmap->rstart;
145 
146   if (row < 0 || row >= A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
147 
148   *nz = a->i[row+1] - a->i[row];
149   if (v) *v = PETSC_NULL;
150   if (idx) {
151     itmp = a->j + a->i[row];
152     if (*nz) {
153       *idx = itmp;
154     }
155     else *idx = 0;
156   }
157   PetscFunctionReturn(0);
158 }
159 
160 #undef __FUNCT__
161 #define __FUNCT__ "MatRestoreRow_MPIAdj"
162 PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
163 {
164   PetscFunctionBegin;
165   PetscFunctionReturn(0);
166 }
167 
168 #undef __FUNCT__
169 #define __FUNCT__ "MatEqual_MPIAdj"
170 PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscBool * flg)
171 {
172   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
173   PetscErrorCode ierr;
174   PetscBool      flag;
175 
176   PetscFunctionBegin;
177   /* If the  matrix dimensions are not equal,or no of nonzeros */
178   if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
179     flag = PETSC_FALSE;
180   }
181 
182   /* if the a->i are the same */
183   ierr = PetscMemcmp(a->i,b->i,(A->rmap->n+1)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
184 
185   /* if a->j are the same */
186   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
187 
188   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,((PetscObject)A)->comm);CHKERRQ(ierr);
189   PetscFunctionReturn(0);
190 }
191 
192 #undef __FUNCT__
193 #define __FUNCT__ "MatGetRowIJ_MPIAdj"
194 PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool  symmetric,PetscBool  blockcompressed,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscBool  *done)
195 {
196   PetscInt       i;
197   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data;
198 
199   PetscFunctionBegin;
200   *m    = A->rmap->n;
201   *ia   = a->i;
202   *ja   = a->j;
203   *done = PETSC_TRUE;
204   if (oshift) {
205     for (i=0; i<(*ia)[*m]; i++) {
206       (*ja)[i]++;
207     }
208     for (i=0; i<=(*m); i++) (*ia)[i]++;
209   }
210   PetscFunctionReturn(0);
211 }
212 
213 #undef __FUNCT__
214 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj"
215 PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool  symmetric,PetscBool  blockcompressed,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscBool  *done)
216 {
217   PetscInt   i;
218   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
219 
220   PetscFunctionBegin;
221   if (ia && a->i != *ia) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
222   if (ja && a->j != *ja) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
223   if (oshift) {
224     for (i=0; i<=(*m); i++) (*ia)[i]--;
225     for (i=0; i<(*ia)[*m]; i++) {
226       (*ja)[i]--;
227     }
228   }
229   PetscFunctionReturn(0);
230 }
231 
232 #undef __FUNCT__
233 #define __FUNCT__ "MatConvertFrom_MPIAdj"
234 PetscErrorCode  MatConvertFrom_MPIAdj(Mat A,const MatType type,MatReuse reuse,Mat *newmat)
235 {
236   Mat               B;
237   PetscErrorCode    ierr;
238   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
239   const PetscInt    *rj;
240   const PetscScalar *ra;
241   MPI_Comm          comm;
242 
243   PetscFunctionBegin;
244   ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr);
245   ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr);
246   ierr = MatGetOwnershipRange(A,&rstart,PETSC_NULL);CHKERRQ(ierr);
247 
248   /* count the number of nonzeros per row */
249   for (i=0; i<m; i++) {
250     ierr   = MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
251     for (j=0; j<len; j++) {
252       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
253     }
254     ierr   = MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
255     nzeros += len;
256   }
257 
258   /* malloc space for nonzeros */
259   ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&a);CHKERRQ(ierr);
260   ierr = PetscMalloc((N+1)*sizeof(PetscInt),&ia);CHKERRQ(ierr);
261   ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&ja);CHKERRQ(ierr);
262 
263   nzeros = 0;
264   ia[0]  = 0;
265   for (i=0; i<m; i++) {
266     ierr    = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
267     cnt     = 0;
268     for (j=0; j<len; j++) {
269       if (rj[j] != i+rstart) { /* if not diagonal */
270         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
271         ja[nzeros+cnt++] = rj[j];
272       }
273     }
274     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
275     nzeros += cnt;
276     ia[i+1] = nzeros;
277   }
278 
279   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
280   ierr = MatCreate(comm,&B);CHKERRQ(ierr);
281   ierr = MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
282   ierr = MatSetType(B,type);CHKERRQ(ierr);
283   ierr = MatMPIAdjSetPreallocation(B,ia,ja,a);CHKERRQ(ierr);
284 
285   if (reuse == MAT_REUSE_MATRIX) {
286     ierr = MatHeaderReplace(A,B);CHKERRQ(ierr);
287   } else {
288     *newmat = B;
289   }
290   PetscFunctionReturn(0);
291 }
292 
293 /* -------------------------------------------------------------------*/
294 static struct _MatOps MatOps_Values = {0,
295        MatGetRow_MPIAdj,
296        MatRestoreRow_MPIAdj,
297        0,
298 /* 4*/ 0,
299        0,
300        0,
301        0,
302        0,
303        0,
304 /*10*/ 0,
305        0,
306        0,
307        0,
308        0,
309 /*15*/ 0,
310        MatEqual_MPIAdj,
311        0,
312        0,
313        0,
314 /*20*/ 0,
315        0,
316        MatSetOption_MPIAdj,
317        0,
318 /*24*/ 0,
319        0,
320        0,
321        0,
322        0,
323 /*29*/ 0,
324        0,
325        0,
326        0,
327        0,
328 /*34*/ 0,
329        0,
330        0,
331        0,
332        0,
333 /*39*/ 0,
334        0,
335        0,
336        0,
337        0,
338 /*44*/ 0,
339        0,
340        0,
341        0,
342        0,
343 /*49*/ 0,
344        MatGetRowIJ_MPIAdj,
345        MatRestoreRowIJ_MPIAdj,
346        0,
347        0,
348 /*54*/ 0,
349        0,
350        0,
351        0,
352        0,
353 /*59*/ 0,
354        MatDestroy_MPIAdj,
355        MatView_MPIAdj,
356        MatConvertFrom_MPIAdj,
357        0,
358 /*64*/ 0,
359        0,
360        0,
361        0,
362        0,
363 /*69*/ 0,
364        0,
365        0,
366        0,
367        0,
368 /*74*/ 0,
369        0,
370        0,
371        0,
372        0,
373 /*79*/ 0,
374        0,
375        0,
376        0,
377        0,
378 /*84*/ 0,
379        0,
380        0,
381        0,
382        0,
383 /*89*/ 0,
384        0,
385        0,
386        0,
387        0,
388 /*94*/ 0,
389        0,
390        0,
391        0};
392 
393 EXTERN_C_BEGIN
394 #undef __FUNCT__
395 #define __FUNCT__ "MatMPIAdjSetPreallocation_MPIAdj"
396 PetscErrorCode  MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
397 {
398   Mat_MPIAdj     *b = (Mat_MPIAdj *)B->data;
399   PetscErrorCode ierr;
400 #if defined(PETSC_USE_DEBUG)
401   PetscInt       ii;
402 #endif
403 
404   PetscFunctionBegin;
405   ierr = PetscLayoutSetBlockSize(B->rmap,1);CHKERRQ(ierr);
406   ierr = PetscLayoutSetBlockSize(B->cmap,1);CHKERRQ(ierr);
407   ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
408   ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
409 
410 #if defined(PETSC_USE_DEBUG)
411   if (i[0] != 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
412   for (ii=1; ii<B->rmap->n; ii++) {
413     if (i[ii] < 0 || i[ii] < i[ii-1]) {
414       SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
415     }
416   }
417   for (ii=0; ii<i[B->rmap->n]; ii++) {
418     if (j[ii] < 0 || j[ii] >= B->cmap->N) {
419       SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
420     }
421   }
422 #endif
423   B->preallocated = PETSC_TRUE;
424 
425   b->j      = j;
426   b->i      = i;
427   b->values = values;
428 
429   b->nz               = i[B->rmap->n];
430   b->diag             = 0;
431   b->symmetric        = PETSC_FALSE;
432   b->freeaij          = PETSC_TRUE;
433 
434   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
435   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
436   PetscFunctionReturn(0);
437 }
438 EXTERN_C_END
439 
440 /*MC
441    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
442    intended for use constructing orderings and partitionings.
443 
444   Level: beginner
445 
446 .seealso: MatCreateMPIAdj
447 M*/
448 
449 EXTERN_C_BEGIN
450 #undef __FUNCT__
451 #define __FUNCT__ "MatCreate_MPIAdj"
452 PetscErrorCode  MatCreate_MPIAdj(Mat B)
453 {
454   Mat_MPIAdj     *b;
455   PetscErrorCode ierr;
456 
457   PetscFunctionBegin;
458   ierr                = PetscNewLog(B,Mat_MPIAdj,&b);CHKERRQ(ierr);
459   B->data             = (void*)b;
460   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
461   B->assembled        = PETSC_FALSE;
462 
463   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAdjSetPreallocation_C",
464                                     "MatMPIAdjSetPreallocation_MPIAdj",
465                                      MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
466   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ);CHKERRQ(ierr);
467   PetscFunctionReturn(0);
468 }
469 EXTERN_C_END
470 
471 #undef __FUNCT__
472 #define __FUNCT__ "MatMPIAdjSetPreallocation"
473 /*@C
474    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
475 
476    Logically Collective on MPI_Comm
477 
478    Input Parameters:
479 +  A - the matrix
480 .  i - the indices into j for the start of each row
481 .  j - the column indices for each row (sorted for each row).
482        The indices in i and j start with zero (NOT with one).
483 -  values - [optional] edge weights
484 
485    Level: intermediate
486 
487 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
488 @*/
489 PetscErrorCode  MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
490 {
491   PetscErrorCode ierr;
492 
493   PetscFunctionBegin;
494   ierr = PetscTryMethod(B,"MatMPIAdjSetPreallocation_C",(Mat,PetscInt*,PetscInt*,PetscInt*),(B,i,j,values));CHKERRQ(ierr);
495   PetscFunctionReturn(0);
496 }
497 
498 #undef __FUNCT__
499 #define __FUNCT__ "MatCreateMPIAdj"
500 /*@C
501    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
502    The matrix does not have numerical values associated with it, but is
503    intended for ordering (to reduce bandwidth etc) and partitioning.
504 
505    Collective on MPI_Comm
506 
507    Input Parameters:
508 +  comm - MPI communicator
509 .  m - number of local rows
510 .  N - number of global columns
511 .  i - the indices into j for the start of each row
512 .  j - the column indices for each row (sorted for each row).
513        The indices in i and j start with zero (NOT with one).
514 -  values -[optional] edge weights
515 
516    Output Parameter:
517 .  A - the matrix
518 
519    Level: intermediate
520 
521    Notes: This matrix object does not support most matrix operations, include
522    MatSetValues().
523    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
524    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
525     call from Fortran you need not create the arrays with PetscMalloc().
526    Should not include the matrix diagonals.
527 
528    If you already have a matrix, you can create its adjacency matrix by a call
529    to MatConvert, specifying a type of MATMPIADJ.
530 
531    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
532 
533 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
534 @*/
535 PetscErrorCode  MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
536 {
537   PetscErrorCode ierr;
538 
539   PetscFunctionBegin;
540   ierr = MatCreate(comm,A);CHKERRQ(ierr);
541   ierr = MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
542   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
543   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
544   PetscFunctionReturn(0);
545 }
546 
547 
548 
549 
550 
551 
552 
553