xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 7d0a6c19129e7069c8a40e210b34ed62989173db)
1 
2 /*
3     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
4 */
5 #include "../src/mat/impls/adj/mpi/mpiadj.h"
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "MatView_MPIAdj_ASCII"
9 PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
10 {
11   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
12   PetscErrorCode    ierr;
13   PetscInt          i,j,m = A->rmap->n;
14   const char        *name;
15   PetscViewerFormat format;
16 
17   PetscFunctionBegin;
18   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
19   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
20   if (format == PETSC_VIEWER_ASCII_INFO) {
21     PetscFunctionReturn(0);
22   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
23     SETERRQ(((PetscObject)A)->comm,PETSC_ERR_SUP,"MATLAB format not supported");
24   } else {
25     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);CHKERRQ(ierr);
26     for (i=0; i<m; i++) {
27       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap->rstart);CHKERRQ(ierr);
28       for (j=a->i[i]; j<a->i[i+1]; j++) {
29         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);CHKERRQ(ierr);
30       }
31       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
32     }
33     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);CHKERRQ(ierr);
34   }
35   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
36   PetscFunctionReturn(0);
37 }
38 
39 #undef __FUNCT__
40 #define __FUNCT__ "MatView_MPIAdj"
41 PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
42 {
43   PetscErrorCode ierr;
44   PetscBool      iascii;
45 
46   PetscFunctionBegin;
47   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
48   if (iascii) {
49     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
50   } else {
51     SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
52   }
53   PetscFunctionReturn(0);
54 }
55 
56 #undef __FUNCT__
57 #define __FUNCT__ "MatDestroy_MPIAdj"
58 PetscErrorCode MatDestroy_MPIAdj(Mat mat)
59 {
60   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;
61   PetscErrorCode ierr;
62 
63   PetscFunctionBegin;
64 #if defined(PETSC_USE_LOG)
65   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap->n,mat->cmap->n,a->nz);
66 #endif
67   ierr = PetscFree(a->diag);CHKERRQ(ierr);
68   if (a->freeaij) {
69     if (a->freeaijwithfree) {
70       if (a->i) free(a->i);
71       if (a->j) free(a->j);
72     } else {
73       ierr = PetscFree(a->i);CHKERRQ(ierr);
74       ierr = PetscFree(a->j);CHKERRQ(ierr);
75       ierr = PetscFree(a->values);CHKERRQ(ierr);
76     }
77   }
78   ierr = PetscFree(a);CHKERRQ(ierr);
79   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
80   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C","",PETSC_NULL);CHKERRQ(ierr);
81   PetscFunctionReturn(0);
82 }
83 
84 #undef __FUNCT__
85 #define __FUNCT__ "MatSetOption_MPIAdj"
86 PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscBool  flg)
87 {
88   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
89   PetscErrorCode ierr;
90 
91   PetscFunctionBegin;
92   switch (op) {
93   case MAT_SYMMETRIC:
94   case MAT_STRUCTURALLY_SYMMETRIC:
95   case MAT_HERMITIAN:
96     a->symmetric = flg;
97     break;
98   case MAT_SYMMETRY_ETERNAL:
99     break;
100   default:
101     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
102     break;
103   }
104   PetscFunctionReturn(0);
105 }
106 
107 
108 /*
109      Adds diagonal pointers to sparse matrix structure.
110 */
111 
112 #undef __FUNCT__
113 #define __FUNCT__ "MatMarkDiagonal_MPIAdj"
114 PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A)
115 {
116   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
117   PetscErrorCode ierr;
118   PetscInt       i,j,m = A->rmap->n;
119 
120   PetscFunctionBegin;
121   ierr = PetscMalloc(m*sizeof(PetscInt),&a->diag);CHKERRQ(ierr);
122   ierr = PetscLogObjectMemory(A,m*sizeof(PetscInt));CHKERRQ(ierr);
123   for (i=0; i<A->rmap->n; i++) {
124     for (j=a->i[i]; j<a->i[i+1]; j++) {
125       if (a->j[j] == i) {
126         a->diag[i] = j;
127         break;
128       }
129     }
130   }
131   PetscFunctionReturn(0);
132 }
133 
134 #undef __FUNCT__
135 #define __FUNCT__ "MatGetRow_MPIAdj"
136 PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
137 {
138   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
139   PetscInt   *itmp;
140 
141   PetscFunctionBegin;
142   row -= A->rmap->rstart;
143 
144   if (row < 0 || row >= A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
145 
146   *nz = a->i[row+1] - a->i[row];
147   if (v) *v = PETSC_NULL;
148   if (idx) {
149     itmp = a->j + a->i[row];
150     if (*nz) {
151       *idx = itmp;
152     }
153     else *idx = 0;
154   }
155   PetscFunctionReturn(0);
156 }
157 
158 #undef __FUNCT__
159 #define __FUNCT__ "MatRestoreRow_MPIAdj"
160 PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
161 {
162   PetscFunctionBegin;
163   PetscFunctionReturn(0);
164 }
165 
166 #undef __FUNCT__
167 #define __FUNCT__ "MatEqual_MPIAdj"
168 PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscBool * flg)
169 {
170   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
171   PetscErrorCode ierr;
172   PetscBool      flag;
173 
174   PetscFunctionBegin;
175   /* If the  matrix dimensions are not equal,or no of nonzeros */
176   if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
177     flag = PETSC_FALSE;
178   }
179 
180   /* if the a->i are the same */
181   ierr = PetscMemcmp(a->i,b->i,(A->rmap->n+1)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
182 
183   /* if a->j are the same */
184   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
185 
186   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,((PetscObject)A)->comm);CHKERRQ(ierr);
187   PetscFunctionReturn(0);
188 }
189 
190 #undef __FUNCT__
191 #define __FUNCT__ "MatGetRowIJ_MPIAdj"
192 PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool  symmetric,PetscBool  blockcompressed,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscBool  *done)
193 {
194   PetscInt       i;
195   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data;
196 
197   PetscFunctionBegin;
198   *m    = A->rmap->n;
199   *ia   = a->i;
200   *ja   = a->j;
201   *done = PETSC_TRUE;
202   if (oshift) {
203     for (i=0; i<(*ia)[*m]; i++) {
204       (*ja)[i]++;
205     }
206     for (i=0; i<=(*m); i++) (*ia)[i]++;
207   }
208   PetscFunctionReturn(0);
209 }
210 
211 #undef __FUNCT__
212 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj"
213 PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool  symmetric,PetscBool  blockcompressed,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscBool  *done)
214 {
215   PetscInt   i;
216   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
217 
218   PetscFunctionBegin;
219   if (ia && a->i != *ia) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
220   if (ja && a->j != *ja) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
221   if (oshift) {
222     for (i=0; i<=(*m); i++) (*ia)[i]--;
223     for (i=0; i<(*ia)[*m]; i++) {
224       (*ja)[i]--;
225     }
226   }
227   PetscFunctionReturn(0);
228 }
229 
230 #undef __FUNCT__
231 #define __FUNCT__ "MatConvertFrom_MPIAdj"
232 PetscErrorCode  MatConvertFrom_MPIAdj(Mat A,const MatType type,MatReuse reuse,Mat *newmat)
233 {
234   Mat               B;
235   PetscErrorCode    ierr;
236   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
237   const PetscInt    *rj;
238   const PetscScalar *ra;
239   MPI_Comm          comm;
240 
241   PetscFunctionBegin;
242   ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr);
243   ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr);
244   ierr = MatGetOwnershipRange(A,&rstart,PETSC_NULL);CHKERRQ(ierr);
245 
246   /* count the number of nonzeros per row */
247   for (i=0; i<m; i++) {
248     ierr   = MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
249     for (j=0; j<len; j++) {
250       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
251     }
252     ierr   = MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
253     nzeros += len;
254   }
255 
256   /* malloc space for nonzeros */
257   ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&a);CHKERRQ(ierr);
258   ierr = PetscMalloc((N+1)*sizeof(PetscInt),&ia);CHKERRQ(ierr);
259   ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&ja);CHKERRQ(ierr);
260 
261   nzeros = 0;
262   ia[0]  = 0;
263   for (i=0; i<m; i++) {
264     ierr    = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
265     cnt     = 0;
266     for (j=0; j<len; j++) {
267       if (rj[j] != i+rstart) { /* if not diagonal */
268         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
269         ja[nzeros+cnt++] = rj[j];
270       }
271     }
272     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
273     nzeros += cnt;
274     ia[i+1] = nzeros;
275   }
276 
277   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
278   ierr = MatCreate(comm,&B);CHKERRQ(ierr);
279   ierr = MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
280   ierr = MatSetType(B,type);CHKERRQ(ierr);
281   ierr = MatMPIAdjSetPreallocation(B,ia,ja,a);CHKERRQ(ierr);
282 
283   if (reuse == MAT_REUSE_MATRIX) {
284     ierr = MatHeaderReplace(A,B);CHKERRQ(ierr);
285   } else {
286     *newmat = B;
287   }
288   PetscFunctionReturn(0);
289 }
290 
291 /* -------------------------------------------------------------------*/
292 static struct _MatOps MatOps_Values = {0,
293        MatGetRow_MPIAdj,
294        MatRestoreRow_MPIAdj,
295        0,
296 /* 4*/ 0,
297        0,
298        0,
299        0,
300        0,
301        0,
302 /*10*/ 0,
303        0,
304        0,
305        0,
306        0,
307 /*15*/ 0,
308        MatEqual_MPIAdj,
309        0,
310        0,
311        0,
312 /*20*/ 0,
313        0,
314        MatSetOption_MPIAdj,
315        0,
316 /*24*/ 0,
317        0,
318        0,
319        0,
320        0,
321 /*29*/ 0,
322        0,
323        0,
324        0,
325        0,
326 /*34*/ 0,
327        0,
328        0,
329        0,
330        0,
331 /*39*/ 0,
332        0,
333        0,
334        0,
335        0,
336 /*44*/ 0,
337        0,
338        0,
339        0,
340        0,
341 /*49*/ 0,
342        MatGetRowIJ_MPIAdj,
343        MatRestoreRowIJ_MPIAdj,
344        0,
345        0,
346 /*54*/ 0,
347        0,
348        0,
349        0,
350        0,
351 /*59*/ 0,
352        MatDestroy_MPIAdj,
353        MatView_MPIAdj,
354        MatConvertFrom_MPIAdj,
355        0,
356 /*64*/ 0,
357        0,
358        0,
359        0,
360        0,
361 /*69*/ 0,
362        0,
363        0,
364        0,
365        0,
366 /*74*/ 0,
367        0,
368        0,
369        0,
370        0,
371 /*79*/ 0,
372        0,
373        0,
374        0,
375        0,
376 /*84*/ 0,
377        0,
378        0,
379        0,
380        0,
381 /*89*/ 0,
382        0,
383        0,
384        0,
385        0,
386 /*94*/ 0,
387        0,
388        0,
389        0};
390 
391 EXTERN_C_BEGIN
392 #undef __FUNCT__
393 #define __FUNCT__ "MatMPIAdjSetPreallocation_MPIAdj"
394 PetscErrorCode  MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
395 {
396   Mat_MPIAdj     *b = (Mat_MPIAdj *)B->data;
397   PetscErrorCode ierr;
398 #if defined(PETSC_USE_DEBUG)
399   PetscInt       ii;
400 #endif
401 
402   PetscFunctionBegin;
403   ierr = PetscLayoutSetBlockSize(B->rmap,1);CHKERRQ(ierr);
404   ierr = PetscLayoutSetBlockSize(B->cmap,1);CHKERRQ(ierr);
405   ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
406   ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
407 
408 #if defined(PETSC_USE_DEBUG)
409   if (i[0] != 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
410   for (ii=1; ii<B->rmap->n; ii++) {
411     if (i[ii] < 0 || i[ii] < i[ii-1]) {
412       SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
413     }
414   }
415   for (ii=0; ii<i[B->rmap->n]; ii++) {
416     if (j[ii] < 0 || j[ii] >= B->cmap->N) {
417       SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
418     }
419   }
420 #endif
421   B->preallocated = PETSC_TRUE;
422 
423   b->j      = j;
424   b->i      = i;
425   b->values = values;
426 
427   b->nz               = i[B->rmap->n];
428   b->diag             = 0;
429   b->symmetric        = PETSC_FALSE;
430   b->freeaij          = PETSC_TRUE;
431 
432   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
433   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
434   PetscFunctionReturn(0);
435 }
436 EXTERN_C_END
437 
438 /*MC
439    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
440    intended for use constructing orderings and partitionings.
441 
442   Level: beginner
443 
444 .seealso: MatCreateMPIAdj
445 M*/
446 
447 EXTERN_C_BEGIN
448 #undef __FUNCT__
449 #define __FUNCT__ "MatCreate_MPIAdj"
450 PetscErrorCode  MatCreate_MPIAdj(Mat B)
451 {
452   Mat_MPIAdj     *b;
453   PetscErrorCode ierr;
454 
455   PetscFunctionBegin;
456   ierr                = PetscNewLog(B,Mat_MPIAdj,&b);CHKERRQ(ierr);
457   B->data             = (void*)b;
458   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
459   B->assembled        = PETSC_FALSE;
460 
461   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAdjSetPreallocation_C",
462                                     "MatMPIAdjSetPreallocation_MPIAdj",
463                                      MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
464   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ);CHKERRQ(ierr);
465   PetscFunctionReturn(0);
466 }
467 EXTERN_C_END
468 
469 #undef __FUNCT__
470 #define __FUNCT__ "MatMPIAdjSetPreallocation"
471 /*@C
472    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
473 
474    Logically Collective on MPI_Comm
475 
476    Input Parameters:
477 +  A - the matrix
478 .  i - the indices into j for the start of each row
479 .  j - the column indices for each row (sorted for each row).
480        The indices in i and j start with zero (NOT with one).
481 -  values - [optional] edge weights
482 
483    Level: intermediate
484 
485 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
486 @*/
487 PetscErrorCode  MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
488 {
489   PetscErrorCode ierr;
490 
491   PetscFunctionBegin;
492   ierr = PetscTryMethod(B,"MatMPIAdjSetPreallocation_C",(Mat,PetscInt*,PetscInt*,PetscInt*),(B,i,j,values));CHKERRQ(ierr);
493   PetscFunctionReturn(0);
494 }
495 
496 #undef __FUNCT__
497 #define __FUNCT__ "MatCreateMPIAdj"
498 /*@C
499    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
500    The matrix does not have numerical values associated with it, but is
501    intended for ordering (to reduce bandwidth etc) and partitioning.
502 
503    Collective on MPI_Comm
504 
505    Input Parameters:
506 +  comm - MPI communicator
507 .  m - number of local rows
508 .  N - number of global columns
509 .  i - the indices into j for the start of each row
510 .  j - the column indices for each row (sorted for each row).
511        The indices in i and j start with zero (NOT with one).
512 -  values -[optional] edge weights
513 
514    Output Parameter:
515 .  A - the matrix
516 
517    Level: intermediate
518 
519    Notes: This matrix object does not support most matrix operations, include
520    MatSetValues().
521    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
522    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
523     call from Fortran you need not create the arrays with PetscMalloc().
524    Should not include the matrix diagonals.
525 
526    If you already have a matrix, you can create its adjacency matrix by a call
527    to MatConvert, specifying a type of MATMPIADJ.
528 
529    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
530 
531 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
532 @*/
533 PetscErrorCode  MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
534 {
535   PetscErrorCode ierr;
536 
537   PetscFunctionBegin;
538   ierr = MatCreate(comm,A);CHKERRQ(ierr);
539   ierr = MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
540   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
541   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
542   PetscFunctionReturn(0);
543 }
544 
545 
546 
547 
548 
549 
550 
551