xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 6849ba73f22fecb8f92ef896a42e4e8bd4cd6965)
1 /*
2     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
3 */
4 #include "src/mat/impls/adj/mpi/mpiadj.h"
5 #include "petscsys.h"
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "MatView_MPIAdj_ASCII"
9 PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
10 {
11   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
12   PetscErrorCode ierr;
13   int i,j,m = A->m;
14   char              *name;
15   PetscViewerFormat format;
16 
17   PetscFunctionBegin;
18   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
19   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
20   if (format == PETSC_VIEWER_ASCII_INFO) {
21     PetscFunctionReturn(0);
22   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
23     SETERRQ(PETSC_ERR_SUP,"Matlab format not supported");
24   } else {
25     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_NO);CHKERRQ(ierr);
26     for (i=0; i<m; i++) {
27       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %d:",i+a->rstart);CHKERRQ(ierr);
28       for (j=a->i[i]; j<a->i[i+1]; j++) {
29         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %d ",a->j[j]);CHKERRQ(ierr);
30       }
31       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
32     }
33     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_YES);CHKERRQ(ierr);
34   }
35   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
36   PetscFunctionReturn(0);
37 }
38 
39 #undef __FUNCT__
40 #define __FUNCT__ "MatView_MPIAdj"
41 PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
42 {
43   PetscErrorCode ierr;
44   PetscTruth iascii;
45 
46   PetscFunctionBegin;
47   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
48   if (iascii) {
49     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
50   } else {
51     SETERRQ1(1,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
52   }
53   PetscFunctionReturn(0);
54 }
55 
56 #undef __FUNCT__
57 #define __FUNCT__ "MatDestroy_MPIAdj"
58 PetscErrorCode MatDestroy_MPIAdj(Mat mat)
59 {
60   Mat_MPIAdj *a = (Mat_MPIAdj*)mat->data;
61   PetscErrorCode ierr;
62 
63   PetscFunctionBegin;
64 #if defined(PETSC_USE_LOG)
65   PetscLogObjectState((PetscObject)mat,"Rows=%d, Cols=%d, NZ=%d",mat->m,mat->n,a->nz);
66 #endif
67   if (a->diag) {ierr = PetscFree(a->diag);CHKERRQ(ierr);}
68   if (a->freeaij) {
69     ierr = PetscFree(a->i);CHKERRQ(ierr);
70     ierr = PetscFree(a->j);CHKERRQ(ierr);
71     if (a->values) {ierr = PetscFree(a->values);CHKERRQ(ierr);}
72   }
73   ierr = PetscFree(a->rowners);CHKERRQ(ierr);
74   ierr = PetscFree(a);CHKERRQ(ierr);
75   PetscFunctionReturn(0);
76 }
77 
78 #undef __FUNCT__
79 #define __FUNCT__ "MatSetOption_MPIAdj"
80 PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op)
81 {
82   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
83 
84   PetscFunctionBegin;
85   switch (op) {
86   case MAT_SYMMETRIC:
87   case MAT_STRUCTURALLY_SYMMETRIC:
88   case MAT_HERMITIAN:
89     a->symmetric = PETSC_TRUE;
90     break;
91   case MAT_NOT_SYMMETRIC:
92   case MAT_NOT_STRUCTURALLY_SYMMETRIC:
93   case MAT_NOT_HERMITIAN:
94     a->symmetric = PETSC_FALSE;
95     break;
96   case MAT_SYMMETRY_ETERNAL:
97   case MAT_NOT_SYMMETRY_ETERNAL:
98     break;
99   default:
100     PetscLogInfo(A,"MatSetOption_MPIAdj:Option ignored\n");
101     break;
102   }
103   PetscFunctionReturn(0);
104 }
105 
106 
107 /*
108      Adds diagonal pointers to sparse matrix structure.
109 */
110 
111 #undef __FUNCT__
112 #define __FUNCT__ "MatMarkDiagonal_MPIAdj"
113 PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A)
114 {
115   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
116   PetscErrorCode ierr;
117   int        i,j,*diag,m = A->m;
118 
119   PetscFunctionBegin;
120   ierr = PetscMalloc((m+1)*sizeof(int),&diag);CHKERRQ(ierr);
121   PetscLogObjectMemory(A,(m+1)*sizeof(int));
122   for (i=0; i<A->m; i++) {
123     for (j=a->i[i]; j<a->i[i+1]; j++) {
124       if (a->j[j] == i) {
125         diag[i] = j;
126         break;
127       }
128     }
129   }
130   a->diag = diag;
131   PetscFunctionReturn(0);
132 }
133 
134 #undef __FUNCT__
135 #define __FUNCT__ "MatGetRow_MPIAdj"
136 PetscErrorCode MatGetRow_MPIAdj(Mat A,int row,int *nz,int **idx,PetscScalar **v)
137 {
138   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
139   int        *itmp;
140 
141   PetscFunctionBegin;
142   row -= a->rstart;
143 
144   if (row < 0 || row >= A->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
145 
146   *nz = a->i[row+1] - a->i[row];
147   if (v) *v = PETSC_NULL;
148   if (idx) {
149     itmp = a->j + a->i[row];
150     if (*nz) {
151       *idx = itmp;
152     }
153     else *idx = 0;
154   }
155   PetscFunctionReturn(0);
156 }
157 
158 #undef __FUNCT__
159 #define __FUNCT__ "MatRestoreRow_MPIAdj"
160 PetscErrorCode MatRestoreRow_MPIAdj(Mat A,int row,int *nz,int **idx,PetscScalar **v)
161 {
162   PetscFunctionBegin;
163   PetscFunctionReturn(0);
164 }
165 
166 #undef __FUNCT__
167 #define __FUNCT__ "MatGetBlockSize_MPIAdj"
168 PetscErrorCode MatGetBlockSize_MPIAdj(Mat A,int *bs)
169 {
170   PetscFunctionBegin;
171   *bs = 1;
172   PetscFunctionReturn(0);
173 }
174 
175 
176 #undef __FUNCT__
177 #define __FUNCT__ "MatEqual_MPIAdj"
178 PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg)
179 {
180   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
181   PetscErrorCode ierr;
182   PetscTruth  flag;
183 
184   PetscFunctionBegin;
185   /* If the  matrix dimensions are not equal,or no of nonzeros */
186   if ((A->m != B->m) ||(a->nz != b->nz)) {
187     flag = PETSC_FALSE;
188   }
189 
190   /* if the a->i are the same */
191   ierr = PetscMemcmp(a->i,b->i,(A->m+1)*sizeof(int),&flag);CHKERRQ(ierr);
192 
193   /* if a->j are the same */
194   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(int),&flag);CHKERRQ(ierr);
195 
196   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr);
197   PetscFunctionReturn(0);
198 }
199 
200 #undef __FUNCT__
201 #define __FUNCT__ "MatGetRowIJ_MPIAdj"
202 PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int *ia[],int *ja[],PetscTruth *done)
203 {
204   PetscErrorCode ierr;
205   int size,i;
206   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
207 
208   PetscFunctionBegin;
209   ierr = MPI_Comm_size(A->comm,&size);CHKERRQ(ierr);
210   if (size > 1) {*done = PETSC_FALSE; PetscFunctionReturn(0);}
211   *m    = A->m;
212   *ia   = a->i;
213   *ja   = a->j;
214   *done = PETSC_TRUE;
215   if (oshift) {
216     for (i=0; i<(*ia)[*m]; i++) {
217       (*ja)[i]++;
218     }
219     for (i=0; i<=(*m); i++) (*ia)[i]++;
220   }
221   PetscFunctionReturn(0);
222 }
223 
224 #undef __FUNCT__
225 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj"
226 PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int *ia[],int *ja[],PetscTruth *done)
227 {
228   int        i;
229   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
230 
231   PetscFunctionBegin;
232   if (ia && a->i != *ia) SETERRQ(1,"ia passed back is not one obtained with MatGetRowIJ()");
233   if (ja && a->j != *ja) SETERRQ(1,"ja passed back is not one obtained with MatGetRowIJ()");
234   if (oshift) {
235     for (i=0; i<=(*m); i++) (*ia)[i]--;
236     for (i=0; i<(*ia)[*m]; i++) {
237       (*ja)[i]--;
238     }
239   }
240   PetscFunctionReturn(0);
241 }
242 
243 /* -------------------------------------------------------------------*/
244 static struct _MatOps MatOps_Values = {0,
245        MatGetRow_MPIAdj,
246        MatRestoreRow_MPIAdj,
247        0,
248 /* 4*/ 0,
249        0,
250        0,
251        0,
252        0,
253        0,
254 /*10*/ 0,
255        0,
256        0,
257        0,
258        0,
259 /*15*/ 0,
260        MatEqual_MPIAdj,
261        0,
262        0,
263        0,
264 /*20*/ 0,
265        0,
266        0,
267        MatSetOption_MPIAdj,
268        0,
269 /*25*/ 0,
270        0,
271        0,
272        0,
273        0,
274 /*30*/ 0,
275        0,
276        0,
277        0,
278        0,
279 /*35*/ 0,
280        0,
281        0,
282        0,
283        0,
284 /*40*/ 0,
285        0,
286        0,
287        0,
288        0,
289 /*45*/ 0,
290        0,
291        0,
292        0,
293        0,
294 /*50*/ MatGetBlockSize_MPIAdj,
295        MatGetRowIJ_MPIAdj,
296        MatRestoreRowIJ_MPIAdj,
297        0,
298        0,
299 /*55*/ 0,
300        0,
301        0,
302        0,
303        0,
304 /*60*/ 0,
305        MatDestroy_MPIAdj,
306        MatView_MPIAdj,
307        MatGetPetscMaps_Petsc,
308        0,
309 /*65*/ 0,
310        0,
311        0,
312        0,
313        0,
314 /*70*/ 0,
315        0,
316        0,
317        0,
318        0,
319 /*75*/ 0,
320        0,
321        0,
322        0,
323        0,
324 /*80*/ 0,
325        0,
326        0,
327        0,
328 /*85*/ 0
329 };
330 
331 EXTERN_C_BEGIN
332 #undef __FUNCT__
333 #define __FUNCT__ "MatMPIAdjSetPreallocation_MPIAdj"
334 PetscErrorCode MatMPIAdjSetPreallocation_MPIAdj(Mat B,int *i,int *j,int *values)
335 {
336   Mat_MPIAdj *b = (Mat_MPIAdj *)B->data;
337   PetscErrorCode ierr;
338 #if defined(PETSC_USE_BOPT_g)
339   int        ii;
340 #endif
341 
342   PetscFunctionBegin;
343   B->preallocated = PETSC_TRUE;
344 #if defined(PETSC_USE_BOPT_g)
345   if (i[0] != 0) SETERRQ1(1,"First i[] index must be zero, instead it is %d\n",i[0]);
346   for (ii=1; ii<B->m; ii++) {
347     if (i[ii] < 0 || i[ii] < i[ii-1]) {
348       SETERRQ4(1,"i[%d]=%d index is out of range: i[%d]=%d",ii,i[ii],ii-1,i[ii-1]);
349     }
350   }
351   for (ii=0; ii<i[B->m]; ii++) {
352     if (j[ii] < 0 || j[ii] >= B->N) {
353       SETERRQ2(1,"Column index %d out of range %d\n",ii,j[ii]);
354     }
355   }
356 #endif
357 
358   b->j      = j;
359   b->i      = i;
360   b->values = values;
361 
362   b->nz               = i[B->m];
363   b->diag             = 0;
364   b->symmetric        = PETSC_FALSE;
365   b->freeaij          = PETSC_TRUE;
366 
367   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
368   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
369   PetscFunctionReturn(0);
370 }
371 EXTERN_C_END
372 
373 /*MC
374    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
375    intended for use constructing orderings and partitionings.
376 
377   Level: beginner
378 
379 .seealso: MatCreateMPIAdj
380 M*/
381 
382 EXTERN_C_BEGIN
383 #undef __FUNCT__
384 #define __FUNCT__ "MatCreate_MPIAdj"
385 PetscErrorCode MatCreate_MPIAdj(Mat B)
386 {
387   Mat_MPIAdj *b;
388   PetscErrorCode ierr;
389   int        ii,size,rank;
390 
391   PetscFunctionBegin;
392   ierr = MPI_Comm_size(B->comm,&size);CHKERRQ(ierr);
393   ierr = MPI_Comm_rank(B->comm,&rank);CHKERRQ(ierr);
394 
395   ierr                = PetscNew(Mat_MPIAdj,&b);CHKERRQ(ierr);
396   B->data             = (void*)b;
397   ierr                = PetscMemzero(b,sizeof(Mat_MPIAdj));CHKERRQ(ierr);
398   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
399   B->factor           = 0;
400   B->lupivotthreshold = 1.0;
401   B->mapping          = 0;
402   B->assembled        = PETSC_FALSE;
403 
404   ierr = MPI_Allreduce(&B->m,&B->M,1,MPI_INT,MPI_SUM,B->comm);CHKERRQ(ierr);
405   B->n = B->N;
406 
407   /* the information in the maps duplicates the information computed below, eventually
408      we should remove the duplicate information that is not contained in the maps */
409   ierr = PetscMapCreateMPI(B->comm,B->m,B->M,&B->rmap);CHKERRQ(ierr);
410   /* we don't know the "local columns" so just use the row information :-(*/
411   ierr = PetscMapCreateMPI(B->comm,B->m,B->M,&B->cmap);CHKERRQ(ierr);
412 
413   ierr = PetscMalloc((size+1)*sizeof(int),&b->rowners);CHKERRQ(ierr);
414   PetscLogObjectMemory(B,(size+2)*sizeof(int)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));
415   ierr = MPI_Allgather(&B->m,1,MPI_INT,b->rowners+1,1,MPI_INT,B->comm);CHKERRQ(ierr);
416   b->rowners[0] = 0;
417   for (ii=2; ii<=size; ii++) {
418     b->rowners[ii] += b->rowners[ii-1];
419   }
420   b->rstart = b->rowners[rank];
421   b->rend   = b->rowners[rank+1];
422   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAdjSetPreallocation_C",
423                                     "MatMPIAdjSetPreallocation_MPIAdj",
424                                      MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
425   PetscFunctionReturn(0);
426 }
427 EXTERN_C_END
428 
429 #undef __FUNCT__
430 #define __FUNCT__ "MatMPIAdjSetPreallocation"
431 /*@C
432    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
433 
434    Collective on MPI_Comm
435 
436    Input Parameters:
437 +  A - the matrix
438 .  i - the indices into j for the start of each row
439 .  j - the column indices for each row (sorted for each row).
440        The indices in i and j start with zero (NOT with one).
441 -  values - [optional] edge weights
442 
443    Level: intermediate
444 
445 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
446 @*/
447 PetscErrorCode MatMPIAdjSetPreallocation(Mat B,int *i,int *j,int *values)
448 {
449   PetscErrorCode ierr,(*f)(Mat,int*,int*,int*);
450 
451   PetscFunctionBegin;
452   ierr = PetscObjectQueryFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",(void (**)(void))&f);CHKERRQ(ierr);
453   if (f) {
454     ierr = (*f)(B,i,j,values);CHKERRQ(ierr);
455   }
456   PetscFunctionReturn(0);
457 }
458 
459 #undef __FUNCT__
460 #define __FUNCT__ "MatCreateMPIAdj"
461 /*@C
462    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
463    The matrix does not have numerical values associated with it, but is
464    intended for ordering (to reduce bandwidth etc) and partitioning.
465 
466    Collective on MPI_Comm
467 
468    Input Parameters:
469 +  comm - MPI communicator
470 .  m - number of local rows
471 .  n - number of columns
472 .  i - the indices into j for the start of each row
473 .  j - the column indices for each row (sorted for each row).
474        The indices in i and j start with zero (NOT with one).
475 -  values -[optional] edge weights
476 
477    Output Parameter:
478 .  A - the matrix
479 
480    Level: intermediate
481 
482    Notes: This matrix object does not support most matrix operations, include
483    MatSetValues().
484    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
485    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
486     call from Fortran you need not create the arrays with PetscMalloc().
487    Should not include the matrix diagonals.
488 
489    If you already have a matrix, you can create its adjacency matrix by a call
490    to MatConvert, specifying a type of MATMPIADJ.
491 
492    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
493 
494 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
495 @*/
496 PetscErrorCode MatCreateMPIAdj(MPI_Comm comm,int m,int n,int *i,int *j,int *values,Mat *A)
497 {
498   PetscErrorCode ierr;
499 
500   PetscFunctionBegin;
501   ierr = MatCreate(comm,m,n,PETSC_DETERMINE,n,A);CHKERRQ(ierr);
502   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
503   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
504   PetscFunctionReturn(0);
505 }
506 
507 EXTERN_C_BEGIN
508 #undef __FUNCT__
509 #define __FUNCT__ "MatConvertTo_MPIAdj"
510 PetscErrorCode MatConvertTo_MPIAdj(Mat A,MatType type,Mat *newmat)
511 {
512   Mat               B;
513   PetscErrorCode ierr;
514   int               i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
515   const int         *rj;
516   const PetscScalar *ra;
517   MPI_Comm          comm;
518 
519   PetscFunctionBegin;
520   ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr);
521   ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr);
522   ierr = MatGetOwnershipRange(A,&rstart,PETSC_NULL);CHKERRQ(ierr);
523 
524   /* count the number of nonzeros per row */
525   for (i=0; i<m; i++) {
526     ierr   = MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
527     for (j=0; j<len; j++) {
528       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
529     }
530     ierr   = MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
531     nzeros += len;
532   }
533 
534   /* malloc space for nonzeros */
535   ierr = PetscMalloc((nzeros+1)*sizeof(int),&a);CHKERRQ(ierr);
536   ierr = PetscMalloc((N+1)*sizeof(int),&ia);CHKERRQ(ierr);
537   ierr = PetscMalloc((nzeros+1)*sizeof(int),&ja);CHKERRQ(ierr);
538 
539   nzeros = 0;
540   ia[0]  = 0;
541   for (i=0; i<m; i++) {
542     ierr    = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
543     cnt     = 0;
544     for (j=0; j<len; j++) {
545       if (rj[j] != i+rstart) { /* if not diagonal */
546         a[nzeros+cnt]    = (int) PetscAbsScalar(ra[j]);
547         ja[nzeros+cnt++] = rj[j];
548       }
549     }
550     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
551     nzeros += cnt;
552     ia[i+1] = nzeros;
553   }
554 
555   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
556   ierr = MatCreate(comm,m,N,PETSC_DETERMINE,N,&B);CHKERRQ(ierr);
557   ierr = MatSetType(B,type);CHKERRQ(ierr);
558   ierr = MatMPIAdjSetPreallocation(B,ia,ja,a);CHKERRQ(ierr);
559 
560   /* Fake support for "inplace" convert. */
561   if (*newmat == A) {
562     ierr = MatDestroy(A);CHKERRQ(ierr);
563   }
564   *newmat = B;
565   PetscFunctionReturn(0);
566 }
567 EXTERN_C_END
568 
569 
570 
571 
572 
573