xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 321af1c700e5f4ecf330ccd829080dfec8601628)
1 /*
2     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
3 */
4 #include "src/mat/impls/adj/mpi/mpiadj.h"
5 #include "petscsys.h"
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "MatView_MPIAdj_ASCII"
9 PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
10 {
11   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
12   PetscErrorCode ierr;
13   int i,j,m = A->m;
14   char              *name;
15   PetscViewerFormat format;
16 
17   PetscFunctionBegin;
18   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
19   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
20   if (format == PETSC_VIEWER_ASCII_INFO) {
21     PetscFunctionReturn(0);
22   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
23     SETERRQ(PETSC_ERR_SUP,"Matlab format not supported");
24   } else {
25     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_NO);CHKERRQ(ierr);
26     for (i=0; i<m; i++) {
27       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %d:",i+a->rstart);CHKERRQ(ierr);
28       for (j=a->i[i]; j<a->i[i+1]; j++) {
29         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %d ",a->j[j]);CHKERRQ(ierr);
30       }
31       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
32     }
33     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_YES);CHKERRQ(ierr);
34   }
35   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
36   PetscFunctionReturn(0);
37 }
38 
39 #undef __FUNCT__
40 #define __FUNCT__ "MatView_MPIAdj"
41 PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
42 {
43   PetscErrorCode ierr;
44   PetscTruth iascii;
45 
46   PetscFunctionBegin;
47   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
48   if (iascii) {
49     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
50   } else {
51     SETERRQ1(1,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
52   }
53   PetscFunctionReturn(0);
54 }
55 
56 #undef __FUNCT__
57 #define __FUNCT__ "MatDestroy_MPIAdj"
58 PetscErrorCode MatDestroy_MPIAdj(Mat mat)
59 {
60   Mat_MPIAdj *a = (Mat_MPIAdj*)mat->data;
61   PetscErrorCode ierr;
62 
63   PetscFunctionBegin;
64 #if defined(PETSC_USE_LOG)
65   PetscLogObjectState((PetscObject)mat,"Rows=%d, Cols=%d, NZ=%d",mat->m,mat->n,a->nz);
66 #endif
67   if (a->diag) {ierr = PetscFree(a->diag);CHKERRQ(ierr);}
68   if (a->freeaij) {
69     ierr = PetscFree(a->i);CHKERRQ(ierr);
70     ierr = PetscFree(a->j);CHKERRQ(ierr);
71     if (a->values) {ierr = PetscFree(a->values);CHKERRQ(ierr);}
72   }
73   ierr = PetscFree(a->rowners);CHKERRQ(ierr);
74   ierr = PetscFree(a);CHKERRQ(ierr);
75   PetscFunctionReturn(0);
76 }
77 
78 #undef __FUNCT__
79 #define __FUNCT__ "MatSetOption_MPIAdj"
80 PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op)
81 {
82   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
83 
84   PetscFunctionBegin;
85   switch (op) {
86   case MAT_SYMMETRIC:
87   case MAT_STRUCTURALLY_SYMMETRIC:
88   case MAT_HERMITIAN:
89     a->symmetric = PETSC_TRUE;
90     break;
91   case MAT_NOT_SYMMETRIC:
92   case MAT_NOT_STRUCTURALLY_SYMMETRIC:
93   case MAT_NOT_HERMITIAN:
94     a->symmetric = PETSC_FALSE;
95     break;
96   case MAT_SYMMETRY_ETERNAL:
97   case MAT_NOT_SYMMETRY_ETERNAL:
98     break;
99   default:
100     PetscLogInfo(A,"MatSetOption_MPIAdj:Option ignored\n");
101     break;
102   }
103   PetscFunctionReturn(0);
104 }
105 
106 
107 /*
108      Adds diagonal pointers to sparse matrix structure.
109 */
110 
111 #undef __FUNCT__
112 #define __FUNCT__ "MatMarkDiagonal_MPIAdj"
113 PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A)
114 {
115   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
116   PetscErrorCode ierr;
117   int        i,j,*diag,m = A->m;
118 
119   PetscFunctionBegin;
120   ierr = PetscMalloc((m+1)*sizeof(int),&diag);CHKERRQ(ierr);
121   PetscLogObjectMemory(A,(m+1)*sizeof(int));
122   for (i=0; i<A->m; i++) {
123     for (j=a->i[i]; j<a->i[i+1]; j++) {
124       if (a->j[j] == i) {
125         diag[i] = j;
126         break;
127       }
128     }
129   }
130   a->diag = diag;
131   PetscFunctionReturn(0);
132 }
133 
134 #undef __FUNCT__
135 #define __FUNCT__ "MatGetRow_MPIAdj"
136 PetscErrorCode MatGetRow_MPIAdj(Mat A,int row,int *nz,int **idx,PetscScalar **v)
137 {
138   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
139   int        *itmp;
140 
141   PetscFunctionBegin;
142   row -= a->rstart;
143 
144   if (row < 0 || row >= A->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
145 
146   *nz = a->i[row+1] - a->i[row];
147   if (v) *v = PETSC_NULL;
148   if (idx) {
149     itmp = a->j + a->i[row];
150     if (*nz) {
151       *idx = itmp;
152     }
153     else *idx = 0;
154   }
155   PetscFunctionReturn(0);
156 }
157 
158 #undef __FUNCT__
159 #define __FUNCT__ "MatRestoreRow_MPIAdj"
160 PetscErrorCode MatRestoreRow_MPIAdj(Mat A,int row,int *nz,int **idx,PetscScalar **v)
161 {
162   PetscFunctionBegin;
163   PetscFunctionReturn(0);
164 }
165 
166 #undef __FUNCT__
167 #define __FUNCT__ "MatGetBlockSize_MPIAdj"
168 PetscErrorCode MatGetBlockSize_MPIAdj(Mat A,int *bs)
169 {
170   PetscFunctionBegin;
171   *bs = 1;
172   PetscFunctionReturn(0);
173 }
174 
175 
176 #undef __FUNCT__
177 #define __FUNCT__ "MatEqual_MPIAdj"
178 PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg)
179 {
180   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
181   PetscErrorCode ierr;
182   PetscTruth  flag;
183 
184   PetscFunctionBegin;
185   /* If the  matrix dimensions are not equal,or no of nonzeros */
186   if ((A->m != B->m) ||(a->nz != b->nz)) {
187     flag = PETSC_FALSE;
188   }
189 
190   /* if the a->i are the same */
191   ierr = PetscMemcmp(a->i,b->i,(A->m+1)*sizeof(int),&flag);CHKERRQ(ierr);
192 
193   /* if a->j are the same */
194   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(int),&flag);CHKERRQ(ierr);
195 
196   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr);
197   PetscFunctionReturn(0);
198 }
199 
200 #undef __FUNCT__
201 #define __FUNCT__ "MatGetRowIJ_MPIAdj"
202 PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int *ia[],int *ja[],PetscTruth *done)
203 {
204   PetscErrorCode ierr;
205   int size,i;
206   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
207 
208   PetscFunctionBegin;
209   ierr = MPI_Comm_size(A->comm,&size);CHKERRQ(ierr);
210   if (size > 1) {*done = PETSC_FALSE; PetscFunctionReturn(0);}
211   *m    = A->m;
212   *ia   = a->i;
213   *ja   = a->j;
214   *done = PETSC_TRUE;
215   if (oshift) {
216     for (i=0; i<(*ia)[*m]; i++) {
217       (*ja)[i]++;
218     }
219     for (i=0; i<=(*m); i++) (*ia)[i]++;
220   }
221   PetscFunctionReturn(0);
222 }
223 
224 #undef __FUNCT__
225 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj"
226 PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int *ia[],int *ja[],PetscTruth *done)
227 {
228   int        i;
229   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
230 
231   PetscFunctionBegin;
232   if (ia && a->i != *ia) SETERRQ(1,"ia passed back is not one obtained with MatGetRowIJ()");
233   if (ja && a->j != *ja) SETERRQ(1,"ja passed back is not one obtained with MatGetRowIJ()");
234   if (oshift) {
235     for (i=0; i<=(*m); i++) (*ia)[i]--;
236     for (i=0; i<(*ia)[*m]; i++) {
237       (*ja)[i]--;
238     }
239   }
240   PetscFunctionReturn(0);
241 }
242 
243 /* -------------------------------------------------------------------*/
244 static struct _MatOps MatOps_Values = {0,
245        MatGetRow_MPIAdj,
246        MatRestoreRow_MPIAdj,
247        0,
248 /* 4*/ 0,
249        0,
250        0,
251        0,
252        0,
253        0,
254 /*10*/ 0,
255        0,
256        0,
257        0,
258        0,
259 /*15*/ 0,
260        MatEqual_MPIAdj,
261        0,
262        0,
263        0,
264 /*20*/ 0,
265        0,
266        0,
267        MatSetOption_MPIAdj,
268        0,
269 /*25*/ 0,
270        0,
271        0,
272        0,
273        0,
274 /*30*/ 0,
275        0,
276        0,
277        0,
278        0,
279 /*35*/ 0,
280        0,
281        0,
282        0,
283        0,
284 /*40*/ 0,
285        0,
286        0,
287        0,
288        0,
289 /*45*/ 0,
290        0,
291        0,
292        0,
293        0,
294 /*50*/ MatGetBlockSize_MPIAdj,
295        MatGetRowIJ_MPIAdj,
296        MatRestoreRowIJ_MPIAdj,
297        0,
298        0,
299 /*55*/ 0,
300        0,
301        0,
302        0,
303        0,
304 /*60*/ 0,
305        MatDestroy_MPIAdj,
306        MatView_MPIAdj,
307        MatGetPetscMaps_Petsc,
308        0,
309 /*65*/ 0,
310        0,
311        0,
312        0,
313        0,
314 /*70*/ 0,
315        0,
316        0,
317        0,
318        0,
319 /*75*/ 0,
320        0,
321        0,
322        0,
323        0,
324 /*80*/ 0,
325        0,
326        0,
327        0,
328        0,
329 /*85*/ 0,
330        0,
331        0,
332        0,
333        0,
334 /*90*/ 0,
335        0,
336        0,
337        0,
338        0,
339 /*95*/ 0,
340        0,
341        0,
342        0};
343 
344 EXTERN_C_BEGIN
345 #undef __FUNCT__
346 #define __FUNCT__ "MatMPIAdjSetPreallocation_MPIAdj"
347 PetscErrorCode MatMPIAdjSetPreallocation_MPIAdj(Mat B,int *i,int *j,int *values)
348 {
349   Mat_MPIAdj *b = (Mat_MPIAdj *)B->data;
350   PetscErrorCode ierr;
351 #if defined(PETSC_USE_BOPT_g)
352   int        ii;
353 #endif
354 
355   PetscFunctionBegin;
356   B->preallocated = PETSC_TRUE;
357 #if defined(PETSC_USE_BOPT_g)
358   if (i[0] != 0) SETERRQ1(1,"First i[] index must be zero, instead it is %d\n",i[0]);
359   for (ii=1; ii<B->m; ii++) {
360     if (i[ii] < 0 || i[ii] < i[ii-1]) {
361       SETERRQ4(1,"i[%d]=%d index is out of range: i[%d]=%d",ii,i[ii],ii-1,i[ii-1]);
362     }
363   }
364   for (ii=0; ii<i[B->m]; ii++) {
365     if (j[ii] < 0 || j[ii] >= B->N) {
366       SETERRQ2(1,"Column index %d out of range %d\n",ii,j[ii]);
367     }
368   }
369 #endif
370 
371   b->j      = j;
372   b->i      = i;
373   b->values = values;
374 
375   b->nz               = i[B->m];
376   b->diag             = 0;
377   b->symmetric        = PETSC_FALSE;
378   b->freeaij          = PETSC_TRUE;
379 
380   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
381   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
382   PetscFunctionReturn(0);
383 }
384 EXTERN_C_END
385 
386 /*MC
387    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
388    intended for use constructing orderings and partitionings.
389 
390   Level: beginner
391 
392 .seealso: MatCreateMPIAdj
393 M*/
394 
395 EXTERN_C_BEGIN
396 #undef __FUNCT__
397 #define __FUNCT__ "MatCreate_MPIAdj"
398 PetscErrorCode MatCreate_MPIAdj(Mat B)
399 {
400   Mat_MPIAdj *b;
401   PetscErrorCode ierr;
402   int        ii,size,rank;
403 
404   PetscFunctionBegin;
405   ierr = MPI_Comm_size(B->comm,&size);CHKERRQ(ierr);
406   ierr = MPI_Comm_rank(B->comm,&rank);CHKERRQ(ierr);
407 
408   ierr                = PetscNew(Mat_MPIAdj,&b);CHKERRQ(ierr);
409   B->data             = (void*)b;
410   ierr                = PetscMemzero(b,sizeof(Mat_MPIAdj));CHKERRQ(ierr);
411   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
412   B->factor           = 0;
413   B->lupivotthreshold = 1.0;
414   B->mapping          = 0;
415   B->assembled        = PETSC_FALSE;
416 
417   ierr = MPI_Allreduce(&B->m,&B->M,1,MPI_INT,MPI_SUM,B->comm);CHKERRQ(ierr);
418   B->n = B->N;
419 
420   /* the information in the maps duplicates the information computed below, eventually
421      we should remove the duplicate information that is not contained in the maps */
422   ierr = PetscMapCreateMPI(B->comm,B->m,B->M,&B->rmap);CHKERRQ(ierr);
423   /* we don't know the "local columns" so just use the row information :-(*/
424   ierr = PetscMapCreateMPI(B->comm,B->m,B->M,&B->cmap);CHKERRQ(ierr);
425 
426   ierr = PetscMalloc((size+1)*sizeof(int),&b->rowners);CHKERRQ(ierr);
427   PetscLogObjectMemory(B,(size+2)*sizeof(int)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));
428   ierr = MPI_Allgather(&B->m,1,MPI_INT,b->rowners+1,1,MPI_INT,B->comm);CHKERRQ(ierr);
429   b->rowners[0] = 0;
430   for (ii=2; ii<=size; ii++) {
431     b->rowners[ii] += b->rowners[ii-1];
432   }
433   b->rstart = b->rowners[rank];
434   b->rend   = b->rowners[rank+1];
435   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAdjSetPreallocation_C",
436                                     "MatMPIAdjSetPreallocation_MPIAdj",
437                                      MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
438   PetscFunctionReturn(0);
439 }
440 EXTERN_C_END
441 
442 #undef __FUNCT__
443 #define __FUNCT__ "MatMPIAdjSetPreallocation"
444 /*@C
445    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
446 
447    Collective on MPI_Comm
448 
449    Input Parameters:
450 +  A - the matrix
451 .  i - the indices into j for the start of each row
452 .  j - the column indices for each row (sorted for each row).
453        The indices in i and j start with zero (NOT with one).
454 -  values - [optional] edge weights
455 
456    Level: intermediate
457 
458 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
459 @*/
460 PetscErrorCode MatMPIAdjSetPreallocation(Mat B,int *i,int *j,int *values)
461 {
462   PetscErrorCode ierr,(*f)(Mat,int*,int*,int*);
463 
464   PetscFunctionBegin;
465   ierr = PetscObjectQueryFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",(void (**)(void))&f);CHKERRQ(ierr);
466   if (f) {
467     ierr = (*f)(B,i,j,values);CHKERRQ(ierr);
468   }
469   PetscFunctionReturn(0);
470 }
471 
472 #undef __FUNCT__
473 #define __FUNCT__ "MatCreateMPIAdj"
474 /*@C
475    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
476    The matrix does not have numerical values associated with it, but is
477    intended for ordering (to reduce bandwidth etc) and partitioning.
478 
479    Collective on MPI_Comm
480 
481    Input Parameters:
482 +  comm - MPI communicator
483 .  m - number of local rows
484 .  n - number of columns
485 .  i - the indices into j for the start of each row
486 .  j - the column indices for each row (sorted for each row).
487        The indices in i and j start with zero (NOT with one).
488 -  values -[optional] edge weights
489 
490    Output Parameter:
491 .  A - the matrix
492 
493    Level: intermediate
494 
495    Notes: This matrix object does not support most matrix operations, include
496    MatSetValues().
497    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
498    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
499     call from Fortran you need not create the arrays with PetscMalloc().
500    Should not include the matrix diagonals.
501 
502    If you already have a matrix, you can create its adjacency matrix by a call
503    to MatConvert, specifying a type of MATMPIADJ.
504 
505    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
506 
507 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
508 @*/
509 PetscErrorCode MatCreateMPIAdj(MPI_Comm comm,int m,int n,int *i,int *j,int *values,Mat *A)
510 {
511   PetscErrorCode ierr;
512 
513   PetscFunctionBegin;
514   ierr = MatCreate(comm,m,n,PETSC_DETERMINE,n,A);CHKERRQ(ierr);
515   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
516   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
517   PetscFunctionReturn(0);
518 }
519 
520 EXTERN_C_BEGIN
521 #undef __FUNCT__
522 #define __FUNCT__ "MatConvertTo_MPIAdj"
523 PetscErrorCode MatConvertTo_MPIAdj(Mat A,MatType type,Mat *newmat)
524 {
525   Mat               B;
526   PetscErrorCode ierr;
527   int               i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
528   const int         *rj;
529   const PetscScalar *ra;
530   MPI_Comm          comm;
531 
532   PetscFunctionBegin;
533   ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr);
534   ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr);
535   ierr = MatGetOwnershipRange(A,&rstart,PETSC_NULL);CHKERRQ(ierr);
536 
537   /* count the number of nonzeros per row */
538   for (i=0; i<m; i++) {
539     ierr   = MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
540     for (j=0; j<len; j++) {
541       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
542     }
543     ierr   = MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
544     nzeros += len;
545   }
546 
547   /* malloc space for nonzeros */
548   ierr = PetscMalloc((nzeros+1)*sizeof(int),&a);CHKERRQ(ierr);
549   ierr = PetscMalloc((N+1)*sizeof(int),&ia);CHKERRQ(ierr);
550   ierr = PetscMalloc((nzeros+1)*sizeof(int),&ja);CHKERRQ(ierr);
551 
552   nzeros = 0;
553   ia[0]  = 0;
554   for (i=0; i<m; i++) {
555     ierr    = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
556     cnt     = 0;
557     for (j=0; j<len; j++) {
558       if (rj[j] != i+rstart) { /* if not diagonal */
559         a[nzeros+cnt]    = (int) PetscAbsScalar(ra[j]);
560         ja[nzeros+cnt++] = rj[j];
561       }
562     }
563     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
564     nzeros += cnt;
565     ia[i+1] = nzeros;
566   }
567 
568   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
569   ierr = MatCreate(comm,m,N,PETSC_DETERMINE,N,&B);CHKERRQ(ierr);
570   ierr = MatSetType(B,type);CHKERRQ(ierr);
571   ierr = MatMPIAdjSetPreallocation(B,ia,ja,a);CHKERRQ(ierr);
572 
573   /* Fake support for "inplace" convert. */
574   if (*newmat == A) {
575     ierr = MatDestroy(A);CHKERRQ(ierr);
576   }
577   *newmat = B;
578   PetscFunctionReturn(0);
579 }
580 EXTERN_C_END
581 
582 
583 
584 
585 
586