xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision e005ede52eafe2fed2813abb7a7eb3df04d5f886)
1 /*
2     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
3 */
4 #include "src/mat/impls/adj/mpi/mpiadj.h"
5 #include "petscsys.h"
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "MatView_MPIAdj_ASCII"
9 PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
10 {
11   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
12   PetscErrorCode ierr;
13   int i,j,m = A->m;
14   char              *name;
15   PetscViewerFormat format;
16 
17   PetscFunctionBegin;
18   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
19   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
20   if (format == PETSC_VIEWER_ASCII_INFO) {
21     PetscFunctionReturn(0);
22   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
23     SETERRQ(PETSC_ERR_SUP,"Matlab format not supported");
24   } else {
25     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_NO);CHKERRQ(ierr);
26     for (i=0; i<m; i++) {
27       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %d:",i+a->rstart);CHKERRQ(ierr);
28       for (j=a->i[i]; j<a->i[i+1]; j++) {
29         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %d ",a->j[j]);CHKERRQ(ierr);
30       }
31       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
32     }
33     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_YES);CHKERRQ(ierr);
34   }
35   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
36   PetscFunctionReturn(0);
37 }
38 
39 #undef __FUNCT__
40 #define __FUNCT__ "MatView_MPIAdj"
41 PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
42 {
43   PetscErrorCode ierr;
44   PetscTruth iascii;
45 
46   PetscFunctionBegin;
47   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
48   if (iascii) {
49     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
50   } else {
51     SETERRQ1(1,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
52   }
53   PetscFunctionReturn(0);
54 }
55 
56 #undef __FUNCT__
57 #define __FUNCT__ "MatDestroy_MPIAdj"
58 PetscErrorCode MatDestroy_MPIAdj(Mat mat)
59 {
60   Mat_MPIAdj *a = (Mat_MPIAdj*)mat->data;
61   PetscErrorCode ierr;
62 
63   PetscFunctionBegin;
64 #if defined(PETSC_USE_LOG)
65   PetscLogObjectState((PetscObject)mat,"Rows=%d, Cols=%d, NZ=%d",mat->m,mat->n,a->nz);
66 #endif
67   if (a->diag) {ierr = PetscFree(a->diag);CHKERRQ(ierr);}
68   if (a->freeaij) {
69     ierr = PetscFree(a->i);CHKERRQ(ierr);
70     ierr = PetscFree(a->j);CHKERRQ(ierr);
71     if (a->values) {ierr = PetscFree(a->values);CHKERRQ(ierr);}
72   }
73   ierr = PetscFree(a->rowners);CHKERRQ(ierr);
74   ierr = PetscFree(a);CHKERRQ(ierr);
75 
76   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C","",PETSC_NULL);CHKERRQ(ierr);
77   PetscFunctionReturn(0);
78 }
79 
80 #undef __FUNCT__
81 #define __FUNCT__ "MatSetOption_MPIAdj"
82 PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op)
83 {
84   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
85 
86   PetscFunctionBegin;
87   switch (op) {
88   case MAT_SYMMETRIC:
89   case MAT_STRUCTURALLY_SYMMETRIC:
90   case MAT_HERMITIAN:
91     a->symmetric = PETSC_TRUE;
92     break;
93   case MAT_NOT_SYMMETRIC:
94   case MAT_NOT_STRUCTURALLY_SYMMETRIC:
95   case MAT_NOT_HERMITIAN:
96     a->symmetric = PETSC_FALSE;
97     break;
98   case MAT_SYMMETRY_ETERNAL:
99   case MAT_NOT_SYMMETRY_ETERNAL:
100     break;
101   default:
102     PetscLogInfo(A,"MatSetOption_MPIAdj:Option ignored\n");
103     break;
104   }
105   PetscFunctionReturn(0);
106 }
107 
108 
109 /*
110      Adds diagonal pointers to sparse matrix structure.
111 */
112 
113 #undef __FUNCT__
114 #define __FUNCT__ "MatMarkDiagonal_MPIAdj"
115 PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A)
116 {
117   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
118   PetscErrorCode ierr;
119   int        i,j,*diag,m = A->m;
120 
121   PetscFunctionBegin;
122   ierr = PetscMalloc((m+1)*sizeof(int),&diag);CHKERRQ(ierr);
123   PetscLogObjectMemory(A,(m+1)*sizeof(int));
124   for (i=0; i<A->m; i++) {
125     for (j=a->i[i]; j<a->i[i+1]; j++) {
126       if (a->j[j] == i) {
127         diag[i] = j;
128         break;
129       }
130     }
131   }
132   a->diag = diag;
133   PetscFunctionReturn(0);
134 }
135 
136 #undef __FUNCT__
137 #define __FUNCT__ "MatGetRow_MPIAdj"
138 PetscErrorCode MatGetRow_MPIAdj(Mat A,int row,int *nz,int **idx,PetscScalar **v)
139 {
140   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
141   int        *itmp;
142 
143   PetscFunctionBegin;
144   row -= a->rstart;
145 
146   if (row < 0 || row >= A->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
147 
148   *nz = a->i[row+1] - a->i[row];
149   if (v) *v = PETSC_NULL;
150   if (idx) {
151     itmp = a->j + a->i[row];
152     if (*nz) {
153       *idx = itmp;
154     }
155     else *idx = 0;
156   }
157   PetscFunctionReturn(0);
158 }
159 
160 #undef __FUNCT__
161 #define __FUNCT__ "MatRestoreRow_MPIAdj"
162 PetscErrorCode MatRestoreRow_MPIAdj(Mat A,int row,int *nz,int **idx,PetscScalar **v)
163 {
164   PetscFunctionBegin;
165   PetscFunctionReturn(0);
166 }
167 
168 #undef __FUNCT__
169 #define __FUNCT__ "MatGetBlockSize_MPIAdj"
170 PetscErrorCode MatGetBlockSize_MPIAdj(Mat A,int *bs)
171 {
172   PetscFunctionBegin;
173   *bs = 1;
174   PetscFunctionReturn(0);
175 }
176 
177 
178 #undef __FUNCT__
179 #define __FUNCT__ "MatEqual_MPIAdj"
180 PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg)
181 {
182   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
183   PetscErrorCode ierr;
184   PetscTruth  flag;
185 
186   PetscFunctionBegin;
187   /* If the  matrix dimensions are not equal,or no of nonzeros */
188   if ((A->m != B->m) ||(a->nz != b->nz)) {
189     flag = PETSC_FALSE;
190   }
191 
192   /* if the a->i are the same */
193   ierr = PetscMemcmp(a->i,b->i,(A->m+1)*sizeof(int),&flag);CHKERRQ(ierr);
194 
195   /* if a->j are the same */
196   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(int),&flag);CHKERRQ(ierr);
197 
198   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr);
199   PetscFunctionReturn(0);
200 }
201 
202 #undef __FUNCT__
203 #define __FUNCT__ "MatGetRowIJ_MPIAdj"
204 PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int *ia[],int *ja[],PetscTruth *done)
205 {
206   PetscErrorCode ierr;
207   int size,i;
208   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
209 
210   PetscFunctionBegin;
211   ierr = MPI_Comm_size(A->comm,&size);CHKERRQ(ierr);
212   if (size > 1) {*done = PETSC_FALSE; PetscFunctionReturn(0);}
213   *m    = A->m;
214   *ia   = a->i;
215   *ja   = a->j;
216   *done = PETSC_TRUE;
217   if (oshift) {
218     for (i=0; i<(*ia)[*m]; i++) {
219       (*ja)[i]++;
220     }
221     for (i=0; i<=(*m); i++) (*ia)[i]++;
222   }
223   PetscFunctionReturn(0);
224 }
225 
226 #undef __FUNCT__
227 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj"
228 PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int *ia[],int *ja[],PetscTruth *done)
229 {
230   int        i;
231   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
232 
233   PetscFunctionBegin;
234   if (ia && a->i != *ia) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
235   if (ja && a->j != *ja) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
236   if (oshift) {
237     for (i=0; i<=(*m); i++) (*ia)[i]--;
238     for (i=0; i<(*ia)[*m]; i++) {
239       (*ja)[i]--;
240     }
241   }
242   PetscFunctionReturn(0);
243 }
244 
245 /* -------------------------------------------------------------------*/
246 static struct _MatOps MatOps_Values = {0,
247        MatGetRow_MPIAdj,
248        MatRestoreRow_MPIAdj,
249        0,
250 /* 4*/ 0,
251        0,
252        0,
253        0,
254        0,
255        0,
256 /*10*/ 0,
257        0,
258        0,
259        0,
260        0,
261 /*15*/ 0,
262        MatEqual_MPIAdj,
263        0,
264        0,
265        0,
266 /*20*/ 0,
267        0,
268        0,
269        MatSetOption_MPIAdj,
270        0,
271 /*25*/ 0,
272        0,
273        0,
274        0,
275        0,
276 /*30*/ 0,
277        0,
278        0,
279        0,
280        0,
281 /*35*/ 0,
282        0,
283        0,
284        0,
285        0,
286 /*40*/ 0,
287        0,
288        0,
289        0,
290        0,
291 /*45*/ 0,
292        0,
293        0,
294        0,
295        0,
296 /*50*/ MatGetBlockSize_MPIAdj,
297        MatGetRowIJ_MPIAdj,
298        MatRestoreRowIJ_MPIAdj,
299        0,
300        0,
301 /*55*/ 0,
302        0,
303        0,
304        0,
305        0,
306 /*60*/ 0,
307        MatDestroy_MPIAdj,
308        MatView_MPIAdj,
309        MatGetPetscMaps_Petsc,
310        0,
311 /*65*/ 0,
312        0,
313        0,
314        0,
315        0,
316 /*70*/ 0,
317        0,
318        0,
319        0,
320        0,
321 /*75*/ 0,
322        0,
323        0,
324        0,
325        0,
326 /*80*/ 0,
327        0,
328        0,
329        0,
330        0,
331 /*85*/ 0,
332        0,
333        0,
334        0,
335        0,
336 /*90*/ 0,
337        0,
338        0,
339        0,
340        0,
341 /*95*/ 0,
342        0,
343        0,
344        0};
345 
346 EXTERN_C_BEGIN
347 #undef __FUNCT__
348 #define __FUNCT__ "MatMPIAdjSetPreallocation_MPIAdj"
349 PetscErrorCode MatMPIAdjSetPreallocation_MPIAdj(Mat B,int *i,int *j,int *values)
350 {
351   Mat_MPIAdj *b = (Mat_MPIAdj *)B->data;
352   PetscErrorCode ierr;
353 #if defined(PETSC_USE_BOPT_g)
354   int        ii;
355 #endif
356 
357   PetscFunctionBegin;
358   B->preallocated = PETSC_TRUE;
359 #if defined(PETSC_USE_BOPT_g)
360   if (i[0] != 0) SETERRQ1(1,"First i[] index must be zero, instead it is %d\n",i[0]);
361   for (ii=1; ii<B->m; ii++) {
362     if (i[ii] < 0 || i[ii] < i[ii-1]) {
363       SETERRQ4(1,"i[%d]=%d index is out of range: i[%d]=%d",ii,i[ii],ii-1,i[ii-1]);
364     }
365   }
366   for (ii=0; ii<i[B->m]; ii++) {
367     if (j[ii] < 0 || j[ii] >= B->N) {
368       SETERRQ2(1,"Column index %d out of range %d\n",ii,j[ii]);
369     }
370   }
371 #endif
372 
373   b->j      = j;
374   b->i      = i;
375   b->values = values;
376 
377   b->nz               = i[B->m];
378   b->diag             = 0;
379   b->symmetric        = PETSC_FALSE;
380   b->freeaij          = PETSC_TRUE;
381 
382   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
383   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
384   PetscFunctionReturn(0);
385 }
386 EXTERN_C_END
387 
388 /*MC
389    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
390    intended for use constructing orderings and partitionings.
391 
392   Level: beginner
393 
394 .seealso: MatCreateMPIAdj
395 M*/
396 
397 EXTERN_C_BEGIN
398 #undef __FUNCT__
399 #define __FUNCT__ "MatCreate_MPIAdj"
400 PetscErrorCode MatCreate_MPIAdj(Mat B)
401 {
402   Mat_MPIAdj *b;
403   PetscErrorCode ierr;
404   int        ii,size,rank;
405 
406   PetscFunctionBegin;
407   ierr = MPI_Comm_size(B->comm,&size);CHKERRQ(ierr);
408   ierr = MPI_Comm_rank(B->comm,&rank);CHKERRQ(ierr);
409 
410   ierr                = PetscNew(Mat_MPIAdj,&b);CHKERRQ(ierr);
411   B->data             = (void*)b;
412   ierr                = PetscMemzero(b,sizeof(Mat_MPIAdj));CHKERRQ(ierr);
413   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
414   B->factor           = 0;
415   B->lupivotthreshold = 1.0;
416   B->mapping          = 0;
417   B->assembled        = PETSC_FALSE;
418 
419   ierr = MPI_Allreduce(&B->m,&B->M,1,MPI_INT,MPI_SUM,B->comm);CHKERRQ(ierr);
420   B->n = B->N;
421 
422   /* the information in the maps duplicates the information computed below, eventually
423      we should remove the duplicate information that is not contained in the maps */
424   ierr = PetscMapCreateMPI(B->comm,B->m,B->M,&B->rmap);CHKERRQ(ierr);
425   /* we don't know the "local columns" so just use the row information :-(*/
426   ierr = PetscMapCreateMPI(B->comm,B->m,B->M,&B->cmap);CHKERRQ(ierr);
427 
428   ierr = PetscMalloc((size+1)*sizeof(int),&b->rowners);CHKERRQ(ierr);
429   PetscLogObjectMemory(B,(size+2)*sizeof(int)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));
430   ierr = MPI_Allgather(&B->m,1,MPI_INT,b->rowners+1,1,MPI_INT,B->comm);CHKERRQ(ierr);
431   b->rowners[0] = 0;
432   for (ii=2; ii<=size; ii++) {
433     b->rowners[ii] += b->rowners[ii-1];
434   }
435   b->rstart = b->rowners[rank];
436   b->rend   = b->rowners[rank+1];
437   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAdjSetPreallocation_C",
438                                     "MatMPIAdjSetPreallocation_MPIAdj",
439                                      MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
440   PetscFunctionReturn(0);
441 }
442 EXTERN_C_END
443 
444 #undef __FUNCT__
445 #define __FUNCT__ "MatMPIAdjSetPreallocation"
446 /*@C
447    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
448 
449    Collective on MPI_Comm
450 
451    Input Parameters:
452 +  A - the matrix
453 .  i - the indices into j for the start of each row
454 .  j - the column indices for each row (sorted for each row).
455        The indices in i and j start with zero (NOT with one).
456 -  values - [optional] edge weights
457 
458    Level: intermediate
459 
460 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
461 @*/
462 PetscErrorCode MatMPIAdjSetPreallocation(Mat B,int *i,int *j,int *values)
463 {
464   PetscErrorCode ierr,(*f)(Mat,int*,int*,int*);
465 
466   PetscFunctionBegin;
467   ierr = PetscObjectQueryFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",(void (**)(void))&f);CHKERRQ(ierr);
468   if (f) {
469     ierr = (*f)(B,i,j,values);CHKERRQ(ierr);
470   }
471   PetscFunctionReturn(0);
472 }
473 
474 #undef __FUNCT__
475 #define __FUNCT__ "MatCreateMPIAdj"
476 /*@C
477    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
478    The matrix does not have numerical values associated with it, but is
479    intended for ordering (to reduce bandwidth etc) and partitioning.
480 
481    Collective on MPI_Comm
482 
483    Input Parameters:
484 +  comm - MPI communicator
485 .  m - number of local rows
486 .  n - number of columns
487 .  i - the indices into j for the start of each row
488 .  j - the column indices for each row (sorted for each row).
489        The indices in i and j start with zero (NOT with one).
490 -  values -[optional] edge weights
491 
492    Output Parameter:
493 .  A - the matrix
494 
495    Level: intermediate
496 
497    Notes: This matrix object does not support most matrix operations, include
498    MatSetValues().
499    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
500    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
501     call from Fortran you need not create the arrays with PetscMalloc().
502    Should not include the matrix diagonals.
503 
504    If you already have a matrix, you can create its adjacency matrix by a call
505    to MatConvert, specifying a type of MATMPIADJ.
506 
507    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
508 
509 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
510 @*/
511 PetscErrorCode MatCreateMPIAdj(MPI_Comm comm,int m,int n,int *i,int *j,int *values,Mat *A)
512 {
513   PetscErrorCode ierr;
514 
515   PetscFunctionBegin;
516   ierr = MatCreate(comm,m,n,PETSC_DETERMINE,n,A);CHKERRQ(ierr);
517   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
518   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
519   PetscFunctionReturn(0);
520 }
521 
522 EXTERN_C_BEGIN
523 #undef __FUNCT__
524 #define __FUNCT__ "MatConvertTo_MPIAdj"
525 PetscErrorCode MatConvertTo_MPIAdj(Mat A,MatType type,Mat *newmat)
526 {
527   Mat               B;
528   PetscErrorCode ierr;
529   int               i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
530   const int         *rj;
531   const PetscScalar *ra;
532   MPI_Comm          comm;
533 
534   PetscFunctionBegin;
535   ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr);
536   ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr);
537   ierr = MatGetOwnershipRange(A,&rstart,PETSC_NULL);CHKERRQ(ierr);
538 
539   /* count the number of nonzeros per row */
540   for (i=0; i<m; i++) {
541     ierr   = MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
542     for (j=0; j<len; j++) {
543       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
544     }
545     ierr   = MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
546     nzeros += len;
547   }
548 
549   /* malloc space for nonzeros */
550   ierr = PetscMalloc((nzeros+1)*sizeof(int),&a);CHKERRQ(ierr);
551   ierr = PetscMalloc((N+1)*sizeof(int),&ia);CHKERRQ(ierr);
552   ierr = PetscMalloc((nzeros+1)*sizeof(int),&ja);CHKERRQ(ierr);
553 
554   nzeros = 0;
555   ia[0]  = 0;
556   for (i=0; i<m; i++) {
557     ierr    = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
558     cnt     = 0;
559     for (j=0; j<len; j++) {
560       if (rj[j] != i+rstart) { /* if not diagonal */
561         a[nzeros+cnt]    = (int) PetscAbsScalar(ra[j]);
562         ja[nzeros+cnt++] = rj[j];
563       }
564     }
565     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
566     nzeros += cnt;
567     ia[i+1] = nzeros;
568   }
569 
570   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
571   ierr = MatCreate(comm,m,N,PETSC_DETERMINE,N,&B);CHKERRQ(ierr);
572   ierr = MatSetType(B,type);CHKERRQ(ierr);
573   ierr = MatMPIAdjSetPreallocation(B,ia,ja,a);CHKERRQ(ierr);
574 
575   /* Fake support for "inplace" convert. */
576   if (*newmat == A) {
577     ierr = MatDestroy(A);CHKERRQ(ierr);
578   }
579   *newmat = B;
580   PetscFunctionReturn(0);
581 }
582 EXTERN_C_END
583 
584 
585 
586 
587 
588