xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision a58c3bc3391eee32bc3fd94ac7edeea38fe57aae)
1 #define PETSCMAT_DLL
2 
3 /*
4     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
5 */
6 #include "src/mat/impls/adj/mpi/mpiadj.h"
7 #include "petscsys.h"
8 
9 #undef __FUNCT__
10 #define __FUNCT__ "MatView_MPIAdj_ASCII"
11 PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
12 {
13   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
14   PetscErrorCode    ierr;
15   PetscInt          i,j,m = A->rmap.n;
16   const char        *name;
17   PetscViewerFormat format;
18 
19   PetscFunctionBegin;
20   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
21   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
22   if (format == PETSC_VIEWER_ASCII_INFO) {
23     PetscFunctionReturn(0);
24   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
25     SETERRQ(PETSC_ERR_SUP,"Matlab format not supported");
26   } else {
27     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_NO);CHKERRQ(ierr);
28     for (i=0; i<m; i++) {
29       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap.rstart);CHKERRQ(ierr);
30       for (j=a->i[i]; j<a->i[i+1]; j++) {
31         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);CHKERRQ(ierr);
32       }
33       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
34     }
35     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_YES);CHKERRQ(ierr);
36   }
37   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
38   PetscFunctionReturn(0);
39 }
40 
41 #undef __FUNCT__
42 #define __FUNCT__ "MatView_MPIAdj"
43 PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
44 {
45   PetscErrorCode ierr;
46   PetscTruth     iascii;
47 
48   PetscFunctionBegin;
49   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
50   if (iascii) {
51     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
52   } else {
53     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
54   }
55   PetscFunctionReturn(0);
56 }
57 
58 #undef __FUNCT__
59 #define __FUNCT__ "MatDestroy_MPIAdj"
60 PetscErrorCode MatDestroy_MPIAdj(Mat mat)
61 {
62   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;
63   PetscErrorCode ierr;
64 
65   PetscFunctionBegin;
66 #if defined(PETSC_USE_LOG)
67   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap.n,mat->cmap.n,a->nz);
68 #endif
69   ierr = PetscFree(a->diag);CHKERRQ(ierr);
70   if (a->freeaij) {
71     ierr = PetscFree(a->i);CHKERRQ(ierr);
72     ierr = PetscFree(a->j);CHKERRQ(ierr);
73     ierr = PetscFree(a->values);CHKERRQ(ierr);
74   }
75   ierr = PetscFree(a);CHKERRQ(ierr);
76   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
77   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C","",PETSC_NULL);CHKERRQ(ierr);
78   PetscFunctionReturn(0);
79 }
80 
81 #undef __FUNCT__
82 #define __FUNCT__ "MatSetOption_MPIAdj"
83 PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op)
84 {
85   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
86   PetscErrorCode ierr;
87 
88   PetscFunctionBegin;
89   switch (op) {
90   case MAT_SYMMETRIC:
91   case MAT_STRUCTURALLY_SYMMETRIC:
92   case MAT_HERMITIAN:
93     a->symmetric = PETSC_TRUE;
94     break;
95   case MAT_NOT_SYMMETRIC:
96   case MAT_NOT_STRUCTURALLY_SYMMETRIC:
97   case MAT_NOT_HERMITIAN:
98     a->symmetric = PETSC_FALSE;
99     break;
100   case MAT_SYMMETRY_ETERNAL:
101   case MAT_NOT_SYMMETRY_ETERNAL:
102     break;
103   default:
104     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
105     break;
106   }
107   PetscFunctionReturn(0);
108 }
109 
110 
111 /*
112      Adds diagonal pointers to sparse matrix structure.
113 */
114 
115 #undef __FUNCT__
116 #define __FUNCT__ "MatMarkDiagonal_MPIAdj"
117 PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A)
118 {
119   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
120   PetscErrorCode ierr;
121   PetscInt       i,j,m = A->rmap.n;
122 
123   PetscFunctionBegin;
124   ierr = PetscMalloc(m*sizeof(PetscInt),&a->diag);CHKERRQ(ierr);
125   ierr = PetscLogObjectMemory(A,m*sizeof(PetscInt));CHKERRQ(ierr);
126   for (i=0; i<A->rmap.n; i++) {
127     for (j=a->i[i]; j<a->i[i+1]; j++) {
128       if (a->j[j] == i) {
129         a->diag[i] = j;
130         break;
131       }
132     }
133   }
134   PetscFunctionReturn(0);
135 }
136 
137 #undef __FUNCT__
138 #define __FUNCT__ "MatGetRow_MPIAdj"
139 PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
140 {
141   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
142   PetscInt   *itmp;
143 
144   PetscFunctionBegin;
145   row -= A->rmap.rstart;
146 
147   if (row < 0 || row >= A->rmap.n) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
148 
149   *nz = a->i[row+1] - a->i[row];
150   if (v) *v = PETSC_NULL;
151   if (idx) {
152     itmp = a->j + a->i[row];
153     if (*nz) {
154       *idx = itmp;
155     }
156     else *idx = 0;
157   }
158   PetscFunctionReturn(0);
159 }
160 
161 #undef __FUNCT__
162 #define __FUNCT__ "MatRestoreRow_MPIAdj"
163 PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
164 {
165   PetscFunctionBegin;
166   PetscFunctionReturn(0);
167 }
168 
169 #undef __FUNCT__
170 #define __FUNCT__ "MatEqual_MPIAdj"
171 PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg)
172 {
173   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
174   PetscErrorCode ierr;
175   PetscTruth     flag;
176 
177   PetscFunctionBegin;
178   /* If the  matrix dimensions are not equal,or no of nonzeros */
179   if ((A->rmap.n != B->rmap.n) ||(a->nz != b->nz)) {
180     flag = PETSC_FALSE;
181   }
182 
183   /* if the a->i are the same */
184   ierr = PetscMemcmp(a->i,b->i,(A->rmap.n+1)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
185 
186   /* if a->j are the same */
187   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
188 
189   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr);
190   PetscFunctionReturn(0);
191 }
192 
193 #undef __FUNCT__
194 #define __FUNCT__ "MatGetRowIJ_MPIAdj"
195 PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscTruth symmetric,PetscTruth blockcompressed,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscTruth *done)
196 {
197   PetscErrorCode ierr;
198   PetscMPIInt    size;
199   PetscInt       i;
200   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data;
201 
202   PetscFunctionBegin;
203   ierr = MPI_Comm_size(A->comm,&size);CHKERRQ(ierr);
204   if (size > 1) {*done = PETSC_FALSE; PetscFunctionReturn(0);}
205   *m    = A->rmap.n;
206   *ia   = a->i;
207   *ja   = a->j;
208   *done = PETSC_TRUE;
209   if (oshift) {
210     for (i=0; i<(*ia)[*m]; i++) {
211       (*ja)[i]++;
212     }
213     for (i=0; i<=(*m); i++) (*ia)[i]++;
214   }
215   PetscFunctionReturn(0);
216 }
217 
218 #undef __FUNCT__
219 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj"
220 PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscTruth symmetric,PetscTruth blockcompressed,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscTruth *done)
221 {
222   PetscInt   i;
223   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
224 
225   PetscFunctionBegin;
226   if (ia && a->i != *ia) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
227   if (ja && a->j != *ja) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
228   if (oshift) {
229     for (i=0; i<=(*m); i++) (*ia)[i]--;
230     for (i=0; i<(*ia)[*m]; i++) {
231       (*ja)[i]--;
232     }
233   }
234   PetscFunctionReturn(0);
235 }
236 
237 #undef __FUNCT__
238 #define __FUNCT__ "MatConvertFrom_MPIAdj"
239 PetscErrorCode PETSCMAT_DLLEXPORT MatConvertFrom_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
240 {
241   Mat               B;
242   PetscErrorCode    ierr;
243   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
244   const PetscInt    *rj;
245   const PetscScalar *ra;
246   MPI_Comm          comm;
247 
248   PetscFunctionBegin;
249   ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr);
250   ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr);
251   ierr = MatGetOwnershipRange(A,&rstart,PETSC_NULL);CHKERRQ(ierr);
252 
253   /* count the number of nonzeros per row */
254   for (i=0; i<m; i++) {
255     ierr   = MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
256     for (j=0; j<len; j++) {
257       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
258     }
259     ierr   = MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
260     nzeros += len;
261   }
262 
263   /* malloc space for nonzeros */
264   ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&a);CHKERRQ(ierr);
265   ierr = PetscMalloc((N+1)*sizeof(PetscInt),&ia);CHKERRQ(ierr);
266   ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&ja);CHKERRQ(ierr);
267 
268   nzeros = 0;
269   ia[0]  = 0;
270   for (i=0; i<m; i++) {
271     ierr    = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
272     cnt     = 0;
273     for (j=0; j<len; j++) {
274       if (rj[j] != i+rstart) { /* if not diagonal */
275         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
276         ja[nzeros+cnt++] = rj[j];
277       }
278     }
279     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
280     nzeros += cnt;
281     ia[i+1] = nzeros;
282   }
283 
284   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
285   ierr = MatCreate(comm,&B);CHKERRQ(ierr);
286   ierr = MatSetSizes(B,m,N,PETSC_DETERMINE,N);CHKERRQ(ierr);
287   ierr = MatSetType(B,type);CHKERRQ(ierr);
288   ierr = MatMPIAdjSetPreallocation(B,ia,ja,a);CHKERRQ(ierr);
289 
290   if (reuse == MAT_REUSE_MATRIX) {
291     ierr = MatHeaderReplace(A,B);CHKERRQ(ierr);
292   } else {
293     *newmat = B;
294   }
295   PetscFunctionReturn(0);
296 }
297 
298 /* -------------------------------------------------------------------*/
299 static struct _MatOps MatOps_Values = {0,
300        MatGetRow_MPIAdj,
301        MatRestoreRow_MPIAdj,
302        0,
303 /* 4*/ 0,
304        0,
305        0,
306        0,
307        0,
308        0,
309 /*10*/ 0,
310        0,
311        0,
312        0,
313        0,
314 /*15*/ 0,
315        MatEqual_MPIAdj,
316        0,
317        0,
318        0,
319 /*20*/ 0,
320        0,
321        0,
322        MatSetOption_MPIAdj,
323        0,
324 /*25*/ 0,
325        0,
326        0,
327        0,
328        0,
329 /*30*/ 0,
330        0,
331        0,
332        0,
333        0,
334 /*35*/ 0,
335        0,
336        0,
337        0,
338        0,
339 /*40*/ 0,
340        0,
341        0,
342        0,
343        0,
344 /*45*/ 0,
345        0,
346        0,
347        0,
348        0,
349 /*50*/ 0,
350        MatGetRowIJ_MPIAdj,
351        MatRestoreRowIJ_MPIAdj,
352        0,
353        0,
354 /*55*/ 0,
355        0,
356        0,
357        0,
358        0,
359 /*60*/ 0,
360        MatDestroy_MPIAdj,
361        MatView_MPIAdj,
362        MatConvertFrom_MPIAdj,
363        0,
364 /*65*/ 0,
365        0,
366        0,
367        0,
368        0,
369 /*70*/ 0,
370        0,
371        0,
372        0,
373        0,
374 /*75*/ 0,
375        0,
376        0,
377        0,
378        0,
379 /*80*/ 0,
380        0,
381        0,
382        0,
383        0,
384 /*85*/ 0,
385        0,
386        0,
387        0,
388        0,
389 /*90*/ 0,
390        0,
391        0,
392        0,
393        0,
394 /*95*/ 0,
395        0,
396        0,
397        0};
398 
399 EXTERN_C_BEGIN
400 #undef __FUNCT__
401 #define __FUNCT__ "MatMPIAdjSetPreallocation_MPIAdj"
402 PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
403 {
404   Mat_MPIAdj     *b = (Mat_MPIAdj *)B->data;
405   PetscErrorCode ierr;
406 #if defined(PETSC_USE_DEBUG)
407   PetscInt       ii;
408 #endif
409 
410   PetscFunctionBegin;
411   B->preallocated = PETSC_TRUE;
412 #if defined(PETSC_USE_DEBUG)
413   if (i[0] != 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
414   for (ii=1; ii<B->rmap.n; ii++) {
415     if (i[ii] < 0 || i[ii] < i[ii-1]) {
416       SETERRQ4(PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
417     }
418   }
419   for (ii=0; ii<i[B->rmap.n]; ii++) {
420     if (j[ii] < 0 || j[ii] >= B->cmap.N) {
421       SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
422     }
423   }
424 #endif
425 
426   b->j      = j;
427   b->i      = i;
428   b->values = values;
429 
430   b->nz               = i[B->rmap.n];
431   b->diag             = 0;
432   b->symmetric        = PETSC_FALSE;
433   b->freeaij          = PETSC_TRUE;
434 
435   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
436   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
437   PetscFunctionReturn(0);
438 }
439 EXTERN_C_END
440 
441 /*MC
442    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
443    intended for use constructing orderings and partitionings.
444 
445   Level: beginner
446 
447 .seealso: MatCreateMPIAdj
448 M*/
449 
450 EXTERN_C_BEGIN
451 #undef __FUNCT__
452 #define __FUNCT__ "MatCreate_MPIAdj"
453 PetscErrorCode PETSCMAT_DLLEXPORT MatCreate_MPIAdj(Mat B)
454 {
455   Mat_MPIAdj     *b;
456   PetscErrorCode ierr;
457   PetscMPIInt    size,rank;
458 
459   PetscFunctionBegin;
460   ierr = MPI_Comm_size(B->comm,&size);CHKERRQ(ierr);
461   ierr = MPI_Comm_rank(B->comm,&rank);CHKERRQ(ierr);
462 
463   ierr                = PetscNew(Mat_MPIAdj,&b);CHKERRQ(ierr);
464   B->data             = (void*)b;
465   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
466   B->factor           = 0;
467   B->mapping          = 0;
468   B->assembled        = PETSC_FALSE;
469 
470   ierr = PetscMapSetBlockSize(&B->rmap,1);CHKERRQ(ierr);
471   ierr = PetscMapSetBlockSize(&B->cmap,1);CHKERRQ(ierr);
472   ierr = PetscMapSetUp(&B->rmap);CHKERRQ(ierr);
473   if (B->cmap.n  < 0) B->cmap.n = B->cmap.N;
474   if (B->cmap.N  < 0) B->cmap.N = B->cmap.n;
475 
476   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAdjSetPreallocation_C",
477                                     "MatMPIAdjSetPreallocation_MPIAdj",
478                                      MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
479   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ);CHKERRQ(ierr);
480   PetscFunctionReturn(0);
481 }
482 EXTERN_C_END
483 
484 #undef __FUNCT__
485 #define __FUNCT__ "MatMPIAdjSetPreallocation"
486 /*@C
487    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
488 
489    Collective on MPI_Comm
490 
491    Input Parameters:
492 +  A - the matrix
493 .  i - the indices into j for the start of each row
494 .  j - the column indices for each row (sorted for each row).
495        The indices in i and j start with zero (NOT with one).
496 -  values - [optional] edge weights
497 
498    Level: intermediate
499 
500 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
501 @*/
502 PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
503 {
504   PetscErrorCode ierr,(*f)(Mat,PetscInt*,PetscInt*,PetscInt*);
505 
506   PetscFunctionBegin;
507   ierr = PetscObjectQueryFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",(void (**)(void))&f);CHKERRQ(ierr);
508   if (f) {
509     ierr = (*f)(B,i,j,values);CHKERRQ(ierr);
510   }
511   PetscFunctionReturn(0);
512 }
513 
514 #undef __FUNCT__
515 #define __FUNCT__ "MatCreateMPIAdj"
516 /*@C
517    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
518    The matrix does not have numerical values associated with it, but is
519    intended for ordering (to reduce bandwidth etc) and partitioning.
520 
521    Collective on MPI_Comm
522 
523    Input Parameters:
524 +  comm - MPI communicator
525 .  m - number of local rows
526 .  n - number of columns
527 .  i - the indices into j for the start of each row
528 .  j - the column indices for each row (sorted for each row).
529        The indices in i and j start with zero (NOT with one).
530 -  values -[optional] edge weights
531 
532    Output Parameter:
533 .  A - the matrix
534 
535    Level: intermediate
536 
537    Notes: This matrix object does not support most matrix operations, include
538    MatSetValues().
539    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
540    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
541     call from Fortran you need not create the arrays with PetscMalloc().
542    Should not include the matrix diagonals.
543 
544    If you already have a matrix, you can create its adjacency matrix by a call
545    to MatConvert, specifying a type of MATMPIADJ.
546 
547    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
548 
549 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
550 @*/
551 PetscErrorCode PETSCMAT_DLLEXPORT MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
552 {
553   PetscErrorCode ierr;
554 
555   PetscFunctionBegin;
556   ierr = MatCreate(comm,A);CHKERRQ(ierr);
557   ierr = MatSetSizes(*A,m,n,PETSC_DETERMINE,n);CHKERRQ(ierr);
558   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
559   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
560   PetscFunctionReturn(0);
561 }
562 
563 
564 
565 
566 
567 
568 
569