xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 7a4fe282d1b349e95b3be72d69d8dd3d3bcd7bc6)
1 #define PETSCMAT_DLL
2 
3 /*
4     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
5 */
6 #include "../src/mat/impls/adj/mpi/mpiadj.h"
7 #include "petscsys.h"
8 
9 #undef __FUNCT__
10 #define __FUNCT__ "MatView_MPIAdj_ASCII"
11 PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
12 {
13   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
14   PetscErrorCode    ierr;
15   PetscInt          i,j,m = A->rmap->n;
16   const char        *name;
17   PetscViewerFormat format;
18 
19   PetscFunctionBegin;
20   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
21   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
22   if (format == PETSC_VIEWER_ASCII_INFO) {
23     PetscFunctionReturn(0);
24   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
25     SETERRQ(PETSC_ERR_SUP,"Matlab format not supported");
26   } else {
27     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_NO);CHKERRQ(ierr);
28     for (i=0; i<m; i++) {
29       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap->rstart);CHKERRQ(ierr);
30       for (j=a->i[i]; j<a->i[i+1]; j++) {
31         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);CHKERRQ(ierr);
32       }
33       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
34     }
35     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_YES);CHKERRQ(ierr);
36   }
37   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
38   PetscFunctionReturn(0);
39 }
40 
41 #undef __FUNCT__
42 #define __FUNCT__ "MatView_MPIAdj"
43 PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
44 {
45   PetscErrorCode ierr;
46   PetscTruth     iascii;
47 
48   PetscFunctionBegin;
49   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
50   if (iascii) {
51     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
52   } else {
53     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
54   }
55   PetscFunctionReturn(0);
56 }
57 
58 #undef __FUNCT__
59 #define __FUNCT__ "MatDestroy_MPIAdj"
60 PetscErrorCode MatDestroy_MPIAdj(Mat mat)
61 {
62   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;
63   PetscErrorCode ierr;
64 
65   PetscFunctionBegin;
66 #if defined(PETSC_USE_LOG)
67   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap->n,mat->cmap->n,a->nz);
68 #endif
69   ierr = PetscFree(a->diag);CHKERRQ(ierr);
70   if (a->freeaij) {
71     ierr = PetscFree(a->i);CHKERRQ(ierr);
72     ierr = PetscFree(a->j);CHKERRQ(ierr);
73     ierr = PetscFree(a->values);CHKERRQ(ierr);
74   }
75   ierr = PetscFree(a);CHKERRQ(ierr);
76   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
77   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C","",PETSC_NULL);CHKERRQ(ierr);
78   PetscFunctionReturn(0);
79 }
80 
81 #undef __FUNCT__
82 #define __FUNCT__ "MatSetOption_MPIAdj"
83 PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscTruth flg)
84 {
85   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
86   PetscErrorCode ierr;
87 
88   PetscFunctionBegin;
89   switch (op) {
90   case MAT_SYMMETRIC:
91   case MAT_STRUCTURALLY_SYMMETRIC:
92   case MAT_HERMITIAN:
93     a->symmetric = flg;
94     break;
95   case MAT_SYMMETRY_ETERNAL:
96     break;
97   default:
98     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
99     break;
100   }
101   PetscFunctionReturn(0);
102 }
103 
104 
105 /*
106      Adds diagonal pointers to sparse matrix structure.
107 */
108 
109 #undef __FUNCT__
110 #define __FUNCT__ "MatMarkDiagonal_MPIAdj"
111 PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A)
112 {
113   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
114   PetscErrorCode ierr;
115   PetscInt       i,j,m = A->rmap->n;
116 
117   PetscFunctionBegin;
118   ierr = PetscMalloc(m*sizeof(PetscInt),&a->diag);CHKERRQ(ierr);
119   ierr = PetscLogObjectMemory(A,m*sizeof(PetscInt));CHKERRQ(ierr);
120   for (i=0; i<A->rmap->n; i++) {
121     for (j=a->i[i]; j<a->i[i+1]; j++) {
122       if (a->j[j] == i) {
123         a->diag[i] = j;
124         break;
125       }
126     }
127   }
128   PetscFunctionReturn(0);
129 }
130 
131 #undef __FUNCT__
132 #define __FUNCT__ "MatGetRow_MPIAdj"
133 PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
134 {
135   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
136   PetscInt   *itmp;
137 
138   PetscFunctionBegin;
139   row -= A->rmap->rstart;
140 
141   if (row < 0 || row >= A->rmap->n) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
142 
143   *nz = a->i[row+1] - a->i[row];
144   if (v) *v = PETSC_NULL;
145   if (idx) {
146     itmp = a->j + a->i[row];
147     if (*nz) {
148       *idx = itmp;
149     }
150     else *idx = 0;
151   }
152   PetscFunctionReturn(0);
153 }
154 
155 #undef __FUNCT__
156 #define __FUNCT__ "MatRestoreRow_MPIAdj"
157 PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
158 {
159   PetscFunctionBegin;
160   PetscFunctionReturn(0);
161 }
162 
163 #undef __FUNCT__
164 #define __FUNCT__ "MatEqual_MPIAdj"
165 PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg)
166 {
167   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
168   PetscErrorCode ierr;
169   PetscTruth     flag;
170 
171   PetscFunctionBegin;
172   /* If the  matrix dimensions are not equal,or no of nonzeros */
173   if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
174     flag = PETSC_FALSE;
175   }
176 
177   /* if the a->i are the same */
178   ierr = PetscMemcmp(a->i,b->i,(A->rmap->n+1)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
179 
180   /* if a->j are the same */
181   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
182 
183   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,((PetscObject)A)->comm);CHKERRQ(ierr);
184   PetscFunctionReturn(0);
185 }
186 
187 #undef __FUNCT__
188 #define __FUNCT__ "MatGetRowIJ_MPIAdj"
189 PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscTruth symmetric,PetscTruth blockcompressed,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscTruth *done)
190 {
191   PetscErrorCode ierr;
192   PetscMPIInt    size;
193   PetscInt       i;
194   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data;
195 
196   PetscFunctionBegin;
197   ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr);
198   if (size > 1) {*done = PETSC_FALSE; PetscFunctionReturn(0);}
199   *m    = A->rmap->n;
200   *ia   = a->i;
201   *ja   = a->j;
202   *done = PETSC_TRUE;
203   if (oshift) {
204     for (i=0; i<(*ia)[*m]; i++) {
205       (*ja)[i]++;
206     }
207     for (i=0; i<=(*m); i++) (*ia)[i]++;
208   }
209   PetscFunctionReturn(0);
210 }
211 
212 #undef __FUNCT__
213 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj"
214 PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscTruth symmetric,PetscTruth blockcompressed,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscTruth *done)
215 {
216   PetscInt   i;
217   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
218 
219   PetscFunctionBegin;
220   if (ia && a->i != *ia) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
221   if (ja && a->j != *ja) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
222   if (oshift) {
223     for (i=0; i<=(*m); i++) (*ia)[i]--;
224     for (i=0; i<(*ia)[*m]; i++) {
225       (*ja)[i]--;
226     }
227   }
228   PetscFunctionReturn(0);
229 }
230 
231 #undef __FUNCT__
232 #define __FUNCT__ "MatConvertFrom_MPIAdj"
233 PetscErrorCode PETSCMAT_DLLEXPORT MatConvertFrom_MPIAdj(Mat A,const MatType type,MatReuse reuse,Mat *newmat)
234 {
235   Mat               B;
236   PetscErrorCode    ierr;
237   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
238   const PetscInt    *rj;
239   const PetscScalar *ra;
240   MPI_Comm          comm;
241 
242   PetscFunctionBegin;
243   ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr);
244   ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr);
245   ierr = MatGetOwnershipRange(A,&rstart,PETSC_NULL);CHKERRQ(ierr);
246 
247   /* count the number of nonzeros per row */
248   for (i=0; i<m; i++) {
249     ierr   = MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
250     for (j=0; j<len; j++) {
251       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
252     }
253     ierr   = MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
254     nzeros += len;
255   }
256 
257   /* malloc space for nonzeros */
258   ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&a);CHKERRQ(ierr);
259   ierr = PetscMalloc((N+1)*sizeof(PetscInt),&ia);CHKERRQ(ierr);
260   ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&ja);CHKERRQ(ierr);
261 
262   nzeros = 0;
263   ia[0]  = 0;
264   for (i=0; i<m; i++) {
265     ierr    = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
266     cnt     = 0;
267     for (j=0; j<len; j++) {
268       if (rj[j] != i+rstart) { /* if not diagonal */
269         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
270         ja[nzeros+cnt++] = rj[j];
271       }
272     }
273     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
274     nzeros += cnt;
275     ia[i+1] = nzeros;
276   }
277 
278   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
279   ierr = MatCreate(comm,&B);CHKERRQ(ierr);
280   ierr = MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
281   ierr = MatSetType(B,type);CHKERRQ(ierr);
282   ierr = MatMPIAdjSetPreallocation(B,ia,ja,a);CHKERRQ(ierr);
283 
284   if (reuse == MAT_REUSE_MATRIX) {
285     ierr = MatHeaderReplace(A,B);CHKERRQ(ierr);
286   } else {
287     *newmat = B;
288   }
289   PetscFunctionReturn(0);
290 }
291 
292 /* -------------------------------------------------------------------*/
293 static struct _MatOps MatOps_Values = {0,
294        MatGetRow_MPIAdj,
295        MatRestoreRow_MPIAdj,
296        0,
297 /* 4*/ 0,
298        0,
299        0,
300        0,
301        0,
302        0,
303 /*10*/ 0,
304        0,
305        0,
306        0,
307        0,
308 /*15*/ 0,
309        MatEqual_MPIAdj,
310        0,
311        0,
312        0,
313 /*20*/ 0,
314        0,
315        0,
316        MatSetOption_MPIAdj,
317        0,
318 /*25*/ 0,
319        0,
320        0,
321        0,
322        0,
323 /*30*/ 0,
324        0,
325        0,
326        0,
327        0,
328 /*35*/ 0,
329        0,
330        0,
331        0,
332        0,
333 /*40*/ 0,
334        0,
335        0,
336        0,
337        0,
338 /*45*/ 0,
339        0,
340        0,
341        0,
342        0,
343 /*50*/ 0,
344        MatGetRowIJ_MPIAdj,
345        MatRestoreRowIJ_MPIAdj,
346        0,
347        0,
348 /*55*/ 0,
349        0,
350        0,
351        0,
352        0,
353 /*60*/ 0,
354        MatDestroy_MPIAdj,
355        MatView_MPIAdj,
356        MatConvertFrom_MPIAdj,
357        0,
358 /*65*/ 0,
359        0,
360        0,
361        0,
362        0,
363 /*70*/ 0,
364        0,
365        0,
366        0,
367        0,
368 /*75*/ 0,
369        0,
370        0,
371        0,
372        0,
373 /*80*/ 0,
374        0,
375        0,
376        0,
377        0,
378 /*85*/ 0,
379        0,
380        0,
381        0,
382        0,
383 /*90*/ 0,
384        0,
385        0,
386        0,
387        0,
388 /*95*/ 0,
389        0,
390        0,
391        0};
392 
393 EXTERN_C_BEGIN
394 #undef __FUNCT__
395 #define __FUNCT__ "MatMPIAdjSetPreallocation_MPIAdj"
396 PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
397 {
398   Mat_MPIAdj     *b = (Mat_MPIAdj *)B->data;
399   PetscErrorCode ierr;
400 #if defined(PETSC_USE_DEBUG)
401   PetscInt       ii;
402 #endif
403 
404   PetscFunctionBegin;
405   ierr = PetscMapSetBlockSize(B->rmap,1);CHKERRQ(ierr);
406   ierr = PetscMapSetBlockSize(B->cmap,1);CHKERRQ(ierr);
407   ierr = PetscMapSetUp(B->rmap);CHKERRQ(ierr);
408   ierr = PetscMapSetUp(B->cmap);CHKERRQ(ierr);
409 
410 #if defined(PETSC_USE_DEBUG)
411   if (i[0] != 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
412   for (ii=1; ii<B->rmap->n; ii++) {
413     if (i[ii] < 0 || i[ii] < i[ii-1]) {
414       SETERRQ4(PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
415     }
416   }
417   for (ii=0; ii<i[B->rmap->n]; ii++) {
418     if (j[ii] < 0 || j[ii] >= B->cmap->N) {
419       SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
420     }
421   }
422 #endif
423   B->preallocated = PETSC_TRUE;
424 
425   b->j      = j;
426   b->i      = i;
427   b->values = values;
428 
429   b->nz               = i[B->rmap->n];
430   b->diag             = 0;
431   b->symmetric        = PETSC_FALSE;
432   b->freeaij          = PETSC_TRUE;
433 
434   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
435   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
436   PetscFunctionReturn(0);
437 }
438 EXTERN_C_END
439 
440 /*MC
441    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
442    intended for use constructing orderings and partitionings.
443 
444   Level: beginner
445 
446 .seealso: MatCreateMPIAdj
447 M*/
448 
449 EXTERN_C_BEGIN
450 #undef __FUNCT__
451 #define __FUNCT__ "MatCreate_MPIAdj"
452 PetscErrorCode PETSCMAT_DLLEXPORT MatCreate_MPIAdj(Mat B)
453 {
454   Mat_MPIAdj     *b;
455   PetscErrorCode ierr;
456   PetscMPIInt    size,rank;
457 
458   PetscFunctionBegin;
459   ierr = MPI_Comm_size(((PetscObject)B)->comm,&size);CHKERRQ(ierr);
460   ierr = MPI_Comm_rank(((PetscObject)B)->comm,&rank);CHKERRQ(ierr);
461 
462   ierr                = PetscNewLog(B,Mat_MPIAdj,&b);CHKERRQ(ierr);
463   B->data             = (void*)b;
464   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
465   B->mapping          = 0;
466   B->assembled        = PETSC_FALSE;
467 
468   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAdjSetPreallocation_C",
469                                     "MatMPIAdjSetPreallocation_MPIAdj",
470                                      MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
471   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ);CHKERRQ(ierr);
472   PetscFunctionReturn(0);
473 }
474 EXTERN_C_END
475 
476 #undef __FUNCT__
477 #define __FUNCT__ "MatMPIAdjSetPreallocation"
478 /*@C
479    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
480 
481    Collective on MPI_Comm
482 
483    Input Parameters:
484 +  A - the matrix
485 .  i - the indices into j for the start of each row
486 .  j - the column indices for each row (sorted for each row).
487        The indices in i and j start with zero (NOT with one).
488 -  values - [optional] edge weights
489 
490    Level: intermediate
491 
492 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
493 @*/
494 PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
495 {
496   PetscErrorCode ierr,(*f)(Mat,PetscInt*,PetscInt*,PetscInt*);
497 
498   PetscFunctionBegin;
499   ierr = PetscObjectQueryFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",(void (**)(void))&f);CHKERRQ(ierr);
500   if (f) {
501     ierr = (*f)(B,i,j,values);CHKERRQ(ierr);
502   }
503   PetscFunctionReturn(0);
504 }
505 
506 #undef __FUNCT__
507 #define __FUNCT__ "MatCreateMPIAdj"
508 /*@C
509    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
510    The matrix does not have numerical values associated with it, but is
511    intended for ordering (to reduce bandwidth etc) and partitioning.
512 
513    Collective on MPI_Comm
514 
515    Input Parameters:
516 +  comm - MPI communicator
517 .  m - number of local rows
518 .  N - number of global columns
519 .  i - the indices into j for the start of each row
520 .  j - the column indices for each row (sorted for each row).
521        The indices in i and j start with zero (NOT with one).
522 -  values -[optional] edge weights
523 
524    Output Parameter:
525 .  A - the matrix
526 
527    Level: intermediate
528 
529    Notes: This matrix object does not support most matrix operations, include
530    MatSetValues().
531    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
532    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
533     call from Fortran you need not create the arrays with PetscMalloc().
534    Should not include the matrix diagonals.
535 
536    If you already have a matrix, you can create its adjacency matrix by a call
537    to MatConvert, specifying a type of MATMPIADJ.
538 
539    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
540 
541 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
542 @*/
543 PetscErrorCode PETSCMAT_DLLEXPORT MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
544 {
545   PetscErrorCode ierr;
546 
547   PetscFunctionBegin;
548   ierr = MatCreate(comm,A);CHKERRQ(ierr);
549   ierr = MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
550   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
551   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
552   PetscFunctionReturn(0);
553 }
554 
555 
556 
557 
558 
559 
560 
561