xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision e3caeda681d93b7b1d053090fe6dee7657faa56d)
1 #define PETSCMAT_DLL
2 
3 /*
4     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
5 */
6 #include "../src/mat/impls/adj/mpi/mpiadj.h"
7 #include "petscsys.h"
8 
9 #undef __FUNCT__
10 #define __FUNCT__ "MatView_MPIAdj_ASCII"
11 PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
12 {
13   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
14   PetscErrorCode    ierr;
15   PetscInt          i,j,m = A->rmap->n;
16   const char        *name;
17   PetscViewerFormat format;
18 
19   PetscFunctionBegin;
20   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
21   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
22   if (format == PETSC_VIEWER_ASCII_INFO) {
23     PetscFunctionReturn(0);
24   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
25     SETERRQ(PETSC_ERR_SUP,"Matlab format not supported");
26   } else {
27     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_NO);CHKERRQ(ierr);
28     for (i=0; i<m; i++) {
29       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap->rstart);CHKERRQ(ierr);
30       for (j=a->i[i]; j<a->i[i+1]; j++) {
31         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);CHKERRQ(ierr);
32       }
33       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
34     }
35     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_YES);CHKERRQ(ierr);
36   }
37   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
38   PetscFunctionReturn(0);
39 }
40 
41 #undef __FUNCT__
42 #define __FUNCT__ "MatView_MPIAdj"
43 PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
44 {
45   PetscErrorCode ierr;
46   PetscTruth     iascii;
47 
48   PetscFunctionBegin;
49   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
50   if (iascii) {
51     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
52   } else {
53     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
54   }
55   PetscFunctionReturn(0);
56 }
57 
58 #undef __FUNCT__
59 #define __FUNCT__ "MatDestroy_MPIAdj"
60 PetscErrorCode MatDestroy_MPIAdj(Mat mat)
61 {
62   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;
63   PetscErrorCode ierr;
64 
65   PetscFunctionBegin;
66 #if defined(PETSC_USE_LOG)
67   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap->n,mat->cmap->n,a->nz);
68 #endif
69   ierr = PetscFree(a->diag);CHKERRQ(ierr);
70   if (a->freeaij) {
71     ierr = PetscFree(a->i);CHKERRQ(ierr);
72     ierr = PetscFree(a->j);CHKERRQ(ierr);
73     ierr = PetscFree(a->values);CHKERRQ(ierr);
74   }
75   ierr = PetscFree(a);CHKERRQ(ierr);
76   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
77   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C","",PETSC_NULL);CHKERRQ(ierr);
78   PetscFunctionReturn(0);
79 }
80 
81 #undef __FUNCT__
82 #define __FUNCT__ "MatSetOption_MPIAdj"
83 PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscTruth flg)
84 {
85   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
86   PetscErrorCode ierr;
87 
88   PetscFunctionBegin;
89   switch (op) {
90   case MAT_SYMMETRIC:
91   case MAT_STRUCTURALLY_SYMMETRIC:
92   case MAT_HERMITIAN:
93     a->symmetric = flg;
94     break;
95   case MAT_SYMMETRY_ETERNAL:
96     break;
97   default:
98     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
99     break;
100   }
101   PetscFunctionReturn(0);
102 }
103 
104 
105 /*
106      Adds diagonal pointers to sparse matrix structure.
107 */
108 
109 #undef __FUNCT__
110 #define __FUNCT__ "MatMarkDiagonal_MPIAdj"
111 PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A)
112 {
113   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
114   PetscErrorCode ierr;
115   PetscInt       i,j,m = A->rmap->n;
116 
117   PetscFunctionBegin;
118   ierr = PetscMalloc(m*sizeof(PetscInt),&a->diag);CHKERRQ(ierr);
119   ierr = PetscLogObjectMemory(A,m*sizeof(PetscInt));CHKERRQ(ierr);
120   for (i=0; i<A->rmap->n; i++) {
121     for (j=a->i[i]; j<a->i[i+1]; j++) {
122       if (a->j[j] == i) {
123         a->diag[i] = j;
124         break;
125       }
126     }
127   }
128   PetscFunctionReturn(0);
129 }
130 
131 #undef __FUNCT__
132 #define __FUNCT__ "MatGetRow_MPIAdj"
133 PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
134 {
135   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
136   PetscInt   *itmp;
137 
138   PetscFunctionBegin;
139   row -= A->rmap->rstart;
140 
141   if (row < 0 || row >= A->rmap->n) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
142 
143   *nz = a->i[row+1] - a->i[row];
144   if (v) *v = PETSC_NULL;
145   if (idx) {
146     itmp = a->j + a->i[row];
147     if (*nz) {
148       *idx = itmp;
149     }
150     else *idx = 0;
151   }
152   PetscFunctionReturn(0);
153 }
154 
155 #undef __FUNCT__
156 #define __FUNCT__ "MatRestoreRow_MPIAdj"
157 PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
158 {
159   PetscFunctionBegin;
160   PetscFunctionReturn(0);
161 }
162 
163 #undef __FUNCT__
164 #define __FUNCT__ "MatEqual_MPIAdj"
165 PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg)
166 {
167   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
168   PetscErrorCode ierr;
169   PetscTruth     flag;
170 
171   PetscFunctionBegin;
172   /* If the  matrix dimensions are not equal,or no of nonzeros */
173   if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
174     flag = PETSC_FALSE;
175   }
176 
177   /* if the a->i are the same */
178   ierr = PetscMemcmp(a->i,b->i,(A->rmap->n+1)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
179 
180   /* if a->j are the same */
181   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
182 
183   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,((PetscObject)A)->comm);CHKERRQ(ierr);
184   PetscFunctionReturn(0);
185 }
186 
187 #undef __FUNCT__
188 #define __FUNCT__ "MatGetRowIJ_MPIAdj"
189 PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscTruth symmetric,PetscTruth blockcompressed,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscTruth *done)
190 {
191   PetscErrorCode ierr;
192   PetscMPIInt    size;
193   PetscInt       i;
194   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data;
195 
196   PetscFunctionBegin;
197   ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr);
198   if (size > 1) {*done = PETSC_FALSE; PetscFunctionReturn(0);}
199   *m    = A->rmap->n;
200   *ia   = a->i;
201   *ja   = a->j;
202   *done = PETSC_TRUE;
203   if (oshift) {
204     for (i=0; i<(*ia)[*m]; i++) {
205       (*ja)[i]++;
206     }
207     for (i=0; i<=(*m); i++) (*ia)[i]++;
208   }
209   PetscFunctionReturn(0);
210 }
211 
212 #undef __FUNCT__
213 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj"
214 PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscTruth symmetric,PetscTruth blockcompressed,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscTruth *done)
215 {
216   PetscInt   i;
217   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
218 
219   PetscFunctionBegin;
220   if (ia && a->i != *ia) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
221   if (ja && a->j != *ja) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
222   if (oshift) {
223     for (i=0; i<=(*m); i++) (*ia)[i]--;
224     for (i=0; i<(*ia)[*m]; i++) {
225       (*ja)[i]--;
226     }
227   }
228   PetscFunctionReturn(0);
229 }
230 
231 #undef __FUNCT__
232 #define __FUNCT__ "MatConvertFrom_MPIAdj"
233 PetscErrorCode PETSCMAT_DLLEXPORT MatConvertFrom_MPIAdj(Mat A,const MatType type,MatReuse reuse,Mat *newmat)
234 {
235   Mat               B;
236   PetscErrorCode    ierr;
237   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
238   const PetscInt    *rj;
239   const PetscScalar *ra;
240   MPI_Comm          comm;
241 
242   PetscFunctionBegin;
243   ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr);
244   ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr);
245   ierr = MatGetOwnershipRange(A,&rstart,PETSC_NULL);CHKERRQ(ierr);
246 
247   /* count the number of nonzeros per row */
248   for (i=0; i<m; i++) {
249     ierr   = MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
250     for (j=0; j<len; j++) {
251       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
252     }
253     ierr   = MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
254     nzeros += len;
255   }
256 
257   /* malloc space for nonzeros */
258   ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&a);CHKERRQ(ierr);
259   ierr = PetscMalloc((N+1)*sizeof(PetscInt),&ia);CHKERRQ(ierr);
260   ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&ja);CHKERRQ(ierr);
261 
262   nzeros = 0;
263   ia[0]  = 0;
264   for (i=0; i<m; i++) {
265     ierr    = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
266     cnt     = 0;
267     for (j=0; j<len; j++) {
268       if (rj[j] != i+rstart) { /* if not diagonal */
269         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
270         ja[nzeros+cnt++] = rj[j];
271       }
272     }
273     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
274     nzeros += cnt;
275     ia[i+1] = nzeros;
276   }
277 
278   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
279   ierr = MatCreate(comm,&B);CHKERRQ(ierr);
280   ierr = MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
281   ierr = MatSetType(B,type);CHKERRQ(ierr);
282   ierr = MatMPIAdjSetPreallocation(B,ia,ja,a);CHKERRQ(ierr);
283 
284   if (reuse == MAT_REUSE_MATRIX) {
285     ierr = MatHeaderReplace(A,B);CHKERRQ(ierr);
286   } else {
287     *newmat = B;
288   }
289   PetscFunctionReturn(0);
290 }
291 
292 /* -------------------------------------------------------------------*/
293 static struct _MatOps MatOps_Values = {0,
294        MatGetRow_MPIAdj,
295        MatRestoreRow_MPIAdj,
296        0,
297 /* 4*/ 0,
298        0,
299        0,
300        0,
301        0,
302        0,
303 /*10*/ 0,
304        0,
305        0,
306        0,
307        0,
308 /*15*/ 0,
309        MatEqual_MPIAdj,
310        0,
311        0,
312        0,
313 /*20*/ 0,
314        0,
315        MatSetOption_MPIAdj,
316        0,
317 /*24*/ 0,
318        0,
319        0,
320        0,
321        0,
322 /*29*/ 0,
323        0,
324        0,
325        0,
326        0,
327 /*34*/ 0,
328        0,
329        0,
330        0,
331        0,
332 /*39*/ 0,
333        0,
334        0,
335        0,
336        0,
337 /*44*/ 0,
338        0,
339        0,
340        0,
341        0,
342 /*49*/ 0,
343        MatGetRowIJ_MPIAdj,
344        MatRestoreRowIJ_MPIAdj,
345        0,
346        0,
347 /*54*/ 0,
348        0,
349        0,
350        0,
351        0,
352 /*59*/ 0,
353        MatDestroy_MPIAdj,
354        MatView_MPIAdj,
355        MatConvertFrom_MPIAdj,
356        0,
357 /*64*/ 0,
358        0,
359        0,
360        0,
361        0,
362 /*69*/ 0,
363        0,
364        0,
365        0,
366        0,
367 /*74*/ 0,
368        0,
369        0,
370        0,
371        0,
372 /*79*/ 0,
373        0,
374        0,
375        0,
376        0,
377 /*84*/ 0,
378        0,
379        0,
380        0,
381        0,
382 /*89*/ 0,
383        0,
384        0,
385        0,
386        0,
387 /*94*/ 0,
388        0,
389        0,
390        0};
391 
392 EXTERN_C_BEGIN
393 #undef __FUNCT__
394 #define __FUNCT__ "MatMPIAdjSetPreallocation_MPIAdj"
395 PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
396 {
397   Mat_MPIAdj     *b = (Mat_MPIAdj *)B->data;
398   PetscErrorCode ierr;
399 #if defined(PETSC_USE_DEBUG)
400   PetscInt       ii;
401 #endif
402 
403   PetscFunctionBegin;
404   ierr = PetscMapSetBlockSize(B->rmap,1);CHKERRQ(ierr);
405   ierr = PetscMapSetBlockSize(B->cmap,1);CHKERRQ(ierr);
406   ierr = PetscMapSetUp(B->rmap);CHKERRQ(ierr);
407   ierr = PetscMapSetUp(B->cmap);CHKERRQ(ierr);
408 
409 #if defined(PETSC_USE_DEBUG)
410   if (i[0] != 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
411   for (ii=1; ii<B->rmap->n; ii++) {
412     if (i[ii] < 0 || i[ii] < i[ii-1]) {
413       SETERRQ4(PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
414     }
415   }
416   for (ii=0; ii<i[B->rmap->n]; ii++) {
417     if (j[ii] < 0 || j[ii] >= B->cmap->N) {
418       SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
419     }
420   }
421 #endif
422   B->preallocated = PETSC_TRUE;
423 
424   b->j      = j;
425   b->i      = i;
426   b->values = values;
427 
428   b->nz               = i[B->rmap->n];
429   b->diag             = 0;
430   b->symmetric        = PETSC_FALSE;
431   b->freeaij          = PETSC_TRUE;
432 
433   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
434   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
435   PetscFunctionReturn(0);
436 }
437 EXTERN_C_END
438 
439 /*MC
440    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
441    intended for use constructing orderings and partitionings.
442 
443   Level: beginner
444 
445 .seealso: MatCreateMPIAdj
446 M*/
447 
448 EXTERN_C_BEGIN
449 #undef __FUNCT__
450 #define __FUNCT__ "MatCreate_MPIAdj"
451 PetscErrorCode PETSCMAT_DLLEXPORT MatCreate_MPIAdj(Mat B)
452 {
453   Mat_MPIAdj     *b;
454   PetscErrorCode ierr;
455   PetscMPIInt    size,rank;
456 
457   PetscFunctionBegin;
458   ierr = MPI_Comm_size(((PetscObject)B)->comm,&size);CHKERRQ(ierr);
459   ierr = MPI_Comm_rank(((PetscObject)B)->comm,&rank);CHKERRQ(ierr);
460 
461   ierr                = PetscNewLog(B,Mat_MPIAdj,&b);CHKERRQ(ierr);
462   B->data             = (void*)b;
463   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
464   B->mapping          = 0;
465   B->assembled        = PETSC_FALSE;
466 
467   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAdjSetPreallocation_C",
468                                     "MatMPIAdjSetPreallocation_MPIAdj",
469                                      MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
470   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ);CHKERRQ(ierr);
471   PetscFunctionReturn(0);
472 }
473 EXTERN_C_END
474 
475 #undef __FUNCT__
476 #define __FUNCT__ "MatMPIAdjSetPreallocation"
477 /*@C
478    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
479 
480    Collective on MPI_Comm
481 
482    Input Parameters:
483 +  A - the matrix
484 .  i - the indices into j for the start of each row
485 .  j - the column indices for each row (sorted for each row).
486        The indices in i and j start with zero (NOT with one).
487 -  values - [optional] edge weights
488 
489    Level: intermediate
490 
491 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
492 @*/
493 PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
494 {
495   PetscErrorCode ierr,(*f)(Mat,PetscInt*,PetscInt*,PetscInt*);
496 
497   PetscFunctionBegin;
498   ierr = PetscObjectQueryFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",(void (**)(void))&f);CHKERRQ(ierr);
499   if (f) {
500     ierr = (*f)(B,i,j,values);CHKERRQ(ierr);
501   }
502   PetscFunctionReturn(0);
503 }
504 
505 #undef __FUNCT__
506 #define __FUNCT__ "MatCreateMPIAdj"
507 /*@C
508    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
509    The matrix does not have numerical values associated with it, but is
510    intended for ordering (to reduce bandwidth etc) and partitioning.
511 
512    Collective on MPI_Comm
513 
514    Input Parameters:
515 +  comm - MPI communicator
516 .  m - number of local rows
517 .  N - number of global columns
518 .  i - the indices into j for the start of each row
519 .  j - the column indices for each row (sorted for each row).
520        The indices in i and j start with zero (NOT with one).
521 -  values -[optional] edge weights
522 
523    Output Parameter:
524 .  A - the matrix
525 
526    Level: intermediate
527 
528    Notes: This matrix object does not support most matrix operations, include
529    MatSetValues().
530    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
531    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
532     call from Fortran you need not create the arrays with PetscMalloc().
533    Should not include the matrix diagonals.
534 
535    If you already have a matrix, you can create its adjacency matrix by a call
536    to MatConvert, specifying a type of MATMPIADJ.
537 
538    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
539 
540 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
541 @*/
542 PetscErrorCode PETSCMAT_DLLEXPORT MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
543 {
544   PetscErrorCode ierr;
545 
546   PetscFunctionBegin;
547   ierr = MatCreate(comm,A);CHKERRQ(ierr);
548   ierr = MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
549   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
550   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
551   PetscFunctionReturn(0);
552 }
553 
554 
555 
556 
557 
558 
559 
560