xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 2cfcea2917e9827eeaf4fa9c2e8dd75b054ced7b)
1 #define PETSCMAT_DLL
2 
3 /*
4     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
5 */
6 #include "src/mat/impls/adj/mpi/mpiadj.h"
7 #include "petscsys.h"
8 
9 #undef __FUNCT__
10 #define __FUNCT__ "MatView_MPIAdj_ASCII"
11 PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
12 {
13   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
14   PetscErrorCode    ierr;
15   PetscInt          i,j,m = A->rmap.n;
16   const char        *name;
17   PetscViewerFormat format;
18 
19   PetscFunctionBegin;
20   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
21   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
22   if (format == PETSC_VIEWER_ASCII_INFO) {
23     PetscFunctionReturn(0);
24   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
25     SETERRQ(PETSC_ERR_SUP,"Matlab format not supported");
26   } else {
27     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_NO);CHKERRQ(ierr);
28     for (i=0; i<m; i++) {
29       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap.rstart);CHKERRQ(ierr);
30       for (j=a->i[i]; j<a->i[i+1]; j++) {
31         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);CHKERRQ(ierr);
32       }
33       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
34     }
35     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_YES);CHKERRQ(ierr);
36   }
37   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
38   PetscFunctionReturn(0);
39 }
40 
41 #undef __FUNCT__
42 #define __FUNCT__ "MatView_MPIAdj"
43 PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
44 {
45   PetscErrorCode ierr;
46   PetscTruth     iascii;
47 
48   PetscFunctionBegin;
49   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
50   if (iascii) {
51     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
52   } else {
53     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
54   }
55   PetscFunctionReturn(0);
56 }
57 
58 #undef __FUNCT__
59 #define __FUNCT__ "MatDestroy_MPIAdj"
60 PetscErrorCode MatDestroy_MPIAdj(Mat mat)
61 {
62   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;
63   PetscErrorCode ierr;
64 
65   PetscFunctionBegin;
66 #if defined(PETSC_USE_LOG)
67   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap.n,mat->cmap.n,a->nz);
68 #endif
69   ierr = PetscFree(a->diag);CHKERRQ(ierr);
70   if (a->freeaij) {
71     ierr = PetscFree(a->i);CHKERRQ(ierr);
72     ierr = PetscFree(a->j);CHKERRQ(ierr);
73     ierr = PetscFree(a->values);CHKERRQ(ierr);
74   }
75   ierr = PetscFree(a);CHKERRQ(ierr);
76 
77   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C","",PETSC_NULL);CHKERRQ(ierr);
78   PetscFunctionReturn(0);
79 }
80 
81 #undef __FUNCT__
82 #define __FUNCT__ "MatSetOption_MPIAdj"
83 PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op)
84 {
85   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
86   PetscErrorCode ierr;
87 
88   PetscFunctionBegin;
89   switch (op) {
90   case MAT_SYMMETRIC:
91   case MAT_STRUCTURALLY_SYMMETRIC:
92   case MAT_HERMITIAN:
93     a->symmetric = PETSC_TRUE;
94     break;
95   case MAT_NOT_SYMMETRIC:
96   case MAT_NOT_STRUCTURALLY_SYMMETRIC:
97   case MAT_NOT_HERMITIAN:
98     a->symmetric = PETSC_FALSE;
99     break;
100   case MAT_SYMMETRY_ETERNAL:
101   case MAT_NOT_SYMMETRY_ETERNAL:
102     break;
103   default:
104     ierr = PetscInfo(A,"Option ignored\n");CHKERRQ(ierr);
105     break;
106   }
107   PetscFunctionReturn(0);
108 }
109 
110 
111 /*
112      Adds diagonal pointers to sparse matrix structure.
113 */
114 
115 #undef __FUNCT__
116 #define __FUNCT__ "MatMarkDiagonal_MPIAdj"
117 PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A)
118 {
119   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
120   PetscErrorCode ierr;
121   PetscInt       i,j,*diag,m = A->rmap.n;
122 
123   PetscFunctionBegin;
124   ierr = PetscMalloc((m+1)*sizeof(PetscInt),&diag);CHKERRQ(ierr);
125   ierr = PetscLogObjectMemory(A,(m+1)*sizeof(PetscInt));CHKERRQ(ierr);
126   for (i=0; i<A->rmap.n; i++) {
127     for (j=a->i[i]; j<a->i[i+1]; j++) {
128       if (a->j[j] == i) {
129         diag[i] = j;
130         break;
131       }
132     }
133   }
134   a->diag = diag;
135   PetscFunctionReturn(0);
136 }
137 
138 #undef __FUNCT__
139 #define __FUNCT__ "MatGetRow_MPIAdj"
140 PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
141 {
142   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
143   PetscInt   *itmp;
144 
145   PetscFunctionBegin;
146   row -= A->rmap.rstart;
147 
148   if (row < 0 || row >= A->rmap.n) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
149 
150   *nz = a->i[row+1] - a->i[row];
151   if (v) *v = PETSC_NULL;
152   if (idx) {
153     itmp = a->j + a->i[row];
154     if (*nz) {
155       *idx = itmp;
156     }
157     else *idx = 0;
158   }
159   PetscFunctionReturn(0);
160 }
161 
162 #undef __FUNCT__
163 #define __FUNCT__ "MatRestoreRow_MPIAdj"
164 PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
165 {
166   PetscFunctionBegin;
167   PetscFunctionReturn(0);
168 }
169 
170 #undef __FUNCT__
171 #define __FUNCT__ "MatEqual_MPIAdj"
172 PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg)
173 {
174   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
175   PetscErrorCode ierr;
176   PetscTruth     flag;
177 
178   PetscFunctionBegin;
179   /* If the  matrix dimensions are not equal,or no of nonzeros */
180   if ((A->rmap.n != B->rmap.n) ||(a->nz != b->nz)) {
181     flag = PETSC_FALSE;
182   }
183 
184   /* if the a->i are the same */
185   ierr = PetscMemcmp(a->i,b->i,(A->rmap.n+1)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
186 
187   /* if a->j are the same */
188   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
189 
190   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr);
191   PetscFunctionReturn(0);
192 }
193 
194 #undef __FUNCT__
195 #define __FUNCT__ "MatGetRowIJ_MPIAdj"
196 PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscTruth symmetric,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscTruth *done)
197 {
198   PetscErrorCode ierr;
199   PetscMPIInt    size;
200   PetscInt       i;
201   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data;
202 
203   PetscFunctionBegin;
204   ierr = MPI_Comm_size(A->comm,&size);CHKERRQ(ierr);
205   if (size > 1) {*done = PETSC_FALSE; PetscFunctionReturn(0);}
206   *m    = A->rmap.n;
207   *ia   = a->i;
208   *ja   = a->j;
209   *done = PETSC_TRUE;
210   if (oshift) {
211     for (i=0; i<(*ia)[*m]; i++) {
212       (*ja)[i]++;
213     }
214     for (i=0; i<=(*m); i++) (*ia)[i]++;
215   }
216   PetscFunctionReturn(0);
217 }
218 
219 #undef __FUNCT__
220 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj"
221 PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscTruth symmetric,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscTruth *done)
222 {
223   PetscInt   i;
224   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
225 
226   PetscFunctionBegin;
227   if (ia && a->i != *ia) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
228   if (ja && a->j != *ja) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
229   if (oshift) {
230     for (i=0; i<=(*m); i++) (*ia)[i]--;
231     for (i=0; i<(*ia)[*m]; i++) {
232       (*ja)[i]--;
233     }
234   }
235   PetscFunctionReturn(0);
236 }
237 
238 /* -------------------------------------------------------------------*/
239 static struct _MatOps MatOps_Values = {0,
240        MatGetRow_MPIAdj,
241        MatRestoreRow_MPIAdj,
242        0,
243 /* 4*/ 0,
244        0,
245        0,
246        0,
247        0,
248        0,
249 /*10*/ 0,
250        0,
251        0,
252        0,
253        0,
254 /*15*/ 0,
255        MatEqual_MPIAdj,
256        0,
257        0,
258        0,
259 /*20*/ 0,
260        0,
261        0,
262        MatSetOption_MPIAdj,
263        0,
264 /*25*/ 0,
265        0,
266        0,
267        0,
268        0,
269 /*30*/ 0,
270        0,
271        0,
272        0,
273        0,
274 /*35*/ 0,
275        0,
276        0,
277        0,
278        0,
279 /*40*/ 0,
280        0,
281        0,
282        0,
283        0,
284 /*45*/ 0,
285        0,
286        0,
287        0,
288        0,
289 /*50*/ 0,
290        MatGetRowIJ_MPIAdj,
291        MatRestoreRowIJ_MPIAdj,
292        0,
293        0,
294 /*55*/ 0,
295        0,
296        0,
297        0,
298        0,
299 /*60*/ 0,
300        MatDestroy_MPIAdj,
301        MatView_MPIAdj,
302        0,
303        0,
304 /*65*/ 0,
305        0,
306        0,
307        0,
308        0,
309 /*70*/ 0,
310        0,
311        0,
312        0,
313        0,
314 /*75*/ 0,
315        0,
316        0,
317        0,
318        0,
319 /*80*/ 0,
320        0,
321        0,
322        0,
323        0,
324 /*85*/ 0,
325        0,
326        0,
327        0,
328        0,
329 /*90*/ 0,
330        0,
331        0,
332        0,
333        0,
334 /*95*/ 0,
335        0,
336        0,
337        0};
338 
339 EXTERN_C_BEGIN
340 #undef __FUNCT__
341 #define __FUNCT__ "MatMPIAdjSetPreallocation_MPIAdj"
342 PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
343 {
344   Mat_MPIAdj     *b = (Mat_MPIAdj *)B->data;
345   PetscErrorCode ierr;
346 #if defined(PETSC_USE_DEBUG)
347   PetscInt       ii;
348 #endif
349 
350   PetscFunctionBegin;
351   B->preallocated = PETSC_TRUE;
352 #if defined(PETSC_USE_DEBUG)
353   if (i[0] != 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
354   for (ii=1; ii<B->rmap.n; ii++) {
355     if (i[ii] < 0 || i[ii] < i[ii-1]) {
356       SETERRQ4(PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
357     }
358   }
359   for (ii=0; ii<i[B->rmap.n]; ii++) {
360     if (j[ii] < 0 || j[ii] >= B->cmap.N) {
361       SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
362     }
363   }
364 #endif
365 
366   b->j      = j;
367   b->i      = i;
368   b->values = values;
369 
370   b->nz               = i[B->rmap.n];
371   b->diag             = 0;
372   b->symmetric        = PETSC_FALSE;
373   b->freeaij          = PETSC_TRUE;
374 
375   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
376   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
377   PetscFunctionReturn(0);
378 }
379 EXTERN_C_END
380 
381 /*MC
382    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
383    intended for use constructing orderings and partitionings.
384 
385   Level: beginner
386 
387 .seealso: MatCreateMPIAdj
388 M*/
389 
390 EXTERN_C_BEGIN
391 #undef __FUNCT__
392 #define __FUNCT__ "MatCreate_MPIAdj"
393 PetscErrorCode PETSCMAT_DLLEXPORT MatCreate_MPIAdj(Mat B)
394 {
395   Mat_MPIAdj     *b;
396   PetscErrorCode ierr;
397   PetscMPIInt    size,rank;
398 
399   PetscFunctionBegin;
400   ierr = MPI_Comm_size(B->comm,&size);CHKERRQ(ierr);
401   ierr = MPI_Comm_rank(B->comm,&rank);CHKERRQ(ierr);
402 
403   ierr                = PetscNew(Mat_MPIAdj,&b);CHKERRQ(ierr);
404   B->data             = (void*)b;
405   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
406   B->factor           = 0;
407   B->mapping          = 0;
408   B->assembled        = PETSC_FALSE;
409 
410   ierr = PetscMapInitialize(B->comm,&B->rmap);CHKERRQ(ierr);
411   if (B->cmap.n < 0) B->cmap.n = B->cmap.N;
412   if (B->cmap.N < 0) B->cmap.N = B->cmap.n;
413 
414   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAdjSetPreallocation_C",
415                                     "MatMPIAdjSetPreallocation_MPIAdj",
416                                      MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
417   PetscFunctionReturn(0);
418 }
419 EXTERN_C_END
420 
421 #undef __FUNCT__
422 #define __FUNCT__ "MatMPIAdjSetPreallocation"
423 /*@C
424    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
425 
426    Collective on MPI_Comm
427 
428    Input Parameters:
429 +  A - the matrix
430 .  i - the indices into j for the start of each row
431 .  j - the column indices for each row (sorted for each row).
432        The indices in i and j start with zero (NOT with one).
433 -  values - [optional] edge weights
434 
435    Level: intermediate
436 
437 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
438 @*/
439 PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
440 {
441   PetscErrorCode ierr,(*f)(Mat,PetscInt*,PetscInt*,PetscInt*);
442 
443   PetscFunctionBegin;
444   ierr = PetscObjectQueryFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",(void (**)(void))&f);CHKERRQ(ierr);
445   if (f) {
446     ierr = (*f)(B,i,j,values);CHKERRQ(ierr);
447   }
448   PetscFunctionReturn(0);
449 }
450 
451 #undef __FUNCT__
452 #define __FUNCT__ "MatCreateMPIAdj"
453 /*@C
454    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
455    The matrix does not have numerical values associated with it, but is
456    intended for ordering (to reduce bandwidth etc) and partitioning.
457 
458    Collective on MPI_Comm
459 
460    Input Parameters:
461 +  comm - MPI communicator
462 .  m - number of local rows
463 .  n - number of columns
464 .  i - the indices into j for the start of each row
465 .  j - the column indices for each row (sorted for each row).
466        The indices in i and j start with zero (NOT with one).
467 -  values -[optional] edge weights
468 
469    Output Parameter:
470 .  A - the matrix
471 
472    Level: intermediate
473 
474    Notes: This matrix object does not support most matrix operations, include
475    MatSetValues().
476    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
477    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
478     call from Fortran you need not create the arrays with PetscMalloc().
479    Should not include the matrix diagonals.
480 
481    If you already have a matrix, you can create its adjacency matrix by a call
482    to MatConvert, specifying a type of MATMPIADJ.
483 
484    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
485 
486 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
487 @*/
488 PetscErrorCode PETSCMAT_DLLEXPORT MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
489 {
490   PetscErrorCode ierr;
491 
492   PetscFunctionBegin;
493   ierr = MatCreate(comm,A);CHKERRQ(ierr);
494   ierr = MatSetSizes(*A,m,n,PETSC_DETERMINE,n);CHKERRQ(ierr);
495   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
496   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
497   PetscFunctionReturn(0);
498 }
499 
500 EXTERN_C_BEGIN
501 #undef __FUNCT__
502 #define __FUNCT__ "MatConvertTo_MPIAdj"
503 PetscErrorCode PETSCMAT_DLLEXPORT MatConvertTo_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
504 {
505   Mat               B;
506   PetscErrorCode    ierr;
507   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
508   const PetscInt    *rj;
509   const PetscScalar *ra;
510   MPI_Comm          comm;
511 
512   PetscFunctionBegin;
513   ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr);
514   ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr);
515   ierr = MatGetOwnershipRange(A,&rstart,PETSC_NULL);CHKERRQ(ierr);
516 
517   /* count the number of nonzeros per row */
518   for (i=0; i<m; i++) {
519     ierr   = MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
520     for (j=0; j<len; j++) {
521       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
522     }
523     ierr   = MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
524     nzeros += len;
525   }
526 
527   /* malloc space for nonzeros */
528   ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&a);CHKERRQ(ierr);
529   ierr = PetscMalloc((N+1)*sizeof(PetscInt),&ia);CHKERRQ(ierr);
530   ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&ja);CHKERRQ(ierr);
531 
532   nzeros = 0;
533   ia[0]  = 0;
534   for (i=0; i<m; i++) {
535     ierr    = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
536     cnt     = 0;
537     for (j=0; j<len; j++) {
538       if (rj[j] != i+rstart) { /* if not diagonal */
539         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
540         ja[nzeros+cnt++] = rj[j];
541       }
542     }
543     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
544     nzeros += cnt;
545     ia[i+1] = nzeros;
546   }
547 
548   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
549   ierr = MatCreate(comm,&B);CHKERRQ(ierr);
550   ierr = MatSetSizes(B,m,N,PETSC_DETERMINE,N);CHKERRQ(ierr);
551   ierr = MatSetType(B,type);CHKERRQ(ierr);
552   ierr = MatMPIAdjSetPreallocation(B,ia,ja,a);CHKERRQ(ierr);
553 
554   if (reuse == MAT_REUSE_MATRIX) {
555     ierr = MatHeaderReplace(A,B);CHKERRQ(ierr);
556   } else {
557     *newmat = B;
558   }
559   PetscFunctionReturn(0);
560 }
561 EXTERN_C_END
562 
563 
564 
565 
566 
567