xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 906ed7cc33fecbafab01746eec64dcdcc8a4842f)
1 #define PETSCMAT_DLL
2 
3 /*
4     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
5 */
6 #include "src/mat/impls/adj/mpi/mpiadj.h"
7 #include "petscsys.h"
8 
9 #undef __FUNCT__
10 #define __FUNCT__ "MatView_MPIAdj_ASCII"
11 PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
12 {
13   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
14   PetscErrorCode    ierr;
15   PetscInt          i,j,m = A->rmap.n;
16   const char        *name;
17   PetscViewerFormat format;
18 
19   PetscFunctionBegin;
20   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
21   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
22   if (format == PETSC_VIEWER_ASCII_INFO) {
23     PetscFunctionReturn(0);
24   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
25     SETERRQ(PETSC_ERR_SUP,"Matlab format not supported");
26   } else {
27     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_NO);CHKERRQ(ierr);
28     for (i=0; i<m; i++) {
29       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap.rstart);CHKERRQ(ierr);
30       for (j=a->i[i]; j<a->i[i+1]; j++) {
31         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);CHKERRQ(ierr);
32       }
33       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
34     }
35     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_YES);CHKERRQ(ierr);
36   }
37   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
38   PetscFunctionReturn(0);
39 }
40 
41 #undef __FUNCT__
42 #define __FUNCT__ "MatView_MPIAdj"
43 PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
44 {
45   PetscErrorCode ierr;
46   PetscTruth     iascii;
47 
48   PetscFunctionBegin;
49   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
50   if (iascii) {
51     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
52   } else {
53     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
54   }
55   PetscFunctionReturn(0);
56 }
57 
58 #undef __FUNCT__
59 #define __FUNCT__ "MatDestroy_MPIAdj"
60 PetscErrorCode MatDestroy_MPIAdj(Mat mat)
61 {
62   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;
63   PetscErrorCode ierr;
64 
65   PetscFunctionBegin;
66 #if defined(PETSC_USE_LOG)
67   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap.n,mat->cmap.n,a->nz);
68 #endif
69   ierr = PetscFree(a->diag);CHKERRQ(ierr);
70   if (a->freeaij) {
71     ierr = PetscFree(a->i);CHKERRQ(ierr);
72     ierr = PetscFree(a->j);CHKERRQ(ierr);
73     ierr = PetscFree(a->values);CHKERRQ(ierr);
74   }
75   ierr = PetscFree(a);CHKERRQ(ierr);
76 
77   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C","",PETSC_NULL);CHKERRQ(ierr);
78   PetscFunctionReturn(0);
79 }
80 
81 #undef __FUNCT__
82 #define __FUNCT__ "MatSetOption_MPIAdj"
83 PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op)
84 {
85   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
86   PetscErrorCode ierr;
87 
88   PetscFunctionBegin;
89   switch (op) {
90   case MAT_SYMMETRIC:
91   case MAT_STRUCTURALLY_SYMMETRIC:
92   case MAT_HERMITIAN:
93     a->symmetric = PETSC_TRUE;
94     break;
95   case MAT_NOT_SYMMETRIC:
96   case MAT_NOT_STRUCTURALLY_SYMMETRIC:
97   case MAT_NOT_HERMITIAN:
98     a->symmetric = PETSC_FALSE;
99     break;
100   case MAT_SYMMETRY_ETERNAL:
101   case MAT_NOT_SYMMETRY_ETERNAL:
102     break;
103   default:
104     ierr = PetscInfo(A,"Option ignored\n");CHKERRQ(ierr);
105     break;
106   }
107   PetscFunctionReturn(0);
108 }
109 
110 
111 /*
112      Adds diagonal pointers to sparse matrix structure.
113 */
114 
115 #undef __FUNCT__
116 #define __FUNCT__ "MatMarkDiagonal_MPIAdj"
117 PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A)
118 {
119   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
120   PetscErrorCode ierr;
121   PetscInt       i,j,m = A->rmap.n;
122 
123   PetscFunctionBegin;
124   ierr = PetscMalloc(m*sizeof(PetscInt),&a->diag);CHKERRQ(ierr);
125   ierr = PetscLogObjectMemory(A,m*sizeof(PetscInt));CHKERRQ(ierr);
126   for (i=0; i<A->rmap.n; i++) {
127     for (j=a->i[i]; j<a->i[i+1]; j++) {
128       if (a->j[j] == i) {
129         a->diag[i] = j;
130         break;
131       }
132     }
133   }
134   PetscFunctionReturn(0);
135 }
136 
137 #undef __FUNCT__
138 #define __FUNCT__ "MatGetRow_MPIAdj"
139 PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
140 {
141   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
142   PetscInt   *itmp;
143 
144   PetscFunctionBegin;
145   row -= A->rmap.rstart;
146 
147   if (row < 0 || row >= A->rmap.n) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
148 
149   *nz = a->i[row+1] - a->i[row];
150   if (v) *v = PETSC_NULL;
151   if (idx) {
152     itmp = a->j + a->i[row];
153     if (*nz) {
154       *idx = itmp;
155     }
156     else *idx = 0;
157   }
158   PetscFunctionReturn(0);
159 }
160 
161 #undef __FUNCT__
162 #define __FUNCT__ "MatRestoreRow_MPIAdj"
163 PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
164 {
165   PetscFunctionBegin;
166   PetscFunctionReturn(0);
167 }
168 
169 #undef __FUNCT__
170 #define __FUNCT__ "MatEqual_MPIAdj"
171 PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg)
172 {
173   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
174   PetscErrorCode ierr;
175   PetscTruth     flag;
176 
177   PetscFunctionBegin;
178   /* If the  matrix dimensions are not equal,or no of nonzeros */
179   if ((A->rmap.n != B->rmap.n) ||(a->nz != b->nz)) {
180     flag = PETSC_FALSE;
181   }
182 
183   /* if the a->i are the same */
184   ierr = PetscMemcmp(a->i,b->i,(A->rmap.n+1)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
185 
186   /* if a->j are the same */
187   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
188 
189   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr);
190   PetscFunctionReturn(0);
191 }
192 
193 #undef __FUNCT__
194 #define __FUNCT__ "MatGetRowIJ_MPIAdj"
195 PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscTruth symmetric,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscTruth *done)
196 {
197   PetscErrorCode ierr;
198   PetscMPIInt    size;
199   PetscInt       i;
200   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data;
201 
202   PetscFunctionBegin;
203   ierr = MPI_Comm_size(A->comm,&size);CHKERRQ(ierr);
204   if (size > 1) {*done = PETSC_FALSE; PetscFunctionReturn(0);}
205   *m    = A->rmap.n;
206   *ia   = a->i;
207   *ja   = a->j;
208   *done = PETSC_TRUE;
209   if (oshift) {
210     for (i=0; i<(*ia)[*m]; i++) {
211       (*ja)[i]++;
212     }
213     for (i=0; i<=(*m); i++) (*ia)[i]++;
214   }
215   PetscFunctionReturn(0);
216 }
217 
218 #undef __FUNCT__
219 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj"
220 PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscTruth symmetric,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscTruth *done)
221 {
222   PetscInt   i;
223   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
224 
225   PetscFunctionBegin;
226   if (ia && a->i != *ia) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
227   if (ja && a->j != *ja) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
228   if (oshift) {
229     for (i=0; i<=(*m); i++) (*ia)[i]--;
230     for (i=0; i<(*ia)[*m]; i++) {
231       (*ja)[i]--;
232     }
233   }
234   PetscFunctionReturn(0);
235 }
236 
237 /* -------------------------------------------------------------------*/
238 static struct _MatOps MatOps_Values = {0,
239        MatGetRow_MPIAdj,
240        MatRestoreRow_MPIAdj,
241        0,
242 /* 4*/ 0,
243        0,
244        0,
245        0,
246        0,
247        0,
248 /*10*/ 0,
249        0,
250        0,
251        0,
252        0,
253 /*15*/ 0,
254        MatEqual_MPIAdj,
255        0,
256        0,
257        0,
258 /*20*/ 0,
259        0,
260        0,
261        MatSetOption_MPIAdj,
262        0,
263 /*25*/ 0,
264        0,
265        0,
266        0,
267        0,
268 /*30*/ 0,
269        0,
270        0,
271        0,
272        0,
273 /*35*/ 0,
274        0,
275        0,
276        0,
277        0,
278 /*40*/ 0,
279        0,
280        0,
281        0,
282        0,
283 /*45*/ 0,
284        0,
285        0,
286        0,
287        0,
288 /*50*/ 0,
289        MatGetRowIJ_MPIAdj,
290        MatRestoreRowIJ_MPIAdj,
291        0,
292        0,
293 /*55*/ 0,
294        0,
295        0,
296        0,
297        0,
298 /*60*/ 0,
299        MatDestroy_MPIAdj,
300        MatView_MPIAdj,
301        0,
302        0,
303 /*65*/ 0,
304        0,
305        0,
306        0,
307        0,
308 /*70*/ 0,
309        0,
310        0,
311        0,
312        0,
313 /*75*/ 0,
314        0,
315        0,
316        0,
317        0,
318 /*80*/ 0,
319        0,
320        0,
321        0,
322        0,
323 /*85*/ 0,
324        0,
325        0,
326        0,
327        0,
328 /*90*/ 0,
329        0,
330        0,
331        0,
332        0,
333 /*95*/ 0,
334        0,
335        0,
336        0};
337 
338 EXTERN_C_BEGIN
339 #undef __FUNCT__
340 #define __FUNCT__ "MatMPIAdjSetPreallocation_MPIAdj"
341 PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
342 {
343   Mat_MPIAdj     *b = (Mat_MPIAdj *)B->data;
344   PetscErrorCode ierr;
345 #if defined(PETSC_USE_DEBUG)
346   PetscInt       ii;
347 #endif
348 
349   PetscFunctionBegin;
350   B->preallocated = PETSC_TRUE;
351 #if defined(PETSC_USE_DEBUG)
352   if (i[0] != 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
353   for (ii=1; ii<B->rmap.n; ii++) {
354     if (i[ii] < 0 || i[ii] < i[ii-1]) {
355       SETERRQ4(PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
356     }
357   }
358   for (ii=0; ii<i[B->rmap.n]; ii++) {
359     if (j[ii] < 0 || j[ii] >= B->cmap.N) {
360       SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
361     }
362   }
363 #endif
364 
365   b->j      = j;
366   b->i      = i;
367   b->values = values;
368 
369   b->nz               = i[B->rmap.n];
370   b->diag             = 0;
371   b->symmetric        = PETSC_FALSE;
372   b->freeaij          = PETSC_TRUE;
373 
374   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
375   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
376   PetscFunctionReturn(0);
377 }
378 EXTERN_C_END
379 
380 /*MC
381    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
382    intended for use constructing orderings and partitionings.
383 
384   Level: beginner
385 
386 .seealso: MatCreateMPIAdj
387 M*/
388 
389 EXTERN_C_BEGIN
390 #undef __FUNCT__
391 #define __FUNCT__ "MatCreate_MPIAdj"
392 PetscErrorCode PETSCMAT_DLLEXPORT MatCreate_MPIAdj(Mat B)
393 {
394   Mat_MPIAdj     *b;
395   PetscErrorCode ierr;
396   PetscMPIInt    size,rank;
397 
398   PetscFunctionBegin;
399   ierr = MPI_Comm_size(B->comm,&size);CHKERRQ(ierr);
400   ierr = MPI_Comm_rank(B->comm,&rank);CHKERRQ(ierr);
401 
402   ierr                = PetscNew(Mat_MPIAdj,&b);CHKERRQ(ierr);
403   B->data             = (void*)b;
404   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
405   B->factor           = 0;
406   B->mapping          = 0;
407   B->assembled        = PETSC_FALSE;
408 
409   ierr = PetscMapInitialize(B->comm,&B->rmap);CHKERRQ(ierr);
410   if (B->cmap.n < 0) B->cmap.n = B->cmap.N;
411   if (B->cmap.N < 0) B->cmap.N = B->cmap.n;
412 
413   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAdjSetPreallocation_C",
414                                     "MatMPIAdjSetPreallocation_MPIAdj",
415                                      MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
416   PetscFunctionReturn(0);
417 }
418 EXTERN_C_END
419 
420 #undef __FUNCT__
421 #define __FUNCT__ "MatMPIAdjSetPreallocation"
422 /*@C
423    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
424 
425    Collective on MPI_Comm
426 
427    Input Parameters:
428 +  A - the matrix
429 .  i - the indices into j for the start of each row
430 .  j - the column indices for each row (sorted for each row).
431        The indices in i and j start with zero (NOT with one).
432 -  values - [optional] edge weights
433 
434    Level: intermediate
435 
436 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
437 @*/
438 PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
439 {
440   PetscErrorCode ierr,(*f)(Mat,PetscInt*,PetscInt*,PetscInt*);
441 
442   PetscFunctionBegin;
443   ierr = PetscObjectQueryFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",(void (**)(void))&f);CHKERRQ(ierr);
444   if (f) {
445     ierr = (*f)(B,i,j,values);CHKERRQ(ierr);
446   }
447   PetscFunctionReturn(0);
448 }
449 
450 #undef __FUNCT__
451 #define __FUNCT__ "MatCreateMPIAdj"
452 /*@C
453    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
454    The matrix does not have numerical values associated with it, but is
455    intended for ordering (to reduce bandwidth etc) and partitioning.
456 
457    Collective on MPI_Comm
458 
459    Input Parameters:
460 +  comm - MPI communicator
461 .  m - number of local rows
462 .  n - number of columns
463 .  i - the indices into j for the start of each row
464 .  j - the column indices for each row (sorted for each row).
465        The indices in i and j start with zero (NOT with one).
466 -  values -[optional] edge weights
467 
468    Output Parameter:
469 .  A - the matrix
470 
471    Level: intermediate
472 
473    Notes: This matrix object does not support most matrix operations, include
474    MatSetValues().
475    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
476    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
477     call from Fortran you need not create the arrays with PetscMalloc().
478    Should not include the matrix diagonals.
479 
480    If you already have a matrix, you can create its adjacency matrix by a call
481    to MatConvert, specifying a type of MATMPIADJ.
482 
483    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
484 
485 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
486 @*/
487 PetscErrorCode PETSCMAT_DLLEXPORT MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
488 {
489   PetscErrorCode ierr;
490 
491   PetscFunctionBegin;
492   ierr = MatCreate(comm,A);CHKERRQ(ierr);
493   ierr = MatSetSizes(*A,m,n,PETSC_DETERMINE,n);CHKERRQ(ierr);
494   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
495   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
496   PetscFunctionReturn(0);
497 }
498 
499 EXTERN_C_BEGIN
500 #undef __FUNCT__
501 #define __FUNCT__ "MatConvertTo_MPIAdj"
502 PetscErrorCode PETSCMAT_DLLEXPORT MatConvertTo_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
503 {
504   Mat               B;
505   PetscErrorCode    ierr;
506   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
507   const PetscInt    *rj;
508   const PetscScalar *ra;
509   MPI_Comm          comm;
510 
511   PetscFunctionBegin;
512   ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr);
513   ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr);
514   ierr = MatGetOwnershipRange(A,&rstart,PETSC_NULL);CHKERRQ(ierr);
515 
516   /* count the number of nonzeros per row */
517   for (i=0; i<m; i++) {
518     ierr   = MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
519     for (j=0; j<len; j++) {
520       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
521     }
522     ierr   = MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
523     nzeros += len;
524   }
525 
526   /* malloc space for nonzeros */
527   ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&a);CHKERRQ(ierr);
528   ierr = PetscMalloc((N+1)*sizeof(PetscInt),&ia);CHKERRQ(ierr);
529   ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&ja);CHKERRQ(ierr);
530 
531   nzeros = 0;
532   ia[0]  = 0;
533   for (i=0; i<m; i++) {
534     ierr    = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
535     cnt     = 0;
536     for (j=0; j<len; j++) {
537       if (rj[j] != i+rstart) { /* if not diagonal */
538         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
539         ja[nzeros+cnt++] = rj[j];
540       }
541     }
542     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
543     nzeros += cnt;
544     ia[i+1] = nzeros;
545   }
546 
547   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
548   ierr = MatCreate(comm,&B);CHKERRQ(ierr);
549   ierr = MatSetSizes(B,m,N,PETSC_DETERMINE,N);CHKERRQ(ierr);
550   ierr = MatSetType(B,type);CHKERRQ(ierr);
551   ierr = MatMPIAdjSetPreallocation(B,ia,ja,a);CHKERRQ(ierr);
552 
553   if (reuse == MAT_REUSE_MATRIX) {
554     ierr = MatHeaderReplace(A,B);CHKERRQ(ierr);
555   } else {
556     *newmat = B;
557   }
558   PetscFunctionReturn(0);
559 }
560 EXTERN_C_END
561 
562 
563 
564 
565 
566