xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision f3fe499b4cc4d64bf04aa4f5e4963dcc4eb56541)
1 #define PETSCMAT_DLL
2 
3 /*
4     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
5 */
6 #include "../src/mat/impls/adj/mpi/mpiadj.h"
7 
8 #undef __FUNCT__
9 #define __FUNCT__ "MatView_MPIAdj_ASCII"
10 PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
11 {
12   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
13   PetscErrorCode    ierr;
14   PetscInt          i,j,m = A->rmap->n;
15   const char        *name;
16   PetscViewerFormat format;
17 
18   PetscFunctionBegin;
19   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
20   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
21   if (format == PETSC_VIEWER_ASCII_INFO) {
22     PetscFunctionReturn(0);
23   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
24     SETERRQ(((PetscObject)A)->comm,PETSC_ERR_SUP,"Matlab format not supported");
25   } else {
26     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_NO);CHKERRQ(ierr);
27     for (i=0; i<m; i++) {
28       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap->rstart);CHKERRQ(ierr);
29       for (j=a->i[i]; j<a->i[i+1]; j++) {
30         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);CHKERRQ(ierr);
31       }
32       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
33     }
34     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_YES);CHKERRQ(ierr);
35   }
36   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
37   PetscFunctionReturn(0);
38 }
39 
40 #undef __FUNCT__
41 #define __FUNCT__ "MatView_MPIAdj"
42 PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
43 {
44   PetscErrorCode ierr;
45   PetscTruth     iascii;
46 
47   PetscFunctionBegin;
48   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
49   if (iascii) {
50     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
51   } else {
52     SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
53   }
54   PetscFunctionReturn(0);
55 }
56 
57 #undef __FUNCT__
58 #define __FUNCT__ "MatDestroy_MPIAdj"
59 PetscErrorCode MatDestroy_MPIAdj(Mat mat)
60 {
61   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;
62   PetscErrorCode ierr;
63 
64   PetscFunctionBegin;
65 #if defined(PETSC_USE_LOG)
66   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap->n,mat->cmap->n,a->nz);
67 #endif
68   ierr = PetscFree(a->diag);CHKERRQ(ierr);
69   if (a->freeaij) {
70     if (a->freeaijwithfree) {
71       if (a->i) free(a->i);
72       if (a->j) free(a->j);
73     } else {
74       ierr = PetscFree(a->i);CHKERRQ(ierr);
75       ierr = PetscFree(a->j);CHKERRQ(ierr);
76       ierr = PetscFree(a->values);CHKERRQ(ierr);
77     }
78   }
79   ierr = PetscFree(a);CHKERRQ(ierr);
80   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
81   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C","",PETSC_NULL);CHKERRQ(ierr);
82   PetscFunctionReturn(0);
83 }
84 
85 #undef __FUNCT__
86 #define __FUNCT__ "MatSetOption_MPIAdj"
87 PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscTruth flg)
88 {
89   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
90   PetscErrorCode ierr;
91 
92   PetscFunctionBegin;
93   switch (op) {
94   case MAT_SYMMETRIC:
95   case MAT_STRUCTURALLY_SYMMETRIC:
96   case MAT_HERMITIAN:
97     a->symmetric = flg;
98     break;
99   case MAT_SYMMETRY_ETERNAL:
100     break;
101   default:
102     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
103     break;
104   }
105   PetscFunctionReturn(0);
106 }
107 
108 
109 /*
110      Adds diagonal pointers to sparse matrix structure.
111 */
112 
113 #undef __FUNCT__
114 #define __FUNCT__ "MatMarkDiagonal_MPIAdj"
115 PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A)
116 {
117   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
118   PetscErrorCode ierr;
119   PetscInt       i,j,m = A->rmap->n;
120 
121   PetscFunctionBegin;
122   ierr = PetscMalloc(m*sizeof(PetscInt),&a->diag);CHKERRQ(ierr);
123   ierr = PetscLogObjectMemory(A,m*sizeof(PetscInt));CHKERRQ(ierr);
124   for (i=0; i<A->rmap->n; i++) {
125     for (j=a->i[i]; j<a->i[i+1]; j++) {
126       if (a->j[j] == i) {
127         a->diag[i] = j;
128         break;
129       }
130     }
131   }
132   PetscFunctionReturn(0);
133 }
134 
135 #undef __FUNCT__
136 #define __FUNCT__ "MatGetRow_MPIAdj"
137 PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
138 {
139   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
140   PetscInt   *itmp;
141 
142   PetscFunctionBegin;
143   row -= A->rmap->rstart;
144 
145   if (row < 0 || row >= A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
146 
147   *nz = a->i[row+1] - a->i[row];
148   if (v) *v = PETSC_NULL;
149   if (idx) {
150     itmp = a->j + a->i[row];
151     if (*nz) {
152       *idx = itmp;
153     }
154     else *idx = 0;
155   }
156   PetscFunctionReturn(0);
157 }
158 
159 #undef __FUNCT__
160 #define __FUNCT__ "MatRestoreRow_MPIAdj"
161 PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
162 {
163   PetscFunctionBegin;
164   PetscFunctionReturn(0);
165 }
166 
167 #undef __FUNCT__
168 #define __FUNCT__ "MatEqual_MPIAdj"
169 PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg)
170 {
171   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
172   PetscErrorCode ierr;
173   PetscTruth     flag;
174 
175   PetscFunctionBegin;
176   /* If the  matrix dimensions are not equal,or no of nonzeros */
177   if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
178     flag = PETSC_FALSE;
179   }
180 
181   /* if the a->i are the same */
182   ierr = PetscMemcmp(a->i,b->i,(A->rmap->n+1)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
183 
184   /* if a->j are the same */
185   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
186 
187   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,((PetscObject)A)->comm);CHKERRQ(ierr);
188   PetscFunctionReturn(0);
189 }
190 
191 #undef __FUNCT__
192 #define __FUNCT__ "MatGetRowIJ_MPIAdj"
193 PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscTruth symmetric,PetscTruth blockcompressed,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscTruth *done)
194 {
195   PetscInt       i;
196   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data;
197 
198   PetscFunctionBegin;
199   *m    = A->rmap->n;
200   *ia   = a->i;
201   *ja   = a->j;
202   *done = PETSC_TRUE;
203   if (oshift) {
204     for (i=0; i<(*ia)[*m]; i++) {
205       (*ja)[i]++;
206     }
207     for (i=0; i<=(*m); i++) (*ia)[i]++;
208   }
209   PetscFunctionReturn(0);
210 }
211 
212 #undef __FUNCT__
213 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj"
214 PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscTruth symmetric,PetscTruth blockcompressed,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscTruth *done)
215 {
216   PetscInt   i;
217   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
218 
219   PetscFunctionBegin;
220   if (ia && a->i != *ia) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
221   if (ja && a->j != *ja) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
222   if (oshift) {
223     for (i=0; i<=(*m); i++) (*ia)[i]--;
224     for (i=0; i<(*ia)[*m]; i++) {
225       (*ja)[i]--;
226     }
227   }
228   PetscFunctionReturn(0);
229 }
230 
231 #undef __FUNCT__
232 #define __FUNCT__ "MatConvertFrom_MPIAdj"
233 PetscErrorCode PETSCMAT_DLLEXPORT MatConvertFrom_MPIAdj(Mat A,const MatType type,MatReuse reuse,Mat *newmat)
234 {
235   Mat               B;
236   PetscErrorCode    ierr;
237   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
238   const PetscInt    *rj;
239   const PetscScalar *ra;
240   MPI_Comm          comm;
241 
242   PetscFunctionBegin;
243   ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr);
244   ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr);
245   ierr = MatGetOwnershipRange(A,&rstart,PETSC_NULL);CHKERRQ(ierr);
246 
247   /* count the number of nonzeros per row */
248   for (i=0; i<m; i++) {
249     ierr   = MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
250     for (j=0; j<len; j++) {
251       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
252     }
253     ierr   = MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
254     nzeros += len;
255   }
256 
257   /* malloc space for nonzeros */
258   ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&a);CHKERRQ(ierr);
259   ierr = PetscMalloc((N+1)*sizeof(PetscInt),&ia);CHKERRQ(ierr);
260   ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&ja);CHKERRQ(ierr);
261 
262   nzeros = 0;
263   ia[0]  = 0;
264   for (i=0; i<m; i++) {
265     ierr    = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
266     cnt     = 0;
267     for (j=0; j<len; j++) {
268       if (rj[j] != i+rstart) { /* if not diagonal */
269         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
270         ja[nzeros+cnt++] = rj[j];
271       }
272     }
273     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
274     nzeros += cnt;
275     ia[i+1] = nzeros;
276   }
277 
278   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
279   ierr = MatCreate(comm,&B);CHKERRQ(ierr);
280   ierr = MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
281   ierr = MatSetType(B,type);CHKERRQ(ierr);
282   ierr = MatMPIAdjSetPreallocation(B,ia,ja,a);CHKERRQ(ierr);
283 
284   if (reuse == MAT_REUSE_MATRIX) {
285     ierr = MatHeaderReplace(A,B);CHKERRQ(ierr);
286   } else {
287     *newmat = B;
288   }
289   PetscFunctionReturn(0);
290 }
291 
292 /* -------------------------------------------------------------------*/
293 static struct _MatOps MatOps_Values = {0,
294        MatGetRow_MPIAdj,
295        MatRestoreRow_MPIAdj,
296        0,
297 /* 4*/ 0,
298        0,
299        0,
300        0,
301        0,
302        0,
303 /*10*/ 0,
304        0,
305        0,
306        0,
307        0,
308 /*15*/ 0,
309        MatEqual_MPIAdj,
310        0,
311        0,
312        0,
313 /*20*/ 0,
314        0,
315        MatSetOption_MPIAdj,
316        0,
317 /*24*/ 0,
318        0,
319        0,
320        0,
321        0,
322 /*29*/ 0,
323        0,
324        0,
325        0,
326        0,
327 /*34*/ 0,
328        0,
329        0,
330        0,
331        0,
332 /*39*/ 0,
333        0,
334        0,
335        0,
336        0,
337 /*44*/ 0,
338        0,
339        0,
340        0,
341        0,
342 /*49*/ 0,
343        MatGetRowIJ_MPIAdj,
344        MatRestoreRowIJ_MPIAdj,
345        0,
346        0,
347 /*54*/ 0,
348        0,
349        0,
350        0,
351        0,
352 /*59*/ 0,
353        MatDestroy_MPIAdj,
354        MatView_MPIAdj,
355        MatConvertFrom_MPIAdj,
356        0,
357 /*64*/ 0,
358        0,
359        0,
360        0,
361        0,
362 /*69*/ 0,
363        0,
364        0,
365        0,
366        0,
367 /*74*/ 0,
368        0,
369        0,
370        0,
371        0,
372 /*79*/ 0,
373        0,
374        0,
375        0,
376        0,
377 /*84*/ 0,
378        0,
379        0,
380        0,
381        0,
382 /*89*/ 0,
383        0,
384        0,
385        0,
386        0,
387 /*94*/ 0,
388        0,
389        0,
390        0};
391 
392 EXTERN_C_BEGIN
393 #undef __FUNCT__
394 #define __FUNCT__ "MatMPIAdjSetPreallocation_MPIAdj"
395 PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
396 {
397   Mat_MPIAdj     *b = (Mat_MPIAdj *)B->data;
398   PetscErrorCode ierr;
399 #if defined(PETSC_USE_DEBUG)
400   PetscInt       ii;
401 #endif
402 
403   PetscFunctionBegin;
404   ierr = PetscLayoutSetBlockSize(B->rmap,1);CHKERRQ(ierr);
405   ierr = PetscLayoutSetBlockSize(B->cmap,1);CHKERRQ(ierr);
406   ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
407   ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
408 
409 #if defined(PETSC_USE_DEBUG)
410   if (i[0] != 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
411   for (ii=1; ii<B->rmap->n; ii++) {
412     if (i[ii] < 0 || i[ii] < i[ii-1]) {
413       SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
414     }
415   }
416   for (ii=0; ii<i[B->rmap->n]; ii++) {
417     if (j[ii] < 0 || j[ii] >= B->cmap->N) {
418       SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
419     }
420   }
421 #endif
422   B->preallocated = PETSC_TRUE;
423 
424   b->j      = j;
425   b->i      = i;
426   b->values = values;
427 
428   b->nz               = i[B->rmap->n];
429   b->diag             = 0;
430   b->symmetric        = PETSC_FALSE;
431   b->freeaij          = PETSC_TRUE;
432 
433   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
434   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
435   PetscFunctionReturn(0);
436 }
437 EXTERN_C_END
438 
439 /*MC
440    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
441    intended for use constructing orderings and partitionings.
442 
443   Level: beginner
444 
445 .seealso: MatCreateMPIAdj
446 M*/
447 
448 EXTERN_C_BEGIN
449 #undef __FUNCT__
450 #define __FUNCT__ "MatCreate_MPIAdj"
451 PetscErrorCode PETSCMAT_DLLEXPORT MatCreate_MPIAdj(Mat B)
452 {
453   Mat_MPIAdj     *b;
454   PetscErrorCode ierr;
455 
456   PetscFunctionBegin;
457   ierr                = PetscNewLog(B,Mat_MPIAdj,&b);CHKERRQ(ierr);
458   B->data             = (void*)b;
459   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
460   B->mapping          = 0;
461   B->assembled        = PETSC_FALSE;
462 
463   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAdjSetPreallocation_C",
464                                     "MatMPIAdjSetPreallocation_MPIAdj",
465                                      MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
466   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ);CHKERRQ(ierr);
467   PetscFunctionReturn(0);
468 }
469 EXTERN_C_END
470 
471 #undef __FUNCT__
472 #define __FUNCT__ "MatMPIAdjSetPreallocation"
473 /*@C
474    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
475 
476    Logically Collective on MPI_Comm
477 
478    Input Parameters:
479 +  A - the matrix
480 .  i - the indices into j for the start of each row
481 .  j - the column indices for each row (sorted for each row).
482        The indices in i and j start with zero (NOT with one).
483 -  values - [optional] edge weights
484 
485    Level: intermediate
486 
487 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
488 @*/
489 PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
490 {
491   PetscErrorCode ierr,(*f)(Mat,PetscInt*,PetscInt*,PetscInt*);
492 
493   PetscFunctionBegin;
494   ierr = PetscObjectQueryFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",(void (**)(void))&f);CHKERRQ(ierr);
495   if (f) {
496     ierr = (*f)(B,i,j,values);CHKERRQ(ierr);
497   }
498   PetscFunctionReturn(0);
499 }
500 
501 #undef __FUNCT__
502 #define __FUNCT__ "MatCreateMPIAdj"
503 /*@C
504    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
505    The matrix does not have numerical values associated with it, but is
506    intended for ordering (to reduce bandwidth etc) and partitioning.
507 
508    Collective on MPI_Comm
509 
510    Input Parameters:
511 +  comm - MPI communicator
512 .  m - number of local rows
513 .  N - number of global columns
514 .  i - the indices into j for the start of each row
515 .  j - the column indices for each row (sorted for each row).
516        The indices in i and j start with zero (NOT with one).
517 -  values -[optional] edge weights
518 
519    Output Parameter:
520 .  A - the matrix
521 
522    Level: intermediate
523 
524    Notes: This matrix object does not support most matrix operations, include
525    MatSetValues().
526    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
527    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
528     call from Fortran you need not create the arrays with PetscMalloc().
529    Should not include the matrix diagonals.
530 
531    If you already have a matrix, you can create its adjacency matrix by a call
532    to MatConvert, specifying a type of MATMPIADJ.
533 
534    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
535 
536 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
537 @*/
538 PetscErrorCode PETSCMAT_DLLEXPORT MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
539 {
540   PetscErrorCode ierr;
541 
542   PetscFunctionBegin;
543   ierr = MatCreate(comm,A);CHKERRQ(ierr);
544   ierr = MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
545   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
546   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
547   PetscFunctionReturn(0);
548 }
549 
550 
551 
552 
553 
554 
555 
556