xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 262ff7bbb8d19d69e3928431622c455081fa134e)
1 /*$Id: mpiadj.c,v 1.66 2001/08/07 03:02:59 balay Exp $*/
2 
3 /*
4     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
5 */
6 #include "src/mat/impls/adj/mpi/mpiadj.h"
7 #include "petscsys.h"
8 
9 #undef __FUNCT__
10 #define __FUNCT__ "MatView_MPIAdj_ASCII"
11 int MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
12 {
13   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
14   int               ierr,i,j,m = A->m;
15   char              *name;
16   PetscViewerFormat format;
17 
18   PetscFunctionBegin;
19   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
20   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
21   if (format == PETSC_VIEWER_ASCII_INFO) {
22     PetscFunctionReturn(0);
23   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
24     SETERRQ(PETSC_ERR_SUP,"Matlab format not supported");
25   } else {
26     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_NO);CHKERRQ(ierr);
27     for (i=0; i<m; i++) {
28       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %d:",i+a->rstart);CHKERRQ(ierr);
29       for (j=a->i[i]; j<a->i[i+1]; j++) {
30         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %d ",a->j[j]);CHKERRQ(ierr);
31       }
32       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
33     }
34     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_YES);CHKERRQ(ierr);
35   }
36   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
37   PetscFunctionReturn(0);
38 }
39 
40 #undef __FUNCT__
41 #define __FUNCT__ "MatView_MPIAdj"
42 int MatView_MPIAdj(Mat A,PetscViewer viewer)
43 {
44   int        ierr;
45   PetscTruth isascii;
46 
47   PetscFunctionBegin;
48   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&isascii);CHKERRQ(ierr);
49   if (isascii) {
50     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
51   } else {
52     SETERRQ1(1,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
53   }
54   PetscFunctionReturn(0);
55 }
56 
57 #undef __FUNCT__
58 #define __FUNCT__ "MatDestroy_MPIAdj"
59 int MatDestroy_MPIAdj(Mat mat)
60 {
61   Mat_MPIAdj *a = (Mat_MPIAdj*)mat->data;
62   int        ierr;
63 
64   PetscFunctionBegin;
65 #if defined(PETSC_USE_LOG)
66   PetscLogObjectState((PetscObject)mat,"Rows=%d, Cols=%d, NZ=%d",mat->m,mat->n,a->nz);
67 #endif
68   if (a->diag) {ierr = PetscFree(a->diag);CHKERRQ(ierr);}
69   if (a->freeaij) {
70     ierr = PetscFree(a->i);CHKERRQ(ierr);
71     ierr = PetscFree(a->j);CHKERRQ(ierr);
72     if (a->values) {ierr = PetscFree(a->values);CHKERRQ(ierr);}
73   }
74   ierr = PetscFree(a->rowners);CHKERRQ(ierr);
75   ierr = PetscFree(a);CHKERRQ(ierr);
76   PetscFunctionReturn(0);
77 }
78 
79 #undef __FUNCT__
80 #define __FUNCT__ "MatSetOption_MPIAdj"
81 int MatSetOption_MPIAdj(Mat A,MatOption op)
82 {
83   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
84 
85   PetscFunctionBegin;
86   switch (op) {
87   case MAT_SYMMETRIC:
88   case MAT_STRUCTURALLY_SYMMETRIC:
89   case MAT_HERMITIAN:
90     a->symmetric = PETSC_TRUE;
91     break;
92   case MAT_NOT_SYMMETRIC:
93   case MAT_NOT_STRUCTURALLY_SYMMETRIC:
94   case MAT_NOT_HERMITIAN:
95     a->symmetric = PETSC_FALSE;
96     break;
97   case MAT_SYMMETRY_ETERNAL:
98   case MAT_NOT_SYMMETRY_ETERNAL:
99     break;
100   default:
101     PetscLogInfo(A,"MatSetOption_MPIAdj:Option ignored\n");
102     break;
103   }
104   PetscFunctionReturn(0);
105 }
106 
107 
108 /*
109      Adds diagonal pointers to sparse matrix structure.
110 */
111 
112 #undef __FUNCT__
113 #define __FUNCT__ "MatMarkDiagonal_MPIAdj"
114 int MatMarkDiagonal_MPIAdj(Mat A)
115 {
116   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
117   int        i,j,*diag,m = A->m,ierr;
118 
119   PetscFunctionBegin;
120   ierr = PetscMalloc((m+1)*sizeof(int),&diag);CHKERRQ(ierr);
121   PetscLogObjectMemory(A,(m+1)*sizeof(int));
122   for (i=0; i<A->m; i++) {
123     for (j=a->i[i]; j<a->i[i+1]; j++) {
124       if (a->j[j] == i) {
125         diag[i] = j;
126         break;
127       }
128     }
129   }
130   a->diag = diag;
131   PetscFunctionReturn(0);
132 }
133 
134 #undef __FUNCT__
135 #define __FUNCT__ "MatGetRow_MPIAdj"
136 int MatGetRow_MPIAdj(Mat A,int row,int *nz,int **idx,PetscScalar **v)
137 {
138   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
139   int        *itmp;
140 
141   PetscFunctionBegin;
142   row -= a->rstart;
143 
144   if (row < 0 || row >= A->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
145 
146   *nz = a->i[row+1] - a->i[row];
147   if (v) *v = PETSC_NULL;
148   if (idx) {
149     itmp = a->j + a->i[row];
150     if (*nz) {
151       *idx = itmp;
152     }
153     else *idx = 0;
154   }
155   PetscFunctionReturn(0);
156 }
157 
158 #undef __FUNCT__
159 #define __FUNCT__ "MatRestoreRow_MPIAdj"
160 int MatRestoreRow_MPIAdj(Mat A,int row,int *nz,int **idx,PetscScalar **v)
161 {
162   PetscFunctionBegin;
163   PetscFunctionReturn(0);
164 }
165 
166 #undef __FUNCT__
167 #define __FUNCT__ "MatGetBlockSize_MPIAdj"
168 int MatGetBlockSize_MPIAdj(Mat A,int *bs)
169 {
170   PetscFunctionBegin;
171   *bs = 1;
172   PetscFunctionReturn(0);
173 }
174 
175 
176 #undef __FUNCT__
177 #define __FUNCT__ "MatEqual_MPIAdj"
178 int MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg)
179 {
180   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
181   int         ierr;
182   PetscTruth  flag;
183 
184   PetscFunctionBegin;
185   /* If the  matrix dimensions are not equal,or no of nonzeros */
186   if ((A->m != B->m) ||(a->nz != b->nz)) {
187     flag = PETSC_FALSE;
188   }
189 
190   /* if the a->i are the same */
191   ierr = PetscMemcmp(a->i,b->i,(A->m+1)*sizeof(int),&flag);CHKERRQ(ierr);
192 
193   /* if a->j are the same */
194   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(int),&flag);CHKERRQ(ierr);
195 
196   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr);
197   PetscFunctionReturn(0);
198 }
199 
200 #undef __FUNCT__
201 #define __FUNCT__ "MatGetRowIJ_MPIAdj"
202 int MatGetRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int *ia[],int *ja[],PetscTruth *done)
203 {
204   int        ierr,size,i;
205   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
206 
207   PetscFunctionBegin;
208   ierr = MPI_Comm_size(A->comm,&size);CHKERRQ(ierr);
209   if (size > 1) {*done = PETSC_FALSE; PetscFunctionReturn(0);}
210   *m    = A->m;
211   *ia   = a->i;
212   *ja   = a->j;
213   *done = PETSC_TRUE;
214   if (oshift) {
215     for (i=0; i<(*ia)[*m]; i++) {
216       (*ja)[i]++;
217     }
218     for (i=0; i<=(*m); i++) (*ia)[i]++;
219   }
220   PetscFunctionReturn(0);
221 }
222 
223 #undef __FUNCT__
224 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj"
225 int MatRestoreRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int *ia[],int *ja[],PetscTruth *done)
226 {
227   int        i;
228   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
229 
230   PetscFunctionBegin;
231   if (ia && a->i != *ia) SETERRQ(1,"ia passed back is not one obtained with MatGetRowIJ()");
232   if (ja && a->j != *ja) SETERRQ(1,"ja passed back is not one obtained with MatGetRowIJ()");
233   if (oshift) {
234     for (i=0; i<=(*m); i++) (*ia)[i]--;
235     for (i=0; i<(*ia)[*m]; i++) {
236       (*ja)[i]--;
237     }
238   }
239   PetscFunctionReturn(0);
240 }
241 
242 /* -------------------------------------------------------------------*/
243 static struct _MatOps MatOps_Values = {0,
244        MatGetRow_MPIAdj,
245        MatRestoreRow_MPIAdj,
246        0,
247 /* 4*/ 0,
248        0,
249        0,
250        0,
251        0,
252        0,
253 /*10*/ 0,
254        0,
255        0,
256        0,
257        0,
258 /*15*/ 0,
259        MatEqual_MPIAdj,
260        0,
261        0,
262        0,
263 /*20*/ 0,
264        0,
265        0,
266        MatSetOption_MPIAdj,
267        0,
268 /*25*/ 0,
269        0,
270        0,
271        0,
272        0,
273 /*30*/ 0,
274        0,
275        0,
276        0,
277        0,
278 /*35*/ 0,
279        0,
280        0,
281        0,
282        0,
283 /*40*/ 0,
284        0,
285        0,
286        0,
287        0,
288 /*45*/ 0,
289        0,
290        0,
291        0,
292        0,
293 /*50*/ MatGetBlockSize_MPIAdj,
294        MatGetRowIJ_MPIAdj,
295        MatRestoreRowIJ_MPIAdj,
296        0,
297        0,
298 /*55*/ 0,
299        0,
300        0,
301        0,
302        0,
303 /*60*/ 0,
304        MatDestroy_MPIAdj,
305        MatView_MPIAdj,
306        MatGetPetscMaps_Petsc,
307        0,
308 /*65*/ 0,
309        0,
310        0,
311        0,
312        0,
313 /*70*/ 0,
314        0,
315        0,
316        0,
317        0,
318 /*75*/ 0,
319        0,
320        0,
321        0,
322        0,
323 /*80*/ 0,
324        0,
325        0,
326        0,
327        0,
328 /*85*/ 0
329 };
330 
331 EXTERN_C_BEGIN
332 #undef __FUNCT__
333 #define __FUNCT__ "MatMPIAdjSetPreallocation_MPIAdj"
334 int MatMPIAdjSetPreallocation_MPIAdj(Mat B,int *i,int *j,int *values)
335 {
336   Mat_MPIAdj *b = (Mat_MPIAdj *)B->data;
337   int        ierr;
338 #if defined(PETSC_USE_BOPT_g)
339   int        ii;
340 #endif
341 
342   PetscFunctionBegin;
343   B->preallocated = PETSC_TRUE;
344 #if defined(PETSC_USE_BOPT_g)
345   if (i[0] != 0) SETERRQ1(1,"First i[] index must be zero, instead it is %d\n",i[0]);
346   for (ii=1; ii<B->m; ii++) {
347     if (i[ii] < 0 || i[ii] < i[ii-1]) {
348       SETERRQ4(1,"i[%d]=%d index is out of range: i[%d]=%d",ii,i[ii],ii-1,i[ii-1]);
349     }
350   }
351   for (ii=0; ii<i[B->m]; ii++) {
352     if (j[ii] < 0 || j[ii] >= B->N) {
353       SETERRQ2(1,"Column index %d out of range %d\n",ii,j[ii]);
354     }
355   }
356 #endif
357 
358   b->j      = j;
359   b->i      = i;
360   b->values = values;
361 
362   b->nz               = i[B->m];
363   b->diag             = 0;
364   b->symmetric        = PETSC_FALSE;
365   b->freeaij          = PETSC_TRUE;
366 
367   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
368   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
369   PetscFunctionReturn(0);
370 }
371 EXTERN_C_END
372 
373 /*MC
374    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
375    intended for use constructing orderings and partitionings.
376 
377   Level: beginner
378 
379 .seealso: MatCreateMPIAdj
380 M*/
381 
382 EXTERN_C_BEGIN
383 #undef __FUNCT__
384 #define __FUNCT__ "MatCreate_MPIAdj"
385 int MatCreate_MPIAdj(Mat B)
386 {
387   Mat_MPIAdj *b;
388   int        ii,ierr,size,rank;
389 
390   PetscFunctionBegin;
391   ierr = MPI_Comm_size(B->comm,&size);CHKERRQ(ierr);
392   ierr = MPI_Comm_rank(B->comm,&rank);CHKERRQ(ierr);
393 
394   ierr                = PetscNew(Mat_MPIAdj,&b);CHKERRQ(ierr);
395   B->data             = (void*)b;
396   ierr                = PetscMemzero(b,sizeof(Mat_MPIAdj));CHKERRQ(ierr);
397   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
398   B->factor           = 0;
399   B->lupivotthreshold = 1.0;
400   B->mapping          = 0;
401   B->assembled        = PETSC_FALSE;
402 
403   ierr = MPI_Allreduce(&B->m,&B->M,1,MPI_INT,MPI_SUM,B->comm);CHKERRQ(ierr);
404   B->n = B->N;
405 
406   /* the information in the maps duplicates the information computed below, eventually
407      we should remove the duplicate information that is not contained in the maps */
408   ierr = PetscMapCreateMPI(B->comm,B->m,B->M,&B->rmap);CHKERRQ(ierr);
409   /* we don't know the "local columns" so just use the row information :-(*/
410   ierr = PetscMapCreateMPI(B->comm,B->m,B->M,&B->cmap);CHKERRQ(ierr);
411 
412   ierr = PetscMalloc((size+1)*sizeof(int),&b->rowners);CHKERRQ(ierr);
413   PetscLogObjectMemory(B,(size+2)*sizeof(int)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));
414   ierr = MPI_Allgather(&B->m,1,MPI_INT,b->rowners+1,1,MPI_INT,B->comm);CHKERRQ(ierr);
415   b->rowners[0] = 0;
416   for (ii=2; ii<=size; ii++) {
417     b->rowners[ii] += b->rowners[ii-1];
418   }
419   b->rstart = b->rowners[rank];
420   b->rend   = b->rowners[rank+1];
421   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAdjSetPreallocation_C",
422                                     "MatMPIAdjSetPreallocation_MPIAdj",
423                                      MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
424   PetscFunctionReturn(0);
425 }
426 EXTERN_C_END
427 
428 #undef __FUNCT__
429 #define __FUNCT__ "MatMPIAdjSetPreallocation"
430 /*@C
431    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
432 
433    Collective on MPI_Comm
434 
435    Input Parameters:
436 +  A - the matrix
437 .  i - the indices into j for the start of each row
438 .  j - the column indices for each row (sorted for each row).
439        The indices in i and j start with zero (NOT with one).
440 -  values - [optional] edge weights
441 
442    Level: intermediate
443 
444 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
445 @*/
446 int MatMPIAdjSetPreallocation(Mat B,int *i,int *j,int *values)
447 {
448   int ierr,(*f)(Mat,int*,int*,int*);
449 
450   PetscFunctionBegin;
451   ierr = PetscObjectQueryFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",(void (**)(void))&f);CHKERRQ(ierr);
452   if (f) {
453     ierr = (*f)(B,i,j,values);CHKERRQ(ierr);
454   }
455   PetscFunctionReturn(0);
456 }
457 
458 #undef __FUNCT__
459 #define __FUNCT__ "MatCreateMPIAdj"
460 /*@C
461    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
462    The matrix does not have numerical values associated with it, but is
463    intended for ordering (to reduce bandwidth etc) and partitioning.
464 
465    Collective on MPI_Comm
466 
467    Input Parameters:
468 +  comm - MPI communicator
469 .  m - number of local rows
470 .  n - number of columns
471 .  i - the indices into j for the start of each row
472 .  j - the column indices for each row (sorted for each row).
473        The indices in i and j start with zero (NOT with one).
474 -  values -[optional] edge weights
475 
476    Output Parameter:
477 .  A - the matrix
478 
479    Level: intermediate
480 
481    Notes: This matrix object does not support most matrix operations, include
482    MatSetValues().
483    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
484    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
485     call from Fortran you need not create the arrays with PetscMalloc().
486    Should not include the matrix diagonals.
487 
488    If you already have a matrix, you can create its adjacency matrix by a call
489    to MatConvert, specifying a type of MATMPIADJ.
490 
491    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
492 
493 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
494 @*/
495 int MatCreateMPIAdj(MPI_Comm comm,int m,int n,int *i,int *j,int *values,Mat *A)
496 {
497   int        ierr;
498 
499   PetscFunctionBegin;
500   ierr = MatCreate(comm,m,n,PETSC_DETERMINE,n,A);CHKERRQ(ierr);
501   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
502   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
503   PetscFunctionReturn(0);
504 }
505 
506 EXTERN_C_BEGIN
507 #undef __FUNCT__
508 #define __FUNCT__ "MatConvertTo_MPIAdj"
509 int MatConvertTo_MPIAdj(Mat A,MatType type,Mat *newmat) {
510   Mat          B;
511   int          i,ierr,m,N,nzeros = 0,*ia,*ja,*rj,len,rstart,cnt,j,*a;
512   PetscScalar  *ra;
513   MPI_Comm     comm;
514 
515   PetscFunctionBegin;
516   ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr);
517   ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr);
518   ierr = MatGetOwnershipRange(A,&rstart,PETSC_NULL);CHKERRQ(ierr);
519 
520   /* count the number of nonzeros per row */
521   for (i=0; i<m; i++) {
522     ierr   = MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
523     for (j=0; j<len; j++) {
524       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
525     }
526     ierr   = MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
527     nzeros += len;
528   }
529 
530   /* malloc space for nonzeros */
531   ierr = PetscMalloc((nzeros+1)*sizeof(int),&a);CHKERRQ(ierr);
532   ierr = PetscMalloc((N+1)*sizeof(int),&ia);CHKERRQ(ierr);
533   ierr = PetscMalloc((nzeros+1)*sizeof(int),&ja);CHKERRQ(ierr);
534 
535   nzeros = 0;
536   ia[0]  = 0;
537   for (i=0; i<m; i++) {
538     ierr    = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
539     cnt     = 0;
540     for (j=0; j<len; j++) {
541       if (rj[j] != i+rstart) { /* if not diagonal */
542         a[nzeros+cnt]    = (int) PetscAbsScalar(ra[j]);
543         ja[nzeros+cnt++] = rj[j];
544       }
545     }
546     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
547     nzeros += cnt;
548     ia[i+1] = nzeros;
549   }
550 
551   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
552   ierr = MatCreateMPIAdj(comm,m,N,ia,ja,a,&B);CHKERRQ(ierr);
553 
554   /* Fake support for "inplace" convert. */
555   if (*newmat == A) {
556     ierr = MatDestroy(A);CHKERRQ(ierr);
557   }
558   *newmat = B;
559   PetscFunctionReturn(0);
560 }
561 EXTERN_C_END
562 
563 
564 
565 
566 
567