xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 4db511f1d1b98f37717f3ecaf35cce8ecdb684f9)
1 /*$Id: mpiadj.c,v 1.66 2001/08/07 03:02:59 balay Exp $*/
2 
3 /*
4     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
5 */
6 #include "src/mat/impls/adj/mpi/mpiadj.h"
7 #include "petscsys.h"
8 
9 #undef __FUNCT__
10 #define __FUNCT__ "MatView_MPIAdj_ASCII"
11 int MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
12 {
13   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
14   int               ierr,i,j,m = A->m;
15   char              *name;
16   PetscViewerFormat format;
17 
18   PetscFunctionBegin;
19   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
20   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
21   if (format == PETSC_VIEWER_ASCII_INFO) {
22     PetscFunctionReturn(0);
23   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
24     SETERRQ(PETSC_ERR_SUP,"Matlab format not supported");
25   } else {
26     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_NO);CHKERRQ(ierr);
27     for (i=0; i<m; i++) {
28       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %d:",i+a->rstart);CHKERRQ(ierr);
29       for (j=a->i[i]; j<a->i[i+1]; j++) {
30         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %d ",a->j[j]);CHKERRQ(ierr);
31       }
32       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
33     }
34     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_YES);CHKERRQ(ierr);
35   }
36   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
37   PetscFunctionReturn(0);
38 }
39 
40 #undef __FUNCT__
41 #define __FUNCT__ "MatView_MPIAdj"
42 int MatView_MPIAdj(Mat A,PetscViewer viewer)
43 {
44   int        ierr;
45   PetscTruth isascii;
46 
47   PetscFunctionBegin;
48   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&isascii);CHKERRQ(ierr);
49   if (isascii) {
50     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
51   } else {
52     SETERRQ1(1,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
53   }
54   PetscFunctionReturn(0);
55 }
56 
57 #undef __FUNCT__
58 #define __FUNCT__ "MatDestroy_MPIAdj"
59 int MatDestroy_MPIAdj(Mat mat)
60 {
61   Mat_MPIAdj *a = (Mat_MPIAdj*)mat->data;
62   int        ierr;
63 
64   PetscFunctionBegin;
65 #if defined(PETSC_USE_LOG)
66   PetscLogObjectState((PetscObject)mat,"Rows=%d, Cols=%d, NZ=%d",mat->m,mat->n,a->nz);
67 #endif
68   if (a->diag) {ierr = PetscFree(a->diag);CHKERRQ(ierr);}
69   if (a->freeaij) {
70     ierr = PetscFree(a->i);CHKERRQ(ierr);
71     ierr = PetscFree(a->j);CHKERRQ(ierr);
72     if (a->values) {ierr = PetscFree(a->values);CHKERRQ(ierr);}
73   }
74   ierr = PetscFree(a->rowners);CHKERRQ(ierr);
75   ierr = PetscFree(a);CHKERRQ(ierr);
76   PetscFunctionReturn(0);
77 }
78 
79 #undef __FUNCT__
80 #define __FUNCT__ "MatSetOption_MPIAdj"
81 int MatSetOption_MPIAdj(Mat A,MatOption op)
82 {
83   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
84 
85   PetscFunctionBegin;
86   switch (op) {
87   case MAT_STRUCTURALLY_SYMMETRIC:
88     a->symmetric = PETSC_TRUE;
89     break;
90   default:
91     PetscLogInfo(A,"MatSetOption_MPIAdj:Option ignored\n");
92     break;
93   }
94   PetscFunctionReturn(0);
95 }
96 
97 
98 /*
99      Adds diagonal pointers to sparse matrix structure.
100 */
101 
102 #undef __FUNCT__
103 #define __FUNCT__ "MatMarkDiagonal_MPIAdj"
104 int MatMarkDiagonal_MPIAdj(Mat A)
105 {
106   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
107   int        i,j,*diag,m = A->m,ierr;
108 
109   PetscFunctionBegin;
110   ierr = PetscMalloc((m+1)*sizeof(int),&diag);CHKERRQ(ierr);
111   PetscLogObjectMemory(A,(m+1)*sizeof(int));
112   for (i=0; i<A->m; i++) {
113     for (j=a->i[i]; j<a->i[i+1]; j++) {
114       if (a->j[j] == i) {
115         diag[i] = j;
116         break;
117       }
118     }
119   }
120   a->diag = diag;
121   PetscFunctionReturn(0);
122 }
123 
124 #undef __FUNCT__
125 #define __FUNCT__ "MatGetRow_MPIAdj"
126 int MatGetRow_MPIAdj(Mat A,int row,int *nz,int **idx,PetscScalar **v)
127 {
128   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
129   int        *itmp;
130 
131   PetscFunctionBegin;
132   row -= a->rstart;
133 
134   if (row < 0 || row >= A->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
135 
136   *nz = a->i[row+1] - a->i[row];
137   if (v) *v = PETSC_NULL;
138   if (idx) {
139     itmp = a->j + a->i[row];
140     if (*nz) {
141       *idx = itmp;
142     }
143     else *idx = 0;
144   }
145   PetscFunctionReturn(0);
146 }
147 
148 #undef __FUNCT__
149 #define __FUNCT__ "MatRestoreRow_MPIAdj"
150 int MatRestoreRow_MPIAdj(Mat A,int row,int *nz,int **idx,PetscScalar **v)
151 {
152   PetscFunctionBegin;
153   PetscFunctionReturn(0);
154 }
155 
156 #undef __FUNCT__
157 #define __FUNCT__ "MatGetBlockSize_MPIAdj"
158 int MatGetBlockSize_MPIAdj(Mat A,int *bs)
159 {
160   PetscFunctionBegin;
161   *bs = 1;
162   PetscFunctionReturn(0);
163 }
164 
165 
166 #undef __FUNCT__
167 #define __FUNCT__ "MatEqual_MPIAdj"
168 int MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg)
169 {
170   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
171   int         ierr;
172   PetscTruth  flag;
173 
174   PetscFunctionBegin;
175   /* If the  matrix dimensions are not equal,or no of nonzeros */
176   if ((A->m != B->m) ||(a->nz != b->nz)) {
177     flag = PETSC_FALSE;
178   }
179 
180   /* if the a->i are the same */
181   ierr = PetscMemcmp(a->i,b->i,(A->m+1)*sizeof(int),&flag);CHKERRQ(ierr);
182 
183   /* if a->j are the same */
184   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(int),&flag);CHKERRQ(ierr);
185 
186   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr);
187   PetscFunctionReturn(0);
188 }
189 
190 #undef __FUNCT__
191 #define __FUNCT__ "MatGetRowIJ_MPIAdj"
192 int MatGetRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int **ia,int **ja,PetscTruth *done)
193 {
194   int        ierr,size,i;
195   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
196 
197   PetscFunctionBegin;
198   ierr = MPI_Comm_size(A->comm,&size);CHKERRQ(ierr);
199   if (size > 1) {*done = PETSC_FALSE; PetscFunctionReturn(0);}
200   *m    = A->m;
201   *ia   = a->i;
202   *ja   = a->j;
203   *done = PETSC_TRUE;
204   if (oshift) {
205     for (i=0; i<(*ia)[*m]; i++) {
206       (*ja)[i]++;
207     }
208     for (i=0; i<=(*m); i++) (*ia)[i]++;
209   }
210   PetscFunctionReturn(0);
211 }
212 
213 #undef __FUNCT__
214 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj"
215 int MatRestoreRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int **ia,int **ja,PetscTruth *done)
216 {
217   int        i;
218   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
219 
220   PetscFunctionBegin;
221   if (ia && a->i != *ia) SETERRQ(1,"ia passed back is not one obtained with MatGetRowIJ()");
222   if (ja && a->j != *ja) SETERRQ(1,"ja passed back is not one obtained with MatGetRowIJ()");
223   if (oshift) {
224     for (i=0; i<=(*m); i++) (*ia)[i]--;
225     for (i=0; i<(*ia)[*m]; i++) {
226       (*ja)[i]--;
227     }
228   }
229   PetscFunctionReturn(0);
230 }
231 
232 /* -------------------------------------------------------------------*/
233 static struct _MatOps MatOps_Values = {0,
234        MatGetRow_MPIAdj,
235        MatRestoreRow_MPIAdj,
236        0,
237        0,
238        0,
239        0,
240        0,
241        0,
242        0,
243        0,
244        0,
245        0,
246        0,
247        0,
248        0,
249        MatEqual_MPIAdj,
250        0,
251        0,
252        0,
253        0,
254        0,
255        0,
256        MatSetOption_MPIAdj,
257        0,
258        0,
259        0,
260        0,
261        0,
262        0,
263        0,
264        0,
265        0,
266        0,
267        0,
268        0,
269        0,
270        0,
271        0,
272        0,
273        0,
274        0,
275        0,
276        0,
277        0,
278        0,
279        0,
280        0,
281        0,
282        0,
283        MatGetBlockSize_MPIAdj,
284        MatGetRowIJ_MPIAdj,
285        MatRestoreRowIJ_MPIAdj,
286        0,
287        0,
288        0,
289        0,
290        0,
291        0,
292        0,
293        0,
294        MatDestroy_MPIAdj,
295        MatView_MPIAdj,
296        MatGetPetscMaps_Petsc};
297 
298 EXTERN_C_BEGIN
299 #undef __FUNCT__
300 #define __FUNCT__ "MatMPIAdjSetPreallocation_MPIAdj"
301 int MatMPIAdjSetPreallocation_MPIAdj(Mat B,int *i,int *j,int *values)
302 {
303   Mat_MPIAdj *b = (Mat_MPIAdj *)B->data;
304   int        ierr;
305 #if defined(PETSC_USE_BOPT_g)
306   int        ii;
307 #endif
308 
309   PetscFunctionBegin;
310   B->preallocated = PETSC_TRUE;
311 #if defined(PETSC_USE_BOPT_g)
312   if (i[0] != 0) SETERRQ1(1,"First i[] index must be zero, instead it is %d\n",i[0]);
313   for (ii=1; ii<B->m; ii++) {
314     if (i[ii] < 0 || i[ii] < i[ii-1]) {
315       SETERRQ4(1,"i[%d]=%d index is out of range: i[%d]=%d",ii,i[ii],ii-1,i[ii-1]);
316     }
317   }
318   for (ii=0; ii<i[B->m]; ii++) {
319     if (j[ii] < 0 || j[ii] >= B->N) {
320       SETERRQ2(1,"Column index %d out of range %d\n",ii,j[ii]);
321     }
322   }
323 #endif
324 
325   b->j      = j;
326   b->i      = i;
327   b->values = values;
328 
329   b->nz               = i[B->m];
330   b->diag             = 0;
331   b->symmetric        = PETSC_FALSE;
332   b->freeaij          = PETSC_TRUE;
333 
334   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
335   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
336   PetscFunctionReturn(0);
337 }
338 EXTERN_C_END
339 
340 EXTERN_C_BEGIN
341 #undef __FUNCT__
342 #define __FUNCT__ "MatCreate_MPIAdj"
343 int MatCreate_MPIAdj(Mat B)
344 {
345   Mat_MPIAdj *b;
346   int        ii,ierr,size,rank;
347 
348   PetscFunctionBegin;
349   ierr = MPI_Comm_size(B->comm,&size);CHKERRQ(ierr);
350   ierr = MPI_Comm_rank(B->comm,&rank);CHKERRQ(ierr);
351 
352   ierr                = PetscNew(Mat_MPIAdj,&b);CHKERRQ(ierr);
353   B->data             = (void*)b;
354   ierr                = PetscMemzero(b,sizeof(Mat_MPIAdj));CHKERRQ(ierr);
355   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
356   B->factor           = 0;
357   B->lupivotthreshold = 1.0;
358   B->mapping          = 0;
359   B->assembled        = PETSC_FALSE;
360 
361   ierr = MPI_Allreduce(&B->m,&B->M,1,MPI_INT,MPI_SUM,B->comm);CHKERRQ(ierr);
362   B->n = B->N;
363 
364   /* the information in the maps duplicates the information computed below, eventually
365      we should remove the duplicate information that is not contained in the maps */
366   ierr = PetscMapCreateMPI(B->comm,B->m,B->M,&B->rmap);CHKERRQ(ierr);
367   /* we don't know the "local columns" so just use the row information :-(*/
368   ierr = PetscMapCreateMPI(B->comm,B->m,B->M,&B->cmap);CHKERRQ(ierr);
369 
370   ierr = PetscMalloc((size+1)*sizeof(int),&b->rowners);CHKERRQ(ierr);
371   PetscLogObjectMemory(B,(size+2)*sizeof(int)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));
372   ierr = MPI_Allgather(&B->m,1,MPI_INT,b->rowners+1,1,MPI_INT,B->comm);CHKERRQ(ierr);
373   b->rowners[0] = 0;
374   for (ii=2; ii<=size; ii++) {
375     b->rowners[ii] += b->rowners[ii-1];
376   }
377   b->rstart = b->rowners[rank];
378   b->rend   = b->rowners[rank+1];
379   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAdjSetPreallocation_C",
380                                     "MatMPIAdjSetPreallocation_MPIAdj",
381                                      MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
382   PetscFunctionReturn(0);
383 }
384 EXTERN_C_END
385 
386 #undef __FUNCT__
387 #define __FUNCT__ "MatMPIAdjSetPreallocation"
388 /*@C
389    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
390 
391    Collective on MPI_Comm
392 
393    Input Parameters:
394 +  A - the matrix
395 .  i - the indices into j for the start of each row
396 .  j - the column indices for each row (sorted for each row).
397        The indices in i and j start with zero (NOT with one).
398 -  values - [optional] edge weights
399 
400    Level: intermediate
401 
402 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
403 @*/
404 int MatMPIAdjSetPreallocation(Mat B,int *i,int *j,int *values)
405 {
406   int ierr,(*f)(Mat,int*,int*,int*);
407 
408   PetscFunctionBegin;
409   ierr = PetscObjectQueryFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",(void (**)(void))&f);CHKERRQ(ierr);
410   if (f) {
411     ierr = (*f)(B,i,j,values);CHKERRQ(ierr);
412   }
413   PetscFunctionReturn(0);
414 }
415 
416 #undef __FUNCT__
417 #define __FUNCT__ "MatCreateMPIAdj"
418 /*@C
419    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
420    The matrix does not have numerical values associated with it, but is
421    intended for ordering (to reduce bandwidth etc) and partitioning.
422 
423    Collective on MPI_Comm
424 
425    Input Parameters:
426 +  comm - MPI communicator
427 .  m - number of local rows
428 .  n - number of columns
429 .  i - the indices into j for the start of each row
430 .  j - the column indices for each row (sorted for each row).
431        The indices in i and j start with zero (NOT with one).
432 -  values -[optional] edge weights
433 
434    Output Parameter:
435 .  A - the matrix
436 
437    Level: intermediate
438 
439    Notes: This matrix object does not support most matrix operations, include
440    MatSetValues().
441    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
442    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
443     call from Fortran you need not create the arrays with PetscMalloc().
444    Should not include the matrix diagonals.
445 
446    If you already have a matrix, you can create its adjacency matrix by a call
447    to MatConvert, specifying a type of MATMPIADJ.
448 
449    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
450 
451 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
452 @*/
453 int MatCreateMPIAdj(MPI_Comm comm,int m,int n,int *i,int *j,int *values,Mat *A)
454 {
455   int        ierr;
456 
457   PetscFunctionBegin;
458   ierr = MatCreate(comm,m,n,PETSC_DETERMINE,n,A);CHKERRQ(ierr);
459   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
460   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
461   PetscFunctionReturn(0);
462 }
463 
464 EXTERN_C_BEGIN
465 #undef __FUNCT__
466 #define __FUNCT__ "MatConvertTo_MPIAdj"
467 int MatConvertTo_MPIAdj(Mat A,MatType type,Mat *B)
468 {
469   int          i,ierr,m,N,nzeros = 0,*ia,*ja,*rj,len,rstart,cnt,j,*a;
470   PetscScalar  *ra;
471   MPI_Comm     comm;
472 
473   PetscFunctionBegin;
474   ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr);
475   ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr);
476   ierr = MatGetOwnershipRange(A,&rstart,PETSC_NULL);CHKERRQ(ierr);
477 
478   /* count the number of nonzeros per row */
479   for (i=0; i<m; i++) {
480     ierr   = MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
481     for (j=0; j<len; j++) {
482       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
483     }
484     ierr   = MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
485     nzeros += len;
486   }
487 
488   /* malloc space for nonzeros */
489   ierr = PetscMalloc((nzeros+1)*sizeof(int),&a);CHKERRQ(ierr);
490   ierr = PetscMalloc((N+1)*sizeof(int),&ia);CHKERRQ(ierr);
491   ierr = PetscMalloc((nzeros+1)*sizeof(int),&ja);CHKERRQ(ierr);
492 
493   nzeros = 0;
494   ia[0]  = 0;
495   for (i=0; i<m; i++) {
496     ierr    = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
497     cnt     = 0;
498     for (j=0; j<len; j++) {
499       if (rj[j] != i+rstart) { /* if not diagonal */
500         a[nzeros+cnt]    = (int) PetscAbsScalar(ra[j]);
501         ja[nzeros+cnt++] = rj[j];
502       }
503     }
504     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
505     nzeros += cnt;
506     ia[i+1] = nzeros;
507   }
508 
509   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
510   ierr = MatCreateMPIAdj(comm,m,N,ia,ja,a,B);CHKERRQ(ierr);
511 
512   PetscFunctionReturn(0);
513 }
514 EXTERN_C_END
515 
516 
517 
518 
519 
520