xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 95d5f7c29374efcfd2ca44c2fe93981fbc2b4454)
1 /*$Id: mpiadj.c,v 1.38 2000/04/09 04:36:28 bsmith Exp bsmith $*/
2 
3 /*
4     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
5 */
6 #include "sys.h"
7 #include "src/mat/impls/adj/mpi/mpiadj.h"
8 
9 #undef __FUNC__
10 #define __FUNC__ /*<a name=""></a>*/"MatView_MPIAdj_ASCII"
11 extern int MatView_MPIAdj_ASCII(Mat A,Viewer viewer)
12 {
13   Mat_MPIAdj  *a = (Mat_MPIAdj*)A->data;
14   int         ierr,i,j,m = a->m, format;
15   char        *outputname;
16 
17   PetscFunctionBegin;
18   ierr = ViewerGetOutputname(viewer,&outputname);CHKERRQ(ierr);
19   ierr = ViewerGetFormat(viewer,&format);CHKERRQ(ierr);
20   if (format == VIEWER_FORMAT_ASCII_INFO) {
21     PetscFunctionReturn(0);
22   } else {
23     ierr = ViewerASCIIUseTabs(viewer,PETSC_NO);CHKERRQ(ierr);
24     for (i=0; i<m; i++) {
25       ierr = ViewerASCIISynchronizedPrintf(viewer,"row %d:",i+a->rstart);CHKERRQ(ierr);
26       for (j=a->i[i]; j<a->i[i+1]; j++) {
27         ierr = ViewerASCIISynchronizedPrintf(viewer," %d ",a->j[j]);CHKERRQ(ierr);
28       }
29       ierr = ViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
30     }
31     ierr = ViewerASCIIUseTabs(viewer,PETSC_YES);CHKERRQ(ierr);
32   }
33   ierr = ViewerFlush(viewer);CHKERRQ(ierr);
34   PetscFunctionReturn(0);
35 }
36 
37 #undef __FUNC__
38 #define __FUNC__ /*<a name=""></a>*/"MatView_MPIAdj"
39 int MatView_MPIAdj(Mat A,Viewer viewer)
40 {
41   int        ierr;
42   PetscTruth isascii;
43 
44   PetscFunctionBegin;
45   ierr = PetscTypeCompare((PetscObject)viewer,ASCII_VIEWER,&isascii);CHKERRQ(ierr);
46   if (isascii) {
47     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
48   } else {
49     SETERRQ1(1,1,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
50   }
51   PetscFunctionReturn(0);
52 }
53 
54 #undef __FUNC__
55 #define __FUNC__ /*<a name=""></a>*/"MatDestroy_MPIAdj"
56 int MatDestroy_MPIAdj(Mat mat)
57 {
58   Mat_MPIAdj *a = (Mat_MPIAdj*)mat->data;
59   int        ierr;
60 
61   PetscFunctionBegin;
62 
63   if (mat->mapping) {
64     ierr = ISLocalToGlobalMappingDestroy(mat->mapping);CHKERRQ(ierr);
65   }
66   if (mat->bmapping) {
67     ierr = ISLocalToGlobalMappingDestroy(mat->bmapping);CHKERRQ(ierr);
68   }
69   if (mat->rmap) {
70     ierr = MapDestroy(mat->rmap);CHKERRQ(ierr);
71   }
72   if (mat->cmap) {
73     ierr = MapDestroy(mat->cmap);CHKERRQ(ierr);
74   }
75 
76 #if defined(PETSC_USE_LOG)
77   PLogObjectState((PetscObject)mat,"Rows=%d, Cols=%d, NZ=%d",mat->m,mat->n,a->nz);
78 #endif
79   if (a->diag) {ierr = PetscFree(a->diag);CHKERRQ(ierr);}
80   if (a->values) {ierr = PetscFree(a->values);CHKERRQ(ierr);}
81   ierr = PetscFree(a->i);CHKERRQ(ierr);
82   ierr = PetscFree(a->j);CHKERRQ(ierr);
83   ierr = PetscFree(a->rowners);CHKERRQ(ierr);
84   ierr = PetscFree(a);CHKERRQ(ierr);
85 
86   PLogObjectDestroy(mat);
87   PetscHeaderDestroy(mat);
88   PetscFunctionReturn(0);
89 }
90 
91 
92 #undef __FUNC__
93 #define __FUNC__ /*<a name=""></a>*/"MatSetOption_MPIAdj"
94 int MatSetOption_MPIAdj(Mat A,MatOption op)
95 {
96   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
97 
98   PetscFunctionBegin;
99   if (op == MAT_STRUCTURALLY_SYMMETRIC) {
100     a->symmetric = PETSC_TRUE;
101   } else {
102     PLogInfo(A,"MatSetOption_MPIAdj:Option ignored\n");
103   }
104   PetscFunctionReturn(0);
105 }
106 
107 
108 /*
109      Adds diagonal pointers to sparse matrix structure.
110 */
111 
112 #undef __FUNC__
113 #define __FUNC__ /*<a name=""></a>*/"MatMarkDiagonal_MPIAdj"
114 int MatMarkDiagonal_MPIAdj(Mat A)
115 {
116   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
117   int        i,j,*diag,m = a->m;
118 
119   PetscFunctionBegin;
120   diag = (int*)PetscMalloc((m+1)*sizeof(int));CHKPTRQ(diag);
121   PLogObjectMemory(A,(m+1)*sizeof(int));
122   for (i=0; i<a->m; i++) {
123     for (j=a->i[i]; j<a->i[i+1]; j++) {
124       if (a->j[j] == i) {
125         diag[i] = j;
126         break;
127       }
128     }
129   }
130   a->diag = diag;
131   PetscFunctionReturn(0);
132 }
133 
134 #undef __FUNC__
135 #define __FUNC__ /*<a name=""></a>*/"MatGetSize_MPIAdj"
136 int MatGetSize_MPIAdj(Mat A,int *m,int *n)
137 {
138   PetscFunctionBegin;
139   if (m) *m = A->M;
140   if (n) *n = A->N;
141   PetscFunctionReturn(0);
142 }
143 
144 #undef __FUNC__
145 #define __FUNC__ /*<a name=""></a>*/"MatGetSize_MPIAdj"
146 int MatGetLocalSize_MPIAdj(Mat A,int *m,int *n)
147 {
148   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
149   PetscFunctionBegin;
150   if (m) *m = a->m;
151   if (n) *n = A->N;
152   PetscFunctionReturn(0);
153 }
154 
155 #undef __FUNC__
156 #define __FUNC__ /*<a name=""></a>*/"MatGetOwnershipRange_MPIAdj"
157 int MatGetOwnershipRange_MPIAdj(Mat A,int *m,int *n)
158 {
159   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
160   PetscFunctionBegin;
161   if (m) *m = a->rstart;
162   if (n) *n = a->rend;
163   PetscFunctionReturn(0);
164 }
165 
166 #undef __FUNC__
167 #define __FUNC__ /*<a name=""></a>*/"MatGetRow_MPIAdj"
168 int MatGetRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
169 {
170   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
171   int        *itmp;
172 
173   PetscFunctionBegin;
174   row -= a->rstart;
175 
176   if (row < 0 || row >= a->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,0,"Row out of range");
177 
178   *nz = a->i[row+1] - a->i[row];
179   if (v) *v = PETSC_NULL;
180   if (idx) {
181     itmp = a->j + a->i[row];
182     if (*nz) {
183       *idx = itmp;
184     }
185     else *idx = 0;
186   }
187   PetscFunctionReturn(0);
188 }
189 
190 #undef __FUNC__
191 #define __FUNC__ /*<a name=""></a>*/"MatRestoreRow_MPIAdj"
192 int MatRestoreRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
193 {
194   PetscFunctionBegin;
195   PetscFunctionReturn(0);
196 }
197 
198 #undef __FUNC__
199 #define __FUNC__ /*<a name=""></a>*/"MatGetBlockSize_MPIAdj"
200 int MatGetBlockSize_MPIAdj(Mat A,int *bs)
201 {
202   PetscFunctionBegin;
203   *bs = 1;
204   PetscFunctionReturn(0);
205 }
206 
207 
208 #undef __FUNC__
209 #define __FUNC__ /*<a name=""></a>*/"MatEqual_MPIAdj"
210 int MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg)
211 {
212   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
213   int         ierr;
214   PetscTruth  flag;
215 
216   PetscFunctionBegin;
217   if (B->type != MATMPIADJ) SETERRQ(PETSC_ERR_ARG_INCOMP,0,"Matrices must be same type");
218 
219   /* If the  matrix dimensions are not equal,or no of nonzeros */
220   if ((a->m != b->m) ||(a->nz != b->nz)) {
221     flag = PETSC_FALSE;
222   }
223 
224   /* if the a->i are the same */
225   ierr = PetscMemcmp(a->i,b->i,(a->m+1)*sizeof(int),&flag);CHKERRQ(ierr);
226 
227   /* if a->j are the same */
228   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(int),&flag);CHKERRQ(ierr);
229 
230   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr);
231   PetscFunctionReturn(0);
232 }
233 
234 
235 /* -------------------------------------------------------------------*/
236 static struct _MatOps MatOps_Values = {0,
237        MatGetRow_MPIAdj,
238        MatRestoreRow_MPIAdj,
239        0,
240        0,
241        0,
242        0,
243        0,
244        0,
245        0,
246        0,
247        0,
248        0,
249        0,
250        0,
251        0,
252        MatEqual_MPIAdj,
253        0,
254        0,
255        0,
256        0,
257        0,
258        0,
259        MatSetOption_MPIAdj,
260        0,
261        0,
262        0,
263        0,
264        0,
265        0,
266        MatGetSize_MPIAdj,
267        MatGetLocalSize_MPIAdj,
268        MatGetOwnershipRange_MPIAdj,
269        0,
270        0,
271        0,
272        0,
273        0,
274        0,
275        0,
276        0,
277        0,
278        0,
279        0,
280        0,
281        0,
282        0,
283        0,
284        0,
285        0,
286        0,
287        0,
288        MatGetBlockSize_MPIAdj,
289        0,
290        0,
291        0,
292        0,
293        0,
294        0,
295        0,
296        0,
297        0,
298        0,
299        0,
300        0,
301        MatGetMaps_Petsc};
302 
303 
304 #undef __FUNC__
305 #define __FUNC__ /*<a name=""></a>*/"MatCreateMPIAdj"
306 /*@C
307    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
308    The matrix does not have numerical values associated with it, but is
309    intended for ordering (to reduce bandwidth etc) and partitioning.
310 
311    Collective on MPI_Comm
312 
313    Input Parameters:
314 +  comm - MPI communicator
315 .  m - number of local rows
316 .  n - number of columns
317 .  i - the indices into j for the start of each row
318 .  j - the column indices for each row (sorted for each row).
319        The indices in i and j start with zero (NOT with one).
320 -  values -[optional] edge weights
321 
322    Output Parameter:
323 .  A - the matrix
324 
325    Level: intermediate
326 
327    Notes: This matrix object does not support most matrix operations, include
328    MatSetValues().
329    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
330    when the matrix is destroyed. And you must allocate them with PetscMalloc()
331 
332    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
333 
334 .seealso: MatCreate(), MatCreateSeqAdj(), MatGetOrdering()
335 @*/
336 int MatCreateMPIAdj(MPI_Comm comm,int m,int n,int *i,int *j,int *values,Mat *A)
337 {
338   Mat        B;
339   Mat_MPIAdj *b;
340   int        ii,ierr,size,rank;
341   PetscTruth flg;
342 
343   PetscFunctionBegin;
344   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
345   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
346 
347   *A                  = 0;
348   PetscHeaderCreate(B,_p_Mat,struct _MatOps,MAT_COOKIE,MATMPIADJ,"Mat",comm,MatDestroy,MatView);
349   PLogObjectCreate(B);
350   B->data             = (void*)(b = PetscNew(Mat_MPIAdj));CHKPTRQ(b);
351   ierr                = PetscMemzero(b,sizeof(Mat_MPIAdj));CHKERRQ(ierr);
352   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
353   B->ops->destroy     = MatDestroy_MPIAdj;
354   B->ops->view        = MatView_MPIAdj;
355   B->factor           = 0;
356   B->lupivotthreshold = 1.0;
357   B->mapping          = 0;
358   B->assembled        = PETSC_FALSE;
359 
360   b->m = m; B->m = m;
361   ierr = MPI_Allreduce(&m,&B->M,1,MPI_INT,MPI_SUM,comm);CHKERRQ(ierr);
362   B->n = n; B->N = n;
363 
364   /* the information in the maps duplicates the information computed below, eventually
365      we should remove the duplicate information that is not contained in the maps */
366   ierr = MapCreateMPI(comm,m,B->M,&B->rmap);CHKERRQ(ierr);
367   /* we don't know the "local columns" so just use the row information :-(*/
368   ierr = MapCreateMPI(comm,m,B->M,&B->cmap);CHKERRQ(ierr);
369 
370   b->rowners = (int*)PetscMalloc((size+1)*sizeof(int));CHKPTRQ(b->rowners);
371   PLogObjectMemory(B,(size+2)*sizeof(int)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));
372   ierr = MPI_Allgather(&m,1,MPI_INT,b->rowners+1,1,MPI_INT,comm);CHKERRQ(ierr);
373   b->rowners[0] = 0;
374   for (ii=2; ii<=size; ii++) {
375     b->rowners[ii] += b->rowners[ii-1];
376   }
377   b->rstart = b->rowners[rank];
378   b->rend   = b->rowners[rank+1];
379 
380 #if defined(PETSC_BOPT_g)
381   if (i[0] != 0) SETERR1(1,1,"First i[] index must be zero, instead it is %d\n",i[0]);
382   for (ii=1; ii<m; ii++) {
383     if (i[ii] < 0 || i[ii] > i[ii-1]) {
384       SETERRQ4(1,1,"i[%d] index is out of range: i[%d]",ii,i[ii],ii-1,i[ii-1]);
385     }
386   }
387   for (ii=0; ii<i[m]; ii++) {
388     if (i[ii] < 0 || i[ii] >= N) {
389       SETERRQ2(1,1,"Column index %d out of range %d\n",ii,i[ii]);
390     }
391   }
392 #endif
393 
394   b->j  = j;
395   b->i  = i;
396   b->values = values;
397 
398   b->nz               = i[m];
399   b->diag             = 0;
400   b->symmetric        = PETSC_FALSE;
401 
402   *A = B;
403 
404   ierr = OptionsHasName(PETSC_NULL,"-help",&flg);CHKERRQ(ierr);
405   if (flg) {ierr = MatPrintHelp(B);CHKERRQ(ierr); }
406   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
407   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
408   PetscFunctionReturn(0);
409 }
410 
411 
412 
413 #undef __FUNC__
414 #define __FUNC__ /*<a name=""></a>*/"MatConvert_MPIAdj"
415 int MatConvert_MPIAdj(Mat A,MatType type,Mat *B)
416 {
417   int      i,ierr,m,N,nzeros = 0,*ia,*ja,*rj,len,rstart,cnt,j,*a;
418   Scalar   *ra;
419   MPI_Comm comm;
420 
421   PetscFunctionBegin;
422   ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr);
423   ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr);
424   ierr = MatGetOwnershipRange(A,&rstart,PETSC_NULL);CHKERRQ(ierr);
425 
426   /* count the number of nonzeros per row */
427   for (i=0; i<m; i++) {
428     ierr   = MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
429     for (j=0; j<len; j++) {
430       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
431     }
432     ierr   = MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
433     nzeros += len;
434   }
435 
436   /* malloc space for nonzeros */
437   a  = (int*)PetscMalloc((nzeros+1)*sizeof(int));CHKPTRQ(a);
438   ia = (int*)PetscMalloc((N+1)*sizeof(int));CHKPTRQ(ia);
439   ja = (int*)PetscMalloc((nzeros+1)*sizeof(int));CHKPTRQ(ja);
440 
441   nzeros = 0;
442   ia[0]  = 0;
443   for (i=0; i<m; i++) {
444     ierr    = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
445     cnt     = 0;
446     for (j=0; j<len; j++) {
447       if (rj[j] != i+rstart) { /* if not diagonal */
448         a[nzeros+cnt]    = (int) PetscAbsScalar(ra[j]);
449         ja[nzeros+cnt++] = rj[j];
450       }
451     }
452     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
453     nzeros += cnt;
454     ia[i+1] = nzeros;
455   }
456 
457   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
458   ierr = MatCreateMPIAdj(comm,m,N,ia,ja,a,B);CHKERRQ(ierr);
459 
460   PetscFunctionReturn(0);
461 }
462 
463 
464 
465 
466 
467 
468