xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision bdd0f598524a006f7c18f69600ab4e3ca0245f0e)
1 /*$Id: mpiadj.c,v 1.43 2000/07/11 02:55:19 bsmith Exp bsmith $*/
2 
3 /*
4     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
5 */
6 #include "petscsys.h"
7 #include "src/mat/impls/adj/mpi/mpiadj.h"
8 
9 #undef __FUNC__
10 #define __FUNC__ /*<a name=""></a>*/"MatView_MPIAdj_ASCII"
11 int MatView_MPIAdj_ASCII(Mat A,Viewer viewer)
12 {
13   Mat_MPIAdj  *a = (Mat_MPIAdj*)A->data;
14   int         ierr,i,j,m = a->m, format;
15   char        *outputname;
16 
17   PetscFunctionBegin;
18   ierr = ViewerGetOutputname(viewer,&outputname);CHKERRQ(ierr);
19   ierr = ViewerGetFormat(viewer,&format);CHKERRQ(ierr);
20   if (format == VIEWER_FORMAT_ASCII_INFO) {
21     PetscFunctionReturn(0);
22   } else {
23     ierr = ViewerASCIIUseTabs(viewer,PETSC_NO);CHKERRQ(ierr);
24     for (i=0; i<m; i++) {
25       ierr = ViewerASCIISynchronizedPrintf(viewer,"row %d:",i+a->rstart);CHKERRQ(ierr);
26       for (j=a->i[i]; j<a->i[i+1]; j++) {
27         ierr = ViewerASCIISynchronizedPrintf(viewer," %d ",a->j[j]);CHKERRQ(ierr);
28       }
29       ierr = ViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
30     }
31     ierr = ViewerASCIIUseTabs(viewer,PETSC_YES);CHKERRQ(ierr);
32   }
33   ierr = ViewerFlush(viewer);CHKERRQ(ierr);
34   PetscFunctionReturn(0);
35 }
36 
37 #undef __FUNC__
38 #define __FUNC__ /*<a name=""></a>*/"MatView_MPIAdj"
39 int MatView_MPIAdj(Mat A,Viewer viewer)
40 {
41   int        ierr;
42   PetscTruth isascii;
43 
44   PetscFunctionBegin;
45   ierr = PetscTypeCompare((PetscObject)viewer,ASCII_VIEWER,&isascii);CHKERRQ(ierr);
46   if (isascii) {
47     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
48   } else {
49     SETERRQ1(1,1,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
50   }
51   PetscFunctionReturn(0);
52 }
53 
54 #undef __FUNC__
55 #define __FUNC__ /*<a name=""></a>*/"MatDestroy_MPIAdj"
56 int MatDestroy_MPIAdj(Mat mat)
57 {
58   Mat_MPIAdj *a = (Mat_MPIAdj*)mat->data;
59   int        ierr;
60 
61   PetscFunctionBegin;
62 
63   if (mat->mapping) {
64     ierr = ISLocalToGlobalMappingDestroy(mat->mapping);CHKERRQ(ierr);
65   }
66   if (mat->bmapping) {
67     ierr = ISLocalToGlobalMappingDestroy(mat->bmapping);CHKERRQ(ierr);
68   }
69   if (mat->rmap) {
70     ierr = MapDestroy(mat->rmap);CHKERRQ(ierr);
71   }
72   if (mat->cmap) {
73     ierr = MapDestroy(mat->cmap);CHKERRQ(ierr);
74   }
75 
76 #if defined(PETSC_USE_LOG)
77   PLogObjectState((PetscObject)mat,"Rows=%d, Cols=%d, NZ=%d",mat->m,mat->n,a->nz);
78 #endif
79   if (a->diag) {ierr = PetscFree(a->diag);CHKERRQ(ierr);}
80   if (a->freeaij) {
81     ierr = PetscFree(a->i);CHKERRQ(ierr);
82     ierr = PetscFree(a->j);CHKERRQ(ierr);
83     if (a->values) {ierr = PetscFree(a->values);CHKERRQ(ierr);}
84   }
85   ierr = PetscFree(a->rowners);CHKERRQ(ierr);
86   ierr = PetscFree(a);CHKERRQ(ierr);
87 
88   PLogObjectDestroy(mat);
89   PetscHeaderDestroy(mat);
90   PetscFunctionReturn(0);
91 }
92 
93 #undef __FUNC__
94 #define __FUNC__ /*<a name=""></a>*/"MatSetOption_MPIAdj"
95 int MatSetOption_MPIAdj(Mat A,MatOption op)
96 {
97   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
98 
99   PetscFunctionBegin;
100   if (op == MAT_STRUCTURALLY_SYMMETRIC) {
101     a->symmetric = PETSC_TRUE;
102   } else {
103     PLogInfo(A,"MatSetOption_MPIAdj:Option ignored\n");
104   }
105   PetscFunctionReturn(0);
106 }
107 
108 
109 /*
110      Adds diagonal pointers to sparse matrix structure.
111 */
112 
113 #undef __FUNC__
114 #define __FUNC__ /*<a name=""></a>*/"MatMarkDiagonal_MPIAdj"
115 int MatMarkDiagonal_MPIAdj(Mat A)
116 {
117   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
118   int        i,j,*diag,m = a->m;
119 
120   PetscFunctionBegin;
121   diag = (int*)PetscMalloc((m+1)*sizeof(int));CHKPTRQ(diag);
122   PLogObjectMemory(A,(m+1)*sizeof(int));
123   for (i=0; i<a->m; i++) {
124     for (j=a->i[i]; j<a->i[i+1]; j++) {
125       if (a->j[j] == i) {
126         diag[i] = j;
127         break;
128       }
129     }
130   }
131   a->diag = diag;
132   PetscFunctionReturn(0);
133 }
134 
135 #undef __FUNC__
136 #define __FUNC__ /*<a name=""></a>*/"MatGetSize_MPIAdj"
137 int MatGetSize_MPIAdj(Mat A,int *m,int *n)
138 {
139   PetscFunctionBegin;
140   if (m) *m = A->M;
141   if (n) *n = A->N;
142   PetscFunctionReturn(0);
143 }
144 
145 #undef __FUNC__
146 #define __FUNC__ /*<a name=""></a>*/"MatGetSize_MPIAdj"
147 int MatGetLocalSize_MPIAdj(Mat A,int *m,int *n)
148 {
149   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
150   PetscFunctionBegin;
151   if (m) *m = a->m;
152   if (n) *n = A->N;
153   PetscFunctionReturn(0);
154 }
155 
156 #undef __FUNC__
157 #define __FUNC__ /*<a name=""></a>*/"MatGetOwnershipRange_MPIAdj"
158 int MatGetOwnershipRange_MPIAdj(Mat A,int *m,int *n)
159 {
160   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
161   PetscFunctionBegin;
162   if (m) *m = a->rstart;
163   if (n) *n = a->rend;
164   PetscFunctionReturn(0);
165 }
166 
167 #undef __FUNC__
168 #define __FUNC__ /*<a name=""></a>*/"MatGetRow_MPIAdj"
169 int MatGetRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
170 {
171   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
172   int        *itmp;
173 
174   PetscFunctionBegin;
175   row -= a->rstart;
176 
177   if (row < 0 || row >= a->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,0,"Row out of range");
178 
179   *nz = a->i[row+1] - a->i[row];
180   if (v) *v = PETSC_NULL;
181   if (idx) {
182     itmp = a->j + a->i[row];
183     if (*nz) {
184       *idx = itmp;
185     }
186     else *idx = 0;
187   }
188   PetscFunctionReturn(0);
189 }
190 
191 #undef __FUNC__
192 #define __FUNC__ /*<a name=""></a>*/"MatRestoreRow_MPIAdj"
193 int MatRestoreRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
194 {
195   PetscFunctionBegin;
196   PetscFunctionReturn(0);
197 }
198 
199 #undef __FUNC__
200 #define __FUNC__ /*<a name=""></a>*/"MatGetBlockSize_MPIAdj"
201 int MatGetBlockSize_MPIAdj(Mat A,int *bs)
202 {
203   PetscFunctionBegin;
204   *bs = 1;
205   PetscFunctionReturn(0);
206 }
207 
208 
209 #undef __FUNC__
210 #define __FUNC__ /*<a name=""></a>*/"MatEqual_MPIAdj"
211 int MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg)
212 {
213   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
214   int         ierr;
215   PetscTruth  flag;
216 
217   PetscFunctionBegin;
218   if (B->type != MATMPIADJ) SETERRQ(PETSC_ERR_ARG_INCOMP,0,"Matrices must be same type");
219 
220   /* If the  matrix dimensions are not equal,or no of nonzeros */
221   if ((a->m != b->m) ||(a->nz != b->nz)) {
222     flag = PETSC_FALSE;
223   }
224 
225   /* if the a->i are the same */
226   ierr = PetscMemcmp(a->i,b->i,(a->m+1)*sizeof(int),&flag);CHKERRQ(ierr);
227 
228   /* if a->j are the same */
229   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(int),&flag);CHKERRQ(ierr);
230 
231   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr);
232   PetscFunctionReturn(0);
233 }
234 
235 
236 /* -------------------------------------------------------------------*/
237 static struct _MatOps MatOps_Values = {0,
238        MatGetRow_MPIAdj,
239        MatRestoreRow_MPIAdj,
240        0,
241        0,
242        0,
243        0,
244        0,
245        0,
246        0,
247        0,
248        0,
249        0,
250        0,
251        0,
252        0,
253        MatEqual_MPIAdj,
254        0,
255        0,
256        0,
257        0,
258        0,
259        0,
260        MatSetOption_MPIAdj,
261        0,
262        0,
263        0,
264        0,
265        0,
266        0,
267        MatGetSize_MPIAdj,
268        MatGetLocalSize_MPIAdj,
269        MatGetOwnershipRange_MPIAdj,
270        0,
271        0,
272        0,
273        0,
274        0,
275        0,
276        0,
277        0,
278        0,
279        0,
280        0,
281        0,
282        0,
283        0,
284        0,
285        0,
286        0,
287        0,
288        0,
289        MatGetBlockSize_MPIAdj,
290        0,
291        0,
292        0,
293        0,
294        0,
295        0,
296        0,
297        0,
298        0,
299        0,
300        MatDestroy_MPIAdj,
301        MatView_MPIAdj,
302        MatGetMaps_Petsc};
303 
304 
305 #undef __FUNC__
306 #define __FUNC__ /*<a name=""></a>*/"MatCreateMPIAdj"
307 /*@C
308    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
309    The matrix does not have numerical values associated with it, but is
310    intended for ordering (to reduce bandwidth etc) and partitioning.
311 
312    Collective on MPI_Comm
313 
314    Input Parameters:
315 +  comm - MPI communicator
316 .  m - number of local rows
317 .  n - number of columns
318 .  i - the indices into j for the start of each row
319 .  j - the column indices for each row (sorted for each row).
320        The indices in i and j start with zero (NOT with one).
321 -  values -[optional] edge weights
322 
323    Output Parameter:
324 .  A - the matrix
325 
326    Level: intermediate
327 
328    Notes: This matrix object does not support most matrix operations, include
329    MatSetValues().
330    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
331    when the matrix is destroyed. And you must allocate them with PetscMalloc(). If you
332     call from Fortran you need not create the arrays with PetscMalloc().
333 
334    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
335 
336 .seealso: MatCreate(), MatCreateSeqAdj(), MatGetOrdering()
337 @*/
338 int MatCreateMPIAdj(MPI_Comm comm,int m,int n,int *i,int *j,int *values,Mat *A)
339 {
340   Mat        B;
341   Mat_MPIAdj *b;
342   int        ii,ierr,size,rank;
343   PetscTruth flg;
344 
345   PetscFunctionBegin;
346   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
347   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
348 
349   *A                  = 0;
350   PetscHeaderCreate(B,_p_Mat,struct _MatOps,MAT_COOKIE,MATMPIADJ,"Mat",comm,MatDestroy,MatView);
351   PLogObjectCreate(B);
352   B->data             = (void*)(b = PetscNew(Mat_MPIAdj));CHKPTRQ(b);
353   ierr                = PetscMemzero(b,sizeof(Mat_MPIAdj));CHKERRQ(ierr);
354   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
355   B->factor           = 0;
356   B->lupivotthreshold = 1.0;
357   B->mapping          = 0;
358   B->assembled        = PETSC_FALSE;
359 
360   b->m = m; B->m = m;
361   ierr = MPI_Allreduce(&m,&B->M,1,MPI_INT,MPI_SUM,comm);CHKERRQ(ierr);
362   B->n = n; B->N = n;
363 
364   /* the information in the maps duplicates the information computed below, eventually
365      we should remove the duplicate information that is not contained in the maps */
366   ierr = MapCreateMPI(comm,m,B->M,&B->rmap);CHKERRQ(ierr);
367   /* we don't know the "local columns" so just use the row information :-(*/
368   ierr = MapCreateMPI(comm,m,B->M,&B->cmap);CHKERRQ(ierr);
369 
370   b->rowners = (int*)PetscMalloc((size+1)*sizeof(int));CHKPTRQ(b->rowners);
371   PLogObjectMemory(B,(size+2)*sizeof(int)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));
372   ierr = MPI_Allgather(&m,1,MPI_INT,b->rowners+1,1,MPI_INT,comm);CHKERRQ(ierr);
373   b->rowners[0] = 0;
374   for (ii=2; ii<=size; ii++) {
375     b->rowners[ii] += b->rowners[ii-1];
376   }
377   b->rstart = b->rowners[rank];
378   b->rend   = b->rowners[rank+1];
379 
380 #if defined(PETSC_BOPT_g)
381   if (i[0] != 0) SETERR1(1,1,"First i[] index must be zero, instead it is %d\n",i[0]);
382   for (ii=1; ii<m; ii++) {
383     if (i[ii] < 0 || i[ii] > i[ii-1]) {
384       SETERRQ4(1,1,"i[%d] index is out of range: i[%d]",ii,i[ii],ii-1,i[ii-1]);
385     }
386   }
387   for (ii=0; ii<i[m]; ii++) {
388     if (i[ii] < 0 || i[ii] >= N) {
389       SETERRQ2(1,1,"Column index %d out of range %d\n",ii,i[ii]);
390     }
391   }
392 #endif
393 
394   b->j  = j;
395   b->i  = i;
396   b->values = values;
397 
398   b->nz               = i[m];
399   b->diag             = 0;
400   b->symmetric        = PETSC_FALSE;
401   b->freeaij          = PETSC_TRUE;
402   *A = B;
403 
404   ierr = OptionsHasName(PETSC_NULL,"-help",&flg);CHKERRQ(ierr);
405   if (flg) {ierr = MatPrintHelp(B);CHKERRQ(ierr); }
406   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
407   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
408   PetscFunctionReturn(0);
409 }
410 
411 
412 
413 #undef __FUNC__
414 #define __FUNC__ /*<a name=""></a>*/"MatConvert_MPIAdj"
415 int MatConvert_MPIAdj(Mat A,MatType type,Mat *B)
416 {
417   int      i,ierr,m,N,nzeros = 0,*ia,*ja,*rj,len,rstart,cnt,j,*a;
418   Scalar   *ra;
419   MPI_Comm comm;
420 
421   PetscFunctionBegin;
422   ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr);
423   ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr);
424   ierr = MatGetOwnershipRange(A,&rstart,PETSC_NULL);CHKERRQ(ierr);
425 
426   /* count the number of nonzeros per row */
427   for (i=0; i<m; i++) {
428     ierr   = MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
429     for (j=0; j<len; j++) {
430       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
431     }
432     ierr   = MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
433     nzeros += len;
434   }
435 
436   /* malloc space for nonzeros */
437   a  = (int*)PetscMalloc((nzeros+1)*sizeof(int));CHKPTRQ(a);
438   ia = (int*)PetscMalloc((N+1)*sizeof(int));CHKPTRQ(ia);
439   ja = (int*)PetscMalloc((nzeros+1)*sizeof(int));CHKPTRQ(ja);
440 
441   nzeros = 0;
442   ia[0]  = 0;
443   for (i=0; i<m; i++) {
444     ierr    = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
445     cnt     = 0;
446     for (j=0; j<len; j++) {
447       if (rj[j] != i+rstart) { /* if not diagonal */
448         a[nzeros+cnt]    = (int) PetscAbsScalar(ra[j]);
449         ja[nzeros+cnt++] = rj[j];
450       }
451     }
452     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
453     nzeros += cnt;
454     ia[i+1] = nzeros;
455   }
456 
457   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
458   ierr = MatCreateMPIAdj(comm,m,N,ia,ja,a,B);CHKERRQ(ierr);
459 
460   PetscFunctionReturn(0);
461 }
462 
463 
464 
465 
466 
467 
468