xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 29bbc08cd5461c366aba6645924e8ff42acd1de0)
1 /*$Id: mpiadj.c,v 1.47 2000/09/22 20:44:09 bsmith Exp bsmith $*/
2 
3 /*
4     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
5 */
6 #include "petscsys.h"
7 #include "src/mat/impls/adj/mpi/mpiadj.h"
8 
9 #undef __FUNC__
10 #define __FUNC__ /*<a name=""></a>*/"MatView_MPIAdj_ASCII"
11 int MatView_MPIAdj_ASCII(Mat A,Viewer viewer)
12 {
13   Mat_MPIAdj  *a = (Mat_MPIAdj*)A->data;
14   int         ierr,i,j,m = a->m, format;
15   char        *outputname;
16 
17   PetscFunctionBegin;
18   ierr = ViewerGetOutputname(viewer,&outputname);CHKERRQ(ierr);
19   ierr = ViewerGetFormat(viewer,&format);CHKERRQ(ierr);
20   if (format == VIEWER_FORMAT_ASCII_INFO) {
21     PetscFunctionReturn(0);
22   } else {
23     ierr = ViewerASCIIUseTabs(viewer,PETSC_NO);CHKERRQ(ierr);
24     for (i=0; i<m; i++) {
25       ierr = ViewerASCIISynchronizedPrintf(viewer,"row %d:",i+a->rstart);CHKERRQ(ierr);
26       for (j=a->i[i]; j<a->i[i+1]; j++) {
27         ierr = ViewerASCIISynchronizedPrintf(viewer," %d ",a->j[j]);CHKERRQ(ierr);
28       }
29       ierr = ViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
30     }
31     ierr = ViewerASCIIUseTabs(viewer,PETSC_YES);CHKERRQ(ierr);
32   }
33   ierr = ViewerFlush(viewer);CHKERRQ(ierr);
34   PetscFunctionReturn(0);
35 }
36 
37 #undef __FUNC__
38 #define __FUNC__ /*<a name=""></a>*/"MatView_MPIAdj"
39 int MatView_MPIAdj(Mat A,Viewer viewer)
40 {
41   int        ierr;
42   PetscTruth isascii;
43 
44   PetscFunctionBegin;
45   ierr = PetscTypeCompare((PetscObject)viewer,ASCII_VIEWER,&isascii);CHKERRQ(ierr);
46   if (isascii) {
47     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
48   } else {
49     SETERRQ1(1,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
50   }
51   PetscFunctionReturn(0);
52 }
53 
54 #undef __FUNC__
55 #define __FUNC__ /*<a name=""></a>*/"MatDestroy_MPIAdj"
56 int MatDestroy_MPIAdj(Mat mat)
57 {
58   Mat_MPIAdj *a = (Mat_MPIAdj*)mat->data;
59   int        ierr;
60 
61   PetscFunctionBegin;
62 
63   if (mat->mapping) {
64     ierr = ISLocalToGlobalMappingDestroy(mat->mapping);CHKERRQ(ierr);
65   }
66   if (mat->bmapping) {
67     ierr = ISLocalToGlobalMappingDestroy(mat->bmapping);CHKERRQ(ierr);
68   }
69   if (mat->rmap) {
70     ierr = MapDestroy(mat->rmap);CHKERRQ(ierr);
71   }
72   if (mat->cmap) {
73     ierr = MapDestroy(mat->cmap);CHKERRQ(ierr);
74   }
75 
76 #if defined(PETSC_USE_LOG)
77   PLogObjectState((PetscObject)mat,"Rows=%d, Cols=%d, NZ=%d",mat->m,mat->n,a->nz);
78 #endif
79   if (a->diag) {ierr = PetscFree(a->diag);CHKERRQ(ierr);}
80   if (a->freeaij) {
81     ierr = PetscFree(a->i);CHKERRQ(ierr);
82     ierr = PetscFree(a->j);CHKERRQ(ierr);
83     if (a->values) {ierr = PetscFree(a->values);CHKERRQ(ierr);}
84   }
85   ierr = PetscFree(a->rowners);CHKERRQ(ierr);
86   ierr = PetscFree(a);CHKERRQ(ierr);
87 
88   PLogObjectDestroy(mat);
89   PetscHeaderDestroy(mat);
90   PetscFunctionReturn(0);
91 }
92 
93 #undef __FUNC__
94 #define __FUNC__ /*<a name=""></a>*/"MatSetOption_MPIAdj"
95 int MatSetOption_MPIAdj(Mat A,MatOption op)
96 {
97   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
98 
99   PetscFunctionBegin;
100   if (op == MAT_STRUCTURALLY_SYMMETRIC) {
101     a->symmetric = PETSC_TRUE;
102   } else {
103     PLogInfo(A,"MatSetOption_MPIAdj:Option ignored\n");
104   }
105   PetscFunctionReturn(0);
106 }
107 
108 
109 /*
110      Adds diagonal pointers to sparse matrix structure.
111 */
112 
113 #undef __FUNC__
114 #define __FUNC__ /*<a name=""></a>*/"MatMarkDiagonal_MPIAdj"
115 int MatMarkDiagonal_MPIAdj(Mat A)
116 {
117   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
118   int        i,j,*diag,m = a->m;
119 
120   PetscFunctionBegin;
121   diag = (int*)PetscMalloc((m+1)*sizeof(int));CHKPTRQ(diag);
122   PLogObjectMemory(A,(m+1)*sizeof(int));
123   for (i=0; i<a->m; i++) {
124     for (j=a->i[i]; j<a->i[i+1]; j++) {
125       if (a->j[j] == i) {
126         diag[i] = j;
127         break;
128       }
129     }
130   }
131   a->diag = diag;
132   PetscFunctionReturn(0);
133 }
134 
135 #undef __FUNC__
136 #define __FUNC__ /*<a name=""></a>*/"MatGetSize_MPIAdj"
137 int MatGetSize_MPIAdj(Mat A,int *m,int *n)
138 {
139   PetscFunctionBegin;
140   if (m) *m = A->M;
141   if (n) *n = A->N;
142   PetscFunctionReturn(0);
143 }
144 
145 #undef __FUNC__
146 #define __FUNC__ /*<a name=""></a>*/"MatGetSize_MPIAdj"
147 int MatGetLocalSize_MPIAdj(Mat A,int *m,int *n)
148 {
149   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
150   PetscFunctionBegin;
151   if (m) *m = a->m;
152   if (n) *n = A->N;
153   PetscFunctionReturn(0);
154 }
155 
156 #undef __FUNC__
157 #define __FUNC__ /*<a name=""></a>*/"MatGetOwnershipRange_MPIAdj"
158 int MatGetOwnershipRange_MPIAdj(Mat A,int *m,int *n)
159 {
160   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
161   PetscFunctionBegin;
162   if (m) *m = a->rstart;
163   if (n) *n = a->rend;
164   PetscFunctionReturn(0);
165 }
166 
167 #undef __FUNC__
168 #define __FUNC__ /*<a name=""></a>*/"MatGetRow_MPIAdj"
169 int MatGetRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
170 {
171   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
172   int        *itmp;
173 
174   PetscFunctionBegin;
175   row -= a->rstart;
176 
177   if (row < 0 || row >= a->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
178 
179   *nz = a->i[row+1] - a->i[row];
180   if (v) *v = PETSC_NULL;
181   if (idx) {
182     itmp = a->j + a->i[row];
183     if (*nz) {
184       *idx = itmp;
185     }
186     else *idx = 0;
187   }
188   PetscFunctionReturn(0);
189 }
190 
191 #undef __FUNC__
192 #define __FUNC__ /*<a name=""></a>*/"MatRestoreRow_MPIAdj"
193 int MatRestoreRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
194 {
195   PetscFunctionBegin;
196   PetscFunctionReturn(0);
197 }
198 
199 #undef __FUNC__
200 #define __FUNC__ /*<a name=""></a>*/"MatGetBlockSize_MPIAdj"
201 int MatGetBlockSize_MPIAdj(Mat A,int *bs)
202 {
203   PetscFunctionBegin;
204   *bs = 1;
205   PetscFunctionReturn(0);
206 }
207 
208 
209 #undef __FUNC__
210 #define __FUNC__ /*<a name=""></a>*/"MatEqual_MPIAdj"
211 int MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg)
212 {
213   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
214   int         ierr;
215   PetscTruth  flag;
216 
217   PetscFunctionBegin;
218   if (B->type != MATMPIADJ) SETERRQ(PETSC_ERR_ARG_INCOMP,"Matrices must be same type");
219 
220   /* If the  matrix dimensions are not equal,or no of nonzeros */
221   if ((a->m != b->m) ||(a->nz != b->nz)) {
222     flag = PETSC_FALSE;
223   }
224 
225   /* if the a->i are the same */
226   ierr = PetscMemcmp(a->i,b->i,(a->m+1)*sizeof(int),&flag);CHKERRQ(ierr);
227 
228   /* if a->j are the same */
229   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(int),&flag);CHKERRQ(ierr);
230 
231   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr);
232   PetscFunctionReturn(0);
233 }
234 
235 
236 /* -------------------------------------------------------------------*/
237 static struct _MatOps MatOps_Values = {0,
238        MatGetRow_MPIAdj,
239        MatRestoreRow_MPIAdj,
240        0,
241        0,
242        0,
243        0,
244        0,
245        0,
246        0,
247        0,
248        0,
249        0,
250        0,
251        0,
252        0,
253        MatEqual_MPIAdj,
254        0,
255        0,
256        0,
257        0,
258        0,
259        0,
260        MatSetOption_MPIAdj,
261        0,
262        0,
263        0,
264        0,
265        0,
266        0,
267        MatGetSize_MPIAdj,
268        MatGetLocalSize_MPIAdj,
269        MatGetOwnershipRange_MPIAdj,
270        0,
271        0,
272        0,
273        0,
274        0,
275        0,
276        0,
277        0,
278        0,
279        0,
280        0,
281        0,
282        0,
283        0,
284        0,
285        0,
286        0,
287        0,
288        0,
289        MatGetBlockSize_MPIAdj,
290        0,
291        0,
292        0,
293        0,
294        0,
295        0,
296        0,
297        0,
298        0,
299        0,
300        MatDestroy_MPIAdj,
301        MatView_MPIAdj,
302        MatGetMaps_Petsc};
303 
304 
305 #undef __FUNC__
306 #define __FUNC__ /*<a name=""></a>*/"MatCreateMPIAdj"
307 /*@C
308    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
309    The matrix does not have numerical values associated with it, but is
310    intended for ordering (to reduce bandwidth etc) and partitioning.
311 
312    Collective on MPI_Comm
313 
314    Input Parameters:
315 +  comm - MPI communicator
316 .  m - number of local rows
317 .  n - number of columns
318 .  i - the indices into j for the start of each row
319 .  j - the column indices for each row (sorted for each row).
320        The indices in i and j start with zero (NOT with one).
321 -  values -[optional] edge weights
322 
323    Output Parameter:
324 .  A - the matrix
325 
326    Level: intermediate
327 
328    Notes: This matrix object does not support most matrix operations, include
329    MatSetValues().
330    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
331    when the matrix is destroyed. And you must allocate them with PetscMalloc(). If you
332     call from Fortran you need not create the arrays with PetscMalloc().
333    Should not include the matrix diagonals.
334 
335    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
336 
337 .seealso: MatCreate(), MatCreateSeqAdj(), MatGetOrdering()
338 @*/
339 int MatCreateMPIAdj(MPI_Comm comm,int m,int n,int *i,int *j,int *values,Mat *A)
340 {
341   Mat        B;
342   Mat_MPIAdj *b;
343   int        ii,ierr,size,rank;
344   PetscTruth flg;
345 
346   PetscFunctionBegin;
347   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
348   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
349 
350   *A                  = 0;
351   PetscHeaderCreate(B,_p_Mat,struct _MatOps,MAT_COOKIE,MATMPIADJ,"Mat",comm,MatDestroy,MatView);
352   PLogObjectCreate(B);
353   B->data             = (void*)(b = PetscNew(Mat_MPIAdj));CHKPTRQ(b);
354   ierr                = PetscMemzero(b,sizeof(Mat_MPIAdj));CHKERRQ(ierr);
355   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
356   B->factor           = 0;
357   B->lupivotthreshold = 1.0;
358   B->mapping          = 0;
359   B->assembled        = PETSC_FALSE;
360 
361   b->m = m; B->m = m;
362   ierr = MPI_Allreduce(&m,&B->M,1,MPI_INT,MPI_SUM,comm);CHKERRQ(ierr);
363   B->n = n; B->N = n;
364 
365   /* the information in the maps duplicates the information computed below, eventually
366      we should remove the duplicate information that is not contained in the maps */
367   ierr = MapCreateMPI(comm,m,B->M,&B->rmap);CHKERRQ(ierr);
368   /* we don't know the "local columns" so just use the row information :-(*/
369   ierr = MapCreateMPI(comm,m,B->M,&B->cmap);CHKERRQ(ierr);
370 
371   b->rowners = (int*)PetscMalloc((size+1)*sizeof(int));CHKPTRQ(b->rowners);
372   PLogObjectMemory(B,(size+2)*sizeof(int)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));
373   ierr = MPI_Allgather(&m,1,MPI_INT,b->rowners+1,1,MPI_INT,comm);CHKERRQ(ierr);
374   b->rowners[0] = 0;
375   for (ii=2; ii<=size; ii++) {
376     b->rowners[ii] += b->rowners[ii-1];
377   }
378   b->rstart = b->rowners[rank];
379   b->rend   = b->rowners[rank+1];
380 
381 #if defined(PETSC_USE_BOPT_g)
382   if (i[0] != 0) SETERRQ1(1,"First i[] index must be zero, instead it is %d\n",i[0]);
383   for (ii=1; ii<m; ii++) {
384     if (i[ii] < 0 || i[ii] < i[ii-1]) {
385       SETERRQ4(1,"i[%d] index is out of range: i[%d]",ii,i[ii],ii-1,i[ii-1]);
386     }
387   }
388   for (ii=0; ii<i[m]; ii++) {
389     if (j[ii] < 0 || j[ii] >= B->N) {
390       SETERRQ2(1,"Column index %d out of range %d\n",ii,j[ii]);
391     }
392   }
393 #endif
394 
395   b->j  = j;
396   b->i  = i;
397   b->values = values;
398 
399   b->nz               = i[m];
400   b->diag             = 0;
401   b->symmetric        = PETSC_FALSE;
402   b->freeaij          = PETSC_TRUE;
403   *A = B;
404 
405   ierr = OptionsHasName(PETSC_NULL,"-help",&flg);CHKERRQ(ierr);
406   if (flg) {ierr = MatPrintHelp(B);CHKERRQ(ierr); }
407   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
408   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
409   PetscFunctionReturn(0);
410 }
411 
412 
413 
414 #undef __FUNC__
415 #define __FUNC__ /*<a name=""></a>*/"MatConvert_MPIAdj"
416 int MatConvert_MPIAdj(Mat A,MatType type,Mat *B)
417 {
418   int      i,ierr,m,N,nzeros = 0,*ia,*ja,*rj,len,rstart,cnt,j,*a;
419   Scalar   *ra;
420   MPI_Comm comm;
421 
422   PetscFunctionBegin;
423   ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr);
424   ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr);
425   ierr = MatGetOwnershipRange(A,&rstart,PETSC_NULL);CHKERRQ(ierr);
426 
427   /* count the number of nonzeros per row */
428   for (i=0; i<m; i++) {
429     ierr   = MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
430     for (j=0; j<len; j++) {
431       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
432     }
433     ierr   = MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
434     nzeros += len;
435   }
436 
437   /* malloc space for nonzeros */
438   a  = (int*)PetscMalloc((nzeros+1)*sizeof(int));CHKPTRQ(a);
439   ia = (int*)PetscMalloc((N+1)*sizeof(int));CHKPTRQ(ia);
440   ja = (int*)PetscMalloc((nzeros+1)*sizeof(int));CHKPTRQ(ja);
441 
442   nzeros = 0;
443   ia[0]  = 0;
444   for (i=0; i<m; i++) {
445     ierr    = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
446     cnt     = 0;
447     for (j=0; j<len; j++) {
448       if (rj[j] != i+rstart) { /* if not diagonal */
449         a[nzeros+cnt]    = (int) PetscAbsScalar(ra[j]);
450         ja[nzeros+cnt++] = rj[j];
451       }
452     }
453     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
454     nzeros += cnt;
455     ia[i+1] = nzeros;
456   }
457 
458   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
459   ierr = MatCreateMPIAdj(comm,m,N,ia,ja,a,B);CHKERRQ(ierr);
460 
461   PetscFunctionReturn(0);
462 }
463 
464 
465 
466 
467 
468 
469