xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 0400557ab3aee7708bad13e858e06fda0130decb)
1 /*$Id: mpiadj.c,v 1.42 2000/07/10 03:39:47 bsmith Exp bsmith $*/
2 
3 /*
4     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
5 */
6 #include "petscsys.h"
7 #include "src/mat/impls/adj/mpi/mpiadj.h"
8 
9 #undef __FUNC__
10 #define __FUNC__ /*<a name=""></a>*/"MatView_MPIAdj_ASCII"
11 int MatView_MPIAdj_ASCII(Mat A,Viewer viewer)
12 {
13   Mat_MPIAdj  *a = (Mat_MPIAdj*)A->data;
14   int         ierr,i,j,m = a->m, format;
15   char        *outputname;
16 
17   PetscFunctionBegin;
18   ierr = ViewerGetOutputname(viewer,&outputname);CHKERRQ(ierr);
19   ierr = ViewerGetFormat(viewer,&format);CHKERRQ(ierr);
20   if (format == VIEWER_FORMAT_ASCII_INFO) {
21     PetscFunctionReturn(0);
22   } else {
23     ierr = ViewerASCIIUseTabs(viewer,PETSC_NO);CHKERRQ(ierr);
24     for (i=0; i<m; i++) {
25       ierr = ViewerASCIISynchronizedPrintf(viewer,"row %d:",i+a->rstart);CHKERRQ(ierr);
26       for (j=a->i[i]; j<a->i[i+1]; j++) {
27         ierr = ViewerASCIISynchronizedPrintf(viewer," %d ",a->j[j]);CHKERRQ(ierr);
28       }
29       ierr = ViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
30     }
31     ierr = ViewerASCIIUseTabs(viewer,PETSC_YES);CHKERRQ(ierr);
32   }
33   ierr = ViewerFlush(viewer);CHKERRQ(ierr);
34   PetscFunctionReturn(0);
35 }
36 
37 #undef __FUNC__
38 #define __FUNC__ /*<a name=""></a>*/"MatView_MPIAdj"
39 int MatView_MPIAdj(Mat A,Viewer viewer)
40 {
41   int        ierr;
42   PetscTruth isascii;
43 
44   PetscFunctionBegin;
45   ierr = PetscTypeCompare((PetscObject)viewer,ASCII_VIEWER,&isascii);CHKERRQ(ierr);
46   if (isascii) {
47     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
48   } else {
49     SETERRQ1(1,1,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
50   }
51   PetscFunctionReturn(0);
52 }
53 
54 #undef __FUNC__
55 #define __FUNC__ /*<a name=""></a>*/"MatDestroy_MPIAdj"
56 int MatDestroy_MPIAdj(Mat mat)
57 {
58   Mat_MPIAdj *a = (Mat_MPIAdj*)mat->data;
59   int        ierr;
60 
61   PetscFunctionBegin;
62 
63   if (mat->mapping) {
64     ierr = ISLocalToGlobalMappingDestroy(mat->mapping);CHKERRQ(ierr);
65   }
66   if (mat->bmapping) {
67     ierr = ISLocalToGlobalMappingDestroy(mat->bmapping);CHKERRQ(ierr);
68   }
69   if (mat->rmap) {
70     ierr = MapDestroy(mat->rmap);CHKERRQ(ierr);
71   }
72   if (mat->cmap) {
73     ierr = MapDestroy(mat->cmap);CHKERRQ(ierr);
74   }
75 
76 #if defined(PETSC_USE_LOG)
77   PLogObjectState((PetscObject)mat,"Rows=%d, Cols=%d, NZ=%d",mat->m,mat->n,a->nz);
78 #endif
79   if (a->diag) {ierr = PetscFree(a->diag);CHKERRQ(ierr);}
80   if (a->values) {ierr = PetscFree(a->values);CHKERRQ(ierr);}
81   if (a->freeaij) {
82     ierr = PetscFree(a->i);CHKERRQ(ierr);
83     ierr = PetscFree(a->j);CHKERRQ(ierr);
84     ierr = PetscFree(a);CHKERRQ(ierr);
85   }
86   ierr = PetscFree(a->rowners);CHKERRQ(ierr);
87 
88   PLogObjectDestroy(mat);
89   PetscHeaderDestroy(mat);
90   PetscFunctionReturn(0);
91 }
92 
93 #undef __FUNC__
94 #define __FUNC__ /*<a name=""></a>*/"MatSetOption_MPIAdj"
95 int MatSetOption_MPIAdj(Mat A,MatOption op)
96 {
97   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
98 
99   PetscFunctionBegin;
100   if (op == MAT_STRUCTURALLY_SYMMETRIC) {
101     a->symmetric = PETSC_TRUE;
102   } else {
103     PLogInfo(A,"MatSetOption_MPIAdj:Option ignored\n");
104   }
105   PetscFunctionReturn(0);
106 }
107 
108 
109 /*
110      Adds diagonal pointers to sparse matrix structure.
111 */
112 
113 #undef __FUNC__
114 #define __FUNC__ /*<a name=""></a>*/"MatMarkDiagonal_MPIAdj"
115 int MatMarkDiagonal_MPIAdj(Mat A)
116 {
117   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
118   int        i,j,*diag,m = a->m;
119 
120   PetscFunctionBegin;
121   diag = (int*)PetscMalloc((m+1)*sizeof(int));CHKPTRQ(diag);
122   PLogObjectMemory(A,(m+1)*sizeof(int));
123   for (i=0; i<a->m; i++) {
124     for (j=a->i[i]; j<a->i[i+1]; j++) {
125       if (a->j[j] == i) {
126         diag[i] = j;
127         break;
128       }
129     }
130   }
131   a->diag = diag;
132   PetscFunctionReturn(0);
133 }
134 
135 #undef __FUNC__
136 #define __FUNC__ /*<a name=""></a>*/"MatGetSize_MPIAdj"
137 int MatGetSize_MPIAdj(Mat A,int *m,int *n)
138 {
139   PetscFunctionBegin;
140   if (m) *m = A->M;
141   if (n) *n = A->N;
142   PetscFunctionReturn(0);
143 }
144 
145 #undef __FUNC__
146 #define __FUNC__ /*<a name=""></a>*/"MatGetSize_MPIAdj"
147 int MatGetLocalSize_MPIAdj(Mat A,int *m,int *n)
148 {
149   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
150   PetscFunctionBegin;
151   if (m) *m = a->m;
152   if (n) *n = A->N;
153   PetscFunctionReturn(0);
154 }
155 
156 #undef __FUNC__
157 #define __FUNC__ /*<a name=""></a>*/"MatGetOwnershipRange_MPIAdj"
158 int MatGetOwnershipRange_MPIAdj(Mat A,int *m,int *n)
159 {
160   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
161   PetscFunctionBegin;
162   if (m) *m = a->rstart;
163   if (n) *n = a->rend;
164   PetscFunctionReturn(0);
165 }
166 
167 #undef __FUNC__
168 #define __FUNC__ /*<a name=""></a>*/"MatGetRow_MPIAdj"
169 int MatGetRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
170 {
171   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
172   int        *itmp;
173 
174   PetscFunctionBegin;
175   row -= a->rstart;
176 
177   if (row < 0 || row >= a->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,0,"Row out of range");
178 
179   *nz = a->i[row+1] - a->i[row];
180   if (v) *v = PETSC_NULL;
181   if (idx) {
182     itmp = a->j + a->i[row];
183     if (*nz) {
184       *idx = itmp;
185     }
186     else *idx = 0;
187   }
188   PetscFunctionReturn(0);
189 }
190 
191 #undef __FUNC__
192 #define __FUNC__ /*<a name=""></a>*/"MatRestoreRow_MPIAdj"
193 int MatRestoreRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
194 {
195   PetscFunctionBegin;
196   PetscFunctionReturn(0);
197 }
198 
199 #undef __FUNC__
200 #define __FUNC__ /*<a name=""></a>*/"MatGetBlockSize_MPIAdj"
201 int MatGetBlockSize_MPIAdj(Mat A,int *bs)
202 {
203   PetscFunctionBegin;
204   *bs = 1;
205   PetscFunctionReturn(0);
206 }
207 
208 
209 #undef __FUNC__
210 #define __FUNC__ /*<a name=""></a>*/"MatEqual_MPIAdj"
211 int MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg)
212 {
213   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
214   int         ierr;
215   PetscTruth  flag;
216 
217   PetscFunctionBegin;
218   if (B->type != MATMPIADJ) SETERRQ(PETSC_ERR_ARG_INCOMP,0,"Matrices must be same type");
219 
220   /* If the  matrix dimensions are not equal,or no of nonzeros */
221   if ((a->m != b->m) ||(a->nz != b->nz)) {
222     flag = PETSC_FALSE;
223   }
224 
225   /* if the a->i are the same */
226   ierr = PetscMemcmp(a->i,b->i,(a->m+1)*sizeof(int),&flag);CHKERRQ(ierr);
227 
228   /* if a->j are the same */
229   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(int),&flag);CHKERRQ(ierr);
230 
231   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr);
232   PetscFunctionReturn(0);
233 }
234 
235 
236 /* -------------------------------------------------------------------*/
237 static struct _MatOps MatOps_Values = {0,
238        MatGetRow_MPIAdj,
239        MatRestoreRow_MPIAdj,
240        0,
241        0,
242        0,
243        0,
244        0,
245        0,
246        0,
247        0,
248        0,
249        0,
250        0,
251        0,
252        0,
253        MatEqual_MPIAdj,
254        0,
255        0,
256        0,
257        0,
258        0,
259        0,
260        MatSetOption_MPIAdj,
261        0,
262        0,
263        0,
264        0,
265        0,
266        0,
267        MatGetSize_MPIAdj,
268        MatGetLocalSize_MPIAdj,
269        MatGetOwnershipRange_MPIAdj,
270        0,
271        0,
272        0,
273        0,
274        0,
275        0,
276        0,
277        0,
278        0,
279        0,
280        0,
281        0,
282        0,
283        0,
284        0,
285        0,
286        0,
287        0,
288        0,
289        MatGetBlockSize_MPIAdj,
290        0,
291        0,
292        0,
293        0,
294        0,
295        0,
296        0,
297        0,
298        0,
299        0,
300        MatDestroy_MPIAdj,
301        MatView_MPIAdj,
302        MatGetMaps_Petsc};
303 
304 
305 #undef __FUNC__
306 #define __FUNC__ /*<a name=""></a>*/"MatCreateMPIAdj"
307 /*@C
308    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
309    The matrix does not have numerical values associated with it, but is
310    intended for ordering (to reduce bandwidth etc) and partitioning.
311 
312    Collective on MPI_Comm
313 
314    Input Parameters:
315 +  comm - MPI communicator
316 .  m - number of local rows
317 .  n - number of columns
318 .  i - the indices into j for the start of each row
319 .  j - the column indices for each row (sorted for each row).
320        The indices in i and j start with zero (NOT with one).
321 -  values -[optional] edge weights
322 
323    Output Parameter:
324 .  A - the matrix
325 
326    Level: intermediate
327 
328    Notes: This matrix object does not support most matrix operations, include
329    MatSetValues().
330    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
331    when the matrix is destroyed. And you must allocate them with PetscMalloc()
332 
333    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
334 
335 .seealso: MatCreate(), MatCreateSeqAdj(), MatGetOrdering()
336 @*/
337 int MatCreateMPIAdj(MPI_Comm comm,int m,int n,int *i,int *j,int *values,Mat *A)
338 {
339   Mat        B;
340   Mat_MPIAdj *b;
341   int        ii,ierr,size,rank;
342   PetscTruth flg;
343 
344   PetscFunctionBegin;
345   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
346   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
347 
348   *A                  = 0;
349   PetscHeaderCreate(B,_p_Mat,struct _MatOps,MAT_COOKIE,MATMPIADJ,"Mat",comm,MatDestroy,MatView);
350   PLogObjectCreate(B);
351   B->data             = (void*)(b = PetscNew(Mat_MPIAdj));CHKPTRQ(b);
352   ierr                = PetscMemzero(b,sizeof(Mat_MPIAdj));CHKERRQ(ierr);
353   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
354   B->factor           = 0;
355   B->lupivotthreshold = 1.0;
356   B->mapping          = 0;
357   B->assembled        = PETSC_FALSE;
358 
359   b->m = m; B->m = m;
360   ierr = MPI_Allreduce(&m,&B->M,1,MPI_INT,MPI_SUM,comm);CHKERRQ(ierr);
361   B->n = n; B->N = n;
362 
363   /* the information in the maps duplicates the information computed below, eventually
364      we should remove the duplicate information that is not contained in the maps */
365   ierr = MapCreateMPI(comm,m,B->M,&B->rmap);CHKERRQ(ierr);
366   /* we don't know the "local columns" so just use the row information :-(*/
367   ierr = MapCreateMPI(comm,m,B->M,&B->cmap);CHKERRQ(ierr);
368 
369   b->rowners = (int*)PetscMalloc((size+1)*sizeof(int));CHKPTRQ(b->rowners);
370   PLogObjectMemory(B,(size+2)*sizeof(int)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));
371   ierr = MPI_Allgather(&m,1,MPI_INT,b->rowners+1,1,MPI_INT,comm);CHKERRQ(ierr);
372   b->rowners[0] = 0;
373   for (ii=2; ii<=size; ii++) {
374     b->rowners[ii] += b->rowners[ii-1];
375   }
376   b->rstart = b->rowners[rank];
377   b->rend   = b->rowners[rank+1];
378 
379 #if defined(PETSC_BOPT_g)
380   if (i[0] != 0) SETERR1(1,1,"First i[] index must be zero, instead it is %d\n",i[0]);
381   for (ii=1; ii<m; ii++) {
382     if (i[ii] < 0 || i[ii] > i[ii-1]) {
383       SETERRQ4(1,1,"i[%d] index is out of range: i[%d]",ii,i[ii],ii-1,i[ii-1]);
384     }
385   }
386   for (ii=0; ii<i[m]; ii++) {
387     if (i[ii] < 0 || i[ii] >= N) {
388       SETERRQ2(1,1,"Column index %d out of range %d\n",ii,i[ii]);
389     }
390   }
391 #endif
392 
393   b->j  = j;
394   b->i  = i;
395   b->values = values;
396 
397   b->nz               = i[m];
398   b->diag             = 0;
399   b->symmetric        = PETSC_FALSE;
400   b->freeaij          = PETSC_TRUE;
401   *A = B;
402 
403   ierr = OptionsHasName(PETSC_NULL,"-help",&flg);CHKERRQ(ierr);
404   if (flg) {ierr = MatPrintHelp(B);CHKERRQ(ierr); }
405   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
406   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
407   PetscFunctionReturn(0);
408 }
409 
410 
411 
412 #undef __FUNC__
413 #define __FUNC__ /*<a name=""></a>*/"MatConvert_MPIAdj"
414 int MatConvert_MPIAdj(Mat A,MatType type,Mat *B)
415 {
416   int      i,ierr,m,N,nzeros = 0,*ia,*ja,*rj,len,rstart,cnt,j,*a;
417   Scalar   *ra;
418   MPI_Comm comm;
419 
420   PetscFunctionBegin;
421   ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr);
422   ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr);
423   ierr = MatGetOwnershipRange(A,&rstart,PETSC_NULL);CHKERRQ(ierr);
424 
425   /* count the number of nonzeros per row */
426   for (i=0; i<m; i++) {
427     ierr   = MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
428     for (j=0; j<len; j++) {
429       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
430     }
431     ierr   = MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
432     nzeros += len;
433   }
434 
435   /* malloc space for nonzeros */
436   a  = (int*)PetscMalloc((nzeros+1)*sizeof(int));CHKPTRQ(a);
437   ia = (int*)PetscMalloc((N+1)*sizeof(int));CHKPTRQ(ia);
438   ja = (int*)PetscMalloc((nzeros+1)*sizeof(int));CHKPTRQ(ja);
439 
440   nzeros = 0;
441   ia[0]  = 0;
442   for (i=0; i<m; i++) {
443     ierr    = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
444     cnt     = 0;
445     for (j=0; j<len; j++) {
446       if (rj[j] != i+rstart) { /* if not diagonal */
447         a[nzeros+cnt]    = (int) PetscAbsScalar(ra[j]);
448         ja[nzeros+cnt++] = rj[j];
449       }
450     }
451     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
452     nzeros += cnt;
453     ia[i+1] = nzeros;
454   }
455 
456   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
457   ierr = MatCreateMPIAdj(comm,m,N,ia,ja,a,B);CHKERRQ(ierr);
458 
459   PetscFunctionReturn(0);
460 }
461 
462 
463 
464 
465 
466 
467