xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision ac2a4f0d24b3b6a4ee93edbcad41f4bb9e923944)
1 /*$Id: mpiadj.c,v 1.29 1999/10/13 20:37:34 bsmith Exp bsmith $*/
2 
3 /*
4     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
5 */
6 #include "sys.h"
7 #include "src/mat/impls/adj/mpi/mpiadj.h"
8 
9 #undef __FUNC__
10 #define __FUNC__ "MatView_MPIAdj_ASCII"
11 extern int MatView_MPIAdj_ASCII(Mat A,Viewer viewer)
12 {
13   Mat_MPIAdj  *a = (Mat_MPIAdj *) A->data;
14   int         ierr, i,j, m = a->m,  format;
15   char        *outputname;
16 
17   PetscFunctionBegin;
18   ierr = ViewerGetOutputname(viewer,&outputname);CHKERRQ(ierr);
19   ierr = ViewerGetFormat(viewer,&format);CHKERRQ(ierr);
20   if (format == VIEWER_FORMAT_ASCII_INFO) {
21     PetscFunctionReturn(0);
22   } else {
23     ierr = ViewerASCIIUseTabs(viewer,PETSC_NO);CHKERRQ(ierr);
24     for ( i=0; i<m; i++ ) {
25       ierr = ViewerASCIISynchronizedPrintf(viewer,"row %d:",i+a->rstart);CHKERRQ(ierr);
26       for ( j=a->i[i]; j<a->i[i+1]; j++ ) {
27         ierr = ViewerASCIISynchronizedPrintf(viewer," %d ",a->j[j]);CHKERRQ(ierr);
28       }
29       ierr = ViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
30     }
31     ierr = ViewerASCIIUseTabs(viewer,PETSC_YES);CHKERRQ(ierr);
32   }
33   ierr = ViewerFlush(viewer);CHKERRQ(ierr);
34   PetscFunctionReturn(0);
35 }
36 
37 #undef __FUNC__
38 #define __FUNC__ "MatView_MPIAdj"
39 int MatView_MPIAdj(Mat A,Viewer viewer)
40 {
41   int        ierr;
42   PetscTruth isascii;
43 
44   PetscFunctionBegin;
45   ierr = PetscTypeCompare((PetscObject)viewer,ASCII_VIEWER,&isascii);CHKERRQ(ierr);
46   if (isascii) {
47     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
48   } else {
49     SETERRQ1(1,1,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
50   }
51   PetscFunctionReturn(0);
52 }
53 
54 #undef __FUNC__
55 #define __FUNC__ "MatDestroy_MPIAdj"
56 int MatDestroy_MPIAdj(Mat mat)
57 {
58   Mat_MPIAdj *a = (Mat_MPIAdj *) mat->data;
59   int        ierr;
60 
61   PetscFunctionBegin;
62 
63   if (mat->mapping) {
64     ierr = ISLocalToGlobalMappingDestroy(mat->mapping);CHKERRQ(ierr);
65   }
66   if (mat->bmapping) {
67     ierr = ISLocalToGlobalMappingDestroy(mat->bmapping);CHKERRQ(ierr);
68   }
69   if (mat->rmap) {
70     ierr = MapDestroy(mat->rmap);CHKERRQ(ierr);
71   }
72   if (mat->cmap) {
73     ierr = MapDestroy(mat->cmap);CHKERRQ(ierr);
74   }
75 
76 #if defined(PETSC_USE_LOG)
77   PLogObjectState((PetscObject)mat,"Rows=%d, Cols=%d, NZ=%d",mat->m,mat->n,a->nz);
78 #endif
79   if (a->diag) {ierr = PetscFree(a->diag);CHKERRQ(ierr);}
80   ierr = PetscFree(a->i);CHKERRQ(ierr);
81   ierr = PetscFree(a->j);CHKERRQ(ierr);
82   ierr = PetscFree(a->rowners);CHKERRQ(ierr);
83   ierr = PetscFree(a);CHKERRQ(ierr);
84 
85   PLogObjectDestroy(mat);
86   PetscHeaderDestroy(mat);
87   PetscFunctionReturn(0);
88 }
89 
90 
91 #undef __FUNC__
92 #define __FUNC__ "MatSetOption_MPIAdj"
93 int MatSetOption_MPIAdj(Mat A,MatOption op)
94 {
95   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
96 
97   PetscFunctionBegin;
98   if (op == MAT_STRUCTURALLY_SYMMETRIC) {
99     a->symmetric = PETSC_TRUE;
100   } else {
101     PLogInfo(A,"MatSetOption_MPIAdj:Option ignored\n");
102   }
103   PetscFunctionReturn(0);
104 }
105 
106 
107 /*
108      Adds diagonal pointers to sparse matrix structure.
109 */
110 
111 #undef __FUNC__
112 #define __FUNC__ "MatMarkDiag_MPIAdj"
113 int MatMarkDiag_MPIAdj(Mat A)
114 {
115   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
116   int        i,j, *diag, m = a->m;
117 
118   PetscFunctionBegin;
119   diag = (int *) PetscMalloc( (m+1)*sizeof(int));CHKPTRQ(diag);
120   PLogObjectMemory(A,(m+1)*sizeof(int));
121   for ( i=0; i<a->m; i++ ) {
122     for ( j=a->i[i]; j<a->i[i+1]; j++ ) {
123       if (a->j[j] == i) {
124         diag[i] = j;
125         break;
126       }
127     }
128   }
129   a->diag = diag;
130   PetscFunctionReturn(0);
131 }
132 
133 #undef __FUNC__
134 #define __FUNC__ "MatGetSize_MPIAdj"
135 int MatGetSize_MPIAdj(Mat A,int *m,int *n)
136 {
137   PetscFunctionBegin;
138   if (m) *m = A->M;
139   if (n) *n = A->N;
140   PetscFunctionReturn(0);
141 }
142 
143 #undef __FUNC__
144 #define __FUNC__ "MatGetSize_MPIAdj"
145 int MatGetLocalSize_MPIAdj(Mat A,int *m,int *n)
146 {
147   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
148   PetscFunctionBegin;
149   if (m) *m = a->m;
150   if (n) *n = A->N;
151   PetscFunctionReturn(0);
152 }
153 
154 #undef __FUNC__
155 #define __FUNC__ "MatGetOwnershipRange_MPIAdj"
156 int MatGetOwnershipRange_MPIAdj(Mat A,int *m,int *n)
157 {
158   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
159   PetscFunctionBegin;
160   *m = a->rstart; *n = a->rend;
161   PetscFunctionReturn(0);
162 }
163 
164 #undef __FUNC__
165 #define __FUNC__ "MatGetRow_MPIAdj"
166 int MatGetRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
167 {
168   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
169   int        *itmp;
170 
171   PetscFunctionBegin;
172   row -= a->rstart;
173 
174   if (row < 0 || row >= a->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,0,"Row out of range");
175 
176   *nz = a->i[row+1] - a->i[row];
177   if (v) *v = PETSC_NULL;
178   if (idx) {
179     itmp = a->j + a->i[row];
180     if (*nz) {
181       *idx = itmp;
182     }
183     else *idx = 0;
184   }
185   PetscFunctionReturn(0);
186 }
187 
188 #undef __FUNC__
189 #define __FUNC__ "MatRestoreRow_MPIAdj"
190 int MatRestoreRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
191 {
192   PetscFunctionBegin;
193   PetscFunctionReturn(0);
194 }
195 
196 #undef __FUNC__
197 #define __FUNC__ "MatGetBlockSize_MPIAdj"
198 int MatGetBlockSize_MPIAdj(Mat A, int *bs)
199 {
200   PetscFunctionBegin;
201   *bs = 1;
202   PetscFunctionReturn(0);
203 }
204 
205 
206 #undef __FUNC__
207 #define __FUNC__ "MatEqual_MPIAdj"
208 int MatEqual_MPIAdj(Mat A,Mat B, PetscTruth* flg)
209 {
210   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data, *b = (Mat_MPIAdj *)B->data;
211   int         ierr;
212   PetscTruth  flag;
213 
214   PetscFunctionBegin;
215   if (B->type != MATMPIADJ) SETERRQ(PETSC_ERR_ARG_INCOMP,0,"Matrices must be same type");
216 
217   /* If the  matrix dimensions are not equal, or no of nonzeros */
218   if ((a->m != b->m ) ||( a->nz != b->nz)) {
219     flag = PETSC_FALSE;
220   }
221 
222   /* if the a->i are the same */
223   ierr = PetscMemcmp(a->i,b->i,(a->m+1)*sizeof(int),&flag);CHKERRQ(ierr);
224 
225   /* if a->j are the same */
226   ierr = PetscMemcmp(a->j, b->j, (a->nz)*sizeof(int),&flag);CHKERRQ(ierr);
227 
228   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr);
229   PetscFunctionReturn(0);
230 }
231 
232 
233 /* -------------------------------------------------------------------*/
234 static struct _MatOps MatOps_Values = {0,
235        MatGetRow_MPIAdj,
236        MatRestoreRow_MPIAdj,
237        0,
238        0,
239        0,
240        0,
241        0,
242        0,
243        0,
244        0,
245        0,
246        0,
247        0,
248        0,
249        0,
250        MatEqual_MPIAdj,
251        0,
252        0,
253        0,
254        0,
255        0,
256        0,
257        MatSetOption_MPIAdj,
258        0,
259        0,
260        0,
261        0,
262        0,
263        0,
264        MatGetSize_MPIAdj,
265        MatGetLocalSize_MPIAdj,
266        MatGetOwnershipRange_MPIAdj,
267        0,
268        0,
269        0,
270        0,
271        0,
272        0,
273        0,
274        0,
275        0,
276        0,
277        0,
278        0,
279        0,
280        0,
281        0,
282        0,
283        0,
284        0,
285        0,
286        MatGetBlockSize_MPIAdj,
287        0,
288        0,
289        0,
290        0,
291        0,
292        0,
293        0,
294        0,
295        0,
296        0,
297        0,
298        0,
299        MatGetMaps_Petsc};
300 
301 
302 #undef __FUNC__
303 #define __FUNC__ "MatCreateMPIAdj"
304 /*@C
305    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
306    The matrix does not have numerical values associated with it, but is
307    intended for ordering (to reduce bandwidth etc) and partitioning.
308 
309    Collective on MPI_Comm
310 
311    Input Parameters:
312 +  comm - MPI communicator, set to PETSC_COMM_SELF
313 .  m - number of local rows
314 .  n - number of columns
315 .  i - the indices into j for the start of each row
316 -  j - the column indices for each row (sorted for each row).
317        The indices in i and j start with zero (NOT with one).
318 
319    Output Parameter:
320 .  A - the matrix
321 
322    Level: intermediate
323 
324    Notes: This matrix object does not support most matrix operations, include
325    MatSetValues().
326    You must NOT free the ii and jj arrays yourself. PETSc will free them
327    when the matrix is destroyed.
328 
329    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
330 
331 .seealso: MatCreate(), MatCreateSeqAdj(), MatGetOrdering()
332 @*/
333 int MatCreateMPIAdj(MPI_Comm comm,int m,int n,int *i,int *j, Mat *A)
334 {
335   Mat        B;
336   Mat_MPIAdj *b;
337   int        ii,ierr, flg,size,rank;
338 
339   PetscFunctionBegin;
340   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
341   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
342 
343   *A                  = 0;
344   PetscHeaderCreate(B,_p_Mat,struct _MatOps,MAT_COOKIE,MATMPIADJ,"Mat",comm,MatDestroy,MatView);
345   PLogObjectCreate(B);
346   B->data             = (void *) (b = PetscNew(Mat_MPIAdj));CHKPTRQ(b);
347   ierr                = PetscMemzero(b,sizeof(Mat_MPIAdj));CHKERRQ(ierr);
348   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
349   B->ops->destroy     = MatDestroy_MPIAdj;
350   B->ops->view        = MatView_MPIAdj;
351   B->factor           = 0;
352   B->lupivotthreshold = 1.0;
353   B->mapping          = 0;
354   B->assembled        = PETSC_FALSE;
355 
356   b->m = m; B->m = m;
357   ierr = MPI_Allreduce(&m,&B->M,1,MPI_INT,MPI_SUM,comm);CHKERRQ(ierr);
358   B->n = n; B->N = n;
359 
360   /* the information in the maps duplicates the information computed below, eventually
361      we should remove the duplicate information that is not contained in the maps */
362   ierr = MapCreateMPI(comm,m,B->M,&B->rmap);CHKERRQ(ierr);
363   /* we don't know the "local columns" so just use the row information :-( */
364   ierr = MapCreateMPI(comm,m,B->M,&B->cmap);CHKERRQ(ierr);
365 
366   b->rowners = (int *) PetscMalloc((size+1)*sizeof(int));CHKPTRQ(b->rowners);
367   PLogObjectMemory(B,(size+2)*sizeof(int)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));
368   ierr = MPI_Allgather(&m,1,MPI_INT,b->rowners+1,1,MPI_INT,comm);CHKERRQ(ierr);
369   b->rowners[0] = 0;
370   for ( ii=2; ii<=size; ii++ ) {
371     b->rowners[ii] += b->rowners[ii-1];
372   }
373   b->rstart = b->rowners[rank];
374   b->rend   = b->rowners[rank+1];
375 
376   b->j  = j;
377   b->i  = i;
378 
379   b->nz               = i[m];
380   b->diag             = 0;
381   b->symmetric        = PETSC_FALSE;
382 
383   *A = B;
384 
385   ierr = OptionsHasName(PETSC_NULL,"-help", &flg);CHKERRQ(ierr);
386   if (flg) {ierr = MatPrintHelp(B);CHKERRQ(ierr); }
387   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
388   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
389   PetscFunctionReturn(0);
390 }
391 
392 
393 
394 
395 
396 
397 
398 
399 
400