xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 6831982abb6453c2f3e25efecb5d0951d333371e)
1 #ifdef PETSC_RCS_HEADER
2 static char vcid[] = "$Id: mpiadj.c,v 1.29 1999/10/13 20:37:34 bsmith Exp bsmith $";
3 #endif
4 
5 /*
6     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
7 */
8 #include "sys.h"
9 #include "src/mat/impls/adj/mpi/mpiadj.h"
10 
11 #undef __FUNC__
12 #define __FUNC__ "MatView_MPIAdj_ASCII"
13 extern int MatView_MPIAdj_ASCII(Mat A,Viewer viewer)
14 {
15   Mat_MPIAdj  *a = (Mat_MPIAdj *) A->data;
16   int         ierr, i,j, m = a->m,  format;
17   char        *outputname;
18 
19   PetscFunctionBegin;
20   ierr = ViewerGetOutputname(viewer,&outputname);CHKERRQ(ierr);
21   ierr = ViewerGetFormat(viewer,&format);CHKERRQ(ierr);
22   if (format == VIEWER_FORMAT_ASCII_INFO) {
23     PetscFunctionReturn(0);
24   } else {
25     ierr = ViewerASCIIUseTabs(viewer,PETSC_NO);CHKERRQ(ierr);
26     for ( i=0; i<m; i++ ) {
27       ierr = ViewerASCIISynchronizedPrintf(viewer,"row %d:",i+a->rstart);CHKERRQ(ierr);
28       for ( j=a->i[i]; j<a->i[i+1]; j++ ) {
29         ierr = ViewerASCIISynchronizedPrintf(viewer," %d ",a->j[j]);CHKERRQ(ierr);
30       }
31       ierr = ViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
32     }
33     ierr = ViewerASCIIUseTabs(viewer,PETSC_YES);CHKERRQ(ierr);
34   }
35   ierr = ViewerFlush(viewer);CHKERRQ(ierr);
36   PetscFunctionReturn(0);
37 }
38 
39 #undef __FUNC__
40 #define __FUNC__ "MatView_MPIAdj"
41 int MatView_MPIAdj(Mat A,Viewer viewer)
42 {
43   int        ierr;
44   PetscTruth isascii;
45 
46   PetscFunctionBegin;
47   ierr = PetscTypeCompare((PetscObject)viewer,ASCII_VIEWER,&isascii);CHKERRQ(ierr);
48   if (isascii) {
49     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
50   } else {
51     SETERRQ1(1,1,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
52   }
53   PetscFunctionReturn(0);
54 }
55 
56 #undef __FUNC__
57 #define __FUNC__ "MatDestroy_MPIAdj"
58 int MatDestroy_MPIAdj(Mat mat)
59 {
60   Mat_MPIAdj *a = (Mat_MPIAdj *) mat->data;
61   int        ierr;
62 
63   PetscFunctionBegin;
64 
65   if (mat->mapping) {
66     ierr = ISLocalToGlobalMappingDestroy(mat->mapping);CHKERRQ(ierr);
67   }
68   if (mat->bmapping) {
69     ierr = ISLocalToGlobalMappingDestroy(mat->bmapping);CHKERRQ(ierr);
70   }
71   if (mat->rmap) {
72     ierr = MapDestroy(mat->rmap);CHKERRQ(ierr);
73   }
74   if (mat->cmap) {
75     ierr = MapDestroy(mat->cmap);CHKERRQ(ierr);
76   }
77 
78 #if defined(PETSC_USE_LOG)
79   PLogObjectState((PetscObject)mat,"Rows=%d, Cols=%d, NZ=%d",mat->m,mat->n,a->nz);
80 #endif
81   if (a->diag) {ierr = PetscFree(a->diag);CHKERRQ(ierr);}
82   ierr = PetscFree(a->i);CHKERRQ(ierr);
83   ierr = PetscFree(a->j);CHKERRQ(ierr);
84   ierr = PetscFree(a->rowners);CHKERRQ(ierr);
85   ierr = PetscFree(a);CHKERRQ(ierr);
86 
87   PLogObjectDestroy(mat);
88   PetscHeaderDestroy(mat);
89   PetscFunctionReturn(0);
90 }
91 
92 
93 #undef __FUNC__
94 #define __FUNC__ "MatSetOption_MPIAdj"
95 int MatSetOption_MPIAdj(Mat A,MatOption op)
96 {
97   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
98 
99   PetscFunctionBegin;
100   if (op == MAT_STRUCTURALLY_SYMMETRIC) {
101     a->symmetric = PETSC_TRUE;
102   } else {
103     PLogInfo(A,"MatSetOption_MPIAdj:Option ignored\n");
104   }
105   PetscFunctionReturn(0);
106 }
107 
108 
109 /*
110      Adds diagonal pointers to sparse matrix structure.
111 */
112 
113 #undef __FUNC__
114 #define __FUNC__ "MatMarkDiag_MPIAdj"
115 int MatMarkDiag_MPIAdj(Mat A)
116 {
117   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
118   int        i,j, *diag, m = a->m;
119 
120   PetscFunctionBegin;
121   diag = (int *) PetscMalloc( (m+1)*sizeof(int));CHKPTRQ(diag);
122   PLogObjectMemory(A,(m+1)*sizeof(int));
123   for ( i=0; i<a->m; i++ ) {
124     for ( j=a->i[i]; j<a->i[i+1]; j++ ) {
125       if (a->j[j] == i) {
126         diag[i] = j;
127         break;
128       }
129     }
130   }
131   a->diag = diag;
132   PetscFunctionReturn(0);
133 }
134 
135 #undef __FUNC__
136 #define __FUNC__ "MatGetSize_MPIAdj"
137 int MatGetSize_MPIAdj(Mat A,int *m,int *n)
138 {
139   PetscFunctionBegin;
140   if (m) *m = A->M;
141   if (n) *n = A->N;
142   PetscFunctionReturn(0);
143 }
144 
145 #undef __FUNC__
146 #define __FUNC__ "MatGetSize_MPIAdj"
147 int MatGetLocalSize_MPIAdj(Mat A,int *m,int *n)
148 {
149   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
150   PetscFunctionBegin;
151   if (m) *m = a->m;
152   if (n) *n = A->N;
153   PetscFunctionReturn(0);
154 }
155 
156 #undef __FUNC__
157 #define __FUNC__ "MatGetOwnershipRange_MPIAdj"
158 int MatGetOwnershipRange_MPIAdj(Mat A,int *m,int *n)
159 {
160   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
161   PetscFunctionBegin;
162   *m = a->rstart; *n = a->rend;
163   PetscFunctionReturn(0);
164 }
165 
166 #undef __FUNC__
167 #define __FUNC__ "MatGetRow_MPIAdj"
168 int MatGetRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
169 {
170   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
171   int        *itmp;
172 
173   PetscFunctionBegin;
174   row -= a->rstart;
175 
176   if (row < 0 || row >= a->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,0,"Row out of range");
177 
178   *nz = a->i[row+1] - a->i[row];
179   if (v) *v = PETSC_NULL;
180   if (idx) {
181     itmp = a->j + a->i[row];
182     if (*nz) {
183       *idx = itmp;
184     }
185     else *idx = 0;
186   }
187   PetscFunctionReturn(0);
188 }
189 
190 #undef __FUNC__
191 #define __FUNC__ "MatRestoreRow_MPIAdj"
192 int MatRestoreRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
193 {
194   PetscFunctionBegin;
195   PetscFunctionReturn(0);
196 }
197 
198 #undef __FUNC__
199 #define __FUNC__ "MatGetBlockSize_MPIAdj"
200 int MatGetBlockSize_MPIAdj(Mat A, int *bs)
201 {
202   PetscFunctionBegin;
203   *bs = 1;
204   PetscFunctionReturn(0);
205 }
206 
207 
208 #undef __FUNC__
209 #define __FUNC__ "MatEqual_MPIAdj"
210 int MatEqual_MPIAdj(Mat A,Mat B, PetscTruth* flg)
211 {
212   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data, *b = (Mat_MPIAdj *)B->data;
213   int         ierr;
214   PetscTruth  flag;
215 
216   PetscFunctionBegin;
217   if (B->type != MATMPIADJ) SETERRQ(PETSC_ERR_ARG_INCOMP,0,"Matrices must be same type");
218 
219   /* If the  matrix dimensions are not equal, or no of nonzeros */
220   if ((a->m != b->m ) ||( a->nz != b->nz)) {
221     flag = PETSC_FALSE;
222   }
223 
224   /* if the a->i are the same */
225   ierr = PetscMemcmp(a->i,b->i,(a->m+1)*sizeof(int),&flag);CHKERRQ(ierr);
226 
227   /* if a->j are the same */
228   ierr = PetscMemcmp(a->j, b->j, (a->nz)*sizeof(int),&flag);CHKERRQ(ierr);
229 
230   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr);
231   PetscFunctionReturn(0);
232 }
233 
234 
235 /* -------------------------------------------------------------------*/
236 static struct _MatOps MatOps_Values = {0,
237        MatGetRow_MPIAdj,
238        MatRestoreRow_MPIAdj,
239        0,
240        0,
241        0,
242        0,
243        0,
244        0,
245        0,
246        0,
247        0,
248        0,
249        0,
250        0,
251        0,
252        MatEqual_MPIAdj,
253        0,
254        0,
255        0,
256        0,
257        0,
258        0,
259        MatSetOption_MPIAdj,
260        0,
261        0,
262        0,
263        0,
264        0,
265        0,
266        MatGetSize_MPIAdj,
267        MatGetLocalSize_MPIAdj,
268        MatGetOwnershipRange_MPIAdj,
269        0,
270        0,
271        0,
272        0,
273        0,
274        0,
275        0,
276        0,
277        0,
278        0,
279        0,
280        0,
281        0,
282        0,
283        0,
284        0,
285        0,
286        0,
287        0,
288        MatGetBlockSize_MPIAdj,
289        0,
290        0,
291        0,
292        0,
293        0,
294        0,
295        0,
296        0,
297        0,
298        0,
299        0,
300        0,
301        MatGetMaps_Petsc};
302 
303 
304 #undef __FUNC__
305 #define __FUNC__ "MatCreateMPIAdj"
306 /*@C
307    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
308    The matrix does not have numerical values associated with it, but is
309    intended for ordering (to reduce bandwidth etc) and partitioning.
310 
311    Collective on MPI_Comm
312 
313    Input Parameters:
314 +  comm - MPI communicator, set to PETSC_COMM_SELF
315 .  m - number of local rows
316 .  n - number of columns
317 .  i - the indices into j for the start of each row
318 -  j - the column indices for each row (sorted for each row).
319        The indices in i and j start with zero (NOT with one).
320 
321    Output Parameter:
322 .  A - the matrix
323 
324    Level: intermediate
325 
326    Notes: This matrix object does not support most matrix operations, include
327    MatSetValues().
328    You must NOT free the ii and jj arrays yourself. PETSc will free them
329    when the matrix is destroyed.
330 
331    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
332 
333 .seealso: MatCreate(), MatCreateSeqAdj(), MatGetOrdering()
334 @*/
335 int MatCreateMPIAdj(MPI_Comm comm,int m,int n,int *i,int *j, Mat *A)
336 {
337   Mat        B;
338   Mat_MPIAdj *b;
339   int        ii,ierr, flg,size,rank;
340 
341   PetscFunctionBegin;
342   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
343   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
344 
345   *A                  = 0;
346   PetscHeaderCreate(B,_p_Mat,struct _MatOps,MAT_COOKIE,MATMPIADJ,"Mat",comm,MatDestroy,MatView);
347   PLogObjectCreate(B);
348   B->data             = (void *) (b = PetscNew(Mat_MPIAdj));CHKPTRQ(b);
349   ierr                = PetscMemzero(b,sizeof(Mat_MPIAdj));CHKERRQ(ierr);
350   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
351   B->ops->destroy     = MatDestroy_MPIAdj;
352   B->ops->view        = MatView_MPIAdj;
353   B->factor           = 0;
354   B->lupivotthreshold = 1.0;
355   B->mapping          = 0;
356   B->assembled        = PETSC_FALSE;
357 
358   b->m = m; B->m = m;
359   ierr = MPI_Allreduce(&m,&B->M,1,MPI_INT,MPI_SUM,comm);CHKERRQ(ierr);
360   B->n = n; B->N = n;
361 
362   /* the information in the maps duplicates the information computed below, eventually
363      we should remove the duplicate information that is not contained in the maps */
364   ierr = MapCreateMPI(comm,m,B->M,&B->rmap);CHKERRQ(ierr);
365   /* we don't know the "local columns" so just use the row information :-( */
366   ierr = MapCreateMPI(comm,m,B->M,&B->cmap);CHKERRQ(ierr);
367 
368   b->rowners = (int *) PetscMalloc((size+1)*sizeof(int));CHKPTRQ(b->rowners);
369   PLogObjectMemory(B,(size+2)*sizeof(int)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));
370   ierr = MPI_Allgather(&m,1,MPI_INT,b->rowners+1,1,MPI_INT,comm);CHKERRQ(ierr);
371   b->rowners[0] = 0;
372   for ( ii=2; ii<=size; ii++ ) {
373     b->rowners[ii] += b->rowners[ii-1];
374   }
375   b->rstart = b->rowners[rank];
376   b->rend   = b->rowners[rank+1];
377 
378   b->j  = j;
379   b->i  = i;
380 
381   b->nz               = i[m];
382   b->diag             = 0;
383   b->symmetric        = PETSC_FALSE;
384 
385   *A = B;
386 
387   ierr = OptionsHasName(PETSC_NULL,"-help", &flg);CHKERRQ(ierr);
388   if (flg) {ierr = MatPrintHelp(B);CHKERRQ(ierr); }
389   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
390   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
391   PetscFunctionReturn(0);
392 }
393 
394 
395 
396 
397 
398 
399 
400 
401 
402