xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 454a90a3eb7bca6958262e5eca1eb393ad97e108)
1 #ifdef PETSC_RCS_HEADER
2 static char vcid[] = "$Id: mpiadj.c,v 1.27 1999/09/20 19:25:06 bsmith Exp bsmith $";
3 #endif
4 
5 /*
6     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
7 */
8 #include "sys.h"
9 #include "src/mat/impls/adj/mpi/mpiadj.h"
10 
11 #undef __FUNC__
12 #define __FUNC__ "MatView_MPIAdj_ASCII"
13 extern int MatView_MPIAdj_ASCII(Mat A,Viewer viewer)
14 {
15   Mat_MPIAdj  *a = (Mat_MPIAdj *) A->data;
16   int         ierr, i,j, m = a->m,  format;
17   FILE        *fd;
18   char        *outputname;
19   MPI_Comm    comm = A->comm;
20 
21   PetscFunctionBegin;
22   ierr = ViewerASCIIGetPointer(viewer,&fd);CHKERRQ(ierr);
23   ierr = ViewerGetOutputname(viewer,&outputname);CHKERRQ(ierr);
24   ierr = ViewerGetFormat(viewer,&format);CHKERRQ(ierr);
25   if (format == VIEWER_FORMAT_ASCII_INFO) {
26     PetscFunctionReturn(0);
27   } else {
28     for ( i=0; i<m; i++ ) {
29       ierr = PetscSynchronizedFPrintf(comm,fd,"row %d:",i+a->rstart);CHKERRQ(ierr);
30       for ( j=a->i[i]; j<a->i[i+1]; j++ ) {
31         ierr = PetscSynchronizedFPrintf(comm,fd," %d ",a->j[j]);CHKERRQ(ierr);
32       }
33       ierr = PetscSynchronizedFPrintf(comm,fd,"\n");CHKERRQ(ierr);
34     }
35   }
36   ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
37   PetscFunctionReturn(0);
38 }
39 
40 #undef __FUNC__
41 #define __FUNC__ "MatView_MPIAdj"
42 int MatView_MPIAdj(Mat A,Viewer viewer)
43 {
44   int         ierr;
45 
46   PetscFunctionBegin;
47   if (PetscTypeCompare(viewer,ASCII_VIEWER)){
48     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
49   } else {
50     SETERRQ(1,1,"Viewer type not supported by PETSc object");
51   }
52   PetscFunctionReturn(0);
53 }
54 
55 #undef __FUNC__
56 #define __FUNC__ "MatDestroy_MPIAdj"
57 int MatDestroy_MPIAdj(Mat mat)
58 {
59   Mat_MPIAdj *a = (Mat_MPIAdj *) mat->data;
60   int        ierr;
61 
62   PetscFunctionBegin;
63 
64   if (mat->mapping) {
65     ierr = ISLocalToGlobalMappingDestroy(mat->mapping);CHKERRQ(ierr);
66   }
67   if (mat->bmapping) {
68     ierr = ISLocalToGlobalMappingDestroy(mat->bmapping);CHKERRQ(ierr);
69   }
70   if (mat->rmap) {
71     ierr = MapDestroy(mat->rmap);CHKERRQ(ierr);
72   }
73   if (mat->cmap) {
74     ierr = MapDestroy(mat->cmap);CHKERRQ(ierr);
75   }
76 
77 #if defined(PETSC_USE_LOG)
78   PLogObjectState((PetscObject)mat,"Rows=%d, Cols=%d, NZ=%d",mat->m,mat->n,a->nz);
79 #endif
80   if (a->diag) {ierr = PetscFree(a->diag);CHKERRQ(ierr);}
81   ierr = PetscFree(a->i);CHKERRQ(ierr);
82   ierr = PetscFree(a->j);CHKERRQ(ierr);
83   ierr = PetscFree(a->rowners);CHKERRQ(ierr);
84   ierr = PetscFree(a);CHKERRQ(ierr);
85 
86   PLogObjectDestroy(mat);
87   PetscHeaderDestroy(mat);
88   PetscFunctionReturn(0);
89 }
90 
91 
92 #undef __FUNC__
93 #define __FUNC__ "MatSetOption_MPIAdj"
94 int MatSetOption_MPIAdj(Mat A,MatOption op)
95 {
96   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
97 
98   PetscFunctionBegin;
99   if (op == MAT_STRUCTURALLY_SYMMETRIC) {
100     a->symmetric = PETSC_TRUE;
101   } else {
102     PLogInfo(A,"MatSetOption_MPIAdj:Option ignored\n");
103   }
104   PetscFunctionReturn(0);
105 }
106 
107 
108 /*
109      Adds diagonal pointers to sparse matrix structure.
110 */
111 
112 #undef __FUNC__
113 #define __FUNC__ "MatMarkDiag_MPIAdj"
114 int MatMarkDiag_MPIAdj(Mat A)
115 {
116   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
117   int        i,j, *diag, m = a->m;
118 
119   PetscFunctionBegin;
120   diag = (int *) PetscMalloc( (m+1)*sizeof(int));CHKPTRQ(diag);
121   PLogObjectMemory(A,(m+1)*sizeof(int));
122   for ( i=0; i<a->m; i++ ) {
123     for ( j=a->i[i]; j<a->i[i+1]; j++ ) {
124       if (a->j[j] == i) {
125         diag[i] = j;
126         break;
127       }
128     }
129   }
130   a->diag = diag;
131   PetscFunctionReturn(0);
132 }
133 
134 #undef __FUNC__
135 #define __FUNC__ "MatGetSize_MPIAdj"
136 int MatGetSize_MPIAdj(Mat A,int *m,int *n)
137 {
138   PetscFunctionBegin;
139   if (m) *m = A->M;
140   if (n) *n = A->N;
141   PetscFunctionReturn(0);
142 }
143 
144 #undef __FUNC__
145 #define __FUNC__ "MatGetSize_MPIAdj"
146 int MatGetLocalSize_MPIAdj(Mat A,int *m,int *n)
147 {
148   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
149   PetscFunctionBegin;
150   if (m) *m = a->m;
151   if (n) *n = A->N;
152   PetscFunctionReturn(0);
153 }
154 
155 #undef __FUNC__
156 #define __FUNC__ "MatGetOwnershipRange_MPIAdj"
157 int MatGetOwnershipRange_MPIAdj(Mat A,int *m,int *n)
158 {
159   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
160   PetscFunctionBegin;
161   *m = a->rstart; *n = a->rend;
162   PetscFunctionReturn(0);
163 }
164 
165 #undef __FUNC__
166 #define __FUNC__ "MatGetRow_MPIAdj"
167 int MatGetRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
168 {
169   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
170   int        *itmp;
171 
172   PetscFunctionBegin;
173   row -= a->rstart;
174 
175   if (row < 0 || row >= a->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,0,"Row out of range");
176 
177   *nz = a->i[row+1] - a->i[row];
178   if (v) *v = PETSC_NULL;
179   if (idx) {
180     itmp = a->j + a->i[row];
181     if (*nz) {
182       *idx = itmp;
183     }
184     else *idx = 0;
185   }
186   PetscFunctionReturn(0);
187 }
188 
189 #undef __FUNC__
190 #define __FUNC__ "MatRestoreRow_MPIAdj"
191 int MatRestoreRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
192 {
193   PetscFunctionBegin;
194   PetscFunctionReturn(0);
195 }
196 
197 #undef __FUNC__
198 #define __FUNC__ "MatGetBlockSize_MPIAdj"
199 int MatGetBlockSize_MPIAdj(Mat A, int *bs)
200 {
201   PetscFunctionBegin;
202   *bs = 1;
203   PetscFunctionReturn(0);
204 }
205 
206 
207 #undef __FUNC__
208 #define __FUNC__ "MatEqual_MPIAdj"
209 int MatEqual_MPIAdj(Mat A,Mat B, PetscTruth* flg)
210 {
211   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data, *b = (Mat_MPIAdj *)B->data;
212  int         flag = 1,ierr;
213 
214   PetscFunctionBegin;
215   if (B->type != MATMPIADJ) SETERRQ(PETSC_ERR_ARG_INCOMP,0,"Matrices must be same type");
216 
217   /* If the  matrix dimensions are not equal, or no of nonzeros */
218   if ((a->m != b->m ) ||( a->nz != b->nz)) {
219     flag = 0;
220   }
221 
222   /* if the a->i are the same */
223   if (PetscMemcmp(a->i,b->i,(a->m+1)*sizeof(int))) {
224     flag = 0;
225   }
226 
227   /* if a->j are the same */
228   if (PetscMemcmp(a->j, b->j, (a->nz)*sizeof(int))) {
229     flag = 0;
230   }
231 
232   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr);
233 
234 
235   PetscFunctionReturn(0);
236 }
237 
238 
239 /* -------------------------------------------------------------------*/
240 static struct _MatOps MatOps_Values = {0,
241        MatGetRow_MPIAdj,
242        MatRestoreRow_MPIAdj,
243        0,
244        0,
245        0,
246        0,
247        0,
248        0,
249        0,
250        0,
251        0,
252        0,
253        0,
254        0,
255        0,
256        MatEqual_MPIAdj,
257        0,
258        0,
259        0,
260        0,
261        0,
262        0,
263        MatSetOption_MPIAdj,
264        0,
265        0,
266        0,
267        0,
268        0,
269        0,
270        MatGetSize_MPIAdj,
271        MatGetLocalSize_MPIAdj,
272        MatGetOwnershipRange_MPIAdj,
273        0,
274        0,
275        0,
276        0,
277        0,
278        0,
279        0,
280        0,
281        0,
282        0,
283        0,
284        0,
285        0,
286        0,
287        0,
288        0,
289        0,
290        0,
291        0,
292        MatGetBlockSize_MPIAdj,
293        0,
294        0,
295        0,
296        0,
297        0,
298        0,
299        0,
300        0,
301        0,
302        0,
303        0,
304        0,
305        MatGetMaps_Petsc};
306 
307 
308 #undef __FUNC__
309 #define __FUNC__ "MatCreateMPIAdj"
310 /*@C
311    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
312    The matrix does not have numerical values associated with it, but is
313    intended for ordering (to reduce bandwidth etc) and partitioning.
314 
315    Collective on MPI_Comm
316 
317    Input Parameters:
318 +  comm - MPI communicator, set to PETSC_COMM_SELF
319 .  m - number of local rows
320 .  n - number of columns
321 .  i - the indices into j for the start of each row
322 -  j - the column indices for each row (sorted for each row).
323        The indices in i and j start with zero (NOT with one).
324 
325    Output Parameter:
326 .  A - the matrix
327 
328    Level: intermediate
329 
330    Notes: This matrix object does not support most matrix operations, include
331    MatSetValues().
332    You must NOT free the ii and jj arrays yourself. PETSc will free them
333    when the matrix is destroyed.
334 
335    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
336 
337 .seealso: MatCreate(), MatCreateSeqAdj(), MatGetOrdering()
338 @*/
339 int MatCreateMPIAdj(MPI_Comm comm,int m,int n,int *i,int *j, Mat *A)
340 {
341   Mat        B;
342   Mat_MPIAdj *b;
343   int        ii,ierr, flg,size,rank;
344 
345   PetscFunctionBegin;
346   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
347   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
348 
349   *A                  = 0;
350   PetscHeaderCreate(B,_p_Mat,struct _MatOps,MAT_COOKIE,MATMPIADJ,"Mat",comm,MatDestroy,MatView);
351   PLogObjectCreate(B);
352   B->data             = (void *) (b = PetscNew(Mat_MPIAdj));CHKPTRQ(b);
353   ierr                = PetscMemzero(b,sizeof(Mat_MPIAdj));CHKERRQ(ierr);
354   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
355   B->ops->destroy     = MatDestroy_MPIAdj;
356   B->ops->view        = MatView_MPIAdj;
357   B->factor           = 0;
358   B->lupivotthreshold = 1.0;
359   B->mapping          = 0;
360   B->assembled        = PETSC_FALSE;
361 
362   b->m = m; B->m = m;
363   ierr = MPI_Allreduce(&m,&B->M,1,MPI_INT,MPI_SUM,comm);CHKERRQ(ierr);
364   B->n = n; B->N = n;
365 
366   /* the information in the maps duplicates the information computed below, eventually
367      we should remove the duplicate information that is not contained in the maps */
368   ierr = MapCreateMPI(comm,m,B->M,&B->rmap);CHKERRQ(ierr);
369   /* we don't know the "local columns" so just use the row information :-( */
370   ierr = MapCreateMPI(comm,m,B->M,&B->cmap);CHKERRQ(ierr);
371 
372   b->rowners = (int *) PetscMalloc((size+1)*sizeof(int));CHKPTRQ(b->rowners);
373   PLogObjectMemory(B,(size+2)*sizeof(int)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));
374   ierr = MPI_Allgather(&m,1,MPI_INT,b->rowners+1,1,MPI_INT,comm);CHKERRQ(ierr);
375   b->rowners[0] = 0;
376   for ( ii=2; ii<=size; ii++ ) {
377     b->rowners[ii] += b->rowners[ii-1];
378   }
379   b->rstart = b->rowners[rank];
380   b->rend   = b->rowners[rank+1];
381 
382   b->j  = j;
383   b->i  = i;
384 
385   b->nz               = i[m];
386   b->diag             = 0;
387   b->symmetric        = PETSC_FALSE;
388 
389   *A = B;
390 
391   ierr = OptionsHasName(PETSC_NULL,"-help", &flg);CHKERRQ(ierr);
392   if (flg) {ierr = MatPrintHelp(B);CHKERRQ(ierr); }
393   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
394   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
395   PetscFunctionReturn(0);
396 }
397 
398 
399 
400 
401 
402 
403 
404 
405 
406