xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision fd16b1779ca341c090fa91c269e2c2dd5dd0b73d)
1 #ifdef PETSC_RCS_HEADER
2 static char vcid[] = "$Id: mpiadj.c,v 1.26 1999/09/02 14:53:32 bsmith Exp bsmith $";
3 #endif
4 
5 /*
6     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
7 */
8 #include "sys.h"
9 #include "src/mat/impls/adj/mpi/mpiadj.h"
10 
11 #undef __FUNC__
12 #define __FUNC__ "MatView_MPIAdj_ASCII"
13 extern int MatView_MPIAdj_ASCII(Mat A,Viewer viewer)
14 {
15   Mat_MPIAdj  *a = (Mat_MPIAdj *) A->data;
16   int         ierr, i,j, m = a->m,  format;
17   FILE        *fd;
18   char        *outputname;
19   MPI_Comm    comm = A->comm;
20 
21   PetscFunctionBegin;
22   ierr = ViewerASCIIGetPointer(viewer,&fd);CHKERRQ(ierr);
23   ierr = ViewerGetOutputname(viewer,&outputname);CHKERRQ(ierr);
24   ierr = ViewerGetFormat(viewer,&format);CHKERRQ(ierr);
25   if (format == VIEWER_FORMAT_ASCII_INFO) {
26     PetscFunctionReturn(0);
27   } else {
28     for ( i=0; i<m; i++ ) {
29       ierr = PetscSynchronizedFPrintf(comm,fd,"row %d:",i+a->rstart);CHKERRQ(ierr);
30       for ( j=a->i[i]; j<a->i[i+1]; j++ ) {
31         ierr = PetscSynchronizedFPrintf(comm,fd," %d ",a->j[j]);CHKERRQ(ierr);
32       }
33       ierr = PetscSynchronizedFPrintf(comm,fd,"\n");CHKERRQ(ierr);
34     }
35   }
36   ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
37   PetscFunctionReturn(0);
38 }
39 
40 #undef __FUNC__
41 #define __FUNC__ "MatView_MPIAdj"
42 int MatView_MPIAdj(Mat A,Viewer viewer)
43 {
44   ViewerType  vtype;
45   int         ierr;
46 
47   PetscFunctionBegin;
48   ierr = ViewerGetType(viewer,&vtype);CHKERRQ(ierr);
49   if (PetscTypeCompare(vtype,ASCII_VIEWER)){
50     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
51   } else {
52     SETERRQ(1,1,"Viewer type not supported by PETSc object");
53   }
54   PetscFunctionReturn(0);
55 }
56 
57 #undef __FUNC__
58 #define __FUNC__ "MatDestroy_MPIAdj"
59 int MatDestroy_MPIAdj(Mat mat)
60 {
61   Mat_MPIAdj *a = (Mat_MPIAdj *) mat->data;
62   int        ierr;
63 
64   PetscFunctionBegin;
65 
66   if (mat->mapping) {
67     ierr = ISLocalToGlobalMappingDestroy(mat->mapping);CHKERRQ(ierr);
68   }
69   if (mat->bmapping) {
70     ierr = ISLocalToGlobalMappingDestroy(mat->bmapping);CHKERRQ(ierr);
71   }
72   if (mat->rmap) {
73     ierr = MapDestroy(mat->rmap);CHKERRQ(ierr);
74   }
75   if (mat->cmap) {
76     ierr = MapDestroy(mat->cmap);CHKERRQ(ierr);
77   }
78 
79 #if defined(PETSC_USE_LOG)
80   PLogObjectState((PetscObject)mat,"Rows=%d, Cols=%d, NZ=%d",mat->m,mat->n,a->nz);
81 #endif
82   if (a->diag) {ierr = PetscFree(a->diag);CHKERRQ(ierr);}
83   ierr = PetscFree(a->i);CHKERRQ(ierr);
84   ierr = PetscFree(a->j);CHKERRQ(ierr);
85   ierr = PetscFree(a->rowners);CHKERRQ(ierr);
86   ierr = PetscFree(a);CHKERRQ(ierr);
87 
88   PLogObjectDestroy(mat);
89   PetscHeaderDestroy(mat);
90   PetscFunctionReturn(0);
91 }
92 
93 
94 #undef __FUNC__
95 #define __FUNC__ "MatSetOption_MPIAdj"
96 int MatSetOption_MPIAdj(Mat A,MatOption op)
97 {
98   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
99 
100   PetscFunctionBegin;
101   if (op == MAT_STRUCTURALLY_SYMMETRIC) {
102     a->symmetric = PETSC_TRUE;
103   } else {
104     PLogInfo(A,"MatSetOption_MPIAdj:Option ignored\n");
105   }
106   PetscFunctionReturn(0);
107 }
108 
109 
110 /*
111      Adds diagonal pointers to sparse matrix structure.
112 */
113 
114 #undef __FUNC__
115 #define __FUNC__ "MatMarkDiag_MPIAdj"
116 int MatMarkDiag_MPIAdj(Mat A)
117 {
118   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
119   int        i,j, *diag, m = a->m;
120 
121   PetscFunctionBegin;
122   diag = (int *) PetscMalloc( (m+1)*sizeof(int));CHKPTRQ(diag);
123   PLogObjectMemory(A,(m+1)*sizeof(int));
124   for ( i=0; i<a->m; i++ ) {
125     for ( j=a->i[i]; j<a->i[i+1]; j++ ) {
126       if (a->j[j] == i) {
127         diag[i] = j;
128         break;
129       }
130     }
131   }
132   a->diag = diag;
133   PetscFunctionReturn(0);
134 }
135 
136 #undef __FUNC__
137 #define __FUNC__ "MatGetSize_MPIAdj"
138 int MatGetSize_MPIAdj(Mat A,int *m,int *n)
139 {
140   PetscFunctionBegin;
141   if (m) *m = A->M;
142   if (n) *n = A->N;
143   PetscFunctionReturn(0);
144 }
145 
146 #undef __FUNC__
147 #define __FUNC__ "MatGetSize_MPIAdj"
148 int MatGetLocalSize_MPIAdj(Mat A,int *m,int *n)
149 {
150   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
151   PetscFunctionBegin;
152   if (m) *m = a->m;
153   if (n) *n = A->N;
154   PetscFunctionReturn(0);
155 }
156 
157 #undef __FUNC__
158 #define __FUNC__ "MatGetOwnershipRange_MPIAdj"
159 int MatGetOwnershipRange_MPIAdj(Mat A,int *m,int *n)
160 {
161   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
162   PetscFunctionBegin;
163   *m = a->rstart; *n = a->rend;
164   PetscFunctionReturn(0);
165 }
166 
167 #undef __FUNC__
168 #define __FUNC__ "MatGetRow_MPIAdj"
169 int MatGetRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
170 {
171   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
172   int        *itmp;
173 
174   PetscFunctionBegin;
175   row -= a->rstart;
176 
177   if (row < 0 || row >= a->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,0,"Row out of range");
178 
179   *nz = a->i[row+1] - a->i[row];
180   if (v) *v = PETSC_NULL;
181   if (idx) {
182     itmp = a->j + a->i[row];
183     if (*nz) {
184       *idx = itmp;
185     }
186     else *idx = 0;
187   }
188   PetscFunctionReturn(0);
189 }
190 
191 #undef __FUNC__
192 #define __FUNC__ "MatRestoreRow_MPIAdj"
193 int MatRestoreRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
194 {
195   PetscFunctionBegin;
196   PetscFunctionReturn(0);
197 }
198 
199 #undef __FUNC__
200 #define __FUNC__ "MatGetBlockSize_MPIAdj"
201 int MatGetBlockSize_MPIAdj(Mat A, int *bs)
202 {
203   PetscFunctionBegin;
204   *bs = 1;
205   PetscFunctionReturn(0);
206 }
207 
208 
209 #undef __FUNC__
210 #define __FUNC__ "MatEqual_MPIAdj"
211 int MatEqual_MPIAdj(Mat A,Mat B, PetscTruth* flg)
212 {
213   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data, *b = (Mat_MPIAdj *)B->data;
214  int         flag = 1,ierr;
215 
216   PetscFunctionBegin;
217   if (B->type != MATMPIADJ) SETERRQ(PETSC_ERR_ARG_INCOMP,0,"Matrices must be same type");
218 
219   /* If the  matrix dimensions are not equal, or no of nonzeros */
220   if ((a->m != b->m ) ||( a->nz != b->nz)) {
221     flag = 0;
222   }
223 
224   /* if the a->i are the same */
225   if (PetscMemcmp(a->i,b->i,(a->m+1)*sizeof(int))) {
226     flag = 0;
227   }
228 
229   /* if a->j are the same */
230   if (PetscMemcmp(a->j, b->j, (a->nz)*sizeof(int))) {
231     flag = 0;
232   }
233 
234   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr);
235 
236 
237   PetscFunctionReturn(0);
238 }
239 
240 
241 /* -------------------------------------------------------------------*/
242 static struct _MatOps MatOps_Values = {0,
243        MatGetRow_MPIAdj,
244        MatRestoreRow_MPIAdj,
245        0,
246        0,
247        0,
248        0,
249        0,
250        0,
251        0,
252        0,
253        0,
254        0,
255        0,
256        0,
257        0,
258        MatEqual_MPIAdj,
259        0,
260        0,
261        0,
262        0,
263        0,
264        0,
265        MatSetOption_MPIAdj,
266        0,
267        0,
268        0,
269        0,
270        0,
271        0,
272        MatGetSize_MPIAdj,
273        MatGetLocalSize_MPIAdj,
274        MatGetOwnershipRange_MPIAdj,
275        0,
276        0,
277        0,
278        0,
279        0,
280        0,
281        0,
282        0,
283        0,
284        0,
285        0,
286        0,
287        0,
288        0,
289        0,
290        0,
291        0,
292        0,
293        0,
294        MatGetBlockSize_MPIAdj,
295        0,
296        0,
297        0,
298        0,
299        0,
300        0,
301        0,
302        0,
303        0,
304        0,
305        0,
306        0,
307        MatGetMaps_Petsc};
308 
309 
310 #undef __FUNC__
311 #define __FUNC__ "MatCreateMPIAdj"
312 /*@C
313    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
314    The matrix does not have numerical values associated with it, but is
315    intended for ordering (to reduce bandwidth etc) and partitioning.
316 
317    Collective on MPI_Comm
318 
319    Input Parameters:
320 +  comm - MPI communicator, set to PETSC_COMM_SELF
321 .  m - number of local rows
322 .  n - number of columns
323 .  i - the indices into j for the start of each row
324 -  j - the column indices for each row (sorted for each row).
325        The indices in i and j start with zero (NOT with one).
326 
327    Output Parameter:
328 .  A - the matrix
329 
330    Level: intermediate
331 
332    Notes: This matrix object does not support most matrix operations, include
333    MatSetValues().
334    You must NOT free the ii and jj arrays yourself. PETSc will free them
335    when the matrix is destroyed.
336 
337    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
338 
339 .seealso: MatCreate(), MatCreateSeqAdj(), MatGetOrdering()
340 @*/
341 int MatCreateMPIAdj(MPI_Comm comm,int m,int n,int *i,int *j, Mat *A)
342 {
343   Mat        B;
344   Mat_MPIAdj *b;
345   int        ii,ierr, flg,size,rank;
346 
347   PetscFunctionBegin;
348   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
349   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
350 
351   *A                  = 0;
352   PetscHeaderCreate(B,_p_Mat,struct _MatOps,MAT_COOKIE,MATMPIADJ,"Mat",comm,MatDestroy,MatView);
353   PLogObjectCreate(B);
354   B->data             = (void *) (b = PetscNew(Mat_MPIAdj));CHKPTRQ(b);
355   ierr                = PetscMemzero(b,sizeof(Mat_MPIAdj));CHKERRQ(ierr);
356   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
357   B->ops->destroy     = MatDestroy_MPIAdj;
358   B->ops->view        = MatView_MPIAdj;
359   B->factor           = 0;
360   B->lupivotthreshold = 1.0;
361   B->mapping          = 0;
362   B->assembled        = PETSC_FALSE;
363 
364   b->m = m; B->m = m;
365   ierr = MPI_Allreduce(&m,&B->M,1,MPI_INT,MPI_SUM,comm);CHKERRQ(ierr);
366   B->n = n; B->N = n;
367 
368   /* the information in the maps duplicates the information computed below, eventually
369      we should remove the duplicate information that is not contained in the maps */
370   ierr = MapCreateMPI(comm,m,B->M,&B->rmap);CHKERRQ(ierr);
371   /* we don't know the "local columns" so just use the row information :-( */
372   ierr = MapCreateMPI(comm,m,B->M,&B->cmap);CHKERRQ(ierr);
373 
374   b->rowners = (int *) PetscMalloc((size+1)*sizeof(int));CHKPTRQ(b->rowners);
375   PLogObjectMemory(B,(size+2)*sizeof(int)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));
376   ierr = MPI_Allgather(&m,1,MPI_INT,b->rowners+1,1,MPI_INT,comm);CHKERRQ(ierr);
377   b->rowners[0] = 0;
378   for ( ii=2; ii<=size; ii++ ) {
379     b->rowners[ii] += b->rowners[ii-1];
380   }
381   b->rstart = b->rowners[rank];
382   b->rend   = b->rowners[rank+1];
383 
384   b->j  = j;
385   b->i  = i;
386 
387   b->nz               = i[m];
388   b->diag             = 0;
389   b->symmetric        = PETSC_FALSE;
390 
391   *A = B;
392 
393   ierr = OptionsHasName(PETSC_NULL,"-help", &flg);CHKERRQ(ierr);
394   if (flg) {ierr = MatPrintHelp(B);CHKERRQ(ierr); }
395   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
396   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
397   PetscFunctionReturn(0);
398 }
399 
400 
401 
402 
403 
404 
405 
406 
407 
408