xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 606d414c6e034e02e67059b83ebaefc3ebe99698)
1 #ifdef PETSC_RCS_HEADER
2 static char vcid[] = "$Id: mpiadj.c,v 1.24 1999/05/12 03:30:08 bsmith Exp balay $";
3 #endif
4 
5 /*
6     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
7 */
8 #include "sys.h"
9 #include "src/mat/impls/adj/mpi/mpiadj.h"
10 
11 #undef __FUNC__
12 #define __FUNC__ "MatView_MPIAdj_ASCII"
13 extern int MatView_MPIAdj_ASCII(Mat A,Viewer viewer)
14 {
15   Mat_MPIAdj  *a = (Mat_MPIAdj *) A->data;
16   int         ierr, i,j, m = a->m,  format;
17   FILE        *fd;
18   char        *outputname;
19   MPI_Comm    comm = A->comm;
20 
21   ierr = ViewerASCIIGetPointer(viewer,&fd);CHKERRQ(ierr);
22   ierr = ViewerGetOutputname(viewer,&outputname);CHKERRQ(ierr);
23   ierr = ViewerGetFormat(viewer,&format);CHKERRQ(ierr);
24   if (format == VIEWER_FORMAT_ASCII_INFO) {
25     PetscFunctionReturn(0);
26   } else {
27     for ( i=0; i<m; i++ ) {
28       ierr = PetscSynchronizedFPrintf(comm,fd,"row %d:",i+a->rstart);CHKERRQ(ierr);
29       for ( j=a->i[i]; j<a->i[i+1]; j++ ) {
30         ierr = PetscSynchronizedFPrintf(comm,fd," %d ",a->j[j]);CHKERRQ(ierr);
31       }
32       ierr = PetscSynchronizedFPrintf(comm,fd,"\n");CHKERRQ(ierr);
33     }
34   }
35   ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
36   PetscFunctionReturn(0);
37 }
38 
39 #undef __FUNC__
40 #define __FUNC__ "MatView_MPIAdj"
41 int MatView_MPIAdj(Mat A,Viewer viewer)
42 {
43   ViewerType  vtype;
44   int         ierr;
45 
46   ierr = ViewerGetType(viewer,&vtype);CHKERRQ(ierr);
47   if (PetscTypeCompare(vtype,ASCII_VIEWER)){
48     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
49   } else {
50     SETERRQ(1,1,"Viewer type not supported by PETSc object");
51   }
52   PetscFunctionReturn(0);
53 }
54 
55 #undef __FUNC__
56 #define __FUNC__ "MatDestroy_MPIAdj"
57 int MatDestroy_MPIAdj(Mat mat)
58 {
59   Mat_MPIAdj *a = (Mat_MPIAdj *) mat->data;
60   int        ierr;
61 
62   PetscFunctionBegin;
63   if (--mat->refct > 0) PetscFunctionReturn(0);
64 
65   if (mat->mapping) {
66     ierr = ISLocalToGlobalMappingDestroy(mat->mapping);CHKERRQ(ierr);
67   }
68   if (mat->bmapping) {
69     ierr = ISLocalToGlobalMappingDestroy(mat->bmapping);CHKERRQ(ierr);
70   }
71   if (mat->rmap) {
72     ierr = MapDestroy(mat->rmap);CHKERRQ(ierr);
73   }
74   if (mat->cmap) {
75     ierr = MapDestroy(mat->cmap);CHKERRQ(ierr);
76   }
77 
78 #if defined(PETSC_USE_LOG)
79   PLogObjectState((PetscObject)mat,"Rows=%d, Cols=%d, NZ=%d",mat->m,mat->n,a->nz);
80 #endif
81   if (a->diag) {ierr = PetscFree(a->diag);CHKERRQ(ierr);}
82   ierr = PetscFree(a->i);CHKERRQ(ierr);
83   ierr = PetscFree(a->j);CHKERRQ(ierr);
84   ierr = PetscFree(a->rowners);CHKERRQ(ierr);
85   ierr = PetscFree(a);CHKERRQ(ierr);
86 
87   PLogObjectDestroy(mat);
88   PetscHeaderDestroy(mat);
89   PetscFunctionReturn(0);
90 }
91 
92 
93 #undef __FUNC__
94 #define __FUNC__ "MatSetOption_MPIAdj"
95 int MatSetOption_MPIAdj(Mat A,MatOption op)
96 {
97   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
98 
99   if (op == MAT_STRUCTURALLY_SYMMETRIC) {
100     a->symmetric = PETSC_TRUE;
101   } else {
102     PLogInfo(A,"MatSetOption_MPIAdj:Option ignored\n");
103   }
104   PetscFunctionReturn(0);
105 }
106 
107 
108 /*
109      Adds diagonal pointers to sparse matrix structure.
110 */
111 
112 #undef __FUNC__
113 #define __FUNC__ "MatMarkDiag_MPIAdj"
114 int MatMarkDiag_MPIAdj(Mat A)
115 {
116   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
117   int        i,j, *diag, m = a->m;
118 
119   diag = (int *) PetscMalloc( (m+1)*sizeof(int));CHKPTRQ(diag);
120   PLogObjectMemory(A,(m+1)*sizeof(int));
121   for ( i=0; i<a->m; i++ ) {
122     for ( j=a->i[i]; j<a->i[i+1]; j++ ) {
123       if (a->j[j] == i) {
124         diag[i] = j;
125         break;
126       }
127     }
128   }
129   a->diag = diag;
130   PetscFunctionReturn(0);
131 }
132 
133 #undef __FUNC__
134 #define __FUNC__ "MatGetSize_MPIAdj"
135 int MatGetSize_MPIAdj(Mat A,int *m,int *n)
136 {
137   if (m) *m = A->M;
138   if (n) *n = A->N;
139   PetscFunctionReturn(0);
140 }
141 
142 #undef __FUNC__
143 #define __FUNC__ "MatGetSize_MPIAdj"
144 int MatGetLocalSize_MPIAdj(Mat A,int *m,int *n)
145 {
146   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
147   if (m) *m = a->m;
148   if (n) *n = A->N;
149   PetscFunctionReturn(0);
150 }
151 
152 #undef __FUNC__
153 #define __FUNC__ "MatGetOwnershipRange_MPIAdj"
154 int MatGetOwnershipRange_MPIAdj(Mat A,int *m,int *n)
155 {
156   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
157   *m = a->rstart; *n = a->rend;
158   PetscFunctionReturn(0);
159 }
160 
161 #undef __FUNC__
162 #define __FUNC__ "MatGetRow_MPIAdj"
163 int MatGetRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
164 {
165   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
166   int        *itmp;
167 
168   row -= a->rstart;
169 
170   if (row < 0 || row >= a->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,0,"Row out of range");
171 
172   *nz = a->i[row+1] - a->i[row];
173   if (v) *v = PETSC_NULL;
174   if (idx) {
175     itmp = a->j + a->i[row];
176     if (*nz) {
177       *idx = itmp;
178     }
179     else *idx = 0;
180   }
181   PetscFunctionReturn(0);
182 }
183 
184 #undef __FUNC__
185 #define __FUNC__ "MatRestoreRow_MPIAdj"
186 int MatRestoreRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
187 {
188   PetscFunctionReturn(0);
189 }
190 
191 #undef __FUNC__
192 #define __FUNC__ "MatGetBlockSize_MPIAdj"
193 int MatGetBlockSize_MPIAdj(Mat A, int *bs)
194 {
195   *bs = 1;
196   PetscFunctionReturn(0);
197 }
198 
199 
200 #undef __FUNC__
201 #define __FUNC__ "MatEqual_MPIAdj"
202 int MatEqual_MPIAdj(Mat A,Mat B, PetscTruth* flg)
203 {
204   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data, *b = (Mat_MPIAdj *)B->data;
205  int         flag = 1,ierr;
206 
207   if (B->type != MATMPIADJ) SETERRQ(PETSC_ERR_ARG_INCOMP,0,"Matrices must be same type");
208 
209   /* If the  matrix dimensions are not equal, or no of nonzeros */
210   if ((a->m != b->m ) ||( a->nz != b->nz)) {
211     flag = 0;
212   }
213 
214   /* if the a->i are the same */
215   if (PetscMemcmp(a->i,b->i,(a->m+1)*sizeof(int))) {
216     flag = 0;
217   }
218 
219   /* if a->j are the same */
220   if (PetscMemcmp(a->j, b->j, (a->nz)*sizeof(int))) {
221     flag = 0;
222   }
223 
224   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr);
225 
226 
227   PetscFunctionReturn(0);
228 }
229 
230 
231 /* -------------------------------------------------------------------*/
232 static struct _MatOps MatOps_Values = {0,
233        MatGetRow_MPIAdj,
234        MatRestoreRow_MPIAdj,
235        0,
236        0,
237        0,
238        0,
239        0,
240        0,
241        0,
242        0,
243        0,
244        0,
245        0,
246        0,
247        0,
248        MatEqual_MPIAdj,
249        0,
250        0,
251        0,
252        0,
253        0,
254        0,
255        MatSetOption_MPIAdj,
256        0,
257        0,
258        0,
259        0,
260        0,
261        0,
262        MatGetSize_MPIAdj,
263        MatGetLocalSize_MPIAdj,
264        MatGetOwnershipRange_MPIAdj,
265        0,
266        0,
267        0,
268        0,
269        0,
270        0,
271        0,
272        0,
273        0,
274        0,
275        0,
276        0,
277        0,
278        0,
279        0,
280        0,
281        0,
282        0,
283        0,
284        MatGetBlockSize_MPIAdj,
285        0,
286        0,
287        0,
288        0,
289        0,
290        0,
291        0,
292        0,
293        0,
294        0,
295        0,
296        0,
297        MatGetMaps_Petsc};
298 
299 
300 #undef __FUNC__
301 #define __FUNC__ "MatCreateMPIAdj"
302 /*@C
303    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
304    The matrix does not have numerical values associated with it, but is
305    intended for ordering (to reduce bandwidth etc) and partitioning.
306 
307    Collective on MPI_Comm
308 
309    Input Parameters:
310 +  comm - MPI communicator, set to PETSC_COMM_SELF
311 .  m - number of local rows
312 .  n - number of columns
313 .  i - the indices into j for the start of each row
314 -  j - the column indices for each row (sorted for each row).
315        The indices in i and j start with zero (NOT with one).
316 
317    Output Parameter:
318 .  A - the matrix
319 
320    Level: intermediate
321 
322    Notes: This matrix object does not support most matrix operations, include
323    MatSetValues().
324    You must NOT free the ii and jj arrays yourself. PETSc will free them
325    when the matrix is destroyed.
326 
327    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
328 
329 .seealso: MatCreate(), MatCreateSeqAdj(), MatGetOrdering()
330 @*/
331 int MatCreateMPIAdj(MPI_Comm comm,int m,int n,int *i,int *j, Mat *A)
332 {
333   Mat        B;
334   Mat_MPIAdj *b;
335   int        ii,ierr, flg,size,rank;
336 
337   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
338   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
339 
340   *A                  = 0;
341   PetscHeaderCreate(B,_p_Mat,struct _MatOps,MAT_COOKIE,MATMPIADJ,"Mat",comm,MatDestroy,MatView);
342   PLogObjectCreate(B);
343   B->data             = (void *) (b = PetscNew(Mat_MPIAdj));CHKPTRQ(b);
344   ierr                = PetscMemzero(b,sizeof(Mat_MPIAdj));CHKERRQ(ierr);
345   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
346   B->ops->destroy     = MatDestroy_MPIAdj;
347   B->ops->view        = MatView_MPIAdj;
348   B->factor           = 0;
349   B->lupivotthreshold = 1.0;
350   B->mapping          = 0;
351   B->assembled        = PETSC_FALSE;
352 
353   b->m = m; B->m = m;
354   ierr = MPI_Allreduce(&m,&B->M,1,MPI_INT,MPI_SUM,comm);CHKERRQ(ierr);
355   B->n = n; B->N = n;
356 
357   /* the information in the maps duplicates the information computed below, eventually
358      we should remove the duplicate information that is not contained in the maps */
359   ierr = MapCreateMPI(comm,m,B->M,&B->rmap);CHKERRQ(ierr);
360   /* we don't know the "local columns" so just use the row information :-( */
361   ierr = MapCreateMPI(comm,m,B->M,&B->cmap);CHKERRQ(ierr);
362 
363   b->rowners = (int *) PetscMalloc((size+1)*sizeof(int));CHKPTRQ(b->rowners);
364   PLogObjectMemory(B,(size+2)*sizeof(int)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));
365   ierr = MPI_Allgather(&m,1,MPI_INT,b->rowners+1,1,MPI_INT,comm);CHKERRQ(ierr);
366   b->rowners[0] = 0;
367   for ( ii=2; ii<=size; ii++ ) {
368     b->rowners[ii] += b->rowners[ii-1];
369   }
370   b->rstart = b->rowners[rank];
371   b->rend   = b->rowners[rank+1];
372 
373   b->j  = j;
374   b->i  = i;
375 
376   b->nz               = i[m];
377   b->diag             = 0;
378   b->symmetric        = PETSC_FALSE;
379 
380   *A = B;
381 
382   ierr = OptionsHasName(PETSC_NULL,"-help", &flg);CHKERRQ(ierr);
383   if (flg) {ierr = MatPrintHelp(B);CHKERRQ(ierr); }
384   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
385   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
386   PetscFunctionReturn(0);
387 }
388 
389 
390 
391