xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision ee8cd831935f06b74797fa2bbde864072d1d7fee)
1 #ifdef PETSC_RCS_HEADER
2 static char vcid[] = "$Id: mpiadj.c,v 1.13 1998/05/29 20:37:39 bsmith Exp bsmith $";
3 #endif
4 
5 /*
6     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
7 */
8 
9 #include "pinclude/pviewer.h"
10 #include "sys.h"
11 #include "src/mat/impls/adj/mpi/mpiadj.h"
12 
13 
14 #undef __FUNC__
15 #define __FUNC__ "MatView_MPIAdj_ASCII"
16 extern int MatView_MPIAdj_ASCII(Mat A,Viewer viewer)
17 {
18   Mat_MPIAdj  *a = (Mat_MPIAdj *) A->data;
19   int         ierr, i,j, m = a->m,  format;
20   FILE        *fd;
21   char        *outputname;
22   MPI_Comm    comm = A->comm;
23 
24   ierr = ViewerASCIIGetPointer(viewer,&fd); CHKERRQ(ierr);
25   ierr = ViewerFileGetOutputname_Private(viewer,&outputname); CHKERRQ(ierr);
26   ierr = ViewerGetFormat(viewer,&format);
27   if (format == VIEWER_FORMAT_ASCII_INFO) {
28     PetscFunctionReturn(0);
29   } else {
30     for ( i=0; i<m; i++ ) {
31       PetscSynchronizedFPrintf(comm,fd,"row %d:",i+a->rstart);
32       for ( j=a->i[i]; j<a->i[i+1]; j++ ) {
33         PetscSynchronizedFPrintf(comm,fd," %d ",a->j[j]);
34       }
35       PetscSynchronizedFPrintf(comm,fd,"\n");
36     }
37   }
38   PetscSynchronizedFlush(comm);
39   PetscFunctionReturn(0);
40 }
41 
42 #undef __FUNC__
43 #define __FUNC__ "MatView_MPIAdj"
44 int MatView_MPIAdj(Mat A,Viewer viewer)
45 {
46   ViewerType  vtype;
47   int         ierr;
48 
49   ierr = ViewerGetType(viewer,&vtype); CHKERRQ(ierr);
50   if (vtype == ASCII_FILE_VIEWER || vtype == ASCII_FILES_VIEWER){
51     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
52   } else {
53     SETERRQ(1,1,"Viewer type not supported by PETSc object");
54   }
55   PetscFunctionReturn(0);
56 }
57 
58 #undef __FUNC__
59 #define __FUNC__ "MatDestroy_MPIAdj"
60 int MatDestroy_MPIAdj(Mat mat)
61 {
62   Mat_MPIAdj *a = (Mat_MPIAdj *) mat->data;
63   int        ierr;
64 
65   PetscFunctionBegin;
66   if (--mat->refct > 0) PetscFunctionReturn(0);
67 
68   if (mat->mapping) {
69     ierr = ISLocalToGlobalMappingDestroy(mat->mapping); CHKERRQ(ierr);
70   }
71   if (mat->bmapping) {
72     ierr = ISLocalToGlobalMappingDestroy(mat->bmapping); CHKERRQ(ierr);
73   }
74 
75 #if defined(USE_PETSC_LOG)
76   PLogObjectState((PetscObject)mat,"Rows=%d, Cols=%d, NZ=%d",mat->m,mat->n,a->nz);
77 #endif
78   if (a->diag) PetscFree(a->diag);
79   PetscFree(a->i);
80   PetscFree(a->j);
81   PetscFree(a->rowners);
82   PetscFree(a);
83 
84   PLogObjectDestroy(mat);
85   PetscHeaderDestroy(mat);
86   PetscFunctionReturn(0);
87 }
88 
89 
90 #undef __FUNC__
91 #define __FUNC__ "MatSetOption_MPIAdj"
92 int MatSetOption_MPIAdj(Mat A,MatOption op)
93 {
94   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
95 
96   if (op == MAT_STRUCTURALLY_SYMMETRIC) {
97     a->symmetric = PETSC_TRUE;
98   } else {
99     PLogInfo(A,"MatSetOption_MPIAdj:Option ignored\n");
100   }
101   PetscFunctionReturn(0);
102 }
103 
104 
105 /*
106      Adds diagonal pointers to sparse matrix structure.
107 */
108 
109 #undef __FUNC__
110 #define __FUNC__ "MatMarkDiag_MPIAdj"
111 int MatMarkDiag_MPIAdj(Mat A)
112 {
113   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
114   int        i,j, *diag, m = a->m;
115 
116   diag = (int *) PetscMalloc( (m+1)*sizeof(int)); CHKPTRQ(diag);
117   PLogObjectMemory(A,(m+1)*sizeof(int));
118   for ( i=0; i<a->m; i++ ) {
119     for ( j=a->i[i]; j<a->i[i+1]; j++ ) {
120       if (a->j[j] == i) {
121         diag[i] = j;
122         break;
123       }
124     }
125   }
126   a->diag = diag;
127   PetscFunctionReturn(0);
128 }
129 
130 #undef __FUNC__
131 #define __FUNC__ "MatGetSize_MPIAdj"
132 int MatGetSize_MPIAdj(Mat A,int *m,int *n)
133 {
134   if (m) *m = A->M;
135   if (n) *n = A->N;
136   PetscFunctionReturn(0);
137 }
138 
139 #undef __FUNC__
140 #define __FUNC__ "MatGetSize_MPIAdj"
141 int MatGetLocalSize_MPIAdj(Mat A,int *m,int *n)
142 {
143   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
144   if (m) *m = a->m;
145   if (n) *n = A->N;
146   PetscFunctionReturn(0);
147 }
148 
149 #undef __FUNC__
150 #define __FUNC__ "MatGetOwnershipRange_MPIAdj"
151 int MatGetOwnershipRange_MPIAdj(Mat A,int *m,int *n)
152 {
153   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
154   *m = a->rstart; *n = a->rend;
155   PetscFunctionReturn(0);
156 }
157 
158 #undef __FUNC__
159 #define __FUNC__ "MatGetRow_MPIAdj"
160 int MatGetRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
161 {
162   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
163   int        *itmp;
164 
165   row -= a->rstart;
166 
167   if (row < 0 || row >= a->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,0,"Row out of range");
168 
169   *nz = a->i[row+1] - a->i[row];
170   if (v) *v = PETSC_NULL;
171   if (idx) {
172     itmp = a->j + a->i[row];
173     if (*nz) {
174       *idx = itmp;
175     }
176     else *idx = 0;
177   }
178   PetscFunctionReturn(0);
179 }
180 
181 #undef __FUNC__
182 #define __FUNC__ "MatRestoreRow_MPIAdj"
183 int MatRestoreRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
184 {
185   PetscFunctionReturn(0);
186 }
187 
188 #undef __FUNC__
189 #define __FUNC__ "MatGetBlockSize_MPIAdj"
190 int MatGetBlockSize_MPIAdj(Mat A, int *bs)
191 {
192   *bs = 1;
193   PetscFunctionReturn(0);
194 }
195 
196 
197 #undef __FUNC__
198 #define __FUNC__ "MatEqual_MPIAdj"
199 int MatEqual_MPIAdj(Mat A,Mat B, PetscTruth* flg)
200 {
201   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data, *b = (Mat_MPIAdj *)B->data;
202  int         flag = 1,ierr;
203 
204   if (B->type != MATMPIADJ) SETERRQ(PETSC_ERR_ARG_INCOMP,0,"Matrices must be same type");
205 
206   /* If the  matrix dimensions are not equal, or no of nonzeros */
207   if ((a->m != b->m ) ||( a->nz != b->nz)) {
208     flag = 0;
209   }
210 
211   /* if the a->i are the same */
212   if (PetscMemcmp(a->i,b->i,(a->m+1)*sizeof(int))) {
213     flag = 0;
214   }
215 
216   /* if a->j are the same */
217   if (PetscMemcmp(a->j, b->j, (a->nz)*sizeof(int))) {
218     flag = 0;
219   }
220 
221   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr);
222 
223 
224   PetscFunctionReturn(0);
225 }
226 
227 
228 /* -------------------------------------------------------------------*/
229 static struct _MatOps MatOps = {0,
230        MatGetRow_MPIAdj,MatRestoreRow_MPIAdj,
231        0,0,
232        0,0,
233        0,0,
234        0,0,
235        0,0,
236        0,
237        0,
238        0,MatEqual_MPIAdj,
239        0,0,0,
240        0,0,
241        0,
242        MatSetOption_MPIAdj,0,0,
243        0,0,0,0,
244        MatGetSize_MPIAdj,MatGetLocalSize_MPIAdj,MatGetOwnershipRange_MPIAdj,
245        0,0,
246        0,0,
247        0,0,0,
248        0,0,0,
249        0,0,
250        0,0,
251        0,
252        0,0,0,
253        0,
254        MatGetBlockSize_MPIAdj,
255        0,
256        0,
257        0,
258        0,
259        0,
260        0,
261        0,
262        0};
263 
264 
265 #undef __FUNC__
266 #define __FUNC__ "MatCreateMPIAdj"
267 /*@C
268    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
269    The matrix does not have numerical values associated with it, but is
270    intended for ordering (to reduce bandwidth etc) and partitioning.
271 
272    Collective on MPI_Comm
273 
274    Input Parameters:
275 +  comm - MPI communicator, set to PETSC_COMM_SELF
276 .  m - number of local rows
277 .  n - number of columns
278 .  i - the indices into j for the start of each row
279 -  j - the column indices for each row (sorted for each row).
280        The indices in i and j start with zero (NOT with one).
281 
282    Output Parameter:
283 .  A - the matrix
284 
285    Notes: This matrix object does not support most matrix operations, include
286    MatSetValues().
287    You must NOT free the ii and jj arrays yourself. PETSc will free them
288    when the matrix is destroyed.
289 
290    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
291 
292 .seealso: MatCreate(), MatCreateSeqAdj(), MatGetReordering()
293 @*/
294 int MatCreateMPIAdj(MPI_Comm comm,int m,int n,int *i,int *j, Mat *A)
295 {
296   Mat        B;
297   Mat_MPIAdj *b;
298   int        ii,ierr, flg,size,rank;
299 
300   MPI_Comm_size(comm,&size);
301   MPI_Comm_rank(comm,&rank);
302 
303   *A                  = 0;
304   PetscHeaderCreate(B,_p_Mat,struct _MatOps,MAT_COOKIE,MATMPIADJ,comm,MatDestroy,MatView);
305   PLogObjectCreate(B);
306   B->data             = (void *) (b = PetscNew(Mat_MPIAdj)); CHKPTRQ(b);
307   PetscMemzero(b,sizeof(Mat_MPIAdj));
308   PetscMemcpy(B->ops,&MatOps,sizeof(struct _MatOps));
309   B->ops->destroy          = MatDestroy_MPIAdj;
310   B->ops->view             = MatView_MPIAdj;
311   B->factor           = 0;
312   B->lupivotthreshold = 1.0;
313   B->mapping          = 0;
314   B->assembled        = PETSC_FALSE;
315 
316   b->m = m; B->m = m;
317   ierr = MPI_Allreduce(&m,&B->M,1,MPI_INT,MPI_SUM,comm);CHKERRQ(ierr);
318   B->n = n; B->N = n;
319 
320   b->rowners = (int *) PetscMalloc((size+1)*sizeof(int)); CHKPTRQ(b->rowners);
321   PLogObjectMemory(B,(size+2)*sizeof(int)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));
322   ierr = MPI_Allgather(&m,1,MPI_INT,b->rowners+1,1,MPI_INT,comm);CHKERRQ(ierr);
323   b->rowners[0] = 0;
324   for ( ii=2; ii<=size; ii++ ) {
325     b->rowners[ii] += b->rowners[ii-1];
326   }
327   b->rstart = b->rowners[rank];
328   b->rend   = b->rowners[rank+1];
329 
330   b->j  = j;
331   b->i  = i;
332 
333   b->nz               = i[m];
334   b->diag             = 0;
335   b->symmetric        = PETSC_FALSE;
336 
337   *A = B;
338 
339   ierr = OptionsHasName(PETSC_NULL,"-help", &flg); CHKERRQ(ierr);
340   if (flg) {ierr = MatPrintHelp(B); CHKERRQ(ierr); }
341   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
342   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
343   PetscFunctionReturn(0);
344 }
345 
346 
347 
348