xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision b9cd556b8ad4e1378e9512be9a773e222ddf629a)
1 #ifdef PETSC_RCS_HEADER
2 static char vcid[] = "$Id: mpiadj.c,v 1.11 1998/04/13 17:41:17 bsmith Exp curfman $";
3 #endif
4 
5 /*
6     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
7 */
8 
9 #include "pinclude/pviewer.h"
10 #include "sys.h"
11 #include "src/mat/impls/adj/mpi/mpiadj.h"
12 
13 
14 #undef __FUNC__
15 #define __FUNC__ "MatView_MPIAdj_ASCII"
16 extern int MatView_MPIAdj_ASCII(Mat A,Viewer viewer)
17 {
18   Mat_MPIAdj  *a = (Mat_MPIAdj *) A->data;
19   int         ierr, i,j, m = a->m,  format;
20   FILE        *fd;
21   char        *outputname;
22   MPI_Comm    comm = A->comm;
23 
24   ierr = ViewerASCIIGetPointer(viewer,&fd); CHKERRQ(ierr);
25   ierr = ViewerFileGetOutputname_Private(viewer,&outputname); CHKERRQ(ierr);
26   ierr = ViewerGetFormat(viewer,&format);
27   if (format == VIEWER_FORMAT_ASCII_INFO) {
28     PetscFunctionReturn(0);
29   } else {
30     for ( i=0; i<m; i++ ) {
31       PetscSynchronizedFPrintf(comm,fd,"row %d:",i+a->rstart);
32       for ( j=a->i[i]; j<a->i[i+1]; j++ ) {
33         PetscSynchronizedFPrintf(comm,fd," %d ",a->j[j]);
34       }
35       PetscSynchronizedFPrintf(comm,fd,"\n");
36     }
37   }
38   PetscSynchronizedFlush(comm);
39   PetscFunctionReturn(0);
40 }
41 
42 #undef __FUNC__
43 #define __FUNC__ "MatView_MPIAdj"
44 int MatView_MPIAdj(Mat A,Viewer viewer)
45 {
46   ViewerType  vtype;
47   int         ierr;
48 
49   ierr = ViewerGetType(viewer,&vtype); CHKERRQ(ierr);
50   if (vtype == ASCII_FILE_VIEWER || vtype == ASCII_FILES_VIEWER){
51     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
52   } else {
53     SETERRQ(1,1,"Viewer type not supported by PETSc object");
54   }
55   PetscFunctionReturn(0);
56 }
57 
58 #undef __FUNC__
59 #define __FUNC__ "MatDestroy_MPIAdj"
60 int MatDestroy_MPIAdj(Mat A)
61 {
62   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
63 
64 #if defined(USE_PETSC_LOG)
65   PLogObjectState((PetscObject)A,"Rows=%d, Cols=%d, NZ=%d",A->m,A->n,a->nz);
66 #endif
67   if (a->diag) PetscFree(a->diag);
68   PetscFree(a->i);
69   PetscFree(a->j);
70   PetscFree(a->rowners);
71   PetscFree(a);
72 
73   PLogObjectDestroy(A);
74   PetscHeaderDestroy(A);
75   PetscFunctionReturn(0);
76 }
77 
78 
79 #undef __FUNC__
80 #define __FUNC__ "MatSetOption_MPIAdj"
81 int MatSetOption_MPIAdj(Mat A,MatOption op)
82 {
83   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
84 
85   if (op == MAT_STRUCTURALLY_SYMMETRIC) {
86     a->symmetric = PETSC_TRUE;
87   } else {
88     PLogInfo(A,"MatSetOption_MPIAdj:Option ignored\n");
89   }
90   PetscFunctionReturn(0);
91 }
92 
93 
94 /*
95      Adds diagonal pointers to sparse matrix structure.
96 */
97 
98 #undef __FUNC__
99 #define __FUNC__ "MatMarkDiag_MPIAdj"
100 int MatMarkDiag_MPIAdj(Mat A)
101 {
102   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
103   int        i,j, *diag, m = a->m;
104 
105   diag = (int *) PetscMalloc( (m+1)*sizeof(int)); CHKPTRQ(diag);
106   PLogObjectMemory(A,(m+1)*sizeof(int));
107   for ( i=0; i<a->m; i++ ) {
108     for ( j=a->i[i]; j<a->i[i+1]; j++ ) {
109       if (a->j[j] == i) {
110         diag[i] = j;
111         break;
112       }
113     }
114   }
115   a->diag = diag;
116   PetscFunctionReturn(0);
117 }
118 
119 #undef __FUNC__
120 #define __FUNC__ "MatGetSize_MPIAdj"
121 int MatGetSize_MPIAdj(Mat A,int *m,int *n)
122 {
123   if (m) *m = A->M;
124   if (n) *n = A->N;
125   PetscFunctionReturn(0);
126 }
127 
128 #undef __FUNC__
129 #define __FUNC__ "MatGetSize_MPIAdj"
130 int MatGetLocalSize_MPIAdj(Mat A,int *m,int *n)
131 {
132   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
133   if (m) *m = a->m;
134   if (n) *n = A->N;
135   PetscFunctionReturn(0);
136 }
137 
138 #undef __FUNC__
139 #define __FUNC__ "MatGetOwnershipRange_MPIAdj"
140 int MatGetOwnershipRange_MPIAdj(Mat A,int *m,int *n)
141 {
142   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
143   *m = a->rstart; *n = a->rend;
144   PetscFunctionReturn(0);
145 }
146 
147 #undef __FUNC__
148 #define __FUNC__ "MatGetRow_MPIAdj"
149 int MatGetRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
150 {
151   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
152   int        *itmp;
153 
154   row -= a->rstart;
155 
156   if (row < 0 || row >= a->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,0,"Row out of range");
157 
158   *nz = a->i[row+1] - a->i[row];
159   if (v) *v = PETSC_NULL;
160   if (idx) {
161     itmp = a->j + a->i[row];
162     if (*nz) {
163       *idx = itmp;
164     }
165     else *idx = 0;
166   }
167   PetscFunctionReturn(0);
168 }
169 
170 #undef __FUNC__
171 #define __FUNC__ "MatRestoreRow_MPIAdj"
172 int MatRestoreRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
173 {
174   PetscFunctionReturn(0);
175 }
176 
177 #undef __FUNC__
178 #define __FUNC__ "MatGetBlockSize_MPIAdj"
179 int MatGetBlockSize_MPIAdj(Mat A, int *bs)
180 {
181   *bs = 1;
182   PetscFunctionReturn(0);
183 }
184 
185 
186 #undef __FUNC__
187 #define __FUNC__ "MatEqual_MPIAdj"
188 int MatEqual_MPIAdj(Mat A,Mat B, PetscTruth* flg)
189 {
190   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data, *b = (Mat_MPIAdj *)B->data;
191  int         flag = 1,ierr;
192 
193   if (B->type != MATMPIADJ) SETERRQ(PETSC_ERR_ARG_INCOMP,0,"Matrices must be same type");
194 
195   /* If the  matrix dimensions are not equal, or no of nonzeros */
196   if ((a->m != b->m ) ||( a->nz != b->nz)) {
197     flag = 0;
198   }
199 
200   /* if the a->i are the same */
201   if (PetscMemcmp(a->i,b->i,(a->m+1)*sizeof(int))) {
202     flag = 0;
203   }
204 
205   /* if a->j are the same */
206   if (PetscMemcmp(a->j, b->j, (a->nz)*sizeof(int))) {
207     flag = 0;
208   }
209 
210   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr);
211 
212 
213   PetscFunctionReturn(0);
214 }
215 
216 
217 /* -------------------------------------------------------------------*/
218 static struct _MatOps MatOps = {0,
219        MatGetRow_MPIAdj,MatRestoreRow_MPIAdj,
220        0,0,
221        0,0,
222        0,0,
223        0,0,
224        0,0,
225        0,
226        0,
227        0,MatEqual_MPIAdj,
228        0,0,0,
229        0,0,
230        0,
231        MatSetOption_MPIAdj,0,0,
232        0,0,0,0,
233        MatGetSize_MPIAdj,MatGetLocalSize_MPIAdj,MatGetOwnershipRange_MPIAdj,
234        0,0,
235        0,0,
236        0,0,0,
237        0,0,0,
238        0,0,
239        0,0,
240        0,
241        0,0,0,
242        0,
243        MatGetBlockSize_MPIAdj,
244        0,
245        0,
246        0,
247        0,
248        0,
249        0,
250        0,
251        0};
252 
253 
254 #undef __FUNC__
255 #define __FUNC__ "MatCreateMPIAdj"
256 /*@C
257    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
258    The matrix does not have numerical values associated with it, but is
259    intended for ordering (to reduce bandwidth etc) and partitioning.
260 
261    Collective on MPI_Comm
262 
263    Input Parameters:
264 +  comm - MPI communicator, set to PETSC_COMM_SELF
265 .  m - number of local rows
266 .  n - number of columns
267 .  i - the indices into j for the start of each row
268 -  j - the column indices for each row (sorted for each row).
269        The indices in i and j start with zero (NOT with one).
270 
271    Output Parameter:
272 .  A - the matrix
273 
274    Notes: You must NOT free the ii and jj arrays yourself. PETSc will free them
275    when the matrix is destroyed.
276 
277    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
278 
279 .seealso: MatCreate(), MatCreateSeqAdj(), MatGetReordering()
280 @*/
281 int MatCreateMPIAdj(MPI_Comm comm,int m,int n,int *i,int *j, Mat *A)
282 {
283   Mat        B;
284   Mat_MPIAdj *b;
285   int        ii,ierr, flg,size,rank;
286 
287   MPI_Comm_size(comm,&size);
288   MPI_Comm_rank(comm,&rank);
289 
290   *A                  = 0;
291   PetscHeaderCreate(B,_p_Mat,struct _MatOps,MAT_COOKIE,MATMPIADJ,comm,MatDestroy,MatView);
292   PLogObjectCreate(B);
293   B->data             = (void *) (b = PetscNew(Mat_MPIAdj)); CHKPTRQ(b);
294   PetscMemzero(b,sizeof(Mat_MPIAdj));
295   PetscMemcpy(B->ops,&MatOps,sizeof(struct _MatOps));
296   B->ops->destroy          = MatDestroy_MPIAdj;
297   B->ops->view             = MatView_MPIAdj;
298   B->factor           = 0;
299   B->lupivotthreshold = 1.0;
300   B->mapping          = 0;
301   B->assembled        = PETSC_FALSE;
302 
303   b->m = m; B->m = m;
304   ierr = MPI_Allreduce(&m,&B->M,1,MPI_INT,MPI_SUM,comm);CHKERRQ(ierr);
305   B->n = n; B->N = n;
306 
307   b->rowners = (int *) PetscMalloc((size+1)*sizeof(int)); CHKPTRQ(b->rowners);
308   PLogObjectMemory(B,(size+2)*sizeof(int)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));
309   ierr = MPI_Allgather(&m,1,MPI_INT,b->rowners+1,1,MPI_INT,comm);CHKERRQ(ierr);
310   b->rowners[0] = 0;
311   for ( ii=2; ii<=size; ii++ ) {
312     b->rowners[ii] += b->rowners[ii-1];
313   }
314   b->rstart = b->rowners[rank];
315   b->rend   = b->rowners[rank+1];
316 
317   b->j  = j;
318   b->i  = i;
319 
320   b->nz               = i[m];
321   b->diag             = 0;
322   b->symmetric        = PETSC_FALSE;
323 
324   *A = B;
325 
326   ierr = OptionsHasName(PETSC_NULL,"-help", &flg); CHKERRQ(ierr);
327   if (flg) {ierr = MatPrintHelp(B); CHKERRQ(ierr); }
328   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
329   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
330   PetscFunctionReturn(0);
331 }
332 
333 
334 
335