xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision e1311b9049e89cb3452dcd306fde571f4b440ff2)
1 #ifdef PETSC_RCS_HEADER
2 static char vcid[] = "$Id: mpiadj.c,v 1.8 1998/03/12 23:19:41 bsmith Exp bsmith $";
3 #endif
4 
5 /*
6     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
7 */
8 
9 #include "pinclude/pviewer.h"
10 #include "sys.h"
11 #include "src/mat/impls/adj/mpi/mpiadj.h"
12 
13 
14 #undef __FUNC__
15 #define __FUNC__ "MatView_MPIAdj_ASCII"
16 extern int MatView_MPIAdj_ASCII(Mat A,Viewer viewer)
17 {
18   Mat_MPIAdj  *a = (Mat_MPIAdj *) A->data;
19   int         ierr, i,j, m = a->m,  format;
20   FILE        *fd;
21   char        *outputname;
22   MPI_Comm    comm = A->comm;
23 
24   ierr = ViewerASCIIGetPointer(viewer,&fd); CHKERRQ(ierr);
25   ierr = ViewerFileGetOutputname_Private(viewer,&outputname); CHKERRQ(ierr);
26   ierr = ViewerGetFormat(viewer,&format);
27   if (format == VIEWER_FORMAT_ASCII_INFO) {
28     PetscFunctionReturn(0);
29   } else {
30     for ( i=0; i<m; i++ ) {
31       PetscSynchronizedFPrintf(comm,fd,"row %d:",i+a->rstart);
32       for ( j=a->i[i]; j<a->i[i+1]; j++ ) {
33         PetscSynchronizedFPrintf(comm,fd," %d ",a->j[j]);
34       }
35       PetscSynchronizedFPrintf(comm,fd,"\n");
36     }
37   }
38   PetscSynchronizedFlush(comm);
39   PetscFunctionReturn(0);
40 }
41 
42 #undef __FUNC__
43 #define __FUNC__ "MatView_MPIAdj"
44 int MatView_MPIAdj(Mat A,Viewer viewer)
45 {
46   ViewerType  vtype;
47   int         ierr;
48 
49   ierr = ViewerGetType(viewer,&vtype); CHKERRQ(ierr);
50   if (vtype == ASCII_FILE_VIEWER || vtype == ASCII_FILES_VIEWER){
51     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
52   }
53   PetscFunctionReturn(0);
54 }
55 
56 #undef __FUNC__
57 #define __FUNC__ "MatDestroy_MPIAdj"
58 int MatDestroy_MPIAdj(Mat A)
59 {
60   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
61 
62 #if defined(USE_PETSC_LOG)
63   PLogObjectState((PetscObject)A,"Rows=%d, Cols=%d, NZ=%d",A->m,A->n,a->nz);
64 #endif
65   if (a->diag) PetscFree(a->diag);
66   PetscFree(a->i);
67   PetscFree(a->j);
68   PetscFree(a->rowners);
69   PetscFree(a);
70 
71   PLogObjectDestroy(A);
72   PetscHeaderDestroy(A);
73   PetscFunctionReturn(0);
74 }
75 
76 
77 #undef __FUNC__
78 #define __FUNC__ "MatSetOption_MPIAdj"
79 int MatSetOption_MPIAdj(Mat A,MatOption op)
80 {
81   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
82 
83   if (op == MAT_STRUCTURALLY_SYMMETRIC) {
84     a->symmetric = PETSC_TRUE;
85   } else {
86     PLogInfo(A,"MatSetOption_MPIAdj:Option ignored\n");
87   }
88   PetscFunctionReturn(0);
89 }
90 
91 
92 /*
93      Adds diagonal pointers to sparse matrix structure.
94 */
95 
96 #undef __FUNC__
97 #define __FUNC__ "MatMarkDiag_MPIAdj"
98 int MatMarkDiag_MPIAdj(Mat A)
99 {
100   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
101   int        i,j, *diag, m = a->m;
102 
103   diag = (int *) PetscMalloc( (m+1)*sizeof(int)); CHKPTRQ(diag);
104   PLogObjectMemory(A,(m+1)*sizeof(int));
105   for ( i=0; i<a->m; i++ ) {
106     for ( j=a->i[i]; j<a->i[i+1]; j++ ) {
107       if (a->j[j] == i) {
108         diag[i] = j;
109         break;
110       }
111     }
112   }
113   a->diag = diag;
114   PetscFunctionReturn(0);
115 }
116 
117 #undef __FUNC__
118 #define __FUNC__ "MatGetSize_MPIAdj"
119 int MatGetSize_MPIAdj(Mat A,int *m,int *n)
120 {
121   if (m) *m = A->M;
122   if (n) *n = A->N;
123   PetscFunctionReturn(0);
124 }
125 
126 #undef __FUNC__
127 #define __FUNC__ "MatGetSize_MPIAdj"
128 int MatGetLocalSize_MPIAdj(Mat A,int *m,int *n)
129 {
130   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
131   if (m) *m = a->m;
132   if (n) *n = A->N;
133   PetscFunctionReturn(0);
134 }
135 
136 #undef __FUNC__
137 #define __FUNC__ "MatGetOwnershipRange_MPIAdj"
138 int MatGetOwnershipRange_MPIAdj(Mat A,int *m,int *n)
139 {
140   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
141   *m = a->rstart; *n = a->rend;
142   PetscFunctionReturn(0);
143 }
144 
145 #undef __FUNC__
146 #define __FUNC__ "MatGetRow_MPIAdj"
147 int MatGetRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
148 {
149   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
150   int        *itmp;
151 
152   row -= a->rstart;
153 
154   if (row < 0 || row >= a->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,0,"Row out of range");
155 
156   *nz = a->i[row+1] - a->i[row];
157   if (v) *v = PETSC_NULL;
158   if (idx) {
159     itmp = a->j + a->i[row];
160     if (*nz) {
161       *idx = itmp;
162     }
163     else *idx = 0;
164   }
165   PetscFunctionReturn(0);
166 }
167 
168 #undef __FUNC__
169 #define __FUNC__ "MatRestoreRow_MPIAdj"
170 int MatRestoreRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
171 {
172   PetscFunctionReturn(0);
173 }
174 
175 #undef __FUNC__
176 #define __FUNC__ "MatGetBlockSize_MPIAdj"
177 int MatGetBlockSize_MPIAdj(Mat A, int *bs)
178 {
179   *bs = 1;
180   PetscFunctionReturn(0);
181 }
182 
183 
184 #undef __FUNC__
185 #define __FUNC__ "MatEqual_MPIAdj"
186 int MatEqual_MPIAdj(Mat A,Mat B, PetscTruth* flg)
187 {
188   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data, *b = (Mat_MPIAdj *)B->data;
189  int         flag = 1,ierr;
190 
191   if (B->type != MATMPIADJ) SETERRQ(PETSC_ERR_ARG_INCOMP,0,"Matrices must be same type");
192 
193   /* If the  matrix dimensions are not equal, or no of nonzeros */
194   if ((a->m != b->m ) ||( a->nz != b->nz)) {
195     flag = 0;
196   }
197 
198   /* if the a->i are the same */
199   if (PetscMemcmp(a->i,b->i,(a->m+1)*sizeof(int))) {
200     flag = 0;
201   }
202 
203   /* if a->j are the same */
204   if (PetscMemcmp(a->j, b->j, (a->nz)*sizeof(int))) {
205     flag = 0;
206   }
207 
208   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr);
209 
210 
211   PetscFunctionReturn(0);
212 }
213 
214 
215 /* -------------------------------------------------------------------*/
216 static struct _MatOps MatOps = {0,
217        MatGetRow_MPIAdj,MatRestoreRow_MPIAdj,
218        0,0,
219        0,0,
220        0,0,
221        0,0,
222        0,0,
223        0,
224        0,
225        0,MatEqual_MPIAdj,
226        0,0,0,
227        0,0,
228        0,
229        MatSetOption_MPIAdj,0,0,
230        0,0,0,0,
231        MatGetSize_MPIAdj,MatGetLocalSize_MPIAdj,MatGetOwnershipRange_MPIAdj,
232        0,0,
233        0,0,
234        0,0,0,
235        0,0,0,
236        0,0,
237        0,0,
238        0,
239        0,0,0,
240        0,
241        MatGetBlockSize_MPIAdj,
242        0,
243        0,
244        0,
245        0,
246        0,
247        0,
248        0,
249        0};
250 
251 
252 #undef __FUNC__
253 #define __FUNC__ "MatCreateMPIAdj"
254 /*@C
255    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
256      The matrix does not have numerical values associated with it, but is
257      intended for ordering (to reduce bandwidth etc) and partitioning.
258 
259    Input Parameters:
260 .  comm - MPI communicator, set to PETSC_COMM_SELF
261 .  m - number of local rows
262 .  n - number of columns
263 .  i - the indices into j for the start of each row
264 .  j - the column indices for each row (sorted for each row)
265        The indices in i and j start with zero NOT one.
266 
267    Output Parameter:
268 .  A - the matrix
269 
270    Notes: You must NOT free the ii and jj arrays yourself. PETSc will free them
271    when the matrix is destroyed.
272 
273 .  MatSetOption() possible values - MAT_STRUCTURALLY_SYMMETRIC
274 
275 .seealso: MatCreate(), MatCreateSeqAdj(), MatGetReordering()
276 @*/
277 int MatCreateMPIAdj(MPI_Comm comm,int m,int n,int *i,int *j, Mat *A)
278 {
279   Mat        B;
280   Mat_MPIAdj *b;
281   int        ii,ierr, flg,size,rank;
282 
283   MPI_Comm_size(comm,&size);
284   MPI_Comm_rank(comm,&rank);
285 
286   *A                  = 0;
287   PetscHeaderCreate(B,_p_Mat,struct _MatOps,MAT_COOKIE,MATMPIADJ,comm,MatDestroy,MatView);
288   PLogObjectCreate(B);
289   B->data             = (void *) (b = PetscNew(Mat_MPIAdj)); CHKPTRQ(b);
290   PetscMemzero(b,sizeof(Mat_MPIAdj));
291   PetscMemcpy(B->ops,&MatOps,sizeof(struct _MatOps));
292   B->ops->destroy          = MatDestroy_MPIAdj;
293   B->ops->view             = MatView_MPIAdj;
294   B->factor           = 0;
295   B->lupivotthreshold = 1.0;
296   B->mapping          = 0;
297   B->assembled        = PETSC_FALSE;
298 
299   b->m = m; B->m = m;
300   ierr = MPI_Allreduce(&m,&B->M,1,MPI_INT,MPI_SUM,comm);CHKERRQ(ierr);
301   B->n = n; B->N = n;
302 
303   b->rowners = (int *) PetscMalloc((size+1)*sizeof(int)); CHKPTRQ(b->rowners);
304   PLogObjectMemory(B,(size+2)*sizeof(int)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));
305   ierr = MPI_Allgather(&m,1,MPI_INT,b->rowners+1,1,MPI_INT,comm);CHKERRQ(ierr);
306   b->rowners[0] = 0;
307   for ( ii=2; ii<=size; ii++ ) {
308     b->rowners[ii] += b->rowners[ii-1];
309   }
310   b->rstart = b->rowners[rank];
311   b->rend   = b->rowners[rank+1];
312 
313   b->j  = j;
314   b->i  = i;
315 
316   b->nz               = i[m];
317   b->diag             = 0;
318   b->symmetric        = PETSC_FALSE;
319 
320   *A = B;
321 
322   ierr = OptionsHasName(PETSC_NULL,"-help", &flg); CHKERRQ(ierr);
323   if (flg) {ierr = MatPrintHelp(B); CHKERRQ(ierr); }
324   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
325   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
326   PetscFunctionReturn(0);
327 }
328 
329 
330 
331