xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 76be9ce4a233aaa47cda2bc7f5a27cd7faabecaa)
1 #ifdef PETSC_RCS_HEADER
2 static char vcid[] = "$Id: mpiadj.c,v 1.5 1997/12/01 01:55:17 bsmith Exp bsmith $";
3 #endif
4 
5 /*
6     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
7 */
8 
9 #include "pinclude/pviewer.h"
10 #include "sys.h"
11 #include "src/mat/impls/adj/mpi/mpiadj.h"
12 
13 
14 #undef __FUNC__
15 #define __FUNC__ "MatView_MPIAdj_ASCII"
16 extern int MatView_MPIAdj_ASCII(Mat A,Viewer viewer)
17 {
18   Mat_MPIAdj  *a = (Mat_MPIAdj *) A->data;
19   int         ierr, i,j, m = a->m,  format;
20   FILE        *fd;
21   char        *outputname;
22   MPI_Comm    comm = A->comm;
23 
24   ierr = ViewerASCIIGetPointer(viewer,&fd); CHKERRQ(ierr);
25   ierr = ViewerFileGetOutputname_Private(viewer,&outputname); CHKERRQ(ierr);
26   ierr = ViewerGetFormat(viewer,&format);
27   if (format == VIEWER_FORMAT_ASCII_INFO) {
28     PetscFunctionReturn(0);
29   } else {
30     for ( i=0; i<m; i++ ) {
31       PetscSynchronizedFPrintf(comm,fd,"row %d:",i+a->rstart);
32       for ( j=a->i[i]; j<a->i[i+1]; j++ ) {
33         PetscSynchronizedFPrintf(comm,fd," %d ",a->j[j]);
34       }
35       PetscSynchronizedFPrintf(comm,fd,"\n");
36     }
37   }
38   PetscSynchronizedFlush(comm);
39   PetscFunctionReturn(0);
40 }
41 
42 #undef __FUNC__
43 #define __FUNC__ "MatView_MPIAdj"
44 int MatView_MPIAdj(PetscObject obj,Viewer viewer)
45 {
46   Mat         A = (Mat) obj;
47   ViewerType  vtype;
48   int         ierr;
49 
50   ierr = ViewerGetType(viewer,&vtype); CHKERRQ(ierr);
51   if (vtype == ASCII_FILE_VIEWER || vtype == ASCII_FILES_VIEWER){
52     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
53   }
54   PetscFunctionReturn(0);
55 }
56 
57 #undef __FUNC__
58 #define __FUNC__ "MatDestroy_MPIAdj"
59 int MatDestroy_MPIAdj(PetscObject obj)
60 {
61   Mat        A  = (Mat) obj;
62   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
63 
64 #if defined(USE_PETSC_LOG)
65   PLogObjectState(obj,"Rows=%d, Cols=%d, NZ=%d",A->m,A->n,a->nz);
66 #endif
67   if (a->diag) PetscFree(a->diag);
68   PetscFree(a->i);
69   PetscFree(a->j);
70   PetscFree(a->rowners);
71   PetscFree(a);
72 
73   PLogObjectDestroy(A);
74   PetscHeaderDestroy(A);
75   PetscFunctionReturn(0);
76 }
77 
78 
79 #undef __FUNC__
80 #define __FUNC__ "MatSetOption_MPIAdj"
81 int MatSetOption_MPIAdj(Mat A,MatOption op)
82 {
83   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
84 
85   if (op == MAT_STRUCTURALLY_SYMMETRIC) {
86     a->symmetric = PETSC_TRUE;
87   } else {
88     PLogInfo(A,"MatSetOption_MPIAdj:Option ignored\n");
89   }
90   PetscFunctionReturn(0);
91 }
92 
93 
94 /*
95      Adds diagonal pointers to sparse matrix structure.
96 */
97 
98 #undef __FUNC__
99 #define __FUNC__ "MatMarkDiag_MPIAdj"
100 int MatMarkDiag_MPIAdj(Mat A)
101 {
102   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
103   int        i,j, *diag, m = a->m;
104 
105   diag = (int *) PetscMalloc( (m+1)*sizeof(int)); CHKPTRQ(diag);
106   PLogObjectMemory(A,(m+1)*sizeof(int));
107   for ( i=0; i<a->m; i++ ) {
108     for ( j=a->i[i]; j<a->i[i+1]; j++ ) {
109       if (a->j[j] == i) {
110         diag[i] = j;
111         break;
112       }
113     }
114   }
115   a->diag = diag;
116   PetscFunctionReturn(0);
117 }
118 
119 #undef __FUNC__
120 #define __FUNC__ "MatGetSize_MPIAdj"
121 int MatGetSize_MPIAdj(Mat A,int *m,int *n)
122 {
123   if (m) *m = A->M;
124   if (n) *n = A->N;
125   PetscFunctionReturn(0);
126 }
127 
128 #undef __FUNC__
129 #define __FUNC__ "MatGetSize_MPIAdj"
130 int MatGetLocalSize_MPIAdj(Mat A,int *m,int *n)
131 {
132   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
133   if (m) *m = a->m;
134   if (n) *n = A->N;
135   PetscFunctionReturn(0);
136 }
137 
138 #undef __FUNC__
139 #define __FUNC__ "MatGetOwnershipRange_MPIAdj"
140 int MatGetOwnershipRange_MPIAdj(Mat A,int *m,int *n)
141 {
142   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
143   *m = a->rstart; *n = a->rend;
144   PetscFunctionReturn(0);
145 }
146 
147 #undef __FUNC__
148 #define __FUNC__ "MatGetRow_MPIAdj"
149 int MatGetRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
150 {
151   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
152   int        *itmp;
153 
154   row -= a->rstart;
155 
156   if (row < 0 || row >= a->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,0,"Row out of range");
157 
158   *nz = a->i[row+1] - a->i[row];
159   if (v) *v = PETSC_NULL;
160   if (idx) {
161     itmp = a->j + a->i[row];
162     if (*nz) {
163       *idx = itmp;
164     }
165     else *idx = 0;
166   }
167   PetscFunctionReturn(0);
168 }
169 
170 #undef __FUNC__
171 #define __FUNC__ "MatRestoreRow_MPIAdj"
172 int MatRestoreRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
173 {
174   PetscFunctionReturn(0);
175 }
176 
177 #undef __FUNC__
178 #define __FUNC__ "MatGetBlockSize_MPIAdj"
179 int MatGetBlockSize_MPIAdj(Mat A, int *bs)
180 {
181   *bs = 1;
182   PetscFunctionReturn(0);
183 }
184 
185 
186 #undef __FUNC__
187 #define __FUNC__ "MatEqual_MPIAdj"
188 int MatEqual_MPIAdj(Mat A,Mat B, PetscTruth* flg)
189 {
190   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data, *b = (Mat_MPIAdj *)B->data;
191  int         flag = 1,ierr;
192 
193   if (B->type != MATMPIADJ) SETERRQ(PETSC_ERR_ARG_INCOMP,0,"Matrices must be same type");
194 
195   /* If the  matrix dimensions are not equal, or no of nonzeros */
196   if ((a->m != b->m ) ||( a->nz != b->nz)) {
197     flag = 0;
198   }
199 
200   /* if the a->i are the same */
201   if (PetscMemcmp(a->i,b->i,(a->m+1)*sizeof(int))) {
202     flag = 0;
203   }
204 
205   /* if a->j are the same */
206   if (PetscMemcmp(a->j, b->j, (a->nz)*sizeof(int))) {
207     flag = 0;
208   }
209 
210   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr);
211 
212 
213   PetscFunctionReturn(0);
214 }
215 
216 
217 /* -------------------------------------------------------------------*/
218 static struct _MatOps MatOps = {0,
219        MatGetRow_MPIAdj,MatRestoreRow_MPIAdj,
220        0,0,
221        0,0,
222        0,0,
223        0,0,
224        0,0,
225        0,
226        0,
227        0,MatEqual_MPIAdj,
228        0,0,0,
229        0,0,
230        0,
231        MatSetOption_MPIAdj,0,0,
232        0,0,0,0,
233        MatGetSize_MPIAdj,MatGetLocalSize_MPIAdj,MatGetOwnershipRange_MPIAdj,
234        0,0,
235        0,0,
236        0,0,0,
237        0,0,0,
238        0,0,
239        0,0,
240        0,
241        0,0,0,
242        0,
243        MatGetBlockSize_MPIAdj,
244        0,
245        0,
246        0,
247        0,
248        0,
249        0,
250        0,
251        0};
252 
253 
254 #undef __FUNC__
255 #define __FUNC__ "MatCreateMPIAdj"
256 /*@C
257    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
258      The matrix does not have numerical values associated with it, but is
259      intended for ordering (to reduce bandwidth etc) and partitioning.
260 
261    Input Parameters:
262 .  comm - MPI communicator, set to PETSC_COMM_SELF
263 .  m - number of local rows
264 .  n - number of columns
265 .  i - the indices into j for the start of each row
266 .  j - the column indices for each row (sorted for each row)
267        The indices in i and j start with zero NOT one.
268 
269    Output Parameter:
270 .  A - the matrix
271 
272    Notes: You must NOT free the ii and jj arrays yourself. PETSc will free them
273    when the matrix is destroyed.
274 
275 .  MatSetOptions() possible values - MAT_STRUCTURALLY_SYMMETRIC
276 
277 .seealso: MatCreate(), MatCreateSeqAdj(), MatGetReordering()
278 @*/
279 int MatCreateMPIAdj(MPI_Comm comm,int m,int n,int *i,int *j, Mat *A)
280 {
281   Mat        B;
282   Mat_MPIAdj *b;
283   int        ii,ierr, flg,size,rank;
284 
285   MPI_Comm_size(comm,&size);
286   MPI_Comm_rank(comm,&rank);
287 
288   *A                  = 0;
289   PetscHeaderCreate(B,_p_Mat,MAT_COOKIE,MATMPIADJ,comm,MatDestroy,MatView);
290   PLogObjectCreate(B);
291   B->data             = (void *) (b = PetscNew(Mat_MPIAdj)); CHKPTRQ(b);
292   PetscMemzero(b,sizeof(Mat_MPIAdj));
293   PetscMemcpy(&B->ops,&MatOps,sizeof(struct _MatOps));
294   B->destroy          = MatDestroy_MPIAdj;
295   B->view             = MatView_MPIAdj;
296   B->factor           = 0;
297   B->lupivotthreshold = 1.0;
298   B->mapping          = 0;
299   B->assembled        = PETSC_FALSE;
300 
301   b->m = m; B->m = m;
302   ierr = MPI_Allreduce(&m,&B->M,1,MPI_INT,MPI_SUM,comm);CHKERRQ(ierr);
303   B->n = n; B->N = n;
304 
305   b->rowners = (int *) PetscMalloc((size+1)*sizeof(int)); CHKPTRQ(b->rowners);
306   PLogObjectMemory(B,(size+2)*sizeof(int)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));
307   ierr = MPI_Allgather(&m,1,MPI_INT,b->rowners+1,1,MPI_INT,comm);CHKERRQ(ierr);
308   b->rowners[0] = 0;
309   for ( ii=2; ii<=size; ii++ ) {
310     b->rowners[ii] += b->rowners[ii-1];
311   }
312   b->rstart = b->rowners[rank];
313   b->rend   = b->rowners[rank+1];
314 
315   b->j  = j;
316   b->i  = i;
317 
318   b->nz               = i[m];
319   b->diag             = 0;
320   b->symmetric        = PETSC_FALSE;
321 
322   *A = B;
323 
324   ierr = OptionsHasName(PETSC_NULL,"-help", &flg); CHKERRQ(ierr);
325   if (flg) {ierr = MatPrintHelp(B); CHKERRQ(ierr); }
326   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
327   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
328   PetscFunctionReturn(0);
329 }
330 
331 
332 
333