xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 3f1db9ec2fd39765c6c3a00831044586630c4cca)
1 #ifdef PETSC_RCS_HEADER
2 static char vcid[] = "$Id: mpiadj.c,v 1.18 1998/12/03 04:01:42 bsmith Exp bsmith $";
3 #endif
4 
5 /*
6     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
7 */
8 #include "sys.h"
9 #include "src/mat/impls/adj/mpi/mpiadj.h"
10 
11 #undef __FUNC__
12 #define __FUNC__ "MatView_MPIAdj_ASCII"
13 extern int MatView_MPIAdj_ASCII(Mat A,Viewer viewer)
14 {
15   Mat_MPIAdj  *a = (Mat_MPIAdj *) A->data;
16   int         ierr, i,j, m = a->m,  format;
17   FILE        *fd;
18   char        *outputname;
19   MPI_Comm    comm = A->comm;
20 
21   ierr = ViewerASCIIGetPointer(viewer,&fd); CHKERRQ(ierr);
22   ierr = ViewerGetOutputname(viewer,&outputname); CHKERRQ(ierr);
23   ierr = ViewerGetFormat(viewer,&format);
24   if (format == VIEWER_FORMAT_ASCII_INFO) {
25     PetscFunctionReturn(0);
26   } else {
27     for ( i=0; i<m; i++ ) {
28       PetscSynchronizedFPrintf(comm,fd,"row %d:",i+a->rstart);
29       for ( j=a->i[i]; j<a->i[i+1]; j++ ) {
30         PetscSynchronizedFPrintf(comm,fd," %d ",a->j[j]);
31       }
32       PetscSynchronizedFPrintf(comm,fd,"\n");
33     }
34   }
35   PetscSynchronizedFlush(comm);
36   PetscFunctionReturn(0);
37 }
38 
39 #undef __FUNC__
40 #define __FUNC__ "MatView_MPIAdj"
41 int MatView_MPIAdj(Mat A,Viewer viewer)
42 {
43   ViewerType  vtype;
44   int         ierr;
45 
46   ierr = ViewerGetType(viewer,&vtype); CHKERRQ(ierr);
47   if (PetscTypeCompare(vtype,ASCII_VIEWER)){
48     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
49   } else {
50     SETERRQ(1,1,"Viewer type not supported by PETSc object");
51   }
52   PetscFunctionReturn(0);
53 }
54 
55 #undef __FUNC__
56 #define __FUNC__ "MatDestroy_MPIAdj"
57 int MatDestroy_MPIAdj(Mat mat)
58 {
59   Mat_MPIAdj *a = (Mat_MPIAdj *) mat->data;
60   int        ierr;
61 
62   PetscFunctionBegin;
63   if (--mat->refct > 0) PetscFunctionReturn(0);
64 
65   if (mat->mapping) {
66     ierr = ISLocalToGlobalMappingDestroy(mat->mapping); CHKERRQ(ierr);
67   }
68   if (mat->bmapping) {
69     ierr = ISLocalToGlobalMappingDestroy(mat->bmapping); CHKERRQ(ierr);
70   }
71   if (mat->rmap) {
72     ierr = MapDestroy(mat->rmap);CHKERRQ(ierr);
73   }
74   if (mat->cmap) {
75     ierr = MapDestroy(mat->cmap);CHKERRQ(ierr);
76   }
77 
78 #if defined(USE_PETSC_LOG)
79   PLogObjectState((PetscObject)mat,"Rows=%d, Cols=%d, NZ=%d",mat->m,mat->n,a->nz);
80 #endif
81   if (a->diag) PetscFree(a->diag);
82   PetscFree(a->i);
83   PetscFree(a->j);
84   PetscFree(a->rowners);
85   PetscFree(a);
86 
87   PLogObjectDestroy(mat);
88   PetscHeaderDestroy(mat);
89   PetscFunctionReturn(0);
90 }
91 
92 
93 #undef __FUNC__
94 #define __FUNC__ "MatSetOption_MPIAdj"
95 int MatSetOption_MPIAdj(Mat A,MatOption op)
96 {
97   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
98 
99   if (op == MAT_STRUCTURALLY_SYMMETRIC) {
100     a->symmetric = PETSC_TRUE;
101   } else {
102     PLogInfo(A,"MatSetOption_MPIAdj:Option ignored\n");
103   }
104   PetscFunctionReturn(0);
105 }
106 
107 
108 /*
109      Adds diagonal pointers to sparse matrix structure.
110 */
111 
112 #undef __FUNC__
113 #define __FUNC__ "MatMarkDiag_MPIAdj"
114 int MatMarkDiag_MPIAdj(Mat A)
115 {
116   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
117   int        i,j, *diag, m = a->m;
118 
119   diag = (int *) PetscMalloc( (m+1)*sizeof(int)); CHKPTRQ(diag);
120   PLogObjectMemory(A,(m+1)*sizeof(int));
121   for ( i=0; i<a->m; i++ ) {
122     for ( j=a->i[i]; j<a->i[i+1]; j++ ) {
123       if (a->j[j] == i) {
124         diag[i] = j;
125         break;
126       }
127     }
128   }
129   a->diag = diag;
130   PetscFunctionReturn(0);
131 }
132 
133 #undef __FUNC__
134 #define __FUNC__ "MatGetSize_MPIAdj"
135 int MatGetSize_MPIAdj(Mat A,int *m,int *n)
136 {
137   if (m) *m = A->M;
138   if (n) *n = A->N;
139   PetscFunctionReturn(0);
140 }
141 
142 #undef __FUNC__
143 #define __FUNC__ "MatGetSize_MPIAdj"
144 int MatGetLocalSize_MPIAdj(Mat A,int *m,int *n)
145 {
146   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
147   if (m) *m = a->m;
148   if (n) *n = A->N;
149   PetscFunctionReturn(0);
150 }
151 
152 #undef __FUNC__
153 #define __FUNC__ "MatGetOwnershipRange_MPIAdj"
154 int MatGetOwnershipRange_MPIAdj(Mat A,int *m,int *n)
155 {
156   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
157   *m = a->rstart; *n = a->rend;
158   PetscFunctionReturn(0);
159 }
160 
161 #undef __FUNC__
162 #define __FUNC__ "MatGetRow_MPIAdj"
163 int MatGetRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
164 {
165   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
166   int        *itmp;
167 
168   row -= a->rstart;
169 
170   if (row < 0 || row >= a->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,0,"Row out of range");
171 
172   *nz = a->i[row+1] - a->i[row];
173   if (v) *v = PETSC_NULL;
174   if (idx) {
175     itmp = a->j + a->i[row];
176     if (*nz) {
177       *idx = itmp;
178     }
179     else *idx = 0;
180   }
181   PetscFunctionReturn(0);
182 }
183 
184 #undef __FUNC__
185 #define __FUNC__ "MatRestoreRow_MPIAdj"
186 int MatRestoreRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
187 {
188   PetscFunctionReturn(0);
189 }
190 
191 #undef __FUNC__
192 #define __FUNC__ "MatGetBlockSize_MPIAdj"
193 int MatGetBlockSize_MPIAdj(Mat A, int *bs)
194 {
195   *bs = 1;
196   PetscFunctionReturn(0);
197 }
198 
199 
200 #undef __FUNC__
201 #define __FUNC__ "MatEqual_MPIAdj"
202 int MatEqual_MPIAdj(Mat A,Mat B, PetscTruth* flg)
203 {
204   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data, *b = (Mat_MPIAdj *)B->data;
205  int         flag = 1,ierr;
206 
207   if (B->type != MATMPIADJ) SETERRQ(PETSC_ERR_ARG_INCOMP,0,"Matrices must be same type");
208 
209   /* If the  matrix dimensions are not equal, or no of nonzeros */
210   if ((a->m != b->m ) ||( a->nz != b->nz)) {
211     flag = 0;
212   }
213 
214   /* if the a->i are the same */
215   if (PetscMemcmp(a->i,b->i,(a->m+1)*sizeof(int))) {
216     flag = 0;
217   }
218 
219   /* if a->j are the same */
220   if (PetscMemcmp(a->j, b->j, (a->nz)*sizeof(int))) {
221     flag = 0;
222   }
223 
224   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr);
225 
226 
227   PetscFunctionReturn(0);
228 }
229 
230 
231 /* -------------------------------------------------------------------*/
232 static struct _MatOps MatOps_Values = {0,
233        MatGetRow_MPIAdj,
234        MatRestoreRow_MPIAdj,
235        0,
236        0,
237        0,
238        0,
239        0,
240        0,
241        0,
242        0,
243        0,
244        0,
245        0,
246        0,
247        0,
248        MatEqual_MPIAdj,
249        0,
250        0,
251        0,
252        0,
253        0,
254        0,
255        MatSetOption_MPIAdj,
256        0,
257        0,
258        0,
259        0,
260        0,
261        0,
262        MatGetSize_MPIAdj,
263        MatGetLocalSize_MPIAdj,
264        MatGetOwnershipRange_MPIAdj,
265        0,
266        0,
267        0,
268        0,
269        0,
270        0,
271        0,
272        0,
273        0,
274        0,
275        0,
276        0,
277        0,
278        0,
279        0,
280        0,
281        0,
282        0,
283        0,
284        MatGetBlockSize_MPIAdj,
285        0,
286        0,
287        0,
288        0,
289        0,
290        0,
291        0,
292        0,
293        0,
294        0,
295        0,
296        0,
297        MatGetMaps_Petsc};
298 
299 
300 #undef __FUNC__
301 #define __FUNC__ "MatCreateMPIAdj"
302 /*@C
303    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
304    The matrix does not have numerical values associated with it, but is
305    intended for ordering (to reduce bandwidth etc) and partitioning.
306 
307    Collective on MPI_Comm
308 
309    Input Parameters:
310 +  comm - MPI communicator, set to PETSC_COMM_SELF
311 .  m - number of local rows
312 .  n - number of columns
313 .  i - the indices into j for the start of each row
314 -  j - the column indices for each row (sorted for each row).
315        The indices in i and j start with zero (NOT with one).
316 
317    Output Parameter:
318 .  A - the matrix
319 
320    Notes: This matrix object does not support most matrix operations, include
321    MatSetValues().
322    You must NOT free the ii and jj arrays yourself. PETSc will free them
323    when the matrix is destroyed.
324 
325    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
326 
327 .seealso: MatCreate(), MatCreateSeqAdj(), MatGetReordering()
328 @*/
329 int MatCreateMPIAdj(MPI_Comm comm,int m,int n,int *i,int *j, Mat *A)
330 {
331   Mat        B;
332   Mat_MPIAdj *b;
333   int        ii,ierr, flg,size,rank;
334 
335   MPI_Comm_size(comm,&size);
336   MPI_Comm_rank(comm,&rank);
337 
338   *A                  = 0;
339   PetscHeaderCreate(B,_p_Mat,struct _MatOps,MAT_COOKIE,MATMPIADJ,"Mat",comm,MatDestroy,MatView);
340   PLogObjectCreate(B);
341   B->data             = (void *) (b = PetscNew(Mat_MPIAdj)); CHKPTRQ(b);
342   PetscMemzero(b,sizeof(Mat_MPIAdj));
343   PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));
344   B->ops->destroy          = MatDestroy_MPIAdj;
345   B->ops->view             = MatView_MPIAdj;
346   B->factor           = 0;
347   B->lupivotthreshold = 1.0;
348   B->mapping          = 0;
349   B->assembled        = PETSC_FALSE;
350 
351   b->m = m; B->m = m;
352   ierr = MPI_Allreduce(&m,&B->M,1,MPI_INT,MPI_SUM,comm);CHKERRQ(ierr);
353   B->n = n; B->N = n;
354 
355   /* the information in the maps duplicates the information computed below, eventually
356      we should remove the duplicate information that is not contained in the maps */
357   ierr = MapCreateMPI(comm,m,B->M,&B->rmap);CHKERRQ(ierr);
358   /* we don't know the "local columns" so just use the row information :-( */
359   ierr = MapCreateMPI(comm,m,B->M,&B->cmap);CHKERRQ(ierr);
360 
361   b->rowners = (int *) PetscMalloc((size+1)*sizeof(int)); CHKPTRQ(b->rowners);
362   PLogObjectMemory(B,(size+2)*sizeof(int)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));
363   ierr = MPI_Allgather(&m,1,MPI_INT,b->rowners+1,1,MPI_INT,comm);CHKERRQ(ierr);
364   b->rowners[0] = 0;
365   for ( ii=2; ii<=size; ii++ ) {
366     b->rowners[ii] += b->rowners[ii-1];
367   }
368   b->rstart = b->rowners[rank];
369   b->rend   = b->rowners[rank+1];
370 
371   b->j  = j;
372   b->i  = i;
373 
374   b->nz               = i[m];
375   b->diag             = 0;
376   b->symmetric        = PETSC_FALSE;
377 
378   *A = B;
379 
380   ierr = OptionsHasName(PETSC_NULL,"-help", &flg); CHKERRQ(ierr);
381   if (flg) {ierr = MatPrintHelp(B); CHKERRQ(ierr); }
382   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
383   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
384   PetscFunctionReturn(0);
385 }
386 
387 
388 
389