xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 488ecbaffa467bb032d31d7eb20bc6d0ef6d986c)
1 #ifdef PETSC_RCS_HEADER
2 static char vcid[] = "$Id: mpiadj.c,v 1.16 1998/07/14 03:16:14 bsmith Exp bsmith $";
3 #endif
4 
5 /*
6     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
7 */
8 
9 #include "pinclude/pviewer.h"
10 #include "sys.h"
11 #include "src/mat/impls/adj/mpi/mpiadj.h"
12 
13 
14 #undef __FUNC__
15 #define __FUNC__ "MatView_MPIAdj_ASCII"
16 extern int MatView_MPIAdj_ASCII(Mat A,Viewer viewer)
17 {
18   Mat_MPIAdj  *a = (Mat_MPIAdj *) A->data;
19   int         ierr, i,j, m = a->m,  format;
20   FILE        *fd;
21   char        *outputname;
22   MPI_Comm    comm = A->comm;
23 
24   ierr = ViewerASCIIGetPointer(viewer,&fd); CHKERRQ(ierr);
25   ierr = ViewerFileGetOutputname_Private(viewer,&outputname); CHKERRQ(ierr);
26   ierr = ViewerGetFormat(viewer,&format);
27   if (format == VIEWER_FORMAT_ASCII_INFO) {
28     PetscFunctionReturn(0);
29   } else {
30     for ( i=0; i<m; i++ ) {
31       PetscSynchronizedFPrintf(comm,fd,"row %d:",i+a->rstart);
32       for ( j=a->i[i]; j<a->i[i+1]; j++ ) {
33         PetscSynchronizedFPrintf(comm,fd," %d ",a->j[j]);
34       }
35       PetscSynchronizedFPrintf(comm,fd,"\n");
36     }
37   }
38   PetscSynchronizedFlush(comm);
39   PetscFunctionReturn(0);
40 }
41 
42 #undef __FUNC__
43 #define __FUNC__ "MatView_MPIAdj"
44 int MatView_MPIAdj(Mat A,Viewer viewer)
45 {
46   ViewerType  vtype;
47   int         ierr;
48 
49   ierr = ViewerGetType(viewer,&vtype); CHKERRQ(ierr);
50   if (vtype == ASCII_FILE_VIEWER || vtype == ASCII_FILES_VIEWER){
51     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
52   } else {
53     SETERRQ(1,1,"Viewer type not supported by PETSc object");
54   }
55   PetscFunctionReturn(0);
56 }
57 
58 #undef __FUNC__
59 #define __FUNC__ "MatDestroy_MPIAdj"
60 int MatDestroy_MPIAdj(Mat mat)
61 {
62   Mat_MPIAdj *a = (Mat_MPIAdj *) mat->data;
63   int        ierr;
64 
65   PetscFunctionBegin;
66   if (--mat->refct > 0) PetscFunctionReturn(0);
67 
68   if (mat->mapping) {
69     ierr = ISLocalToGlobalMappingDestroy(mat->mapping); CHKERRQ(ierr);
70   }
71   if (mat->bmapping) {
72     ierr = ISLocalToGlobalMappingDestroy(mat->bmapping); CHKERRQ(ierr);
73   }
74   if (mat->rmap) {
75     ierr = MapDestroy(mat->rmap);CHKERRQ(ierr);
76   }
77   if (mat->cmap) {
78     ierr = MapDestroy(mat->cmap);CHKERRQ(ierr);
79   }
80 
81 #if defined(USE_PETSC_LOG)
82   PLogObjectState((PetscObject)mat,"Rows=%d, Cols=%d, NZ=%d",mat->m,mat->n,a->nz);
83 #endif
84   if (a->diag) PetscFree(a->diag);
85   PetscFree(a->i);
86   PetscFree(a->j);
87   PetscFree(a->rowners);
88   PetscFree(a);
89 
90   PLogObjectDestroy(mat);
91   PetscHeaderDestroy(mat);
92   PetscFunctionReturn(0);
93 }
94 
95 
96 #undef __FUNC__
97 #define __FUNC__ "MatSetOption_MPIAdj"
98 int MatSetOption_MPIAdj(Mat A,MatOption op)
99 {
100   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
101 
102   if (op == MAT_STRUCTURALLY_SYMMETRIC) {
103     a->symmetric = PETSC_TRUE;
104   } else {
105     PLogInfo(A,"MatSetOption_MPIAdj:Option ignored\n");
106   }
107   PetscFunctionReturn(0);
108 }
109 
110 
111 /*
112      Adds diagonal pointers to sparse matrix structure.
113 */
114 
115 #undef __FUNC__
116 #define __FUNC__ "MatMarkDiag_MPIAdj"
117 int MatMarkDiag_MPIAdj(Mat A)
118 {
119   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
120   int        i,j, *diag, m = a->m;
121 
122   diag = (int *) PetscMalloc( (m+1)*sizeof(int)); CHKPTRQ(diag);
123   PLogObjectMemory(A,(m+1)*sizeof(int));
124   for ( i=0; i<a->m; i++ ) {
125     for ( j=a->i[i]; j<a->i[i+1]; j++ ) {
126       if (a->j[j] == i) {
127         diag[i] = j;
128         break;
129       }
130     }
131   }
132   a->diag = diag;
133   PetscFunctionReturn(0);
134 }
135 
136 #undef __FUNC__
137 #define __FUNC__ "MatGetSize_MPIAdj"
138 int MatGetSize_MPIAdj(Mat A,int *m,int *n)
139 {
140   if (m) *m = A->M;
141   if (n) *n = A->N;
142   PetscFunctionReturn(0);
143 }
144 
145 #undef __FUNC__
146 #define __FUNC__ "MatGetSize_MPIAdj"
147 int MatGetLocalSize_MPIAdj(Mat A,int *m,int *n)
148 {
149   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
150   if (m) *m = a->m;
151   if (n) *n = A->N;
152   PetscFunctionReturn(0);
153 }
154 
155 #undef __FUNC__
156 #define __FUNC__ "MatGetOwnershipRange_MPIAdj"
157 int MatGetOwnershipRange_MPIAdj(Mat A,int *m,int *n)
158 {
159   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
160   *m = a->rstart; *n = a->rend;
161   PetscFunctionReturn(0);
162 }
163 
164 #undef __FUNC__
165 #define __FUNC__ "MatGetRow_MPIAdj"
166 int MatGetRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
167 {
168   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
169   int        *itmp;
170 
171   row -= a->rstart;
172 
173   if (row < 0 || row >= a->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,0,"Row out of range");
174 
175   *nz = a->i[row+1] - a->i[row];
176   if (v) *v = PETSC_NULL;
177   if (idx) {
178     itmp = a->j + a->i[row];
179     if (*nz) {
180       *idx = itmp;
181     }
182     else *idx = 0;
183   }
184   PetscFunctionReturn(0);
185 }
186 
187 #undef __FUNC__
188 #define __FUNC__ "MatRestoreRow_MPIAdj"
189 int MatRestoreRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
190 {
191   PetscFunctionReturn(0);
192 }
193 
194 #undef __FUNC__
195 #define __FUNC__ "MatGetBlockSize_MPIAdj"
196 int MatGetBlockSize_MPIAdj(Mat A, int *bs)
197 {
198   *bs = 1;
199   PetscFunctionReturn(0);
200 }
201 
202 
203 #undef __FUNC__
204 #define __FUNC__ "MatEqual_MPIAdj"
205 int MatEqual_MPIAdj(Mat A,Mat B, PetscTruth* flg)
206 {
207   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data, *b = (Mat_MPIAdj *)B->data;
208  int         flag = 1,ierr;
209 
210   if (B->type != MATMPIADJ) SETERRQ(PETSC_ERR_ARG_INCOMP,0,"Matrices must be same type");
211 
212   /* If the  matrix dimensions are not equal, or no of nonzeros */
213   if ((a->m != b->m ) ||( a->nz != b->nz)) {
214     flag = 0;
215   }
216 
217   /* if the a->i are the same */
218   if (PetscMemcmp(a->i,b->i,(a->m+1)*sizeof(int))) {
219     flag = 0;
220   }
221 
222   /* if a->j are the same */
223   if (PetscMemcmp(a->j, b->j, (a->nz)*sizeof(int))) {
224     flag = 0;
225   }
226 
227   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr);
228 
229 
230   PetscFunctionReturn(0);
231 }
232 
233 
234 /* -------------------------------------------------------------------*/
235 static struct _MatOps MatOps_Values = {0,
236        MatGetRow_MPIAdj,
237        MatRestoreRow_MPIAdj,
238        0,
239        0,
240        0,
241        0,
242        0,
243        0,
244        0,
245        0,
246        0,
247        0,
248        0,
249        0,
250        0,
251        MatEqual_MPIAdj,
252        0,
253        0,
254        0,
255        0,
256        0,
257        0,
258        MatSetOption_MPIAdj,
259        0,
260        0,
261        0,
262        0,
263        0,
264        0,
265        MatGetSize_MPIAdj,
266        MatGetLocalSize_MPIAdj,
267        MatGetOwnershipRange_MPIAdj,
268        0,
269        0,
270        0,
271        0,
272        0,
273        0,
274        0,
275        0,
276        0,
277        0,
278        0,
279        0,
280        0,
281        0,
282        0,
283        0,
284        0,
285        0,
286        0,
287        MatGetBlockSize_MPIAdj,
288        0,
289        0,
290        0,
291        0,
292        0,
293        0,
294        0,
295        0,
296        0,
297        0,
298        0,
299        0,
300        MatGetMaps_Petsc};
301 
302 
303 #undef __FUNC__
304 #define __FUNC__ "MatCreateMPIAdj"
305 /*@C
306    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
307    The matrix does not have numerical values associated with it, but is
308    intended for ordering (to reduce bandwidth etc) and partitioning.
309 
310    Collective on MPI_Comm
311 
312    Input Parameters:
313 +  comm - MPI communicator, set to PETSC_COMM_SELF
314 .  m - number of local rows
315 .  n - number of columns
316 .  i - the indices into j for the start of each row
317 -  j - the column indices for each row (sorted for each row).
318        The indices in i and j start with zero (NOT with one).
319 
320    Output Parameter:
321 .  A - the matrix
322 
323    Notes: This matrix object does not support most matrix operations, include
324    MatSetValues().
325    You must NOT free the ii and jj arrays yourself. PETSc will free them
326    when the matrix is destroyed.
327 
328    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
329 
330 .seealso: MatCreate(), MatCreateSeqAdj(), MatGetReordering()
331 @*/
332 int MatCreateMPIAdj(MPI_Comm comm,int m,int n,int *i,int *j, Mat *A)
333 {
334   Mat        B;
335   Mat_MPIAdj *b;
336   int        ii,ierr, flg,size,rank;
337 
338   MPI_Comm_size(comm,&size);
339   MPI_Comm_rank(comm,&rank);
340 
341   *A                  = 0;
342   PetscHeaderCreate(B,_p_Mat,struct _MatOps,MAT_COOKIE,MATMPIADJ,comm,MatDestroy,MatView);
343   PLogObjectCreate(B);
344   B->data             = (void *) (b = PetscNew(Mat_MPIAdj)); CHKPTRQ(b);
345   PetscMemzero(b,sizeof(Mat_MPIAdj));
346   PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));
347   B->ops->destroy          = MatDestroy_MPIAdj;
348   B->ops->view             = MatView_MPIAdj;
349   B->factor           = 0;
350   B->lupivotthreshold = 1.0;
351   B->mapping          = 0;
352   B->assembled        = PETSC_FALSE;
353 
354   b->m = m; B->m = m;
355   ierr = MPI_Allreduce(&m,&B->M,1,MPI_INT,MPI_SUM,comm);CHKERRQ(ierr);
356   B->n = n; B->N = n;
357 
358   /* the information in the maps duplicates the information computed below, eventually
359      we should remove the duplicate information that is not contained in the maps */
360   ierr = MapCreateMPI(comm,m,B->M,&B->rmap);CHKERRQ(ierr);
361   /* we don't know the "local columns" so just use the row information :-( */
362   ierr = MapCreateMPI(comm,m,B->M,&B->cmap);CHKERRQ(ierr);
363 
364   b->rowners = (int *) PetscMalloc((size+1)*sizeof(int)); CHKPTRQ(b->rowners);
365   PLogObjectMemory(B,(size+2)*sizeof(int)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));
366   ierr = MPI_Allgather(&m,1,MPI_INT,b->rowners+1,1,MPI_INT,comm);CHKERRQ(ierr);
367   b->rowners[0] = 0;
368   for ( ii=2; ii<=size; ii++ ) {
369     b->rowners[ii] += b->rowners[ii-1];
370   }
371   b->rstart = b->rowners[rank];
372   b->rend   = b->rowners[rank+1];
373 
374   b->j  = j;
375   b->i  = i;
376 
377   b->nz               = i[m];
378   b->diag             = 0;
379   b->symmetric        = PETSC_FALSE;
380 
381   *A = B;
382 
383   ierr = OptionsHasName(PETSC_NULL,"-help", &flg); CHKERRQ(ierr);
384   if (flg) {ierr = MatPrintHelp(B); CHKERRQ(ierr); }
385   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
386   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
387   PetscFunctionReturn(0);
388 }
389 
390 
391 
392