xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision be0abb6ddea392ceaee217d3645fed7c6ea71822)
1 #ifdef PETSC_RCS_HEADER
2 static char vcid[] = "$Id: mpiadj.c,v 1.25 1999/06/30 23:52:07 balay Exp bsmith $";
3 #endif
4 
5 /*
6     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
7 */
8 #include "sys.h"
9 #include "src/mat/impls/adj/mpi/mpiadj.h"
10 
11 #undef __FUNC__
12 #define __FUNC__ "MatView_MPIAdj_ASCII"
13 extern int MatView_MPIAdj_ASCII(Mat A,Viewer viewer)
14 {
15   Mat_MPIAdj  *a = (Mat_MPIAdj *) A->data;
16   int         ierr, i,j, m = a->m,  format;
17   FILE        *fd;
18   char        *outputname;
19   MPI_Comm    comm = A->comm;
20 
21   ierr = ViewerASCIIGetPointer(viewer,&fd);CHKERRQ(ierr);
22   ierr = ViewerGetOutputname(viewer,&outputname);CHKERRQ(ierr);
23   ierr = ViewerGetFormat(viewer,&format);CHKERRQ(ierr);
24   if (format == VIEWER_FORMAT_ASCII_INFO) {
25     PetscFunctionReturn(0);
26   } else {
27     for ( i=0; i<m; i++ ) {
28       ierr = PetscSynchronizedFPrintf(comm,fd,"row %d:",i+a->rstart);CHKERRQ(ierr);
29       for ( j=a->i[i]; j<a->i[i+1]; j++ ) {
30         ierr = PetscSynchronizedFPrintf(comm,fd," %d ",a->j[j]);CHKERRQ(ierr);
31       }
32       ierr = PetscSynchronizedFPrintf(comm,fd,"\n");CHKERRQ(ierr);
33     }
34   }
35   ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
36   PetscFunctionReturn(0);
37 }
38 
39 #undef __FUNC__
40 #define __FUNC__ "MatView_MPIAdj"
41 int MatView_MPIAdj(Mat A,Viewer viewer)
42 {
43   ViewerType  vtype;
44   int         ierr;
45 
46   ierr = ViewerGetType(viewer,&vtype);CHKERRQ(ierr);
47   if (PetscTypeCompare(vtype,ASCII_VIEWER)){
48     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
49   } else {
50     SETERRQ(1,1,"Viewer type not supported by PETSc object");
51   }
52   PetscFunctionReturn(0);
53 }
54 
55 #undef __FUNC__
56 #define __FUNC__ "MatDestroy_MPIAdj"
57 int MatDestroy_MPIAdj(Mat mat)
58 {
59   Mat_MPIAdj *a = (Mat_MPIAdj *) mat->data;
60   int        ierr;
61 
62   PetscFunctionBegin;
63 
64   if (mat->mapping) {
65     ierr = ISLocalToGlobalMappingDestroy(mat->mapping);CHKERRQ(ierr);
66   }
67   if (mat->bmapping) {
68     ierr = ISLocalToGlobalMappingDestroy(mat->bmapping);CHKERRQ(ierr);
69   }
70   if (mat->rmap) {
71     ierr = MapDestroy(mat->rmap);CHKERRQ(ierr);
72   }
73   if (mat->cmap) {
74     ierr = MapDestroy(mat->cmap);CHKERRQ(ierr);
75   }
76 
77 #if defined(PETSC_USE_LOG)
78   PLogObjectState((PetscObject)mat,"Rows=%d, Cols=%d, NZ=%d",mat->m,mat->n,a->nz);
79 #endif
80   if (a->diag) {ierr = PetscFree(a->diag);CHKERRQ(ierr);}
81   ierr = PetscFree(a->i);CHKERRQ(ierr);
82   ierr = PetscFree(a->j);CHKERRQ(ierr);
83   ierr = PetscFree(a->rowners);CHKERRQ(ierr);
84   ierr = PetscFree(a);CHKERRQ(ierr);
85 
86   PLogObjectDestroy(mat);
87   PetscHeaderDestroy(mat);
88   PetscFunctionReturn(0);
89 }
90 
91 
92 #undef __FUNC__
93 #define __FUNC__ "MatSetOption_MPIAdj"
94 int MatSetOption_MPIAdj(Mat A,MatOption op)
95 {
96   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
97 
98   if (op == MAT_STRUCTURALLY_SYMMETRIC) {
99     a->symmetric = PETSC_TRUE;
100   } else {
101     PLogInfo(A,"MatSetOption_MPIAdj:Option ignored\n");
102   }
103   PetscFunctionReturn(0);
104 }
105 
106 
107 /*
108      Adds diagonal pointers to sparse matrix structure.
109 */
110 
111 #undef __FUNC__
112 #define __FUNC__ "MatMarkDiag_MPIAdj"
113 int MatMarkDiag_MPIAdj(Mat A)
114 {
115   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
116   int        i,j, *diag, m = a->m;
117 
118   diag = (int *) PetscMalloc( (m+1)*sizeof(int));CHKPTRQ(diag);
119   PLogObjectMemory(A,(m+1)*sizeof(int));
120   for ( i=0; i<a->m; i++ ) {
121     for ( j=a->i[i]; j<a->i[i+1]; j++ ) {
122       if (a->j[j] == i) {
123         diag[i] = j;
124         break;
125       }
126     }
127   }
128   a->diag = diag;
129   PetscFunctionReturn(0);
130 }
131 
132 #undef __FUNC__
133 #define __FUNC__ "MatGetSize_MPIAdj"
134 int MatGetSize_MPIAdj(Mat A,int *m,int *n)
135 {
136   if (m) *m = A->M;
137   if (n) *n = A->N;
138   PetscFunctionReturn(0);
139 }
140 
141 #undef __FUNC__
142 #define __FUNC__ "MatGetSize_MPIAdj"
143 int MatGetLocalSize_MPIAdj(Mat A,int *m,int *n)
144 {
145   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
146   if (m) *m = a->m;
147   if (n) *n = A->N;
148   PetscFunctionReturn(0);
149 }
150 
151 #undef __FUNC__
152 #define __FUNC__ "MatGetOwnershipRange_MPIAdj"
153 int MatGetOwnershipRange_MPIAdj(Mat A,int *m,int *n)
154 {
155   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
156   *m = a->rstart; *n = a->rend;
157   PetscFunctionReturn(0);
158 }
159 
160 #undef __FUNC__
161 #define __FUNC__ "MatGetRow_MPIAdj"
162 int MatGetRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
163 {
164   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
165   int        *itmp;
166 
167   row -= a->rstart;
168 
169   if (row < 0 || row >= a->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,0,"Row out of range");
170 
171   *nz = a->i[row+1] - a->i[row];
172   if (v) *v = PETSC_NULL;
173   if (idx) {
174     itmp = a->j + a->i[row];
175     if (*nz) {
176       *idx = itmp;
177     }
178     else *idx = 0;
179   }
180   PetscFunctionReturn(0);
181 }
182 
183 #undef __FUNC__
184 #define __FUNC__ "MatRestoreRow_MPIAdj"
185 int MatRestoreRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
186 {
187   PetscFunctionReturn(0);
188 }
189 
190 #undef __FUNC__
191 #define __FUNC__ "MatGetBlockSize_MPIAdj"
192 int MatGetBlockSize_MPIAdj(Mat A, int *bs)
193 {
194   *bs = 1;
195   PetscFunctionReturn(0);
196 }
197 
198 
199 #undef __FUNC__
200 #define __FUNC__ "MatEqual_MPIAdj"
201 int MatEqual_MPIAdj(Mat A,Mat B, PetscTruth* flg)
202 {
203   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data, *b = (Mat_MPIAdj *)B->data;
204  int         flag = 1,ierr;
205 
206   if (B->type != MATMPIADJ) SETERRQ(PETSC_ERR_ARG_INCOMP,0,"Matrices must be same type");
207 
208   /* If the  matrix dimensions are not equal, or no of nonzeros */
209   if ((a->m != b->m ) ||( a->nz != b->nz)) {
210     flag = 0;
211   }
212 
213   /* if the a->i are the same */
214   if (PetscMemcmp(a->i,b->i,(a->m+1)*sizeof(int))) {
215     flag = 0;
216   }
217 
218   /* if a->j are the same */
219   if (PetscMemcmp(a->j, b->j, (a->nz)*sizeof(int))) {
220     flag = 0;
221   }
222 
223   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr);
224 
225 
226   PetscFunctionReturn(0);
227 }
228 
229 
230 /* -------------------------------------------------------------------*/
231 static struct _MatOps MatOps_Values = {0,
232        MatGetRow_MPIAdj,
233        MatRestoreRow_MPIAdj,
234        0,
235        0,
236        0,
237        0,
238        0,
239        0,
240        0,
241        0,
242        0,
243        0,
244        0,
245        0,
246        0,
247        MatEqual_MPIAdj,
248        0,
249        0,
250        0,
251        0,
252        0,
253        0,
254        MatSetOption_MPIAdj,
255        0,
256        0,
257        0,
258        0,
259        0,
260        0,
261        MatGetSize_MPIAdj,
262        MatGetLocalSize_MPIAdj,
263        MatGetOwnershipRange_MPIAdj,
264        0,
265        0,
266        0,
267        0,
268        0,
269        0,
270        0,
271        0,
272        0,
273        0,
274        0,
275        0,
276        0,
277        0,
278        0,
279        0,
280        0,
281        0,
282        0,
283        MatGetBlockSize_MPIAdj,
284        0,
285        0,
286        0,
287        0,
288        0,
289        0,
290        0,
291        0,
292        0,
293        0,
294        0,
295        0,
296        MatGetMaps_Petsc};
297 
298 
299 #undef __FUNC__
300 #define __FUNC__ "MatCreateMPIAdj"
301 /*@C
302    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
303    The matrix does not have numerical values associated with it, but is
304    intended for ordering (to reduce bandwidth etc) and partitioning.
305 
306    Collective on MPI_Comm
307 
308    Input Parameters:
309 +  comm - MPI communicator, set to PETSC_COMM_SELF
310 .  m - number of local rows
311 .  n - number of columns
312 .  i - the indices into j for the start of each row
313 -  j - the column indices for each row (sorted for each row).
314        The indices in i and j start with zero (NOT with one).
315 
316    Output Parameter:
317 .  A - the matrix
318 
319    Level: intermediate
320 
321    Notes: This matrix object does not support most matrix operations, include
322    MatSetValues().
323    You must NOT free the ii and jj arrays yourself. PETSc will free them
324    when the matrix is destroyed.
325 
326    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
327 
328 .seealso: MatCreate(), MatCreateSeqAdj(), MatGetOrdering()
329 @*/
330 int MatCreateMPIAdj(MPI_Comm comm,int m,int n,int *i,int *j, Mat *A)
331 {
332   Mat        B;
333   Mat_MPIAdj *b;
334   int        ii,ierr, flg,size,rank;
335 
336   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
337   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
338 
339   *A                  = 0;
340   PetscHeaderCreate(B,_p_Mat,struct _MatOps,MAT_COOKIE,MATMPIADJ,"Mat",comm,MatDestroy,MatView);
341   PLogObjectCreate(B);
342   B->data             = (void *) (b = PetscNew(Mat_MPIAdj));CHKPTRQ(b);
343   ierr                = PetscMemzero(b,sizeof(Mat_MPIAdj));CHKERRQ(ierr);
344   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
345   B->ops->destroy     = MatDestroy_MPIAdj;
346   B->ops->view        = MatView_MPIAdj;
347   B->factor           = 0;
348   B->lupivotthreshold = 1.0;
349   B->mapping          = 0;
350   B->assembled        = PETSC_FALSE;
351 
352   b->m = m; B->m = m;
353   ierr = MPI_Allreduce(&m,&B->M,1,MPI_INT,MPI_SUM,comm);CHKERRQ(ierr);
354   B->n = n; B->N = n;
355 
356   /* the information in the maps duplicates the information computed below, eventually
357      we should remove the duplicate information that is not contained in the maps */
358   ierr = MapCreateMPI(comm,m,B->M,&B->rmap);CHKERRQ(ierr);
359   /* we don't know the "local columns" so just use the row information :-( */
360   ierr = MapCreateMPI(comm,m,B->M,&B->cmap);CHKERRQ(ierr);
361 
362   b->rowners = (int *) PetscMalloc((size+1)*sizeof(int));CHKPTRQ(b->rowners);
363   PLogObjectMemory(B,(size+2)*sizeof(int)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));
364   ierr = MPI_Allgather(&m,1,MPI_INT,b->rowners+1,1,MPI_INT,comm);CHKERRQ(ierr);
365   b->rowners[0] = 0;
366   for ( ii=2; ii<=size; ii++ ) {
367     b->rowners[ii] += b->rowners[ii-1];
368   }
369   b->rstart = b->rowners[rank];
370   b->rend   = b->rowners[rank+1];
371 
372   b->j  = j;
373   b->i  = i;
374 
375   b->nz               = i[m];
376   b->diag             = 0;
377   b->symmetric        = PETSC_FALSE;
378 
379   *A = B;
380 
381   ierr = OptionsHasName(PETSC_NULL,"-help", &flg);CHKERRQ(ierr);
382   if (flg) {ierr = MatPrintHelp(B);CHKERRQ(ierr); }
383   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
384   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
385   PetscFunctionReturn(0);
386 }
387 
388 
389 
390