xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision ffac6cdb671c711dabb6f689a6a2ffdf24fad51a)
1 /*$Id: mpiadj.c,v 1.49 2000/10/24 20:25:58 bsmith Exp bsmith $*/
2 
3 /*
4     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
5 */
6 #include "petscsys.h"
7 #include "src/mat/impls/adj/mpi/mpiadj.h"
8 
9 #undef __FUNC__
10 #define __FUNC__ /*<a name=""></a>*/"MatView_MPIAdj_ASCII"
11 int MatView_MPIAdj_ASCII(Mat A,Viewer viewer)
12 {
13   Mat_MPIAdj  *a = (Mat_MPIAdj*)A->data;
14   int         ierr,i,j,m = A->m, format;
15   char        *outputname;
16 
17   PetscFunctionBegin;
18   ierr = ViewerGetOutputname(viewer,&outputname);CHKERRQ(ierr);
19   ierr = ViewerGetFormat(viewer,&format);CHKERRQ(ierr);
20   if (format == VIEWER_FORMAT_ASCII_INFO) {
21     PetscFunctionReturn(0);
22   } else if (format == VIEWER_FORMAT_ASCII_MATLAB) {
23     SETERRQ(PETSC_ERR_SUP,"Matlab format not supported");
24   } else {
25     ierr = ViewerASCIIUseTabs(viewer,PETSC_NO);CHKERRQ(ierr);
26     for (i=0; i<m; i++) {
27       ierr = ViewerASCIISynchronizedPrintf(viewer,"row %d:",i+a->rstart);CHKERRQ(ierr);
28       for (j=a->i[i]; j<a->i[i+1]; j++) {
29         ierr = ViewerASCIISynchronizedPrintf(viewer," %d ",a->j[j]);CHKERRQ(ierr);
30       }
31       ierr = ViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
32     }
33     ierr = ViewerASCIIUseTabs(viewer,PETSC_YES);CHKERRQ(ierr);
34   }
35   ierr = ViewerFlush(viewer);CHKERRQ(ierr);
36   PetscFunctionReturn(0);
37 }
38 
39 #undef __FUNC__
40 #define __FUNC__ /*<a name=""></a>*/"MatView_MPIAdj"
41 int MatView_MPIAdj(Mat A,Viewer viewer)
42 {
43   int        ierr;
44   PetscTruth isascii;
45 
46   PetscFunctionBegin;
47   ierr = PetscTypeCompare((PetscObject)viewer,ASCII_VIEWER,&isascii);CHKERRQ(ierr);
48   if (isascii) {
49     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
50   } else {
51     SETERRQ1(1,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
52   }
53   PetscFunctionReturn(0);
54 }
55 
56 #undef __FUNC__
57 #define __FUNC__ /*<a name=""></a>*/"MatDestroy_MPIAdj"
58 int MatDestroy_MPIAdj(Mat mat)
59 {
60   Mat_MPIAdj *a = (Mat_MPIAdj*)mat->data;
61   int        ierr;
62 
63   PetscFunctionBegin;
64 #if defined(PETSC_USE_LOG)
65   PLogObjectState((PetscObject)mat,"Rows=%d, Cols=%d, NZ=%d",mat->m,mat->n,a->nz);
66 #endif
67   if (a->diag) {ierr = PetscFree(a->diag);CHKERRQ(ierr);}
68   if (a->freeaij) {
69     ierr = PetscFree(a->i);CHKERRQ(ierr);
70     ierr = PetscFree(a->j);CHKERRQ(ierr);
71     if (a->values) {ierr = PetscFree(a->values);CHKERRQ(ierr);}
72   }
73   ierr = PetscFree(a->rowners);CHKERRQ(ierr);
74   ierr = PetscFree(a);CHKERRQ(ierr);
75   PetscFunctionReturn(0);
76 }
77 
78 #undef __FUNC__
79 #define __FUNC__ /*<a name=""></a>*/"MatSetOption_MPIAdj"
80 int MatSetOption_MPIAdj(Mat A,MatOption op)
81 {
82   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
83 
84   PetscFunctionBegin;
85   if (op == MAT_STRUCTURALLY_SYMMETRIC) {
86     a->symmetric = PETSC_TRUE;
87   } else {
88     PLogInfo(A,"MatSetOption_MPIAdj:Option ignored\n");
89   }
90   PetscFunctionReturn(0);
91 }
92 
93 
94 /*
95      Adds diagonal pointers to sparse matrix structure.
96 */
97 
98 #undef __FUNC__
99 #define __FUNC__ /*<a name=""></a>*/"MatMarkDiagonal_MPIAdj"
100 int MatMarkDiagonal_MPIAdj(Mat A)
101 {
102   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
103   int        i,j,*diag,m = A->m;
104 
105   PetscFunctionBegin;
106   diag = (int*)PetscMalloc((m+1)*sizeof(int));CHKPTRQ(diag);
107   PLogObjectMemory(A,(m+1)*sizeof(int));
108   for (i=0; i<A->m; i++) {
109     for (j=a->i[i]; j<a->i[i+1]; j++) {
110       if (a->j[j] == i) {
111         diag[i] = j;
112         break;
113       }
114     }
115   }
116   a->diag = diag;
117   PetscFunctionReturn(0);
118 }
119 
120 #undef __FUNC__
121 #define __FUNC__ /*<a name=""></a>*/"MatGetOwnershipRange_MPIAdj"
122 int MatGetOwnershipRange_MPIAdj(Mat A,int *m,int *n)
123 {
124   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
125   PetscFunctionBegin;
126   if (m) *m = a->rstart;
127   if (n) *n = a->rend;
128   PetscFunctionReturn(0);
129 }
130 
131 #undef __FUNC__
132 #define __FUNC__ /*<a name=""></a>*/"MatGetRow_MPIAdj"
133 int MatGetRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
134 {
135   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
136   int        *itmp;
137 
138   PetscFunctionBegin;
139   row -= a->rstart;
140 
141   if (row < 0 || row >= A->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
142 
143   *nz = a->i[row+1] - a->i[row];
144   if (v) *v = PETSC_NULL;
145   if (idx) {
146     itmp = a->j + a->i[row];
147     if (*nz) {
148       *idx = itmp;
149     }
150     else *idx = 0;
151   }
152   PetscFunctionReturn(0);
153 }
154 
155 #undef __FUNC__
156 #define __FUNC__ /*<a name=""></a>*/"MatRestoreRow_MPIAdj"
157 int MatRestoreRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
158 {
159   PetscFunctionBegin;
160   PetscFunctionReturn(0);
161 }
162 
163 #undef __FUNC__
164 #define __FUNC__ /*<a name=""></a>*/"MatGetBlockSize_MPIAdj"
165 int MatGetBlockSize_MPIAdj(Mat A,int *bs)
166 {
167   PetscFunctionBegin;
168   *bs = 1;
169   PetscFunctionReturn(0);
170 }
171 
172 
173 #undef __FUNC__
174 #define __FUNC__ /*<a name=""></a>*/"MatEqual_MPIAdj"
175 int MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg)
176 {
177   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
178   int         ierr;
179   PetscTruth  flag;
180 
181   PetscFunctionBegin;
182   ierr = PetscTypeCompare((PetscObject)B,MATMPIADJ,&flag);CHKERRQ(ierr);
183   if (!flag) SETERRQ(PETSC_ERR_ARG_INCOMP,"Matrices must be same type");
184 
185   /* If the  matrix dimensions are not equal,or no of nonzeros */
186   if ((A->m != B->m) ||(a->nz != b->nz)) {
187     flag = PETSC_FALSE;
188   }
189 
190   /* if the a->i are the same */
191   ierr = PetscMemcmp(a->i,b->i,(A->m+1)*sizeof(int),&flag);CHKERRQ(ierr);
192 
193   /* if a->j are the same */
194   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(int),&flag);CHKERRQ(ierr);
195 
196   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr);
197   PetscFunctionReturn(0);
198 }
199 
200 
201 /* -------------------------------------------------------------------*/
202 static struct _MatOps MatOps_Values = {0,
203        MatGetRow_MPIAdj,
204        MatRestoreRow_MPIAdj,
205        0,
206        0,
207        0,
208        0,
209        0,
210        0,
211        0,
212        0,
213        0,
214        0,
215        0,
216        0,
217        0,
218        MatEqual_MPIAdj,
219        0,
220        0,
221        0,
222        0,
223        0,
224        0,
225        MatSetOption_MPIAdj,
226        0,
227        0,
228        0,
229        0,
230        0,
231        0,
232        0,
233        0,
234        MatGetOwnershipRange_MPIAdj,
235        0,
236        0,
237        0,
238        0,
239        0,
240        0,
241        0,
242        0,
243        0,
244        0,
245        0,
246        0,
247        0,
248        0,
249        0,
250        0,
251        0,
252        0,
253        0,
254        MatGetBlockSize_MPIAdj,
255        0,
256        0,
257        0,
258        0,
259        0,
260        0,
261        0,
262        0,
263        0,
264        0,
265        MatDestroy_MPIAdj,
266        MatView_MPIAdj,
267        MatGetMaps_Petsc};
268 
269 
270 EXTERN_C_BEGIN
271 #undef __FUNC__
272 #define __FUNC__ /*<a name=""></a>*/"MatCreate_MPIAdj"
273 int MatCreate_MPIAdj(Mat B)
274 {
275   Mat_MPIAdj *b;
276   int        ii,ierr,size,rank;
277 
278   PetscFunctionBegin;
279   ierr = MPI_Comm_size(B->comm,&size);CHKERRQ(ierr);
280   ierr = MPI_Comm_rank(B->comm,&rank);CHKERRQ(ierr);
281 
282   B->data             = (void*)(b = PetscNew(Mat_MPIAdj));CHKPTRQ(b);
283   ierr                = PetscMemzero(b,sizeof(Mat_MPIAdj));CHKERRQ(ierr);
284   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
285   B->factor           = 0;
286   B->lupivotthreshold = 1.0;
287   B->mapping          = 0;
288   B->assembled        = PETSC_FALSE;
289 
290   ierr = MPI_Allreduce(&B->m,&B->M,1,MPI_INT,MPI_SUM,B->comm);CHKERRQ(ierr);
291   B->n = B->N;
292 
293   /* the information in the maps duplicates the information computed below, eventually
294      we should remove the duplicate information that is not contained in the maps */
295   ierr = MapCreateMPI(B->comm,B->m,B->M,&B->rmap);CHKERRQ(ierr);
296   /* we don't know the "local columns" so just use the row information :-(*/
297   ierr = MapCreateMPI(B->comm,B->m,B->M,&B->cmap);CHKERRQ(ierr);
298 
299   b->rowners = (int*)PetscMalloc((size+1)*sizeof(int));CHKPTRQ(b->rowners);
300   PLogObjectMemory(B,(size+2)*sizeof(int)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));
301   ierr = MPI_Allgather(&B->m,1,MPI_INT,b->rowners+1,1,MPI_INT,B->comm);CHKERRQ(ierr);
302   b->rowners[0] = 0;
303   for (ii=2; ii<=size; ii++) {
304     b->rowners[ii] += b->rowners[ii-1];
305   }
306   b->rstart = b->rowners[rank];
307   b->rend   = b->rowners[rank+1];
308 
309   PetscFunctionReturn(0);
310 }
311 EXTERN_C_END
312 
313 #undef __FUNC__
314 #define __FUNC__ /*<a name=""></a>*/"MatMPIAdjSetPreallocation"
315 int MatMPIAdjSetPreallocation(Mat B,int *i,int *j,int *values)
316 {
317   Mat_MPIAdj *b = (Mat_MPIAdj *)B->data;
318   int        ierr;
319 #if defined(PETSC_USE_BOPT_g)
320   int        ii;
321 #endif
322 
323   PetscFunctionBegin;
324   B->preallocated = PETSC_TRUE;
325 #if defined(PETSC_USE_BOPT_g)
326   if (i[0] != 0) SETERRQ1(1,"First i[] index must be zero, instead it is %d\n",i[0]);
327   for (ii=1; ii<B->m; ii++) {
328     if (i[ii] < 0 || i[ii] < i[ii-1]) {
329       SETERRQ4(1,"i[%d] index is out of range: i[%d]",ii,i[ii],ii-1,i[ii-1]);
330     }
331   }
332   for (ii=0; ii<i[B->m]; ii++) {
333     if (j[ii] < 0 || j[ii] >= B->N) {
334       SETERRQ2(1,"Column index %d out of range %d\n",ii,j[ii]);
335     }
336   }
337 #endif
338 
339   b->j      = j;
340   b->i      = i;
341   b->values = values;
342 
343   b->nz               = i[B->m];
344   b->diag             = 0;
345   b->symmetric        = PETSC_FALSE;
346   b->freeaij          = PETSC_TRUE;
347 
348   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
349   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
350   PetscFunctionReturn(0);
351 }
352 
353 #undef __FUNC__
354 #define __FUNC__ /*<a name=""></a>*/"MatCreateMPIAdj"
355 /*@C
356    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
357    The matrix does not have numerical values associated with it, but is
358    intended for ordering (to reduce bandwidth etc) and partitioning.
359 
360    Collective on MPI_Comm
361 
362    Input Parameters:
363 +  comm - MPI communicator
364 .  m - number of local rows
365 .  n - number of columns
366 .  i - the indices into j for the start of each row
367 .  j - the column indices for each row (sorted for each row).
368        The indices in i and j start with zero (NOT with one).
369 -  values -[optional] edge weights
370 
371    Output Parameter:
372 .  A - the matrix
373 
374    Level: intermediate
375 
376    Notes: This matrix object does not support most matrix operations, include
377    MatSetValues().
378    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
379    when the matrix is destroyed. And you must allocate them with PetscMalloc(). If you
380     call from Fortran you need not create the arrays with PetscMalloc().
381    Should not include the matrix diagonals.
382 
383    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
384 
385 .seealso: MatCreate(), MatCreateSeqAdj(), MatGetOrdering()
386 @*/
387 int MatCreateMPIAdj(MPI_Comm comm,int m,int n,int *i,int *j,int *values,Mat *A)
388 {
389   int        ierr;
390 
391   PetscFunctionBegin;
392   ierr = MatCreate(comm,m,n,PETSC_DETERMINE,n,A);CHKERRQ(ierr);
393   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
394   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
395   PetscFunctionReturn(0);
396 }
397 
398 EXTERN_C_BEGIN
399 #undef __FUNC__
400 #define __FUNC__ /*<a name=""></a>*/"MatConvertTo_MPIAdj"
401 int MatConvertTo_MPIAdj(Mat A,MatType type,Mat *B)
402 {
403   int      i,ierr,m,N,nzeros = 0,*ia,*ja,*rj,len,rstart,cnt,j,*a;
404   Scalar   *ra;
405   MPI_Comm comm;
406 
407   PetscFunctionBegin;
408   ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr);
409   ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr);
410   ierr = MatGetOwnershipRange(A,&rstart,PETSC_NULL);CHKERRQ(ierr);
411 
412   /* count the number of nonzeros per row */
413   for (i=0; i<m; i++) {
414     ierr   = MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
415     for (j=0; j<len; j++) {
416       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
417     }
418     ierr   = MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
419     nzeros += len;
420   }
421 
422   /* malloc space for nonzeros */
423   a  = (int*)PetscMalloc((nzeros+1)*sizeof(int));CHKPTRQ(a);
424   ia = (int*)PetscMalloc((N+1)*sizeof(int));CHKPTRQ(ia);
425   ja = (int*)PetscMalloc((nzeros+1)*sizeof(int));CHKPTRQ(ja);
426 
427   nzeros = 0;
428   ia[0]  = 0;
429   for (i=0; i<m; i++) {
430     ierr    = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
431     cnt     = 0;
432     for (j=0; j<len; j++) {
433       if (rj[j] != i+rstart) { /* if not diagonal */
434         a[nzeros+cnt]    = (int) PetscAbsScalar(ra[j]);
435         ja[nzeros+cnt++] = rj[j];
436       }
437     }
438     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
439     nzeros += cnt;
440     ia[i+1] = nzeros;
441   }
442 
443   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
444   ierr = MatCreateMPIAdj(comm,m,N,ia,ja,a,B);CHKERRQ(ierr);
445 
446   PetscFunctionReturn(0);
447 }
448 EXTERN_C_END
449 
450 
451 
452 
453 
454