xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision d42735a103fa0eebdc40ab0086e89c7b7740dbb2)
1 /*$Id: mpiadj.c,v 1.51 2000/11/06 16:27:58 bsmith Exp bsmith $*/
2 
3 /*
4     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
5 */
6 #include "petscsys.h"
7 #include "src/mat/impls/adj/mpi/mpiadj.h"
8 
9 #undef __FUNC__
10 #define __FUNC__ /*<a name=""></a>*/"MatView_MPIAdj_ASCII"
11 int MatView_MPIAdj_ASCII(Mat A,Viewer viewer)
12 {
13   Mat_MPIAdj  *a = (Mat_MPIAdj*)A->data;
14   int         ierr,i,j,m = A->m, format;
15   char        *outputname;
16 
17   PetscFunctionBegin;
18   ierr = ViewerGetOutputname(viewer,&outputname);CHKERRQ(ierr);
19   ierr = ViewerGetFormat(viewer,&format);CHKERRQ(ierr);
20   if (format == VIEWER_FORMAT_ASCII_INFO) {
21     PetscFunctionReturn(0);
22   } else if (format == VIEWER_FORMAT_ASCII_MATLAB) {
23     SETERRQ(PETSC_ERR_SUP,"Matlab format not supported");
24   } else {
25     ierr = ViewerASCIIUseTabs(viewer,PETSC_NO);CHKERRQ(ierr);
26     for (i=0; i<m; i++) {
27       ierr = ViewerASCIISynchronizedPrintf(viewer,"row %d:",i+a->rstart);CHKERRQ(ierr);
28       for (j=a->i[i]; j<a->i[i+1]; j++) {
29         ierr = ViewerASCIISynchronizedPrintf(viewer," %d ",a->j[j]);CHKERRQ(ierr);
30       }
31       ierr = ViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
32     }
33     ierr = ViewerASCIIUseTabs(viewer,PETSC_YES);CHKERRQ(ierr);
34   }
35   ierr = ViewerFlush(viewer);CHKERRQ(ierr);
36   PetscFunctionReturn(0);
37 }
38 
39 #undef __FUNC__
40 #define __FUNC__ /*<a name=""></a>*/"MatView_MPIAdj"
41 int MatView_MPIAdj(Mat A,Viewer viewer)
42 {
43   int        ierr;
44   PetscTruth isascii;
45 
46   PetscFunctionBegin;
47   ierr = PetscTypeCompare((PetscObject)viewer,ASCII_VIEWER,&isascii);CHKERRQ(ierr);
48   if (isascii) {
49     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
50   } else {
51     SETERRQ1(1,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
52   }
53   PetscFunctionReturn(0);
54 }
55 
56 #undef __FUNC__
57 #define __FUNC__ /*<a name=""></a>*/"MatDestroy_MPIAdj"
58 int MatDestroy_MPIAdj(Mat mat)
59 {
60   Mat_MPIAdj *a = (Mat_MPIAdj*)mat->data;
61   int        ierr;
62 
63   PetscFunctionBegin;
64 #if defined(PETSC_USE_LOG)
65   PLogObjectState((PetscObject)mat,"Rows=%d, Cols=%d, NZ=%d",mat->m,mat->n,a->nz);
66 #endif
67   if (a->diag) {ierr = PetscFree(a->diag);CHKERRQ(ierr);}
68   if (a->freeaij) {
69     ierr = PetscFree(a->i);CHKERRQ(ierr);
70     ierr = PetscFree(a->j);CHKERRQ(ierr);
71     if (a->values) {ierr = PetscFree(a->values);CHKERRQ(ierr);}
72   }
73   ierr = PetscFree(a->rowners);CHKERRQ(ierr);
74   ierr = PetscFree(a);CHKERRQ(ierr);
75   PetscFunctionReturn(0);
76 }
77 
78 #undef __FUNC__
79 #define __FUNC__ /*<a name=""></a>*/"MatSetOption_MPIAdj"
80 int MatSetOption_MPIAdj(Mat A,MatOption op)
81 {
82   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
83 
84   PetscFunctionBegin;
85   if (op == MAT_STRUCTURALLY_SYMMETRIC) {
86     a->symmetric = PETSC_TRUE;
87   } else {
88     PLogInfo(A,"MatSetOption_MPIAdj:Option ignored\n");
89   }
90   PetscFunctionReturn(0);
91 }
92 
93 
94 /*
95      Adds diagonal pointers to sparse matrix structure.
96 */
97 
98 #undef __FUNC__
99 #define __FUNC__ /*<a name=""></a>*/"MatMarkDiagonal_MPIAdj"
100 int MatMarkDiagonal_MPIAdj(Mat A)
101 {
102   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
103   int        i,j,*diag,m = A->m;
104 
105   PetscFunctionBegin;
106   diag = (int*)PetscMalloc((m+1)*sizeof(int));CHKPTRQ(diag);
107   PLogObjectMemory(A,(m+1)*sizeof(int));
108   for (i=0; i<A->m; i++) {
109     for (j=a->i[i]; j<a->i[i+1]; j++) {
110       if (a->j[j] == i) {
111         diag[i] = j;
112         break;
113       }
114     }
115   }
116   a->diag = diag;
117   PetscFunctionReturn(0);
118 }
119 
120 #undef __FUNC__
121 #define __FUNC__ /*<a name=""></a>*/"MatGetOwnershipRange_MPIAdj"
122 int MatGetOwnershipRange_MPIAdj(Mat A,int *m,int *n)
123 {
124   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
125   PetscFunctionBegin;
126   if (m) *m = a->rstart;
127   if (n) *n = a->rend;
128   PetscFunctionReturn(0);
129 }
130 
131 #undef __FUNC__
132 #define __FUNC__ /*<a name=""></a>*/"MatGetRow_MPIAdj"
133 int MatGetRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
134 {
135   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
136   int        *itmp;
137 
138   PetscFunctionBegin;
139   row -= a->rstart;
140 
141   if (row < 0 || row >= A->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
142 
143   *nz = a->i[row+1] - a->i[row];
144   if (v) *v = PETSC_NULL;
145   if (idx) {
146     itmp = a->j + a->i[row];
147     if (*nz) {
148       *idx = itmp;
149     }
150     else *idx = 0;
151   }
152   PetscFunctionReturn(0);
153 }
154 
155 #undef __FUNC__
156 #define __FUNC__ /*<a name=""></a>*/"MatRestoreRow_MPIAdj"
157 int MatRestoreRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
158 {
159   PetscFunctionBegin;
160   PetscFunctionReturn(0);
161 }
162 
163 #undef __FUNC__
164 #define __FUNC__ /*<a name=""></a>*/"MatGetBlockSize_MPIAdj"
165 int MatGetBlockSize_MPIAdj(Mat A,int *bs)
166 {
167   PetscFunctionBegin;
168   *bs = 1;
169   PetscFunctionReturn(0);
170 }
171 
172 
173 #undef __FUNC__
174 #define __FUNC__ /*<a name=""></a>*/"MatEqual_MPIAdj"
175 int MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg)
176 {
177   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
178   int         ierr;
179   PetscTruth  flag;
180 
181   PetscFunctionBegin;
182   ierr = PetscTypeCompare((PetscObject)B,MATMPIADJ,&flag);CHKERRQ(ierr);
183   if (!flag) SETERRQ(PETSC_ERR_ARG_INCOMP,"Matrices must be same type");
184 
185   /* If the  matrix dimensions are not equal,or no of nonzeros */
186   if ((A->m != B->m) ||(a->nz != b->nz)) {
187     flag = PETSC_FALSE;
188   }
189 
190   /* if the a->i are the same */
191   ierr = PetscMemcmp(a->i,b->i,(A->m+1)*sizeof(int),&flag);CHKERRQ(ierr);
192 
193   /* if a->j are the same */
194   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(int),&flag);CHKERRQ(ierr);
195 
196   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr);
197   PetscFunctionReturn(0);
198 }
199 
200 #undef __FUNC__
201 #define __FUNC__ /*<a name="MatGetRowIJ_MPIAdj"></a>*/"MatGetRowIJ_MPIAdj"
202 int MatGetRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int **ia,int **ja,PetscTruth *done)
203 {
204   int        ierr,size,i;
205   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
206 
207   PetscFunctionBegin;
208   ierr = MPI_Comm_size(A->comm,&size);CHKERRQ(ierr);
209   if (size > 1) {*done = PETSC_FALSE; PetscFunctionReturn(0);}
210   *m    = A->m;
211   *ia   = a->i;
212   *ja   = a->j;
213   *done = PETSC_TRUE;
214   if (oshift) {
215     for (i=0; i<(*ia)[*m]; i++) {
216       (*ja)[i]++;
217     }
218     for (i=0; i<=(*m); i++) (*ia)[i]++;
219   }
220   PetscFunctionReturn(0);
221 }
222 
223 #undef __FUNC__
224 #define __FUNC__ /*<a name="MatRestoreRowIJ_MPIAdj"></a>*/"MatRestoreRowIJ_MPIAdj"
225 int MatRestoreRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int **ia,int **ja,PetscTruth *done)
226 {
227   int        i;
228   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
229 
230   PetscFunctionBegin;
231   if (oshift) {
232     for (i=0; i<=(*m); i++) (*ia)[i]--;
233     for (i=0; i<(*ia)[*m]; i++) {
234       (*ja)[i]--;
235     }
236   }
237   PetscFunctionReturn(0);
238 }
239 
240 /* -------------------------------------------------------------------*/
241 static struct _MatOps MatOps_Values = {0,
242        MatGetRow_MPIAdj,
243        MatRestoreRow_MPIAdj,
244        0,
245        0,
246        0,
247        0,
248        0,
249        0,
250        0,
251        0,
252        0,
253        0,
254        0,
255        0,
256        0,
257        MatEqual_MPIAdj,
258        0,
259        0,
260        0,
261        0,
262        0,
263        0,
264        MatSetOption_MPIAdj,
265        0,
266        0,
267        0,
268        0,
269        0,
270        0,
271        0,
272        0,
273        MatGetOwnershipRange_MPIAdj,
274        0,
275        0,
276        0,
277        0,
278        0,
279        0,
280        0,
281        0,
282        0,
283        0,
284        0,
285        0,
286        0,
287        0,
288        0,
289        0,
290        0,
291        0,
292        0,
293        MatGetBlockSize_MPIAdj,
294        MatGetRowIJ_MPIAdj,
295        MatRestoreRowIJ_MPIAdj,
296        0,
297        0,
298        0,
299        0,
300        0,
301        0,
302        0,
303        0,
304        MatDestroy_MPIAdj,
305        MatView_MPIAdj,
306        MatGetMaps_Petsc};
307 
308 
309 EXTERN_C_BEGIN
310 #undef __FUNC__
311 #define __FUNC__ /*<a name=""></a>*/"MatCreate_MPIAdj"
312 int MatCreate_MPIAdj(Mat B)
313 {
314   Mat_MPIAdj *b;
315   int        ii,ierr,size,rank;
316 
317   PetscFunctionBegin;
318   ierr = MPI_Comm_size(B->comm,&size);CHKERRQ(ierr);
319   ierr = MPI_Comm_rank(B->comm,&rank);CHKERRQ(ierr);
320 
321   B->data             = (void*)(b = PetscNew(Mat_MPIAdj));CHKPTRQ(b);
322   ierr                = PetscMemzero(b,sizeof(Mat_MPIAdj));CHKERRQ(ierr);
323   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
324   B->factor           = 0;
325   B->lupivotthreshold = 1.0;
326   B->mapping          = 0;
327   B->assembled        = PETSC_FALSE;
328 
329   ierr = MPI_Allreduce(&B->m,&B->M,1,MPI_INT,MPI_SUM,B->comm);CHKERRQ(ierr);
330   B->n = B->N;
331 
332   /* the information in the maps duplicates the information computed below, eventually
333      we should remove the duplicate information that is not contained in the maps */
334   ierr = MapCreateMPI(B->comm,B->m,B->M,&B->rmap);CHKERRQ(ierr);
335   /* we don't know the "local columns" so just use the row information :-(*/
336   ierr = MapCreateMPI(B->comm,B->m,B->M,&B->cmap);CHKERRQ(ierr);
337 
338   b->rowners = (int*)PetscMalloc((size+1)*sizeof(int));CHKPTRQ(b->rowners);
339   PLogObjectMemory(B,(size+2)*sizeof(int)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));
340   ierr = MPI_Allgather(&B->m,1,MPI_INT,b->rowners+1,1,MPI_INT,B->comm);CHKERRQ(ierr);
341   b->rowners[0] = 0;
342   for (ii=2; ii<=size; ii++) {
343     b->rowners[ii] += b->rowners[ii-1];
344   }
345   b->rstart = b->rowners[rank];
346   b->rend   = b->rowners[rank+1];
347 
348   PetscFunctionReturn(0);
349 }
350 EXTERN_C_END
351 
352 #undef __FUNC__
353 #define __FUNC__ /*<a name=""></a>*/"MatMPIAdjSetPreallocation"
354 int MatMPIAdjSetPreallocation(Mat B,int *i,int *j,int *values)
355 {
356   Mat_MPIAdj *b = (Mat_MPIAdj *)B->data;
357   int        ierr;
358 #if defined(PETSC_USE_BOPT_g)
359   int        ii;
360 #endif
361 
362   PetscFunctionBegin;
363   B->preallocated = PETSC_TRUE;
364 #if defined(PETSC_USE_BOPT_g)
365   if (i[0] != 0) SETERRQ1(1,"First i[] index must be zero, instead it is %d\n",i[0]);
366   for (ii=1; ii<B->m; ii++) {
367     if (i[ii] < 0 || i[ii] < i[ii-1]) {
368       SETERRQ4(1,"i[%d] index is out of range: i[%d]",ii,i[ii],ii-1,i[ii-1]);
369     }
370   }
371   for (ii=0; ii<i[B->m]; ii++) {
372     if (j[ii] < 0 || j[ii] >= B->N) {
373       SETERRQ2(1,"Column index %d out of range %d\n",ii,j[ii]);
374     }
375   }
376 #endif
377 
378   b->j      = j;
379   b->i      = i;
380   b->values = values;
381 
382   b->nz               = i[B->m];
383   b->diag             = 0;
384   b->symmetric        = PETSC_FALSE;
385   b->freeaij          = PETSC_TRUE;
386 
387   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
388   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
389   PetscFunctionReturn(0);
390 }
391 
392 #undef __FUNC__
393 #define __FUNC__ /*<a name=""></a>*/"MatCreateMPIAdj"
394 /*@C
395    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
396    The matrix does not have numerical values associated with it, but is
397    intended for ordering (to reduce bandwidth etc) and partitioning.
398 
399    Collective on MPI_Comm
400 
401    Input Parameters:
402 +  comm - MPI communicator
403 .  m - number of local rows
404 .  n - number of columns
405 .  i - the indices into j for the start of each row
406 .  j - the column indices for each row (sorted for each row).
407        The indices in i and j start with zero (NOT with one).
408 -  values -[optional] edge weights
409 
410    Output Parameter:
411 .  A - the matrix
412 
413    Level: intermediate
414 
415    Notes: This matrix object does not support most matrix operations, include
416    MatSetValues().
417    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
418    when the matrix is destroyed. And you must allocate them with PetscMalloc(). If you
419     call from Fortran you need not create the arrays with PetscMalloc().
420    Should not include the matrix diagonals.
421 
422    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
423 
424 .seealso: MatCreate(), MatCreateSeqAdj(), MatGetOrdering()
425 @*/
426 int MatCreateMPIAdj(MPI_Comm comm,int m,int n,int *i,int *j,int *values,Mat *A)
427 {
428   int        ierr;
429 
430   PetscFunctionBegin;
431   ierr = MatCreate(comm,m,n,PETSC_DETERMINE,n,A);CHKERRQ(ierr);
432   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
433   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
434   PetscFunctionReturn(0);
435 }
436 
437 EXTERN_C_BEGIN
438 #undef __FUNC__
439 #define __FUNC__ /*<a name=""></a>*/"MatConvertTo_MPIAdj"
440 int MatConvertTo_MPIAdj(Mat A,MatType type,Mat *B)
441 {
442   int      i,ierr,m,N,nzeros = 0,*ia,*ja,*rj,len,rstart,cnt,j,*a;
443   Scalar   *ra;
444   MPI_Comm comm;
445 
446   PetscFunctionBegin;
447   ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr);
448   ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr);
449   ierr = MatGetOwnershipRange(A,&rstart,PETSC_NULL);CHKERRQ(ierr);
450 
451   /* count the number of nonzeros per row */
452   for (i=0; i<m; i++) {
453     ierr   = MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
454     for (j=0; j<len; j++) {
455       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
456     }
457     ierr   = MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
458     nzeros += len;
459   }
460 
461   /* malloc space for nonzeros */
462   a  = (int*)PetscMalloc((nzeros+1)*sizeof(int));CHKPTRQ(a);
463   ia = (int*)PetscMalloc((N+1)*sizeof(int));CHKPTRQ(ia);
464   ja = (int*)PetscMalloc((nzeros+1)*sizeof(int));CHKPTRQ(ja);
465 
466   nzeros = 0;
467   ia[0]  = 0;
468   for (i=0; i<m; i++) {
469     ierr    = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
470     cnt     = 0;
471     for (j=0; j<len; j++) {
472       if (rj[j] != i+rstart) { /* if not diagonal */
473         a[nzeros+cnt]    = (int) PetscAbsScalar(ra[j]);
474         ja[nzeros+cnt++] = rj[j];
475       }
476     }
477     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
478     nzeros += cnt;
479     ia[i+1] = nzeros;
480   }
481 
482   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
483   ierr = MatCreateMPIAdj(comm,m,N,ia,ja,a,B);CHKERRQ(ierr);
484 
485   PetscFunctionReturn(0);
486 }
487 EXTERN_C_END
488 
489 
490 
491 
492 
493