xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision b371d7f8e2abbaebdcb15be13d1cdb20ba919fe0)
1 /*$Id: mpiadj.c,v 1.52 2000/11/06 16:42:54 bsmith Exp bsmith $*/
2 
3 /*
4     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
5 */
6 #include "petscsys.h"
7 #include "src/mat/impls/adj/mpi/mpiadj.h"
8 
9 #undef __FUNC__
10 #define __FUNC__ /*<a name=""></a>*/"MatView_MPIAdj_ASCII"
11 int MatView_MPIAdj_ASCII(Mat A,Viewer viewer)
12 {
13   Mat_MPIAdj  *a = (Mat_MPIAdj*)A->data;
14   int         ierr,i,j,m = A->m, format;
15   char        *outputname;
16 
17   PetscFunctionBegin;
18   ierr = ViewerGetOutputname(viewer,&outputname);CHKERRQ(ierr);
19   ierr = ViewerGetFormat(viewer,&format);CHKERRQ(ierr);
20   if (format == VIEWER_FORMAT_ASCII_INFO) {
21     PetscFunctionReturn(0);
22   } else if (format == VIEWER_FORMAT_ASCII_MATLAB) {
23     SETERRQ(PETSC_ERR_SUP,"Matlab format not supported");
24   } else {
25     ierr = ViewerASCIIUseTabs(viewer,PETSC_NO);CHKERRQ(ierr);
26     for (i=0; i<m; i++) {
27       ierr = ViewerASCIISynchronizedPrintf(viewer,"row %d:",i+a->rstart);CHKERRQ(ierr);
28       for (j=a->i[i]; j<a->i[i+1]; j++) {
29         ierr = ViewerASCIISynchronizedPrintf(viewer," %d ",a->j[j]);CHKERRQ(ierr);
30       }
31       ierr = ViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
32     }
33     ierr = ViewerASCIIUseTabs(viewer,PETSC_YES);CHKERRQ(ierr);
34   }
35   ierr = ViewerFlush(viewer);CHKERRQ(ierr);
36   PetscFunctionReturn(0);
37 }
38 
39 #undef __FUNC__
40 #define __FUNC__ /*<a name=""></a>*/"MatView_MPIAdj"
41 int MatView_MPIAdj(Mat A,Viewer viewer)
42 {
43   int        ierr;
44   PetscTruth isascii;
45 
46   PetscFunctionBegin;
47   ierr = PetscTypeCompare((PetscObject)viewer,ASCII_VIEWER,&isascii);CHKERRQ(ierr);
48   if (isascii) {
49     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
50   } else {
51     SETERRQ1(1,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
52   }
53   PetscFunctionReturn(0);
54 }
55 
56 #undef __FUNC__
57 #define __FUNC__ /*<a name=""></a>*/"MatDestroy_MPIAdj"
58 int MatDestroy_MPIAdj(Mat mat)
59 {
60   Mat_MPIAdj *a = (Mat_MPIAdj*)mat->data;
61   int        ierr;
62 
63   PetscFunctionBegin;
64 #if defined(PETSC_USE_LOG)
65   PLogObjectState((PetscObject)mat,"Rows=%d, Cols=%d, NZ=%d",mat->m,mat->n,a->nz);
66 #endif
67   if (a->diag) {ierr = PetscFree(a->diag);CHKERRQ(ierr);}
68   if (a->freeaij) {
69     ierr = PetscFree(a->i);CHKERRQ(ierr);
70     ierr = PetscFree(a->j);CHKERRQ(ierr);
71     if (a->values) {ierr = PetscFree(a->values);CHKERRQ(ierr);}
72   }
73   ierr = PetscFree(a->rowners);CHKERRQ(ierr);
74   ierr = PetscFree(a);CHKERRQ(ierr);
75   PetscFunctionReturn(0);
76 }
77 
78 #undef __FUNC__
79 #define __FUNC__ /*<a name=""></a>*/"MatSetOption_MPIAdj"
80 int MatSetOption_MPIAdj(Mat A,MatOption op)
81 {
82   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
83 
84   PetscFunctionBegin;
85   if (op == MAT_STRUCTURALLY_SYMMETRIC) {
86     a->symmetric = PETSC_TRUE;
87   } else {
88     PLogInfo(A,"MatSetOption_MPIAdj:Option ignored\n");
89   }
90   PetscFunctionReturn(0);
91 }
92 
93 
94 /*
95      Adds diagonal pointers to sparse matrix structure.
96 */
97 
98 #undef __FUNC__
99 #define __FUNC__ /*<a name=""></a>*/"MatMarkDiagonal_MPIAdj"
100 int MatMarkDiagonal_MPIAdj(Mat A)
101 {
102   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
103   int        i,j,*diag,m = A->m;
104 
105   PetscFunctionBegin;
106   diag = (int*)PetscMalloc((m+1)*sizeof(int));CHKPTRQ(diag);
107   PLogObjectMemory(A,(m+1)*sizeof(int));
108   for (i=0; i<A->m; i++) {
109     for (j=a->i[i]; j<a->i[i+1]; j++) {
110       if (a->j[j] == i) {
111         diag[i] = j;
112         break;
113       }
114     }
115   }
116   a->diag = diag;
117   PetscFunctionReturn(0);
118 }
119 
120 #undef __FUNC__
121 #define __FUNC__ /*<a name=""></a>*/"MatGetOwnershipRange_MPIAdj"
122 int MatGetOwnershipRange_MPIAdj(Mat A,int *m,int *n)
123 {
124   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
125   PetscFunctionBegin;
126   if (m) *m = a->rstart;
127   if (n) *n = a->rend;
128   PetscFunctionReturn(0);
129 }
130 
131 #undef __FUNC__
132 #define __FUNC__ /*<a name=""></a>*/"MatGetRow_MPIAdj"
133 int MatGetRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
134 {
135   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
136   int        *itmp;
137 
138   PetscFunctionBegin;
139   row -= a->rstart;
140 
141   if (row < 0 || row >= A->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
142 
143   *nz = a->i[row+1] - a->i[row];
144   if (v) *v = PETSC_NULL;
145   if (idx) {
146     itmp = a->j + a->i[row];
147     if (*nz) {
148       *idx = itmp;
149     }
150     else *idx = 0;
151   }
152   PetscFunctionReturn(0);
153 }
154 
155 #undef __FUNC__
156 #define __FUNC__ /*<a name=""></a>*/"MatRestoreRow_MPIAdj"
157 int MatRestoreRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
158 {
159   PetscFunctionBegin;
160   PetscFunctionReturn(0);
161 }
162 
163 #undef __FUNC__
164 #define __FUNC__ /*<a name=""></a>*/"MatGetBlockSize_MPIAdj"
165 int MatGetBlockSize_MPIAdj(Mat A,int *bs)
166 {
167   PetscFunctionBegin;
168   *bs = 1;
169   PetscFunctionReturn(0);
170 }
171 
172 
173 #undef __FUNC__
174 #define __FUNC__ /*<a name=""></a>*/"MatEqual_MPIAdj"
175 int MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg)
176 {
177   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
178   int         ierr;
179   PetscTruth  flag;
180 
181   PetscFunctionBegin;
182   ierr = PetscTypeCompare((PetscObject)B,MATMPIADJ,&flag);CHKERRQ(ierr);
183   if (!flag) SETERRQ(PETSC_ERR_ARG_INCOMP,"Matrices must be same type");
184 
185   /* If the  matrix dimensions are not equal,or no of nonzeros */
186   if ((A->m != B->m) ||(a->nz != b->nz)) {
187     flag = PETSC_FALSE;
188   }
189 
190   /* if the a->i are the same */
191   ierr = PetscMemcmp(a->i,b->i,(A->m+1)*sizeof(int),&flag);CHKERRQ(ierr);
192 
193   /* if a->j are the same */
194   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(int),&flag);CHKERRQ(ierr);
195 
196   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr);
197   PetscFunctionReturn(0);
198 }
199 
200 #undef __FUNC__
201 #define __FUNC__ /*<a name="MatGetRowIJ_MPIAdj"></a>*/"MatGetRowIJ_MPIAdj"
202 int MatGetRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int **ia,int **ja,PetscTruth *done)
203 {
204   int        ierr,size,i;
205   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
206 
207   PetscFunctionBegin;
208   ierr = MPI_Comm_size(A->comm,&size);CHKERRQ(ierr);
209   if (size > 1) {*done = PETSC_FALSE; PetscFunctionReturn(0);}
210   *m    = A->m;
211   *ia   = a->i;
212   *ja   = a->j;
213   *done = PETSC_TRUE;
214   if (oshift) {
215     for (i=0; i<(*ia)[*m]; i++) {
216       (*ja)[i]++;
217     }
218     for (i=0; i<=(*m); i++) (*ia)[i]++;
219   }
220   PetscFunctionReturn(0);
221 }
222 
223 #undef __FUNC__
224 #define __FUNC__ /*<a name="MatRestoreRowIJ_MPIAdj"></a>*/"MatRestoreRowIJ_MPIAdj"
225 int MatRestoreRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int **ia,int **ja,PetscTruth *done)
226 {
227   int        i;
228   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
229 
230   PetscFunctionBegin;
231   if (a->ia != ia) SETERRQ(1,"ia passed back is not one obtained with MatGetRowIJ()");
232   if (a->ja != ja) SETERRQ(1,"ja passed back is not one obtained with MatGetRowIJ()");
233   if (oshift) {
234     for (i=0; i<=(*m); i++) (*ia)[i]--;
235     for (i=0; i<(*ia)[*m]; i++) {
236       (*ja)[i]--;
237     }
238   }
239   PetscFunctionReturn(0);
240 }
241 
242 /* -------------------------------------------------------------------*/
243 static struct _MatOps MatOps_Values = {0,
244        MatGetRow_MPIAdj,
245        MatRestoreRow_MPIAdj,
246        0,
247        0,
248        0,
249        0,
250        0,
251        0,
252        0,
253        0,
254        0,
255        0,
256        0,
257        0,
258        0,
259        MatEqual_MPIAdj,
260        0,
261        0,
262        0,
263        0,
264        0,
265        0,
266        MatSetOption_MPIAdj,
267        0,
268        0,
269        0,
270        0,
271        0,
272        0,
273        0,
274        0,
275        MatGetOwnershipRange_MPIAdj,
276        0,
277        0,
278        0,
279        0,
280        0,
281        0,
282        0,
283        0,
284        0,
285        0,
286        0,
287        0,
288        0,
289        0,
290        0,
291        0,
292        0,
293        0,
294        0,
295        MatGetBlockSize_MPIAdj,
296        MatGetRowIJ_MPIAdj,
297        MatRestoreRowIJ_MPIAdj,
298        0,
299        0,
300        0,
301        0,
302        0,
303        0,
304        0,
305        0,
306        MatDestroy_MPIAdj,
307        MatView_MPIAdj,
308        MatGetMaps_Petsc};
309 
310 
311 EXTERN_C_BEGIN
312 #undef __FUNC__
313 #define __FUNC__ /*<a name=""></a>*/"MatCreate_MPIAdj"
314 int MatCreate_MPIAdj(Mat B)
315 {
316   Mat_MPIAdj *b;
317   int        ii,ierr,size,rank;
318 
319   PetscFunctionBegin;
320   ierr = MPI_Comm_size(B->comm,&size);CHKERRQ(ierr);
321   ierr = MPI_Comm_rank(B->comm,&rank);CHKERRQ(ierr);
322 
323   B->data             = (void*)(b = PetscNew(Mat_MPIAdj));CHKPTRQ(b);
324   ierr                = PetscMemzero(b,sizeof(Mat_MPIAdj));CHKERRQ(ierr);
325   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
326   B->factor           = 0;
327   B->lupivotthreshold = 1.0;
328   B->mapping          = 0;
329   B->assembled        = PETSC_FALSE;
330 
331   ierr = MPI_Allreduce(&B->m,&B->M,1,MPI_INT,MPI_SUM,B->comm);CHKERRQ(ierr);
332   B->n = B->N;
333 
334   /* the information in the maps duplicates the information computed below, eventually
335      we should remove the duplicate information that is not contained in the maps */
336   ierr = MapCreateMPI(B->comm,B->m,B->M,&B->rmap);CHKERRQ(ierr);
337   /* we don't know the "local columns" so just use the row information :-(*/
338   ierr = MapCreateMPI(B->comm,B->m,B->M,&B->cmap);CHKERRQ(ierr);
339 
340   b->rowners = (int*)PetscMalloc((size+1)*sizeof(int));CHKPTRQ(b->rowners);
341   PLogObjectMemory(B,(size+2)*sizeof(int)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));
342   ierr = MPI_Allgather(&B->m,1,MPI_INT,b->rowners+1,1,MPI_INT,B->comm);CHKERRQ(ierr);
343   b->rowners[0] = 0;
344   for (ii=2; ii<=size; ii++) {
345     b->rowners[ii] += b->rowners[ii-1];
346   }
347   b->rstart = b->rowners[rank];
348   b->rend   = b->rowners[rank+1];
349 
350   PetscFunctionReturn(0);
351 }
352 EXTERN_C_END
353 
354 #undef __FUNC__
355 #define __FUNC__ /*<a name=""></a>*/"MatMPIAdjSetPreallocation"
356 int MatMPIAdjSetPreallocation(Mat B,int *i,int *j,int *values)
357 {
358   Mat_MPIAdj *b = (Mat_MPIAdj *)B->data;
359   int        ierr;
360 #if defined(PETSC_USE_BOPT_g)
361   int        ii;
362 #endif
363 
364   PetscFunctionBegin;
365   B->preallocated = PETSC_TRUE;
366 #if defined(PETSC_USE_BOPT_g)
367   if (i[0] != 0) SETERRQ1(1,"First i[] index must be zero, instead it is %d\n",i[0]);
368   for (ii=1; ii<B->m; ii++) {
369     if (i[ii] < 0 || i[ii] < i[ii-1]) {
370       SETERRQ4(1,"i[%d] index is out of range: i[%d]",ii,i[ii],ii-1,i[ii-1]);
371     }
372   }
373   for (ii=0; ii<i[B->m]; ii++) {
374     if (j[ii] < 0 || j[ii] >= B->N) {
375       SETERRQ2(1,"Column index %d out of range %d\n",ii,j[ii]);
376     }
377   }
378 #endif
379 
380   b->j      = j;
381   b->i      = i;
382   b->values = values;
383 
384   b->nz               = i[B->m];
385   b->diag             = 0;
386   b->symmetric        = PETSC_FALSE;
387   b->freeaij          = PETSC_TRUE;
388 
389   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
390   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
391   PetscFunctionReturn(0);
392 }
393 
394 #undef __FUNC__
395 #define __FUNC__ /*<a name=""></a>*/"MatCreateMPIAdj"
396 /*@C
397    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
398    The matrix does not have numerical values associated with it, but is
399    intended for ordering (to reduce bandwidth etc) and partitioning.
400 
401    Collective on MPI_Comm
402 
403    Input Parameters:
404 +  comm - MPI communicator
405 .  m - number of local rows
406 .  n - number of columns
407 .  i - the indices into j for the start of each row
408 .  j - the column indices for each row (sorted for each row).
409        The indices in i and j start with zero (NOT with one).
410 -  values -[optional] edge weights
411 
412    Output Parameter:
413 .  A - the matrix
414 
415    Level: intermediate
416 
417    Notes: This matrix object does not support most matrix operations, include
418    MatSetValues().
419    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
420    when the matrix is destroyed. And you must allocate them with PetscMalloc(). If you
421     call from Fortran you need not create the arrays with PetscMalloc().
422    Should not include the matrix diagonals.
423 
424    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
425 
426 .seealso: MatCreate(), MatCreateSeqAdj(), MatGetOrdering()
427 @*/
428 int MatCreateMPIAdj(MPI_Comm comm,int m,int n,int *i,int *j,int *values,Mat *A)
429 {
430   int        ierr;
431 
432   PetscFunctionBegin;
433   ierr = MatCreate(comm,m,n,PETSC_DETERMINE,n,A);CHKERRQ(ierr);
434   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
435   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
436   PetscFunctionReturn(0);
437 }
438 
439 EXTERN_C_BEGIN
440 #undef __FUNC__
441 #define __FUNC__ /*<a name=""></a>*/"MatConvertTo_MPIAdj"
442 int MatConvertTo_MPIAdj(Mat A,MatType type,Mat *B)
443 {
444   int      i,ierr,m,N,nzeros = 0,*ia,*ja,*rj,len,rstart,cnt,j,*a;
445   Scalar   *ra;
446   MPI_Comm comm;
447 
448   PetscFunctionBegin;
449   ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr);
450   ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr);
451   ierr = MatGetOwnershipRange(A,&rstart,PETSC_NULL);CHKERRQ(ierr);
452 
453   /* count the number of nonzeros per row */
454   for (i=0; i<m; i++) {
455     ierr   = MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
456     for (j=0; j<len; j++) {
457       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
458     }
459     ierr   = MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
460     nzeros += len;
461   }
462 
463   /* malloc space for nonzeros */
464   a  = (int*)PetscMalloc((nzeros+1)*sizeof(int));CHKPTRQ(a);
465   ia = (int*)PetscMalloc((N+1)*sizeof(int));CHKPTRQ(ia);
466   ja = (int*)PetscMalloc((nzeros+1)*sizeof(int));CHKPTRQ(ja);
467 
468   nzeros = 0;
469   ia[0]  = 0;
470   for (i=0; i<m; i++) {
471     ierr    = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
472     cnt     = 0;
473     for (j=0; j<len; j++) {
474       if (rj[j] != i+rstart) { /* if not diagonal */
475         a[nzeros+cnt]    = (int) PetscAbsScalar(ra[j]);
476         ja[nzeros+cnt++] = rj[j];
477       }
478     }
479     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
480     nzeros += cnt;
481     ia[i+1] = nzeros;
482   }
483 
484   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
485   ierr = MatCreateMPIAdj(comm,m,N,ia,ja,a,B);CHKERRQ(ierr);
486 
487   PetscFunctionReturn(0);
488 }
489 EXTERN_C_END
490 
491 
492 
493 
494 
495