xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision b0a32e0c6855ee6a6cd3495fa7da12ea9885bc5d)
1 /*$Id: mpiadj.c,v 1.54 2000/11/08 15:15:20 bsmith Exp bsmith $*/
2 
3 /*
4     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
5 */
6 #include "petscsys.h"
7 #include "src/mat/impls/adj/mpi/mpiadj.h"
8 
9 #undef __FUNC__
10 #define __FUNC__ "MatView_MPIAdj_ASCII"
11 int MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
12 {
13   Mat_MPIAdj  *a = (Mat_MPIAdj*)A->data;
14   int         ierr,i,j,m = A->m, format;
15   char        *outputname;
16 
17   PetscFunctionBegin;
18   ierr = PetscViewerGetOutputname(viewer,&outputname);CHKERRQ(ierr);
19   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
20   if (format == PETSC_VIEWER_FORMAT_ASCII_INFO) {
21     PetscFunctionReturn(0);
22   } else if (format == PETSC_VIEWER_FORMAT_ASCII_MATLAB) {
23     SETERRQ(PETSC_ERR_SUP,"Matlab format not supported");
24   } else {
25     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_NO);CHKERRQ(ierr);
26     for (i=0; i<m; i++) {
27       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %d:",i+a->rstart);CHKERRQ(ierr);
28       for (j=a->i[i]; j<a->i[i+1]; j++) {
29         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %d ",a->j[j]);CHKERRQ(ierr);
30       }
31       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
32     }
33     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_YES);CHKERRQ(ierr);
34   }
35   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
36   PetscFunctionReturn(0);
37 }
38 
39 #undef __FUNC__
40 #define __FUNC__ "MatView_MPIAdj"
41 int MatView_MPIAdj(Mat A,PetscViewer viewer)
42 {
43   int        ierr;
44   PetscTruth isascii;
45 
46   PetscFunctionBegin;
47   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&isascii);CHKERRQ(ierr);
48   if (isascii) {
49     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
50   } else {
51     SETERRQ1(1,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
52   }
53   PetscFunctionReturn(0);
54 }
55 
56 #undef __FUNC__
57 #define __FUNC__ "MatDestroy_MPIAdj"
58 int MatDestroy_MPIAdj(Mat mat)
59 {
60   Mat_MPIAdj *a = (Mat_MPIAdj*)mat->data;
61   int        ierr;
62 
63   PetscFunctionBegin;
64 #if defined(PETSC_USE_LOG)
65   PetscLogObjectState((PetscObject)mat,"Rows=%d, Cols=%d, NZ=%d",mat->m,mat->n,a->nz);
66 #endif
67   if (a->diag) {ierr = PetscFree(a->diag);CHKERRQ(ierr);}
68   if (a->freeaij) {
69     ierr = PetscFree(a->i);CHKERRQ(ierr);
70     ierr = PetscFree(a->j);CHKERRQ(ierr);
71     if (a->values) {ierr = PetscFree(a->values);CHKERRQ(ierr);}
72   }
73   ierr = PetscFree(a->rowners);CHKERRQ(ierr);
74   ierr = PetscFree(a);CHKERRQ(ierr);
75   PetscFunctionReturn(0);
76 }
77 
78 #undef __FUNC__
79 #define __FUNC__ "MatSetOption_MPIAdj"
80 int MatSetOption_MPIAdj(Mat A,MatOption op)
81 {
82   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
83 
84   PetscFunctionBegin;
85   if (op == MAT_STRUCTURALLY_SYMMETRIC) {
86     a->symmetric = PETSC_TRUE;
87   } else {
88     PetscLogInfo(A,"MatSetOption_MPIAdj:Option ignored\n");
89   }
90   PetscFunctionReturn(0);
91 }
92 
93 
94 /*
95      Adds diagonal pointers to sparse matrix structure.
96 */
97 
98 #undef __FUNC__
99 #define __FUNC__ "MatMarkDiagonal_MPIAdj"
100 int MatMarkDiagonal_MPIAdj(Mat A)
101 {
102   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
103   int        i,j,*diag,m = A->m;
104 
105   PetscFunctionBegin;
106 ierr = PetscMalloc((m+1)*sizeof(int),&  diag );CHKERRQ(ierr);
107   PetscLogObjectMemory(A,(m+1)*sizeof(int));
108   for (i=0; i<A->m; i++) {
109     for (j=a->i[i]; j<a->i[i+1]; j++) {
110       if (a->j[j] == i) {
111         diag[i] = j;
112         break;
113       }
114     }
115   }
116   a->diag = diag;
117   PetscFunctionReturn(0);
118 }
119 
120 #undef __FUNC__
121 #define __FUNC__ "MatGetOwnershipRange_MPIAdj"
122 int MatGetOwnershipRange_MPIAdj(Mat A,int *m,int *n)
123 {
124   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
125   PetscFunctionBegin;
126   if (m) *m = a->rstart;
127   if (n) *n = a->rend;
128   PetscFunctionReturn(0);
129 }
130 
131 #undef __FUNC__
132 #define __FUNC__ "MatGetRow_MPIAdj"
133 int MatGetRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
134 {
135   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
136   int        *itmp;
137 
138   PetscFunctionBegin;
139   row -= a->rstart;
140 
141   if (row < 0 || row >= A->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
142 
143   *nz = a->i[row+1] - a->i[row];
144   if (v) *v = PETSC_NULL;
145   if (idx) {
146     itmp = a->j + a->i[row];
147     if (*nz) {
148       *idx = itmp;
149     }
150     else *idx = 0;
151   }
152   PetscFunctionReturn(0);
153 }
154 
155 #undef __FUNC__
156 #define __FUNC__ "MatRestoreRow_MPIAdj"
157 int MatRestoreRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
158 {
159   PetscFunctionBegin;
160   PetscFunctionReturn(0);
161 }
162 
163 #undef __FUNC__
164 #define __FUNC__ "MatGetBlockSize_MPIAdj"
165 int MatGetBlockSize_MPIAdj(Mat A,int *bs)
166 {
167   PetscFunctionBegin;
168   *bs = 1;
169   PetscFunctionReturn(0);
170 }
171 
172 
173 #undef __FUNC__
174 #define __FUNC__ "MatEqual_MPIAdj"
175 int MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg)
176 {
177   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
178   int         ierr;
179   PetscTruth  flag;
180 
181   PetscFunctionBegin;
182   ierr = PetscTypeCompare((PetscObject)B,MATMPIADJ,&flag);CHKERRQ(ierr);
183   if (!flag) SETERRQ(PETSC_ERR_ARG_INCOMP,"Matrices must be same type");
184 
185   /* If the  matrix dimensions are not equal,or no of nonzeros */
186   if ((A->m != B->m) ||(a->nz != b->nz)) {
187     flag = PETSC_FALSE;
188   }
189 
190   /* if the a->i are the same */
191   ierr = PetscMemcmp(a->i,b->i,(A->m+1)*sizeof(int),&flag);CHKERRQ(ierr);
192 
193   /* if a->j are the same */
194   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(int),&flag);CHKERRQ(ierr);
195 
196   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr);
197   PetscFunctionReturn(0);
198 }
199 
200 #undef __FUNC__
201 #define __FUNC__ "MatGetRowIJ_MPIAdj"
202 int MatGetRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int **ia,int **ja,PetscTruth *done)
203 {
204   int        ierr,size,i;
205   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
206 
207   PetscFunctionBegin;
208   ierr = MPI_Comm_size(A->comm,&size);CHKERRQ(ierr);
209   if (size > 1) {*done = PETSC_FALSE; PetscFunctionReturn(0);}
210   *m    = A->m;
211   *ia   = a->i;
212   *ja   = a->j;
213   *done = PETSC_TRUE;
214   if (oshift) {
215     for (i=0; i<(*ia)[*m]; i++) {
216       (*ja)[i]++;
217     }
218     for (i=0; i<=(*m); i++) (*ia)[i]++;
219   }
220   PetscFunctionReturn(0);
221 }
222 
223 #undef __FUNC__
224 #define __FUNC__ "MatRestoreRowIJ_MPIAdj"
225 int MatRestoreRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int **ia,int **ja,PetscTruth *done)
226 {
227   int        i;
228   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
229 
230   PetscFunctionBegin;
231   if (ia && a->i != *ia) SETERRQ(1,"ia passed back is not one obtained with MatGetRowIJ()");
232   if (ja && a->j != *ja) SETERRQ(1,"ja passed back is not one obtained with MatGetRowIJ()");
233   if (oshift) {
234     for (i=0; i<=(*m); i++) (*ia)[i]--;
235     for (i=0; i<(*ia)[*m]; i++) {
236       (*ja)[i]--;
237     }
238   }
239   PetscFunctionReturn(0);
240 }
241 
242 /* -------------------------------------------------------------------*/
243 static struct _MatOps MatOps_Values = {0,
244        MatGetRow_MPIAdj,
245        MatRestoreRow_MPIAdj,
246        0,
247        0,
248        0,
249        0,
250        0,
251        0,
252        0,
253        0,
254        0,
255        0,
256        0,
257        0,
258        0,
259        MatEqual_MPIAdj,
260        0,
261        0,
262        0,
263        0,
264        0,
265        0,
266        MatSetOption_MPIAdj,
267        0,
268        0,
269        0,
270        0,
271        0,
272        0,
273        0,
274        0,
275        MatGetOwnershipRange_MPIAdj,
276        0,
277        0,
278        0,
279        0,
280        0,
281        0,
282        0,
283        0,
284        0,
285        0,
286        0,
287        0,
288        0,
289        0,
290        0,
291        0,
292        0,
293        0,
294        0,
295        MatGetBlockSize_MPIAdj,
296        MatGetRowIJ_MPIAdj,
297        MatRestoreRowIJ_MPIAdj,
298        0,
299        0,
300        0,
301        0,
302        0,
303        0,
304        0,
305        0,
306        MatDestroy_MPIAdj,
307        MatView_MPIAdj,
308        MatGetMaps_Petsc};
309 
310 
311 EXTERN_C_BEGIN
312 #undef __FUNC__
313 #define __FUNC__ "MatCreate_MPIAdj"
314 int MatCreate_MPIAdj(Mat B)
315 {
316   Mat_MPIAdj *b;
317   int        ii,ierr,size,rank;
318 
319   PetscFunctionBegin;
320   ierr = MPI_Comm_size(B->comm,&size);CHKERRQ(ierr);
321   ierr = MPI_Comm_rank(B->comm,&rank);CHKERRQ(ierr);
322 
323   ierr                = PetscNew(Mat_MPIAdj,&b);CHKERRQ(ierr);
324   B->data             = (void*)b;
325   ierr                = PetscMemzero(b,sizeof(Mat_MPIAdj));CHKERRQ(ierr);
326   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
327   B->factor           = 0;
328   B->lupivotthreshold = 1.0;
329   B->mapping          = 0;
330   B->assembled        = PETSC_FALSE;
331 
332   ierr = MPI_Allreduce(&B->m,&B->M,1,MPI_INT,MPI_SUM,B->comm);CHKERRQ(ierr);
333   B->n = B->N;
334 
335   /* the information in the maps duplicates the information computed below, eventually
336      we should remove the duplicate information that is not contained in the maps */
337   ierr = MapCreateMPI(B->comm,B->m,B->M,&B->rmap);CHKERRQ(ierr);
338   /* we don't know the "local columns" so just use the row information :-(*/
339   ierr = MapCreateMPI(B->comm,B->m,B->M,&B->cmap);CHKERRQ(ierr);
340 
341 ierr = PetscMalloc((size+1)*sizeof(int),&  b->rowners );CHKERRQ(ierr);
342   PetscLogObjectMemory(B,(size+2)*sizeof(int)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));
343   ierr = MPI_Allgather(&B->m,1,MPI_INT,b->rowners+1,1,MPI_INT,B->comm);CHKERRQ(ierr);
344   b->rowners[0] = 0;
345   for (ii=2; ii<=size; ii++) {
346     b->rowners[ii] += b->rowners[ii-1];
347   }
348   b->rstart = b->rowners[rank];
349   b->rend   = b->rowners[rank+1];
350 
351   PetscFunctionReturn(0);
352 }
353 EXTERN_C_END
354 
355 #undef __FUNC__
356 #define __FUNC__ "MatMPIAdjSetPreallocation"
357 int MatMPIAdjSetPreallocation(Mat B,int *i,int *j,int *values)
358 {
359   Mat_MPIAdj *b = (Mat_MPIAdj *)B->data;
360   int        ierr;
361 #if defined(PETSC_USE_BOPT_g)
362   int        ii;
363 #endif
364 
365   PetscFunctionBegin;
366   B->preallocated = PETSC_TRUE;
367 #if defined(PETSC_USE_BOPT_g)
368   if (i[0] != 0) SETERRQ1(1,"First i[] index must be zero, instead it is %d\n",i[0]);
369   for (ii=1; ii<B->m; ii++) {
370     if (i[ii] < 0 || i[ii] < i[ii-1]) {
371       SETERRQ4(1,"i[%d] index is out of range: i[%d]",ii,i[ii],ii-1,i[ii-1]);
372     }
373   }
374   for (ii=0; ii<i[B->m]; ii++) {
375     if (j[ii] < 0 || j[ii] >= B->N) {
376       SETERRQ2(1,"Column index %d out of range %d\n",ii,j[ii]);
377     }
378   }
379 #endif
380 
381   b->j      = j;
382   b->i      = i;
383   b->values = values;
384 
385   b->nz               = i[B->m];
386   b->diag             = 0;
387   b->symmetric        = PETSC_FALSE;
388   b->freeaij          = PETSC_TRUE;
389 
390   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
391   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
392   PetscFunctionReturn(0);
393 }
394 
395 #undef __FUNC__
396 #define __FUNC__ "MatCreateMPIAdj"
397 /*@C
398    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
399    The matrix does not have numerical values associated with it, but is
400    intended for ordering (to reduce bandwidth etc) and partitioning.
401 
402    Collective on MPI_Comm
403 
404    Input Parameters:
405 +  comm - MPI communicator
406 .  m - number of local rows
407 .  n - number of columns
408 .  i - the indices into j for the start of each row
409 .  j - the column indices for each row (sorted for each row).
410        The indices in i and j start with zero (NOT with one).
411 -  values -[optional] edge weights
412 
413    Output Parameter:
414 .  A - the matrix
415 
416    Level: intermediate
417 
418    Notes: This matrix object does not support most matrix operations, include
419    MatSetValues().
420    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
421    when the matrix is destroyed. And you must allocate them with PetscMalloc(). If you
422     call from Fortran you need not create the arrays with PetscMalloc().
423    Should not include the matrix diagonals.
424 
425    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
426 
427 .seealso: MatCreate(), MatCreateSeqAdj(), MatGetOrdering()
428 @*/
429 int MatCreateMPIAdj(MPI_Comm comm,int m,int n,int *i,int *j,int *values,Mat *A)
430 {
431   int        ierr;
432 
433   PetscFunctionBegin;
434   ierr = MatCreate(comm,m,n,PETSC_DETERMINE,n,A);CHKERRQ(ierr);
435   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
436   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
437   PetscFunctionReturn(0);
438 }
439 
440 EXTERN_C_BEGIN
441 #undef __FUNC__
442 #define __FUNC__ "MatConvertTo_MPIAdj"
443 int MatConvertTo_MPIAdj(Mat A,MatType type,Mat *B)
444 {
445   int      i,ierr,m,N,nzeros = 0,*ia,*ja,*rj,len,rstart,cnt,j,*a;
446   Scalar   *ra;
447   MPI_Comm comm;
448 
449   PetscFunctionBegin;
450   ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr);
451   ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr);
452   ierr = MatGetOwnershipRange(A,&rstart,PETSC_NULL);CHKERRQ(ierr);
453 
454   /* count the number of nonzeros per row */
455   for (i=0; i<m; i++) {
456     ierr   = MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
457     for (j=0; j<len; j++) {
458       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
459     }
460     ierr   = MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
461     nzeros += len;
462   }
463 
464   /* malloc space for nonzeros */
465 ierr = PetscMalloc((nzeros+1)*sizeof(int),&  a  );CHKERRQ(ierr);
466 ierr = PetscMalloc((N+1)*sizeof(int),&  ia );CHKERRQ(ierr);
467 ierr = PetscMalloc((nzeros+1)*sizeof(int),&  ja );CHKERRQ(ierr);
468 
469   nzeros = 0;
470   ia[0]  = 0;
471   for (i=0; i<m; i++) {
472     ierr    = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
473     cnt     = 0;
474     for (j=0; j<len; j++) {
475       if (rj[j] != i+rstart) { /* if not diagonal */
476         a[nzeros+cnt]    = (int) PetscAbsScalar(ra[j]);
477         ja[nzeros+cnt++] = rj[j];
478       }
479     }
480     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
481     nzeros += cnt;
482     ia[i+1] = nzeros;
483   }
484 
485   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
486   ierr = MatCreateMPIAdj(comm,m,N,ia,ja,a,B);CHKERRQ(ierr);
487 
488   PetscFunctionReturn(0);
489 }
490 EXTERN_C_END
491 
492 
493 
494 
495 
496