xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision f6cc79bd0334691d264e95eacfaee85c3da0d04a)
1 /*$Id: mpiadj.c,v 1.66 2001/08/07 03:02:59 balay Exp $*/
2 
3 /*
4     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
5 */
6 #include "src/mat/impls/adj/mpi/mpiadj.h"
7 #include "petscsys.h"
8 
9 #undef __FUNCT__
10 #define __FUNCT__ "MatView_MPIAdj_ASCII"
11 int MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
12 {
13   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
14   int               ierr,i,j,m = A->m;
15   char              *name;
16   PetscViewerFormat format;
17 
18   PetscFunctionBegin;
19   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
20   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
21   if (format == PETSC_VIEWER_ASCII_INFO) {
22     PetscFunctionReturn(0);
23   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
24     SETERRQ(PETSC_ERR_SUP,"Matlab format not supported");
25   } else {
26     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_NO);CHKERRQ(ierr);
27     for (i=0; i<m; i++) {
28       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %d:",i+a->rstart);CHKERRQ(ierr);
29       for (j=a->i[i]; j<a->i[i+1]; j++) {
30         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %d ",a->j[j]);CHKERRQ(ierr);
31       }
32       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
33     }
34     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_YES);CHKERRQ(ierr);
35   }
36   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
37   PetscFunctionReturn(0);
38 }
39 
40 #undef __FUNCT__
41 #define __FUNCT__ "MatView_MPIAdj"
42 int MatView_MPIAdj(Mat A,PetscViewer viewer)
43 {
44   int        ierr;
45   PetscTruth isascii;
46 
47   PetscFunctionBegin;
48   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&isascii);CHKERRQ(ierr);
49   if (isascii) {
50     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
51   } else {
52     SETERRQ1(1,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
53   }
54   PetscFunctionReturn(0);
55 }
56 
57 #undef __FUNCT__
58 #define __FUNCT__ "MatDestroy_MPIAdj"
59 int MatDestroy_MPIAdj(Mat mat)
60 {
61   Mat_MPIAdj *a = (Mat_MPIAdj*)mat->data;
62   int        ierr;
63 
64   PetscFunctionBegin;
65 #if defined(PETSC_USE_LOG)
66   PetscLogObjectState((PetscObject)mat,"Rows=%d, Cols=%d, NZ=%d",mat->m,mat->n,a->nz);
67 #endif
68   if (a->diag) {ierr = PetscFree(a->diag);CHKERRQ(ierr);}
69   if (a->freeaij) {
70     ierr = PetscFree(a->i);CHKERRQ(ierr);
71     ierr = PetscFree(a->j);CHKERRQ(ierr);
72     if (a->values) {ierr = PetscFree(a->values);CHKERRQ(ierr);}
73   }
74   ierr = PetscFree(a->rowners);CHKERRQ(ierr);
75   ierr = PetscFree(a);CHKERRQ(ierr);
76   PetscFunctionReturn(0);
77 }
78 
79 #undef __FUNCT__
80 #define __FUNCT__ "MatSetOption_MPIAdj"
81 int MatSetOption_MPIAdj(Mat A,MatOption op)
82 {
83   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
84 
85   PetscFunctionBegin;
86   switch (op) {
87   case MAT_STRUCTURALLY_SYMMETRIC:
88     a->symmetric = PETSC_TRUE;
89     break;
90   case MAT_USE_SINGLE_PRECISION_SOLVES:
91   default:
92     PetscLogInfo(A,"MatSetOption_MPIAdj:Option ignored\n");
93     break;
94   }
95   PetscFunctionReturn(0);
96 }
97 
98 
99 /*
100      Adds diagonal pointers to sparse matrix structure.
101 */
102 
103 #undef __FUNCT__
104 #define __FUNCT__ "MatMarkDiagonal_MPIAdj"
105 int MatMarkDiagonal_MPIAdj(Mat A)
106 {
107   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
108   int        i,j,*diag,m = A->m,ierr;
109 
110   PetscFunctionBegin;
111   ierr = PetscMalloc((m+1)*sizeof(int),&diag);CHKERRQ(ierr);
112   PetscLogObjectMemory(A,(m+1)*sizeof(int));
113   for (i=0; i<A->m; i++) {
114     for (j=a->i[i]; j<a->i[i+1]; j++) {
115       if (a->j[j] == i) {
116         diag[i] = j;
117         break;
118       }
119     }
120   }
121   a->diag = diag;
122   PetscFunctionReturn(0);
123 }
124 
125 #undef __FUNCT__
126 #define __FUNCT__ "MatGetRow_MPIAdj"
127 int MatGetRow_MPIAdj(Mat A,int row,int *nz,int **idx,PetscScalar **v)
128 {
129   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
130   int        *itmp;
131 
132   PetscFunctionBegin;
133   row -= a->rstart;
134 
135   if (row < 0 || row >= A->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
136 
137   *nz = a->i[row+1] - a->i[row];
138   if (v) *v = PETSC_NULL;
139   if (idx) {
140     itmp = a->j + a->i[row];
141     if (*nz) {
142       *idx = itmp;
143     }
144     else *idx = 0;
145   }
146   PetscFunctionReturn(0);
147 }
148 
149 #undef __FUNCT__
150 #define __FUNCT__ "MatRestoreRow_MPIAdj"
151 int MatRestoreRow_MPIAdj(Mat A,int row,int *nz,int **idx,PetscScalar **v)
152 {
153   PetscFunctionBegin;
154   PetscFunctionReturn(0);
155 }
156 
157 #undef __FUNCT__
158 #define __FUNCT__ "MatGetBlockSize_MPIAdj"
159 int MatGetBlockSize_MPIAdj(Mat A,int *bs)
160 {
161   PetscFunctionBegin;
162   *bs = 1;
163   PetscFunctionReturn(0);
164 }
165 
166 
167 #undef __FUNCT__
168 #define __FUNCT__ "MatEqual_MPIAdj"
169 int MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg)
170 {
171   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
172   int         ierr;
173   PetscTruth  flag;
174 
175   PetscFunctionBegin;
176   ierr = PetscTypeCompare((PetscObject)B,MATMPIADJ,&flag);CHKERRQ(ierr);
177   if (!flag) SETERRQ(PETSC_ERR_ARG_INCOMP,"Matrices must be same type");
178 
179   /* If the  matrix dimensions are not equal,or no of nonzeros */
180   if ((A->m != B->m) ||(a->nz != b->nz)) {
181     flag = PETSC_FALSE;
182   }
183 
184   /* if the a->i are the same */
185   ierr = PetscMemcmp(a->i,b->i,(A->m+1)*sizeof(int),&flag);CHKERRQ(ierr);
186 
187   /* if a->j are the same */
188   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(int),&flag);CHKERRQ(ierr);
189 
190   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr);
191   PetscFunctionReturn(0);
192 }
193 
194 #undef __FUNCT__
195 #define __FUNCT__ "MatGetRowIJ_MPIAdj"
196 int MatGetRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int **ia,int **ja,PetscTruth *done)
197 {
198   int        ierr,size,i;
199   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
200 
201   PetscFunctionBegin;
202   ierr = MPI_Comm_size(A->comm,&size);CHKERRQ(ierr);
203   if (size > 1) {*done = PETSC_FALSE; PetscFunctionReturn(0);}
204   *m    = A->m;
205   *ia   = a->i;
206   *ja   = a->j;
207   *done = PETSC_TRUE;
208   if (oshift) {
209     for (i=0; i<(*ia)[*m]; i++) {
210       (*ja)[i]++;
211     }
212     for (i=0; i<=(*m); i++) (*ia)[i]++;
213   }
214   PetscFunctionReturn(0);
215 }
216 
217 #undef __FUNCT__
218 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj"
219 int MatRestoreRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int **ia,int **ja,PetscTruth *done)
220 {
221   int        i;
222   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
223 
224   PetscFunctionBegin;
225   if (ia && a->i != *ia) SETERRQ(1,"ia passed back is not one obtained with MatGetRowIJ()");
226   if (ja && a->j != *ja) SETERRQ(1,"ja passed back is not one obtained with MatGetRowIJ()");
227   if (oshift) {
228     for (i=0; i<=(*m); i++) (*ia)[i]--;
229     for (i=0; i<(*ia)[*m]; i++) {
230       (*ja)[i]--;
231     }
232   }
233   PetscFunctionReturn(0);
234 }
235 
236 /* -------------------------------------------------------------------*/
237 static struct _MatOps MatOps_Values = {0,
238        MatGetRow_MPIAdj,
239        MatRestoreRow_MPIAdj,
240        0,
241        0,
242        0,
243        0,
244        0,
245        0,
246        0,
247        0,
248        0,
249        0,
250        0,
251        0,
252        0,
253        MatEqual_MPIAdj,
254        0,
255        0,
256        0,
257        0,
258        0,
259        0,
260        MatSetOption_MPIAdj,
261        0,
262        0,
263        0,
264        0,
265        0,
266        0,
267        0,
268        0,
269        0,
270        0,
271        0,
272        0,
273        0,
274        0,
275        0,
276        0,
277        0,
278        0,
279        0,
280        0,
281        0,
282        0,
283        0,
284        0,
285        0,
286        0,
287        MatGetBlockSize_MPIAdj,
288        MatGetRowIJ_MPIAdj,
289        MatRestoreRowIJ_MPIAdj,
290        0,
291        0,
292        0,
293        0,
294        0,
295        0,
296        0,
297        0,
298        MatDestroy_MPIAdj,
299        MatView_MPIAdj,
300        MatGetPetscMaps_Petsc};
301 
302 
303 EXTERN_C_BEGIN
304 #undef __FUNCT__
305 #define __FUNCT__ "MatCreate_MPIAdj"
306 int MatCreate_MPIAdj(Mat B)
307 {
308   Mat_MPIAdj *b;
309   int        ii,ierr,size,rank;
310 
311   PetscFunctionBegin;
312   ierr = MPI_Comm_size(B->comm,&size);CHKERRQ(ierr);
313   ierr = MPI_Comm_rank(B->comm,&rank);CHKERRQ(ierr);
314 
315   ierr                = PetscNew(Mat_MPIAdj,&b);CHKERRQ(ierr);
316   B->data             = (void*)b;
317   ierr                = PetscMemzero(b,sizeof(Mat_MPIAdj));CHKERRQ(ierr);
318   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
319   B->factor           = 0;
320   B->lupivotthreshold = 1.0;
321   B->mapping          = 0;
322   B->assembled        = PETSC_FALSE;
323 
324   ierr = MPI_Allreduce(&B->m,&B->M,1,MPI_INT,MPI_SUM,B->comm);CHKERRQ(ierr);
325   B->n = B->N;
326 
327   /* the information in the maps duplicates the information computed below, eventually
328      we should remove the duplicate information that is not contained in the maps */
329   ierr = PetscMapCreateMPI(B->comm,B->m,B->M,&B->rmap);CHKERRQ(ierr);
330   /* we don't know the "local columns" so just use the row information :-(*/
331   ierr = PetscMapCreateMPI(B->comm,B->m,B->M,&B->cmap);CHKERRQ(ierr);
332 
333   ierr = PetscMalloc((size+1)*sizeof(int),&b->rowners);CHKERRQ(ierr);
334   PetscLogObjectMemory(B,(size+2)*sizeof(int)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));
335   ierr = MPI_Allgather(&B->m,1,MPI_INT,b->rowners+1,1,MPI_INT,B->comm);CHKERRQ(ierr);
336   b->rowners[0] = 0;
337   for (ii=2; ii<=size; ii++) {
338     b->rowners[ii] += b->rowners[ii-1];
339   }
340   b->rstart = b->rowners[rank];
341   b->rend   = b->rowners[rank+1];
342 
343   PetscFunctionReturn(0);
344 }
345 EXTERN_C_END
346 
347 #undef __FUNCT__
348 #define __FUNCT__ "MatMPIAdjSetPreallocation"
349 int MatMPIAdjSetPreallocation(Mat B,int *i,int *j,int *values)
350 {
351   Mat_MPIAdj *b = (Mat_MPIAdj *)B->data;
352   int        ierr;
353 #if defined(PETSC_USE_BOPT_g)
354   int        ii;
355 #endif
356 
357   PetscFunctionBegin;
358   B->preallocated = PETSC_TRUE;
359 #if defined(PETSC_USE_BOPT_g)
360   if (i[0] != 0) SETERRQ1(1,"First i[] index must be zero, instead it is %d\n",i[0]);
361   for (ii=1; ii<B->m; ii++) {
362     if (i[ii] < 0 || i[ii] < i[ii-1]) {
363       SETERRQ4(1,"i[%d]=%d index is out of range: i[%d]=%d",ii,i[ii],ii-1,i[ii-1]);
364     }
365   }
366   for (ii=0; ii<i[B->m]; ii++) {
367     if (j[ii] < 0 || j[ii] >= B->N) {
368       SETERRQ2(1,"Column index %d out of range %d\n",ii,j[ii]);
369     }
370   }
371 #endif
372 
373   b->j      = j;
374   b->i      = i;
375   b->values = values;
376 
377   b->nz               = i[B->m];
378   b->diag             = 0;
379   b->symmetric        = PETSC_FALSE;
380   b->freeaij          = PETSC_TRUE;
381 
382   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
383   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
384   PetscFunctionReturn(0);
385 }
386 
387 #undef __FUNCT__
388 #define __FUNCT__ "MatCreateMPIAdj"
389 /*@C
390    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
391    The matrix does not have numerical values associated with it, but is
392    intended for ordering (to reduce bandwidth etc) and partitioning.
393 
394    Collective on MPI_Comm
395 
396    Input Parameters:
397 +  comm - MPI communicator
398 .  m - number of local rows
399 .  n - number of columns
400 .  i - the indices into j for the start of each row
401 .  j - the column indices for each row (sorted for each row).
402        The indices in i and j start with zero (NOT with one).
403 -  values -[optional] edge weights
404 
405    Output Parameter:
406 .  A - the matrix
407 
408    Level: intermediate
409 
410    Notes: This matrix object does not support most matrix operations, include
411    MatSetValues().
412    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
413    when the matrix is destroyed. And you must allocate them with PetscMalloc(). If you
414     call from Fortran you need not create the arrays with PetscMalloc().
415    Should not include the matrix diagonals.
416 
417    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
418 
419 .seealso: MatCreate(), MatCreateSeqAdj(), MatGetOrdering()
420 @*/
421 int MatCreateMPIAdj(MPI_Comm comm,int m,int n,int *i,int *j,int *values,Mat *A)
422 {
423   int        ierr;
424 
425   PetscFunctionBegin;
426   ierr = MatCreate(comm,m,n,PETSC_DETERMINE,n,A);CHKERRQ(ierr);
427   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
428   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
429   PetscFunctionReturn(0);
430 }
431 
432 EXTERN_C_BEGIN
433 #undef __FUNCT__
434 #define __FUNCT__ "MatConvertTo_MPIAdj"
435 int MatConvertTo_MPIAdj(Mat A,MatType type,Mat *B)
436 {
437   int          i,ierr,m,N,nzeros = 0,*ia,*ja,*rj,len,rstart,cnt,j,*a;
438   PetscScalar  *ra;
439   MPI_Comm     comm;
440 
441   PetscFunctionBegin;
442   ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr);
443   ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr);
444   ierr = MatGetOwnershipRange(A,&rstart,PETSC_NULL);CHKERRQ(ierr);
445 
446   /* count the number of nonzeros per row */
447   for (i=0; i<m; i++) {
448     ierr   = MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
449     for (j=0; j<len; j++) {
450       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
451     }
452     ierr   = MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
453     nzeros += len;
454   }
455 
456   /* malloc space for nonzeros */
457   ierr = PetscMalloc((nzeros+1)*sizeof(int),&a);CHKERRQ(ierr);
458   ierr = PetscMalloc((N+1)*sizeof(int),&ia);CHKERRQ(ierr);
459   ierr = PetscMalloc((nzeros+1)*sizeof(int),&ja);CHKERRQ(ierr);
460 
461   nzeros = 0;
462   ia[0]  = 0;
463   for (i=0; i<m; i++) {
464     ierr    = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
465     cnt     = 0;
466     for (j=0; j<len; j++) {
467       if (rj[j] != i+rstart) { /* if not diagonal */
468         a[nzeros+cnt]    = (int) PetscAbsScalar(ra[j]);
469         ja[nzeros+cnt++] = rj[j];
470       }
471     }
472     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
473     nzeros += cnt;
474     ia[i+1] = nzeros;
475   }
476 
477   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
478   ierr = MatCreateMPIAdj(comm,m,N,ia,ja,a,B);CHKERRQ(ierr);
479 
480   PetscFunctionReturn(0);
481 }
482 EXTERN_C_END
483 
484 
485 
486 
487 
488