xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 73f4d3771d9e6ab3f04055eab794d7609818b9d3)
1 /*$Id: mpiadj.c,v 1.66 2001/08/07 03:02:59 balay Exp $*/
2 
3 /*
4     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
5 */
6 #include "petscsys.h"
7 #include "src/mat/impls/adj/mpi/mpiadj.h"
8 
9 #undef __FUNCT__
10 #define __FUNCT__ "MatView_MPIAdj_ASCII"
11 int MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
12 {
13   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
14   int               ierr,i,j,m = A->m;
15   char              *name;
16   PetscViewerFormat format;
17 
18   PetscFunctionBegin;
19   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
20   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
21   if (format == PETSC_VIEWER_ASCII_INFO) {
22     PetscFunctionReturn(0);
23   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
24     SETERRQ(PETSC_ERR_SUP,"Matlab format not supported");
25   } else {
26     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_NO);CHKERRQ(ierr);
27     for (i=0; i<m; i++) {
28       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %d:",i+a->rstart);CHKERRQ(ierr);
29       for (j=a->i[i]; j<a->i[i+1]; j++) {
30         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %d ",a->j[j]);CHKERRQ(ierr);
31       }
32       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
33     }
34     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_YES);CHKERRQ(ierr);
35   }
36   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
37   PetscFunctionReturn(0);
38 }
39 
40 #undef __FUNCT__
41 #define __FUNCT__ "MatView_MPIAdj"
42 int MatView_MPIAdj(Mat A,PetscViewer viewer)
43 {
44   int        ierr;
45   PetscTruth isascii;
46 
47   PetscFunctionBegin;
48   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&isascii);CHKERRQ(ierr);
49   if (isascii) {
50     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
51   } else {
52     SETERRQ1(1,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
53   }
54   PetscFunctionReturn(0);
55 }
56 
57 #undef __FUNCT__
58 #define __FUNCT__ "MatDestroy_MPIAdj"
59 int MatDestroy_MPIAdj(Mat mat)
60 {
61   Mat_MPIAdj *a = (Mat_MPIAdj*)mat->data;
62   int        ierr;
63 
64   PetscFunctionBegin;
65 #if defined(PETSC_USE_LOG)
66   PetscLogObjectState((PetscObject)mat,"Rows=%d, Cols=%d, NZ=%d",mat->m,mat->n,a->nz);
67 #endif
68   if (a->diag) {ierr = PetscFree(a->diag);CHKERRQ(ierr);}
69   if (a->freeaij) {
70     ierr = PetscFree(a->i);CHKERRQ(ierr);
71     ierr = PetscFree(a->j);CHKERRQ(ierr);
72     if (a->values) {ierr = PetscFree(a->values);CHKERRQ(ierr);}
73   }
74   ierr = PetscFree(a->rowners);CHKERRQ(ierr);
75   ierr = PetscFree(a);CHKERRQ(ierr);
76   PetscFunctionReturn(0);
77 }
78 
79 #undef __FUNCT__
80 #define __FUNCT__ "MatSetOption_MPIAdj"
81 int MatSetOption_MPIAdj(Mat A,MatOption op)
82 {
83   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
84 
85   PetscFunctionBegin;
86   switch (op) {
87   case MAT_STRUCTURALLY_SYMMETRIC:
88     a->symmetric = PETSC_TRUE;
89     break;
90   case MAT_USE_SINGLE_PRECISION_SOLVES:
91   default:
92     PetscLogInfo(A,"MatSetOption_MPIAdj:Option ignored\n");
93     break;
94   }
95   PetscFunctionReturn(0);
96 }
97 
98 
99 /*
100      Adds diagonal pointers to sparse matrix structure.
101 */
102 
103 #undef __FUNCT__
104 #define __FUNCT__ "MatMarkDiagonal_MPIAdj"
105 int MatMarkDiagonal_MPIAdj(Mat A)
106 {
107   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
108   int        i,j,*diag,m = A->m,ierr;
109 
110   PetscFunctionBegin;
111   ierr = PetscMalloc((m+1)*sizeof(int),&diag);CHKERRQ(ierr);
112   PetscLogObjectMemory(A,(m+1)*sizeof(int));
113   for (i=0; i<A->m; i++) {
114     for (j=a->i[i]; j<a->i[i+1]; j++) {
115       if (a->j[j] == i) {
116         diag[i] = j;
117         break;
118       }
119     }
120   }
121   a->diag = diag;
122   PetscFunctionReturn(0);
123 }
124 
125 #undef __FUNCT__
126 #define __FUNCT__ "MatGetOwnershipRange_MPIAdj"
127 int MatGetOwnershipRange_MPIAdj(Mat A,int *m,int *n)
128 {
129   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
130   PetscFunctionBegin;
131   if (m) *m = a->rstart;
132   if (n) *n = a->rend;
133   PetscFunctionReturn(0);
134 }
135 
136 #undef __FUNCT__
137 #define __FUNCT__ "MatGetRow_MPIAdj"
138 int MatGetRow_MPIAdj(Mat A,int row,int *nz,int **idx,PetscScalar **v)
139 {
140   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
141   int        *itmp;
142 
143   PetscFunctionBegin;
144   row -= a->rstart;
145 
146   if (row < 0 || row >= A->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
147 
148   *nz = a->i[row+1] - a->i[row];
149   if (v) *v = PETSC_NULL;
150   if (idx) {
151     itmp = a->j + a->i[row];
152     if (*nz) {
153       *idx = itmp;
154     }
155     else *idx = 0;
156   }
157   PetscFunctionReturn(0);
158 }
159 
160 #undef __FUNCT__
161 #define __FUNCT__ "MatRestoreRow_MPIAdj"
162 int MatRestoreRow_MPIAdj(Mat A,int row,int *nz,int **idx,PetscScalar **v)
163 {
164   PetscFunctionBegin;
165   PetscFunctionReturn(0);
166 }
167 
168 #undef __FUNCT__
169 #define __FUNCT__ "MatGetBlockSize_MPIAdj"
170 int MatGetBlockSize_MPIAdj(Mat A,int *bs)
171 {
172   PetscFunctionBegin;
173   *bs = 1;
174   PetscFunctionReturn(0);
175 }
176 
177 
178 #undef __FUNCT__
179 #define __FUNCT__ "MatEqual_MPIAdj"
180 int MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg)
181 {
182   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
183   int         ierr;
184   PetscTruth  flag;
185 
186   PetscFunctionBegin;
187   ierr = PetscTypeCompare((PetscObject)B,MATMPIADJ,&flag);CHKERRQ(ierr);
188   if (!flag) SETERRQ(PETSC_ERR_ARG_INCOMP,"Matrices must be same type");
189 
190   /* If the  matrix dimensions are not equal,or no of nonzeros */
191   if ((A->m != B->m) ||(a->nz != b->nz)) {
192     flag = PETSC_FALSE;
193   }
194 
195   /* if the a->i are the same */
196   ierr = PetscMemcmp(a->i,b->i,(A->m+1)*sizeof(int),&flag);CHKERRQ(ierr);
197 
198   /* if a->j are the same */
199   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(int),&flag);CHKERRQ(ierr);
200 
201   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr);
202   PetscFunctionReturn(0);
203 }
204 
205 #undef __FUNCT__
206 #define __FUNCT__ "MatGetRowIJ_MPIAdj"
207 int MatGetRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int **ia,int **ja,PetscTruth *done)
208 {
209   int        ierr,size,i;
210   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
211 
212   PetscFunctionBegin;
213   ierr = MPI_Comm_size(A->comm,&size);CHKERRQ(ierr);
214   if (size > 1) {*done = PETSC_FALSE; PetscFunctionReturn(0);}
215   *m    = A->m;
216   *ia   = a->i;
217   *ja   = a->j;
218   *done = PETSC_TRUE;
219   if (oshift) {
220     for (i=0; i<(*ia)[*m]; i++) {
221       (*ja)[i]++;
222     }
223     for (i=0; i<=(*m); i++) (*ia)[i]++;
224   }
225   PetscFunctionReturn(0);
226 }
227 
228 #undef __FUNCT__
229 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj"
230 int MatRestoreRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int **ia,int **ja,PetscTruth *done)
231 {
232   int        i;
233   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
234 
235   PetscFunctionBegin;
236   if (ia && a->i != *ia) SETERRQ(1,"ia passed back is not one obtained with MatGetRowIJ()");
237   if (ja && a->j != *ja) SETERRQ(1,"ja passed back is not one obtained with MatGetRowIJ()");
238   if (oshift) {
239     for (i=0; i<=(*m); i++) (*ia)[i]--;
240     for (i=0; i<(*ia)[*m]; i++) {
241       (*ja)[i]--;
242     }
243   }
244   PetscFunctionReturn(0);
245 }
246 
247 /* -------------------------------------------------------------------*/
248 static struct _MatOps MatOps_Values = {0,
249        MatGetRow_MPIAdj,
250        MatRestoreRow_MPIAdj,
251        0,
252        0,
253        0,
254        0,
255        0,
256        0,
257        0,
258        0,
259        0,
260        0,
261        0,
262        0,
263        0,
264        MatEqual_MPIAdj,
265        0,
266        0,
267        0,
268        0,
269        0,
270        0,
271        MatSetOption_MPIAdj,
272        0,
273        0,
274        0,
275        0,
276        0,
277        0,
278        0,
279        0,
280        MatGetOwnershipRange_MPIAdj,
281        0,
282        0,
283        0,
284        0,
285        0,
286        0,
287        0,
288        0,
289        0,
290        0,
291        0,
292        0,
293        0,
294        0,
295        0,
296        0,
297        0,
298        0,
299        0,
300        MatGetBlockSize_MPIAdj,
301        MatGetRowIJ_MPIAdj,
302        MatRestoreRowIJ_MPIAdj,
303        0,
304        0,
305        0,
306        0,
307        0,
308        0,
309        0,
310        0,
311        MatDestroy_MPIAdj,
312        MatView_MPIAdj,
313        MatGetPetscMaps_Petsc};
314 
315 
316 EXTERN_C_BEGIN
317 #undef __FUNCT__
318 #define __FUNCT__ "MatCreate_MPIAdj"
319 int MatCreate_MPIAdj(Mat B)
320 {
321   Mat_MPIAdj *b;
322   int        ii,ierr,size,rank;
323 
324   PetscFunctionBegin;
325   ierr = MPI_Comm_size(B->comm,&size);CHKERRQ(ierr);
326   ierr = MPI_Comm_rank(B->comm,&rank);CHKERRQ(ierr);
327 
328   ierr                = PetscNew(Mat_MPIAdj,&b);CHKERRQ(ierr);
329   B->data             = (void*)b;
330   ierr                = PetscMemzero(b,sizeof(Mat_MPIAdj));CHKERRQ(ierr);
331   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
332   B->factor           = 0;
333   B->lupivotthreshold = 1.0;
334   B->mapping          = 0;
335   B->assembled        = PETSC_FALSE;
336 
337   ierr = MPI_Allreduce(&B->m,&B->M,1,MPI_INT,MPI_SUM,B->comm);CHKERRQ(ierr);
338   B->n = B->N;
339 
340   /* the information in the maps duplicates the information computed below, eventually
341      we should remove the duplicate information that is not contained in the maps */
342   ierr = PetscMapCreateMPI(B->comm,B->m,B->M,&B->rmap);CHKERRQ(ierr);
343   /* we don't know the "local columns" so just use the row information :-(*/
344   ierr = PetscMapCreateMPI(B->comm,B->m,B->M,&B->cmap);CHKERRQ(ierr);
345 
346   ierr = PetscMalloc((size+1)*sizeof(int),&b->rowners);CHKERRQ(ierr);
347   PetscLogObjectMemory(B,(size+2)*sizeof(int)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));
348   ierr = MPI_Allgather(&B->m,1,MPI_INT,b->rowners+1,1,MPI_INT,B->comm);CHKERRQ(ierr);
349   b->rowners[0] = 0;
350   for (ii=2; ii<=size; ii++) {
351     b->rowners[ii] += b->rowners[ii-1];
352   }
353   b->rstart = b->rowners[rank];
354   b->rend   = b->rowners[rank+1];
355 
356   PetscFunctionReturn(0);
357 }
358 EXTERN_C_END
359 
360 #undef __FUNCT__
361 #define __FUNCT__ "MatMPIAdjSetPreallocation"
362 int MatMPIAdjSetPreallocation(Mat B,int *i,int *j,int *values)
363 {
364   Mat_MPIAdj *b = (Mat_MPIAdj *)B->data;
365   int        ierr;
366 #if defined(PETSC_USE_BOPT_g)
367   int        ii;
368 #endif
369 
370   PetscFunctionBegin;
371   B->preallocated = PETSC_TRUE;
372 #if defined(PETSC_USE_BOPT_g)
373   if (i[0] != 0) SETERRQ1(1,"First i[] index must be zero, instead it is %d\n",i[0]);
374   for (ii=1; ii<B->m; ii++) {
375     if (i[ii] < 0 || i[ii] < i[ii-1]) {
376       SETERRQ4(1,"i[%d] index is out of range: i[%d]",ii,i[ii],ii-1,i[ii-1]);
377     }
378   }
379   for (ii=0; ii<i[B->m]; ii++) {
380     if (j[ii] < 0 || j[ii] >= B->N) {
381       SETERRQ2(1,"Column index %d out of range %d\n",ii,j[ii]);
382     }
383   }
384 #endif
385 
386   b->j      = j;
387   b->i      = i;
388   b->values = values;
389 
390   b->nz               = i[B->m];
391   b->diag             = 0;
392   b->symmetric        = PETSC_FALSE;
393   b->freeaij          = PETSC_TRUE;
394 
395   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
396   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
397   PetscFunctionReturn(0);
398 }
399 
400 #undef __FUNCT__
401 #define __FUNCT__ "MatCreateMPIAdj"
402 /*@C
403    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
404    The matrix does not have numerical values associated with it, but is
405    intended for ordering (to reduce bandwidth etc) and partitioning.
406 
407    Collective on MPI_Comm
408 
409    Input Parameters:
410 +  comm - MPI communicator
411 .  m - number of local rows
412 .  n - number of columns
413 .  i - the indices into j for the start of each row
414 .  j - the column indices for each row (sorted for each row).
415        The indices in i and j start with zero (NOT with one).
416 -  values -[optional] edge weights
417 
418    Output Parameter:
419 .  A - the matrix
420 
421    Level: intermediate
422 
423    Notes: This matrix object does not support most matrix operations, include
424    MatSetValues().
425    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
426    when the matrix is destroyed. And you must allocate them with PetscMalloc(). If you
427     call from Fortran you need not create the arrays with PetscMalloc().
428    Should not include the matrix diagonals.
429 
430    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
431 
432 .seealso: MatCreate(), MatCreateSeqAdj(), MatGetOrdering()
433 @*/
434 int MatCreateMPIAdj(MPI_Comm comm,int m,int n,int *i,int *j,int *values,Mat *A)
435 {
436   int        ierr;
437 
438   PetscFunctionBegin;
439   ierr = MatCreate(comm,m,n,PETSC_DETERMINE,n,A);CHKERRQ(ierr);
440   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
441   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
442   PetscFunctionReturn(0);
443 }
444 
445 EXTERN_C_BEGIN
446 #undef __FUNCT__
447 #define __FUNCT__ "MatConvertTo_MPIAdj"
448 int MatConvertTo_MPIAdj(Mat A,MatType type,Mat *B)
449 {
450   int          i,ierr,m,N,nzeros = 0,*ia,*ja,*rj,len,rstart,cnt,j,*a;
451   PetscScalar  *ra;
452   MPI_Comm     comm;
453 
454   PetscFunctionBegin;
455   ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr);
456   ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr);
457   ierr = MatGetOwnershipRange(A,&rstart,PETSC_NULL);CHKERRQ(ierr);
458 
459   /* count the number of nonzeros per row */
460   for (i=0; i<m; i++) {
461     ierr   = MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
462     for (j=0; j<len; j++) {
463       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
464     }
465     ierr   = MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
466     nzeros += len;
467   }
468 
469   /* malloc space for nonzeros */
470   ierr = PetscMalloc((nzeros+1)*sizeof(int),&a);CHKERRQ(ierr);
471   ierr = PetscMalloc((N+1)*sizeof(int),&ia);CHKERRQ(ierr);
472   ierr = PetscMalloc((nzeros+1)*sizeof(int),&ja);CHKERRQ(ierr);
473 
474   nzeros = 0;
475   ia[0]  = 0;
476   for (i=0; i<m; i++) {
477     ierr    = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
478     cnt     = 0;
479     for (j=0; j<len; j++) {
480       if (rj[j] != i+rstart) { /* if not diagonal */
481         a[nzeros+cnt]    = (int) PetscAbsScalar(ra[j]);
482         ja[nzeros+cnt++] = rj[j];
483       }
484     }
485     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
486     nzeros += cnt;
487     ia[i+1] = nzeros;
488   }
489 
490   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
491   ierr = MatCreateMPIAdj(comm,m,N,ia,ja,a,B);CHKERRQ(ierr);
492 
493   PetscFunctionReturn(0);
494 }
495 EXTERN_C_END
496 
497 
498 
499 
500 
501