xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision b5758dff40dc4232f7fa9713b5481f7e0e0d5d24)
1 /*$Id: mpiadj.c,v 1.58 2001/01/20 03:34:58 bsmith Exp bsmith $*/
2 
3 /*
4     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
5 */
6 #include "petscsys.h"
7 #include "src/mat/impls/adj/mpi/mpiadj.h"
8 
9 #undef __FUNC__
10 #define __FUNC__ "MatView_MPIAdj_ASCII"
11 int MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
12 {
13   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
14   int               ierr,i,j,m = A->m;
15   char              *name;
16   PetscViewerFormat format;
17 
18   PetscFunctionBegin;
19   ierr = PetscObjectGetName((PetscObject)viewer,&name);CHKERRQ(ierr);
20   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
21   if (format == PETSC_VIEWER_ASCII_INFO) {
22     PetscFunctionReturn(0);
23   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
24     SETERRQ(PETSC_ERR_SUP,"Matlab format not supported");
25   } else {
26     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_NO);CHKERRQ(ierr);
27     for (i=0; i<m; i++) {
28       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %d:",i+a->rstart);CHKERRQ(ierr);
29       for (j=a->i[i]; j<a->i[i+1]; j++) {
30         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %d ",a->j[j]);CHKERRQ(ierr);
31       }
32       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
33     }
34     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_YES);CHKERRQ(ierr);
35   }
36   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
37   PetscFunctionReturn(0);
38 }
39 
40 #undef __FUNC__
41 #define __FUNC__ "MatView_MPIAdj"
42 int MatView_MPIAdj(Mat A,PetscViewer viewer)
43 {
44   int        ierr;
45   PetscTruth isascii;
46 
47   PetscFunctionBegin;
48   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&isascii);CHKERRQ(ierr);
49   if (isascii) {
50     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
51   } else {
52     SETERRQ1(1,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
53   }
54   PetscFunctionReturn(0);
55 }
56 
57 #undef __FUNC__
58 #define __FUNC__ "MatDestroy_MPIAdj"
59 int MatDestroy_MPIAdj(Mat mat)
60 {
61   Mat_MPIAdj *a = (Mat_MPIAdj*)mat->data;
62   int        ierr;
63 
64   PetscFunctionBegin;
65 #if defined(PETSC_USE_LOG)
66   PetscLogObjectState((PetscObject)mat,"Rows=%d, Cols=%d, NZ=%d",mat->m,mat->n,a->nz);
67 #endif
68   if (a->diag) {ierr = PetscFree(a->diag);CHKERRQ(ierr);}
69   if (a->freeaij) {
70     ierr = PetscFree(a->i);CHKERRQ(ierr);
71     ierr = PetscFree(a->j);CHKERRQ(ierr);
72     if (a->values) {ierr = PetscFree(a->values);CHKERRQ(ierr);}
73   }
74   ierr = PetscFree(a->rowners);CHKERRQ(ierr);
75   ierr = PetscFree(a);CHKERRQ(ierr);
76   PetscFunctionReturn(0);
77 }
78 
79 #undef __FUNC__
80 #define __FUNC__ "MatSetOption_MPIAdj"
81 int MatSetOption_MPIAdj(Mat A,MatOption op)
82 {
83   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
84 
85   PetscFunctionBegin;
86   if (op == MAT_STRUCTURALLY_SYMMETRIC) {
87     a->symmetric = PETSC_TRUE;
88   } else {
89     PetscLogInfo(A,"MatSetOption_MPIAdj:Option ignored\n");
90   }
91   PetscFunctionReturn(0);
92 }
93 
94 
95 /*
96      Adds diagonal pointers to sparse matrix structure.
97 */
98 
99 #undef __FUNC__
100 #define __FUNC__ "MatMarkDiagonal_MPIAdj"
101 int MatMarkDiagonal_MPIAdj(Mat A)
102 {
103   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
104   int        i,j,*diag,m = A->m,ierr;
105 
106   PetscFunctionBegin;
107   ierr = PetscMalloc((m+1)*sizeof(int),&diag);CHKERRQ(ierr);
108   PetscLogObjectMemory(A,(m+1)*sizeof(int));
109   for (i=0; i<A->m; i++) {
110     for (j=a->i[i]; j<a->i[i+1]; j++) {
111       if (a->j[j] == i) {
112         diag[i] = j;
113         break;
114       }
115     }
116   }
117   a->diag = diag;
118   PetscFunctionReturn(0);
119 }
120 
121 #undef __FUNC__
122 #define __FUNC__ "MatGetOwnershipRange_MPIAdj"
123 int MatGetOwnershipRange_MPIAdj(Mat A,int *m,int *n)
124 {
125   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
126   PetscFunctionBegin;
127   if (m) *m = a->rstart;
128   if (n) *n = a->rend;
129   PetscFunctionReturn(0);
130 }
131 
132 #undef __FUNC__
133 #define __FUNC__ "MatGetRow_MPIAdj"
134 int MatGetRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
135 {
136   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
137   int        *itmp;
138 
139   PetscFunctionBegin;
140   row -= a->rstart;
141 
142   if (row < 0 || row >= A->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
143 
144   *nz = a->i[row+1] - a->i[row];
145   if (v) *v = PETSC_NULL;
146   if (idx) {
147     itmp = a->j + a->i[row];
148     if (*nz) {
149       *idx = itmp;
150     }
151     else *idx = 0;
152   }
153   PetscFunctionReturn(0);
154 }
155 
156 #undef __FUNC__
157 #define __FUNC__ "MatRestoreRow_MPIAdj"
158 int MatRestoreRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
159 {
160   PetscFunctionBegin;
161   PetscFunctionReturn(0);
162 }
163 
164 #undef __FUNC__
165 #define __FUNC__ "MatGetBlockSize_MPIAdj"
166 int MatGetBlockSize_MPIAdj(Mat A,int *bs)
167 {
168   PetscFunctionBegin;
169   *bs = 1;
170   PetscFunctionReturn(0);
171 }
172 
173 
174 #undef __FUNC__
175 #define __FUNC__ "MatEqual_MPIAdj"
176 int MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg)
177 {
178   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
179   int         ierr;
180   PetscTruth  flag;
181 
182   PetscFunctionBegin;
183   ierr = PetscTypeCompare((PetscObject)B,MATMPIADJ,&flag);CHKERRQ(ierr);
184   if (!flag) SETERRQ(PETSC_ERR_ARG_INCOMP,"Matrices must be same type");
185 
186   /* If the  matrix dimensions are not equal,or no of nonzeros */
187   if ((A->m != B->m) ||(a->nz != b->nz)) {
188     flag = PETSC_FALSE;
189   }
190 
191   /* if the a->i are the same */
192   ierr = PetscMemcmp(a->i,b->i,(A->m+1)*sizeof(int),&flag);CHKERRQ(ierr);
193 
194   /* if a->j are the same */
195   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(int),&flag);CHKERRQ(ierr);
196 
197   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr);
198   PetscFunctionReturn(0);
199 }
200 
201 #undef __FUNC__
202 #define __FUNC__ "MatGetRowIJ_MPIAdj"
203 int MatGetRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int **ia,int **ja,PetscTruth *done)
204 {
205   int        ierr,size,i;
206   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
207 
208   PetscFunctionBegin;
209   ierr = MPI_Comm_size(A->comm,&size);CHKERRQ(ierr);
210   if (size > 1) {*done = PETSC_FALSE; PetscFunctionReturn(0);}
211   *m    = A->m;
212   *ia   = a->i;
213   *ja   = a->j;
214   *done = PETSC_TRUE;
215   if (oshift) {
216     for (i=0; i<(*ia)[*m]; i++) {
217       (*ja)[i]++;
218     }
219     for (i=0; i<=(*m); i++) (*ia)[i]++;
220   }
221   PetscFunctionReturn(0);
222 }
223 
224 #undef __FUNC__
225 #define __FUNC__ "MatRestoreRowIJ_MPIAdj"
226 int MatRestoreRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int **ia,int **ja,PetscTruth *done)
227 {
228   int        i;
229   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
230 
231   PetscFunctionBegin;
232   if (ia && a->i != *ia) SETERRQ(1,"ia passed back is not one obtained with MatGetRowIJ()");
233   if (ja && a->j != *ja) SETERRQ(1,"ja passed back is not one obtained with MatGetRowIJ()");
234   if (oshift) {
235     for (i=0; i<=(*m); i++) (*ia)[i]--;
236     for (i=0; i<(*ia)[*m]; i++) {
237       (*ja)[i]--;
238     }
239   }
240   PetscFunctionReturn(0);
241 }
242 
243 /* -------------------------------------------------------------------*/
244 static struct _MatOps MatOps_Values = {0,
245        MatGetRow_MPIAdj,
246        MatRestoreRow_MPIAdj,
247        0,
248        0,
249        0,
250        0,
251        0,
252        0,
253        0,
254        0,
255        0,
256        0,
257        0,
258        0,
259        0,
260        MatEqual_MPIAdj,
261        0,
262        0,
263        0,
264        0,
265        0,
266        0,
267        MatSetOption_MPIAdj,
268        0,
269        0,
270        0,
271        0,
272        0,
273        0,
274        0,
275        0,
276        MatGetOwnershipRange_MPIAdj,
277        0,
278        0,
279        0,
280        0,
281        0,
282        0,
283        0,
284        0,
285        0,
286        0,
287        0,
288        0,
289        0,
290        0,
291        0,
292        0,
293        0,
294        0,
295        0,
296        MatGetBlockSize_MPIAdj,
297        MatGetRowIJ_MPIAdj,
298        MatRestoreRowIJ_MPIAdj,
299        0,
300        0,
301        0,
302        0,
303        0,
304        0,
305        0,
306        0,
307        MatDestroy_MPIAdj,
308        MatView_MPIAdj,
309        MatGetMaps_Petsc};
310 
311 
312 EXTERN_C_BEGIN
313 #undef __FUNC__
314 #define __FUNC__ "MatCreate_MPIAdj"
315 int MatCreate_MPIAdj(Mat B)
316 {
317   Mat_MPIAdj *b;
318   int        ii,ierr,size,rank;
319 
320   PetscFunctionBegin;
321   ierr = MPI_Comm_size(B->comm,&size);CHKERRQ(ierr);
322   ierr = MPI_Comm_rank(B->comm,&rank);CHKERRQ(ierr);
323 
324   ierr                = PetscNew(Mat_MPIAdj,&b);CHKERRQ(ierr);
325   B->data             = (void*)b;
326   ierr                = PetscMemzero(b,sizeof(Mat_MPIAdj));CHKERRQ(ierr);
327   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
328   B->factor           = 0;
329   B->lupivotthreshold = 1.0;
330   B->mapping          = 0;
331   B->assembled        = PETSC_FALSE;
332 
333   ierr = MPI_Allreduce(&B->m,&B->M,1,MPI_INT,MPI_SUM,B->comm);CHKERRQ(ierr);
334   B->n = B->N;
335 
336   /* the information in the maps duplicates the information computed below, eventually
337      we should remove the duplicate information that is not contained in the maps */
338   ierr = MapCreateMPI(B->comm,B->m,B->M,&B->rmap);CHKERRQ(ierr);
339   /* we don't know the "local columns" so just use the row information :-(*/
340   ierr = MapCreateMPI(B->comm,B->m,B->M,&B->cmap);CHKERRQ(ierr);
341 
342   ierr = PetscMalloc((size+1)*sizeof(int),&b->rowners);CHKERRQ(ierr);
343   PetscLogObjectMemory(B,(size+2)*sizeof(int)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));
344   ierr = MPI_Allgather(&B->m,1,MPI_INT,b->rowners+1,1,MPI_INT,B->comm);CHKERRQ(ierr);
345   b->rowners[0] = 0;
346   for (ii=2; ii<=size; ii++) {
347     b->rowners[ii] += b->rowners[ii-1];
348   }
349   b->rstart = b->rowners[rank];
350   b->rend   = b->rowners[rank+1];
351 
352   PetscFunctionReturn(0);
353 }
354 EXTERN_C_END
355 
356 #undef __FUNC__
357 #define __FUNC__ "MatMPIAdjSetPreallocation"
358 int MatMPIAdjSetPreallocation(Mat B,int *i,int *j,int *values)
359 {
360   Mat_MPIAdj *b = (Mat_MPIAdj *)B->data;
361   int        ierr;
362 #if defined(PETSC_USE_BOPT_g)
363   int        ii;
364 #endif
365 
366   PetscFunctionBegin;
367   B->preallocated = PETSC_TRUE;
368 #if defined(PETSC_USE_BOPT_g)
369   if (i[0] != 0) SETERRQ1(1,"First i[] index must be zero, instead it is %d\n",i[0]);
370   for (ii=1; ii<B->m; ii++) {
371     if (i[ii] < 0 || i[ii] < i[ii-1]) {
372       SETERRQ4(1,"i[%d] index is out of range: i[%d]",ii,i[ii],ii-1,i[ii-1]);
373     }
374   }
375   for (ii=0; ii<i[B->m]; ii++) {
376     if (j[ii] < 0 || j[ii] >= B->N) {
377       SETERRQ2(1,"Column index %d out of range %d\n",ii,j[ii]);
378     }
379   }
380 #endif
381 
382   b->j      = j;
383   b->i      = i;
384   b->values = values;
385 
386   b->nz               = i[B->m];
387   b->diag             = 0;
388   b->symmetric        = PETSC_FALSE;
389   b->freeaij          = PETSC_TRUE;
390 
391   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
392   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
393   PetscFunctionReturn(0);
394 }
395 
396 #undef __FUNC__
397 #define __FUNC__ "MatCreateMPIAdj"
398 /*@C
399    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
400    The matrix does not have numerical values associated with it, but is
401    intended for ordering (to reduce bandwidth etc) and partitioning.
402 
403    Collective on MPI_Comm
404 
405    Input Parameters:
406 +  comm - MPI communicator
407 .  m - number of local rows
408 .  n - number of columns
409 .  i - the indices into j for the start of each row
410 .  j - the column indices for each row (sorted for each row).
411        The indices in i and j start with zero (NOT with one).
412 -  values -[optional] edge weights
413 
414    Output Parameter:
415 .  A - the matrix
416 
417    Level: intermediate
418 
419    Notes: This matrix object does not support most matrix operations, include
420    MatSetValues().
421    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
422    when the matrix is destroyed. And you must allocate them with PetscMalloc(). If you
423     call from Fortran you need not create the arrays with PetscMalloc().
424    Should not include the matrix diagonals.
425 
426    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
427 
428 .seealso: MatCreate(), MatCreateSeqAdj(), MatGetOrdering()
429 @*/
430 int MatCreateMPIAdj(MPI_Comm comm,int m,int n,int *i,int *j,int *values,Mat *A)
431 {
432   int        ierr;
433 
434   PetscFunctionBegin;
435   ierr = MatCreate(comm,m,n,PETSC_DETERMINE,n,A);CHKERRQ(ierr);
436   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
437   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
438   PetscFunctionReturn(0);
439 }
440 
441 EXTERN_C_BEGIN
442 #undef __FUNC__
443 #define __FUNC__ "MatConvertTo_MPIAdj"
444 int MatConvertTo_MPIAdj(Mat A,MatType type,Mat *B)
445 {
446   int      i,ierr,m,N,nzeros = 0,*ia,*ja,*rj,len,rstart,cnt,j,*a;
447   Scalar   *ra;
448   MPI_Comm comm;
449 
450   PetscFunctionBegin;
451   ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr);
452   ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr);
453   ierr = MatGetOwnershipRange(A,&rstart,PETSC_NULL);CHKERRQ(ierr);
454 
455   /* count the number of nonzeros per row */
456   for (i=0; i<m; i++) {
457     ierr   = MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
458     for (j=0; j<len; j++) {
459       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
460     }
461     ierr   = MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
462     nzeros += len;
463   }
464 
465   /* malloc space for nonzeros */
466   ierr = PetscMalloc((nzeros+1)*sizeof(int),&a);CHKERRQ(ierr);
467   ierr = PetscMalloc((N+1)*sizeof(int),&ia);CHKERRQ(ierr);
468   ierr = PetscMalloc((nzeros+1)*sizeof(int),&ja);CHKERRQ(ierr);
469 
470   nzeros = 0;
471   ia[0]  = 0;
472   for (i=0; i<m; i++) {
473     ierr    = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
474     cnt     = 0;
475     for (j=0; j<len; j++) {
476       if (rj[j] != i+rstart) { /* if not diagonal */
477         a[nzeros+cnt]    = (int) PetscAbsScalar(ra[j]);
478         ja[nzeros+cnt++] = rj[j];
479       }
480     }
481     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
482     nzeros += cnt;
483     ia[i+1] = nzeros;
484   }
485 
486   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
487   ierr = MatCreateMPIAdj(comm,m,N,ia,ja,a,B);CHKERRQ(ierr);
488 
489   PetscFunctionReturn(0);
490 }
491 EXTERN_C_END
492 
493 
494 
495 
496 
497