xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 77e54ba9d2b3f34b049ed8d5734cc409f715452e)
1 /*$Id: mpiadj.c,v 1.66 2001/08/07 03:02:59 balay Exp $*/
2 
3 /*
4     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
5 */
6 #include "src/mat/impls/adj/mpi/mpiadj.h"
7 #include "petscsys.h"
8 
9 #undef __FUNCT__
10 #define __FUNCT__ "MatView_MPIAdj_ASCII"
11 int MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
12 {
13   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
14   int               ierr,i,j,m = A->m;
15   char              *name;
16   PetscViewerFormat format;
17 
18   PetscFunctionBegin;
19   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
20   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
21   if (format == PETSC_VIEWER_ASCII_INFO) {
22     PetscFunctionReturn(0);
23   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
24     SETERRQ(PETSC_ERR_SUP,"Matlab format not supported");
25   } else {
26     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_NO);CHKERRQ(ierr);
27     for (i=0; i<m; i++) {
28       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %d:",i+a->rstart);CHKERRQ(ierr);
29       for (j=a->i[i]; j<a->i[i+1]; j++) {
30         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %d ",a->j[j]);CHKERRQ(ierr);
31       }
32       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
33     }
34     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_YES);CHKERRQ(ierr);
35   }
36   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
37   PetscFunctionReturn(0);
38 }
39 
40 #undef __FUNCT__
41 #define __FUNCT__ "MatView_MPIAdj"
42 int MatView_MPIAdj(Mat A,PetscViewer viewer)
43 {
44   int        ierr;
45   PetscTruth isascii;
46 
47   PetscFunctionBegin;
48   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&isascii);CHKERRQ(ierr);
49   if (isascii) {
50     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
51   } else {
52     SETERRQ1(1,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
53   }
54   PetscFunctionReturn(0);
55 }
56 
57 #undef __FUNCT__
58 #define __FUNCT__ "MatDestroy_MPIAdj"
59 int MatDestroy_MPIAdj(Mat mat)
60 {
61   Mat_MPIAdj *a = (Mat_MPIAdj*)mat->data;
62   int        ierr;
63 
64   PetscFunctionBegin;
65 #if defined(PETSC_USE_LOG)
66   PetscLogObjectState((PetscObject)mat,"Rows=%d, Cols=%d, NZ=%d",mat->m,mat->n,a->nz);
67 #endif
68   if (a->diag) {ierr = PetscFree(a->diag);CHKERRQ(ierr);}
69   if (a->freeaij) {
70     ierr = PetscFree(a->i);CHKERRQ(ierr);
71     ierr = PetscFree(a->j);CHKERRQ(ierr);
72     if (a->values) {ierr = PetscFree(a->values);CHKERRQ(ierr);}
73   }
74   ierr = PetscFree(a->rowners);CHKERRQ(ierr);
75   ierr = PetscFree(a);CHKERRQ(ierr);
76   PetscFunctionReturn(0);
77 }
78 
79 #undef __FUNCT__
80 #define __FUNCT__ "MatSetOption_MPIAdj"
81 int MatSetOption_MPIAdj(Mat A,MatOption op)
82 {
83   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
84 
85   PetscFunctionBegin;
86   switch (op) {
87   case MAT_SYMMETRIC:
88     break;
89   case MAT_STRUCTURALLY_SYMMETRIC:
90     a->symmetric = PETSC_TRUE;
91     break;
92   default:
93     PetscLogInfo(A,"MatSetOption_MPIAdj:Option ignored\n");
94     break;
95   }
96   PetscFunctionReturn(0);
97 }
98 
99 
100 /*
101      Adds diagonal pointers to sparse matrix structure.
102 */
103 
104 #undef __FUNCT__
105 #define __FUNCT__ "MatMarkDiagonal_MPIAdj"
106 int MatMarkDiagonal_MPIAdj(Mat A)
107 {
108   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
109   int        i,j,*diag,m = A->m,ierr;
110 
111   PetscFunctionBegin;
112   ierr = PetscMalloc((m+1)*sizeof(int),&diag);CHKERRQ(ierr);
113   PetscLogObjectMemory(A,(m+1)*sizeof(int));
114   for (i=0; i<A->m; i++) {
115     for (j=a->i[i]; j<a->i[i+1]; j++) {
116       if (a->j[j] == i) {
117         diag[i] = j;
118         break;
119       }
120     }
121   }
122   a->diag = diag;
123   PetscFunctionReturn(0);
124 }
125 
126 #undef __FUNCT__
127 #define __FUNCT__ "MatGetRow_MPIAdj"
128 int MatGetRow_MPIAdj(Mat A,int row,int *nz,int **idx,PetscScalar **v)
129 {
130   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
131   int        *itmp;
132 
133   PetscFunctionBegin;
134   row -= a->rstart;
135 
136   if (row < 0 || row >= A->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
137 
138   *nz = a->i[row+1] - a->i[row];
139   if (v) *v = PETSC_NULL;
140   if (idx) {
141     itmp = a->j + a->i[row];
142     if (*nz) {
143       *idx = itmp;
144     }
145     else *idx = 0;
146   }
147   PetscFunctionReturn(0);
148 }
149 
150 #undef __FUNCT__
151 #define __FUNCT__ "MatRestoreRow_MPIAdj"
152 int MatRestoreRow_MPIAdj(Mat A,int row,int *nz,int **idx,PetscScalar **v)
153 {
154   PetscFunctionBegin;
155   PetscFunctionReturn(0);
156 }
157 
158 #undef __FUNCT__
159 #define __FUNCT__ "MatGetBlockSize_MPIAdj"
160 int MatGetBlockSize_MPIAdj(Mat A,int *bs)
161 {
162   PetscFunctionBegin;
163   *bs = 1;
164   PetscFunctionReturn(0);
165 }
166 
167 
168 #undef __FUNCT__
169 #define __FUNCT__ "MatEqual_MPIAdj"
170 int MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg)
171 {
172   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
173   int         ierr;
174   PetscTruth  flag;
175 
176   PetscFunctionBegin;
177   /* If the  matrix dimensions are not equal,or no of nonzeros */
178   if ((A->m != B->m) ||(a->nz != b->nz)) {
179     flag = PETSC_FALSE;
180   }
181 
182   /* if the a->i are the same */
183   ierr = PetscMemcmp(a->i,b->i,(A->m+1)*sizeof(int),&flag);CHKERRQ(ierr);
184 
185   /* if a->j are the same */
186   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(int),&flag);CHKERRQ(ierr);
187 
188   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr);
189   PetscFunctionReturn(0);
190 }
191 
192 #undef __FUNCT__
193 #define __FUNCT__ "MatGetRowIJ_MPIAdj"
194 int MatGetRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int *ia[],int *ja[],PetscTruth *done)
195 {
196   int        ierr,size,i;
197   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
198 
199   PetscFunctionBegin;
200   ierr = MPI_Comm_size(A->comm,&size);CHKERRQ(ierr);
201   if (size > 1) {*done = PETSC_FALSE; PetscFunctionReturn(0);}
202   *m    = A->m;
203   *ia   = a->i;
204   *ja   = a->j;
205   *done = PETSC_TRUE;
206   if (oshift) {
207     for (i=0; i<(*ia)[*m]; i++) {
208       (*ja)[i]++;
209     }
210     for (i=0; i<=(*m); i++) (*ia)[i]++;
211   }
212   PetscFunctionReturn(0);
213 }
214 
215 #undef __FUNCT__
216 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj"
217 int MatRestoreRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int *ia[],int *ja[],PetscTruth *done)
218 {
219   int        i;
220   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
221 
222   PetscFunctionBegin;
223   if (ia && a->i != *ia) SETERRQ(1,"ia passed back is not one obtained with MatGetRowIJ()");
224   if (ja && a->j != *ja) SETERRQ(1,"ja passed back is not one obtained with MatGetRowIJ()");
225   if (oshift) {
226     for (i=0; i<=(*m); i++) (*ia)[i]--;
227     for (i=0; i<(*ia)[*m]; i++) {
228       (*ja)[i]--;
229     }
230   }
231   PetscFunctionReturn(0);
232 }
233 
234 /* -------------------------------------------------------------------*/
235 static struct _MatOps MatOps_Values = {0,
236        MatGetRow_MPIAdj,
237        MatRestoreRow_MPIAdj,
238        0,
239 /* 4*/ 0,
240        0,
241        0,
242        0,
243        0,
244        0,
245 /*10*/ 0,
246        0,
247        0,
248        0,
249        0,
250 /*15*/ 0,
251        MatEqual_MPIAdj,
252        0,
253        0,
254        0,
255 /*20*/ 0,
256        0,
257        0,
258        MatSetOption_MPIAdj,
259        0,
260 /*25*/ 0,
261        0,
262        0,
263        0,
264        0,
265 /*30*/ 0,
266        0,
267        0,
268        0,
269        0,
270 /*35*/ 0,
271        0,
272        0,
273        0,
274        0,
275 /*40*/ 0,
276        0,
277        0,
278        0,
279        0,
280 /*45*/ 0,
281        0,
282        0,
283        0,
284        0,
285 /*50*/ MatGetBlockSize_MPIAdj,
286        MatGetRowIJ_MPIAdj,
287        MatRestoreRowIJ_MPIAdj,
288        0,
289        0,
290 /*55*/ 0,
291        0,
292        0,
293        0,
294        0,
295 /*60*/ 0,
296        MatDestroy_MPIAdj,
297        MatView_MPIAdj,
298        MatGetPetscMaps_Petsc,
299        0,
300 /*65*/ 0,
301        0,
302        0,
303        0,
304        0,
305 /*70*/ 0,
306        0,
307        0,
308        0,
309        0,
310 /*75*/ 0,
311        0,
312        0,
313        0,
314        0,
315 /*80*/ 0,
316        0,
317        0,
318        0,
319        0,
320 /*85*/ 0
321 };
322 
323 EXTERN_C_BEGIN
324 #undef __FUNCT__
325 #define __FUNCT__ "MatMPIAdjSetPreallocation_MPIAdj"
326 int MatMPIAdjSetPreallocation_MPIAdj(Mat B,int *i,int *j,int *values)
327 {
328   Mat_MPIAdj *b = (Mat_MPIAdj *)B->data;
329   int        ierr;
330 #if defined(PETSC_USE_BOPT_g)
331   int        ii;
332 #endif
333 
334   PetscFunctionBegin;
335   B->preallocated = PETSC_TRUE;
336 #if defined(PETSC_USE_BOPT_g)
337   if (i[0] != 0) SETERRQ1(1,"First i[] index must be zero, instead it is %d\n",i[0]);
338   for (ii=1; ii<B->m; ii++) {
339     if (i[ii] < 0 || i[ii] < i[ii-1]) {
340       SETERRQ4(1,"i[%d]=%d index is out of range: i[%d]=%d",ii,i[ii],ii-1,i[ii-1]);
341     }
342   }
343   for (ii=0; ii<i[B->m]; ii++) {
344     if (j[ii] < 0 || j[ii] >= B->N) {
345       SETERRQ2(1,"Column index %d out of range %d\n",ii,j[ii]);
346     }
347   }
348 #endif
349 
350   b->j      = j;
351   b->i      = i;
352   b->values = values;
353 
354   b->nz               = i[B->m];
355   b->diag             = 0;
356   b->symmetric        = PETSC_FALSE;
357   b->freeaij          = PETSC_TRUE;
358 
359   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
360   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
361   PetscFunctionReturn(0);
362 }
363 EXTERN_C_END
364 
365 /*MC
366    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
367    intended for use constructing orderings and partitionings.
368 
369   Level: beginner
370 
371 .seealso: MatCreateMPIAdj
372 M*/
373 
374 EXTERN_C_BEGIN
375 #undef __FUNCT__
376 #define __FUNCT__ "MatCreate_MPIAdj"
377 int MatCreate_MPIAdj(Mat B)
378 {
379   Mat_MPIAdj *b;
380   int        ii,ierr,size,rank;
381 
382   PetscFunctionBegin;
383   ierr = MPI_Comm_size(B->comm,&size);CHKERRQ(ierr);
384   ierr = MPI_Comm_rank(B->comm,&rank);CHKERRQ(ierr);
385 
386   ierr                = PetscNew(Mat_MPIAdj,&b);CHKERRQ(ierr);
387   B->data             = (void*)b;
388   ierr                = PetscMemzero(b,sizeof(Mat_MPIAdj));CHKERRQ(ierr);
389   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
390   B->factor           = 0;
391   B->lupivotthreshold = 1.0;
392   B->mapping          = 0;
393   B->assembled        = PETSC_FALSE;
394 
395   ierr = MPI_Allreduce(&B->m,&B->M,1,MPI_INT,MPI_SUM,B->comm);CHKERRQ(ierr);
396   B->n = B->N;
397 
398   /* the information in the maps duplicates the information computed below, eventually
399      we should remove the duplicate information that is not contained in the maps */
400   ierr = PetscMapCreateMPI(B->comm,B->m,B->M,&B->rmap);CHKERRQ(ierr);
401   /* we don't know the "local columns" so just use the row information :-(*/
402   ierr = PetscMapCreateMPI(B->comm,B->m,B->M,&B->cmap);CHKERRQ(ierr);
403 
404   ierr = PetscMalloc((size+1)*sizeof(int),&b->rowners);CHKERRQ(ierr);
405   PetscLogObjectMemory(B,(size+2)*sizeof(int)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));
406   ierr = MPI_Allgather(&B->m,1,MPI_INT,b->rowners+1,1,MPI_INT,B->comm);CHKERRQ(ierr);
407   b->rowners[0] = 0;
408   for (ii=2; ii<=size; ii++) {
409     b->rowners[ii] += b->rowners[ii-1];
410   }
411   b->rstart = b->rowners[rank];
412   b->rend   = b->rowners[rank+1];
413   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAdjSetPreallocation_C",
414                                     "MatMPIAdjSetPreallocation_MPIAdj",
415                                      MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
416   PetscFunctionReturn(0);
417 }
418 EXTERN_C_END
419 
420 #undef __FUNCT__
421 #define __FUNCT__ "MatMPIAdjSetPreallocation"
422 /*@C
423    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
424 
425    Collective on MPI_Comm
426 
427    Input Parameters:
428 +  A - the matrix
429 .  i - the indices into j for the start of each row
430 .  j - the column indices for each row (sorted for each row).
431        The indices in i and j start with zero (NOT with one).
432 -  values - [optional] edge weights
433 
434    Level: intermediate
435 
436 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
437 @*/
438 int MatMPIAdjSetPreallocation(Mat B,int *i,int *j,int *values)
439 {
440   int ierr,(*f)(Mat,int*,int*,int*);
441 
442   PetscFunctionBegin;
443   ierr = PetscObjectQueryFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",(void (**)(void))&f);CHKERRQ(ierr);
444   if (f) {
445     ierr = (*f)(B,i,j,values);CHKERRQ(ierr);
446   }
447   PetscFunctionReturn(0);
448 }
449 
450 #undef __FUNCT__
451 #define __FUNCT__ "MatCreateMPIAdj"
452 /*@C
453    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
454    The matrix does not have numerical values associated with it, but is
455    intended for ordering (to reduce bandwidth etc) and partitioning.
456 
457    Collective on MPI_Comm
458 
459    Input Parameters:
460 +  comm - MPI communicator
461 .  m - number of local rows
462 .  n - number of columns
463 .  i - the indices into j for the start of each row
464 .  j - the column indices for each row (sorted for each row).
465        The indices in i and j start with zero (NOT with one).
466 -  values -[optional] edge weights
467 
468    Output Parameter:
469 .  A - the matrix
470 
471    Level: intermediate
472 
473    Notes: This matrix object does not support most matrix operations, include
474    MatSetValues().
475    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
476    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
477     call from Fortran you need not create the arrays with PetscMalloc().
478    Should not include the matrix diagonals.
479 
480    If you already have a matrix, you can create its adjacency matrix by a call
481    to MatConvert, specifying a type of MATMPIADJ.
482 
483    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
484 
485 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
486 @*/
487 int MatCreateMPIAdj(MPI_Comm comm,int m,int n,int *i,int *j,int *values,Mat *A)
488 {
489   int        ierr;
490 
491   PetscFunctionBegin;
492   ierr = MatCreate(comm,m,n,PETSC_DETERMINE,n,A);CHKERRQ(ierr);
493   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
494   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
495   PetscFunctionReturn(0);
496 }
497 
498 EXTERN_C_BEGIN
499 #undef __FUNCT__
500 #define __FUNCT__ "MatConvertTo_MPIAdj"
501 int MatConvertTo_MPIAdj(Mat A,MatType type,Mat *newmat) {
502   Mat          B;
503   int          i,ierr,m,N,nzeros = 0,*ia,*ja,*rj,len,rstart,cnt,j,*a;
504   PetscScalar  *ra;
505   MPI_Comm     comm;
506 
507   PetscFunctionBegin;
508   ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr);
509   ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr);
510   ierr = MatGetOwnershipRange(A,&rstart,PETSC_NULL);CHKERRQ(ierr);
511 
512   /* count the number of nonzeros per row */
513   for (i=0; i<m; i++) {
514     ierr   = MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
515     for (j=0; j<len; j++) {
516       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
517     }
518     ierr   = MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
519     nzeros += len;
520   }
521 
522   /* malloc space for nonzeros */
523   ierr = PetscMalloc((nzeros+1)*sizeof(int),&a);CHKERRQ(ierr);
524   ierr = PetscMalloc((N+1)*sizeof(int),&ia);CHKERRQ(ierr);
525   ierr = PetscMalloc((nzeros+1)*sizeof(int),&ja);CHKERRQ(ierr);
526 
527   nzeros = 0;
528   ia[0]  = 0;
529   for (i=0; i<m; i++) {
530     ierr    = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
531     cnt     = 0;
532     for (j=0; j<len; j++) {
533       if (rj[j] != i+rstart) { /* if not diagonal */
534         a[nzeros+cnt]    = (int) PetscAbsScalar(ra[j]);
535         ja[nzeros+cnt++] = rj[j];
536       }
537     }
538     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
539     nzeros += cnt;
540     ia[i+1] = nzeros;
541   }
542 
543   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
544   ierr = MatCreateMPIAdj(comm,m,N,ia,ja,a,&B);CHKERRQ(ierr);
545 
546   /* Fake support for "inplace" convert. */
547   if (*newmat == A) {
548     ierr = MatDestroy(A);CHKERRQ(ierr);
549   }
550   *newmat = B;
551   PetscFunctionReturn(0);
552 }
553 EXTERN_C_END
554 
555 
556 
557 
558 
559