xref: /petsc/src/mat/tests/ex211.c (revision ffa8c5705e8ab2cf85ee1d14dbe507a6e2eb5283)
1 
2 static char help[] = "Tests MatCreateSubmatrix() in parallel.";
3 
4 #include <petscmat.h>
5 #include <../src/mat/impls/aij/mpi/mpiaij.h>
6 
7 PetscErrorCode ISGetSeqIS_SameColDist_Private(Mat mat,IS isrow,IS iscol,IS *isrow_d,IS *iscol_d,IS *iscol_o,const PetscInt *garray[])
8 {
9   PetscErrorCode ierr;
10   Vec            x,cmap;
11   const PetscInt *is_idx;
12   PetscScalar    *xarray,*cmaparray;
13   PetscInt       ncols,isstart,*idx,m,rstart,count;
14   Mat_MPIAIJ     *a=(Mat_MPIAIJ*)mat->data;
15   Mat            B=a->B;
16   Vec            lvec=a->lvec,lcmap;
17   PetscInt       i,cstart,cend,Bn=B->cmap->N;
18   MPI_Comm       comm;
19   PetscMPIInt    rank;
20   VecScatter     Mvctx;
21 
22   PetscFunctionBegin;
23   PetscCall(PetscObjectGetComm((PetscObject)mat,&comm));
24   PetscCallMPI(MPI_Comm_rank(comm,&rank));
25   PetscCall(ISGetLocalSize(iscol,&ncols));
26 
27   /* (1) iscol is a sub-column vector of mat, pad it with '-1.' to form a full vector x */
28   PetscCall(MatCreateVecs(mat,&x,NULL));
29   PetscCall(VecDuplicate(x,&cmap));
30   PetscCall(VecSet(x,-1.0));
31   PetscCall(VecSet(cmap,-1.0));
32 
33   PetscCall(VecDuplicate(lvec,&lcmap));
34 
35   /* Get start indices */
36   PetscCallMPI(MPI_Scan(&ncols,&isstart,1,MPIU_INT,MPI_SUM,comm));
37   isstart -= ncols;
38   PetscCall(MatGetOwnershipRangeColumn(mat,&cstart,&cend));
39 
40   PetscCall(ISGetIndices(iscol,&is_idx));
41   PetscCall(VecGetArray(x,&xarray));
42   PetscCall(VecGetArray(cmap,&cmaparray));
43   PetscCall(PetscMalloc1(ncols,&idx));
44   for (i=0; i<ncols; i++) {
45     xarray[is_idx[i]-cstart]    = (PetscScalar)is_idx[i];
46     cmaparray[is_idx[i]-cstart] = (PetscScalar)(i + isstart);      /* global index of iscol[i] */
47     idx[i]                      = is_idx[i]-cstart; /* local index of iscol[i]  */
48   }
49   PetscCall(VecRestoreArray(x,&xarray));
50   PetscCall(VecRestoreArray(cmap,&cmaparray));
51   PetscCall(ISRestoreIndices(iscol,&is_idx));
52 
53   /* Get iscol_d */
54   PetscCall(ISCreateGeneral(PETSC_COMM_SELF,ncols,idx,PETSC_OWN_POINTER,iscol_d));
55   PetscCall(ISGetBlockSize(iscol,&i));
56   PetscCall(ISSetBlockSize(*iscol_d,i));
57 
58   /* Get isrow_d */
59   PetscCall(ISGetLocalSize(isrow,&m));
60   rstart = mat->rmap->rstart;
61   PetscCall(PetscMalloc1(m,&idx));
62   PetscCall(ISGetIndices(isrow,&is_idx));
63   for (i=0; i<m; i++) idx[i] = is_idx[i]-rstart;
64   PetscCall(ISRestoreIndices(isrow,&is_idx));
65 
66   PetscCall(ISCreateGeneral(PETSC_COMM_SELF,m,idx,PETSC_OWN_POINTER,isrow_d));
67   PetscCall(ISGetBlockSize(isrow,&i));
68   PetscCall(ISSetBlockSize(*isrow_d,i));
69 
70   /* (2) Scatter x and cmap using aij->Mvctx to get their off-process portions (see MatMult_MPIAIJ) */
71 #if 0
72   if (!a->Mvctx_mpi1) {
73     /* a->Mvctx causes random 'count' in o-build? See src/mat/tests/runex59_2 */
74     a->Mvctx_mpi1_flg = PETSC_TRUE;
75     PetscCall(MatSetUpMultiply_MPIAIJ(mat));
76   }
77   Mvctx = a->Mvctx_mpi1;
78 #endif
79   Mvctx = a->Mvctx;
80   PetscCall(VecScatterBegin(Mvctx,x,lvec,INSERT_VALUES,SCATTER_FORWARD));
81   PetscCall(VecScatterEnd(Mvctx,x,lvec,INSERT_VALUES,SCATTER_FORWARD));
82 
83   PetscCall(VecScatterBegin(Mvctx,cmap,lcmap,INSERT_VALUES,SCATTER_FORWARD));
84   PetscCall(VecScatterEnd(Mvctx,cmap,lcmap,INSERT_VALUES,SCATTER_FORWARD));
85 
86   /* (3) create sequential iscol_o (a subset of iscol) and isgarray */
87   /* off-process column indices */
88   count = 0;
89   PetscInt *cmap1;
90   PetscCall(PetscMalloc1(Bn,&idx));
91   PetscCall(PetscMalloc1(Bn,&cmap1));
92 
93   PetscCall(VecGetArray(lvec,&xarray));
94   PetscCall(VecGetArray(lcmap,&cmaparray));
95   for (i=0; i<Bn; i++) {
96     if (PetscRealPart(xarray[i]) > -1.0) {
97       idx[count]   = i;                   /* local column index in off-diagonal part B */
98       cmap1[count] = (PetscInt)(PetscRealPart(cmaparray[i]));  /* column index in submat */
99       count++;
100     }
101   }
102   printf("[%d] Bn %d, count %d\n",rank,Bn,count);
103   PetscCall(VecRestoreArray(lvec,&xarray));
104   PetscCall(VecRestoreArray(lcmap,&cmaparray));
105   if (count != 6) {
106     printf("[%d] count %d != 6 lvec:\n",rank,count);
107     PetscCall(VecView(lvec,0));
108 
109     printf("[%d] count %d != 6 lcmap:\n",rank,count);
110     PetscCall(VecView(lcmap,0));
111     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"count %d != 6",count);
112   }
113 
114   PetscCall(ISCreateGeneral(PETSC_COMM_SELF,count,idx,PETSC_COPY_VALUES,iscol_o));
115   /* cannot ensure iscol_o has same blocksize as iscol! */
116 
117   PetscCall(PetscFree(idx));
118 
119   *garray = cmap1;
120 
121   PetscCall(VecDestroy(&x));
122   PetscCall(VecDestroy(&cmap));
123   PetscCall(VecDestroy(&lcmap));
124   PetscFunctionReturn(0);
125 }
126 
127 int main(int argc,char **args)
128 {
129   Mat            C,A;
130   PetscInt       i,j,m = 3,n = 2,rstart,rend;
131   PetscMPIInt    size,rank;
132   PetscErrorCode ierr;
133   PetscScalar    v;
134   IS             isrow,iscol;
135 
136   PetscCall(PetscInitialize(&argc,&args,(char*)0,help));
137   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank));
138   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD,&size));
139   n    = 2*size;
140 
141   PetscCall(MatCreate(PETSC_COMM_WORLD,&C));
142   PetscCall(MatSetSizes(C,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n));
143   PetscCall(MatSetFromOptions(C));
144   PetscCall(MatSetUp(C));
145 
146   /*
147         This is JUST to generate a nice test matrix, all processors fill up
148     the entire matrix. This is not something one would ever do in practice.
149   */
150   PetscCall(MatGetOwnershipRange(C,&rstart,&rend));
151   for (i=rstart; i<rend; i++) {
152     for (j=0; j<m*n; j++) {
153       v    = i + j + 1;
154       PetscCall(MatSetValues(C,1,&i,1,&j,&v,INSERT_VALUES));
155     }
156   }
157 
158   PetscCall(MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY));
159   PetscCall(MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY));
160 
161   /*
162      Generate a new matrix consisting of every second row and column of
163    the original matrix
164   */
165   PetscCall(MatGetOwnershipRange(C,&rstart,&rend));
166   /* Create parallel IS with the rows we want on THIS processor */
167   PetscCall(ISCreateStride(PETSC_COMM_WORLD,(rend-rstart)/2,rstart,2,&isrow));
168   /* Create parallel IS with the rows we want on THIS processor (same as rows for now) */
169   PetscCall(ISCreateStride(PETSC_COMM_WORLD,(rend-rstart)/2,rstart,2,&iscol));
170 
171   IS             iscol_d,isrow_d,iscol_o;
172   const PetscInt *garray;
173   PetscCall(ISGetSeqIS_SameColDist_Private(C,isrow,iscol,&isrow_d,&iscol_d,&iscol_o,&garray));
174 
175   PetscCall(ISDestroy(&isrow_d));
176   PetscCall(ISDestroy(&iscol_d));
177   PetscCall(ISDestroy(&iscol_o));
178   PetscCall(PetscFree(garray));
179 
180   PetscCall(MatCreateSubMatrix(C,isrow,iscol,MAT_INITIAL_MATRIX,&A));
181   PetscCall(MatCreateSubMatrix(C,isrow,iscol,MAT_REUSE_MATRIX,&A));
182 
183   PetscCall(ISDestroy(&isrow));
184   PetscCall(ISDestroy(&iscol));
185   PetscCall(MatDestroy(&A));
186   PetscCall(MatDestroy(&C));
187   PetscCall(PetscFinalize());
188   return 0;
189 }
190