xref: /petsc/src/mat/tests/ex211.c (revision daa037dfd3c3bec8dc8659548d2b20b07c1dc6de)
1 
2 static char help[] = "Tests MatCreateSubmatrix() in parallel.";
3 
4 #include <petscmat.h>
5 #include <../src/mat/impls/aij/mpi/mpiaij.h>
6 
7 PetscErrorCode ISGetSeqIS_SameColDist_Private(Mat mat,IS isrow,IS iscol,IS *isrow_d,IS *iscol_d,IS *iscol_o,const PetscInt *garray[])
8 {
9   Vec            x,cmap;
10   const PetscInt *is_idx;
11   PetscScalar    *xarray,*cmaparray;
12   PetscInt       ncols,isstart,*idx,m,rstart,count;
13   Mat_MPIAIJ     *a=(Mat_MPIAIJ*)mat->data;
14   Mat            B=a->B;
15   Vec            lvec=a->lvec,lcmap;
16   PetscInt       i,cstart,cend,Bn=B->cmap->N;
17   MPI_Comm       comm;
18   PetscMPIInt    rank;
19   VecScatter     Mvctx;
20 
21   PetscFunctionBegin;
22   PetscCall(PetscObjectGetComm((PetscObject)mat,&comm));
23   PetscCallMPI(MPI_Comm_rank(comm,&rank));
24   PetscCall(ISGetLocalSize(iscol,&ncols));
25 
26   /* (1) iscol is a sub-column vector of mat, pad it with '-1.' to form a full vector x */
27   PetscCall(MatCreateVecs(mat,&x,NULL));
28   PetscCall(VecDuplicate(x,&cmap));
29   PetscCall(VecSet(x,-1.0));
30   PetscCall(VecSet(cmap,-1.0));
31 
32   PetscCall(VecDuplicate(lvec,&lcmap));
33 
34   /* Get start indices */
35   PetscCallMPI(MPI_Scan(&ncols,&isstart,1,MPIU_INT,MPI_SUM,comm));
36   isstart -= ncols;
37   PetscCall(MatGetOwnershipRangeColumn(mat,&cstart,&cend));
38 
39   PetscCall(ISGetIndices(iscol,&is_idx));
40   PetscCall(VecGetArray(x,&xarray));
41   PetscCall(VecGetArray(cmap,&cmaparray));
42   PetscCall(PetscMalloc1(ncols,&idx));
43   for (i=0; i<ncols; i++) {
44     xarray[is_idx[i]-cstart]    = (PetscScalar)is_idx[i];
45     cmaparray[is_idx[i]-cstart] = (PetscScalar)(i + isstart);      /* global index of iscol[i] */
46     idx[i]                      = is_idx[i]-cstart; /* local index of iscol[i]  */
47   }
48   PetscCall(VecRestoreArray(x,&xarray));
49   PetscCall(VecRestoreArray(cmap,&cmaparray));
50   PetscCall(ISRestoreIndices(iscol,&is_idx));
51 
52   /* Get iscol_d */
53   PetscCall(ISCreateGeneral(PETSC_COMM_SELF,ncols,idx,PETSC_OWN_POINTER,iscol_d));
54   PetscCall(ISGetBlockSize(iscol,&i));
55   PetscCall(ISSetBlockSize(*iscol_d,i));
56 
57   /* Get isrow_d */
58   PetscCall(ISGetLocalSize(isrow,&m));
59   rstart = mat->rmap->rstart;
60   PetscCall(PetscMalloc1(m,&idx));
61   PetscCall(ISGetIndices(isrow,&is_idx));
62   for (i=0; i<m; i++) idx[i] = is_idx[i]-rstart;
63   PetscCall(ISRestoreIndices(isrow,&is_idx));
64 
65   PetscCall(ISCreateGeneral(PETSC_COMM_SELF,m,idx,PETSC_OWN_POINTER,isrow_d));
66   PetscCall(ISGetBlockSize(isrow,&i));
67   PetscCall(ISSetBlockSize(*isrow_d,i));
68 
69   /* (2) Scatter x and cmap using aij->Mvctx to get their off-process portions (see MatMult_MPIAIJ) */
70 #if 0
71   if (!a->Mvctx_mpi1) {
72     /* a->Mvctx causes random 'count' in o-build? See src/mat/tests/runex59_2 */
73     a->Mvctx_mpi1_flg = PETSC_TRUE;
74     PetscCall(MatSetUpMultiply_MPIAIJ(mat));
75   }
76   Mvctx = a->Mvctx_mpi1;
77 #endif
78   Mvctx = a->Mvctx;
79   PetscCall(VecScatterBegin(Mvctx,x,lvec,INSERT_VALUES,SCATTER_FORWARD));
80   PetscCall(VecScatterEnd(Mvctx,x,lvec,INSERT_VALUES,SCATTER_FORWARD));
81 
82   PetscCall(VecScatterBegin(Mvctx,cmap,lcmap,INSERT_VALUES,SCATTER_FORWARD));
83   PetscCall(VecScatterEnd(Mvctx,cmap,lcmap,INSERT_VALUES,SCATTER_FORWARD));
84 
85   /* (3) create sequential iscol_o (a subset of iscol) and isgarray */
86   /* off-process column indices */
87   count = 0;
88   PetscInt *cmap1;
89   PetscCall(PetscMalloc1(Bn,&idx));
90   PetscCall(PetscMalloc1(Bn,&cmap1));
91 
92   PetscCall(VecGetArray(lvec,&xarray));
93   PetscCall(VecGetArray(lcmap,&cmaparray));
94   for (i=0; i<Bn; i++) {
95     if (PetscRealPart(xarray[i]) > -1.0) {
96       idx[count]   = i;                   /* local column index in off-diagonal part B */
97       cmap1[count] = (PetscInt)(PetscRealPart(cmaparray[i]));  /* column index in submat */
98       count++;
99     }
100   }
101   printf("[%d] Bn %d, count %d\n",rank,Bn,count);
102   PetscCall(VecRestoreArray(lvec,&xarray));
103   PetscCall(VecRestoreArray(lcmap,&cmaparray));
104   if (count != 6) {
105     printf("[%d] count %d != 6 lvec:\n",rank,count);
106     PetscCall(VecView(lvec,0));
107 
108     printf("[%d] count %d != 6 lcmap:\n",rank,count);
109     PetscCall(VecView(lcmap,0));
110     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"count %d != 6",count);
111   }
112 
113   PetscCall(ISCreateGeneral(PETSC_COMM_SELF,count,idx,PETSC_COPY_VALUES,iscol_o));
114   /* cannot ensure iscol_o has same blocksize as iscol! */
115 
116   PetscCall(PetscFree(idx));
117 
118   *garray = cmap1;
119 
120   PetscCall(VecDestroy(&x));
121   PetscCall(VecDestroy(&cmap));
122   PetscCall(VecDestroy(&lcmap));
123   PetscFunctionReturn(0);
124 }
125 
126 int main(int argc,char **args)
127 {
128   Mat            C,A;
129   PetscInt       i,j,m = 3,n = 2,rstart,rend;
130   PetscMPIInt    size,rank;
131   PetscScalar    v;
132   IS             isrow,iscol;
133 
134   PetscCall(PetscInitialize(&argc,&args,(char*)0,help));
135   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank));
136   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD,&size));
137   n    = 2*size;
138 
139   PetscCall(MatCreate(PETSC_COMM_WORLD,&C));
140   PetscCall(MatSetSizes(C,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n));
141   PetscCall(MatSetFromOptions(C));
142   PetscCall(MatSetUp(C));
143 
144   /*
145         This is JUST to generate a nice test matrix, all processors fill up
146     the entire matrix. This is not something one would ever do in practice.
147   */
148   PetscCall(MatGetOwnershipRange(C,&rstart,&rend));
149   for (i=rstart; i<rend; i++) {
150     for (j=0; j<m*n; j++) {
151       v    = i + j + 1;
152       PetscCall(MatSetValues(C,1,&i,1,&j,&v,INSERT_VALUES));
153     }
154   }
155 
156   PetscCall(MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY));
157   PetscCall(MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY));
158 
159   /*
160      Generate a new matrix consisting of every second row and column of
161    the original matrix
162   */
163   PetscCall(MatGetOwnershipRange(C,&rstart,&rend));
164   /* Create parallel IS with the rows we want on THIS processor */
165   PetscCall(ISCreateStride(PETSC_COMM_WORLD,(rend-rstart)/2,rstart,2,&isrow));
166   /* Create parallel IS with the rows we want on THIS processor (same as rows for now) */
167   PetscCall(ISCreateStride(PETSC_COMM_WORLD,(rend-rstart)/2,rstart,2,&iscol));
168 
169   IS             iscol_d,isrow_d,iscol_o;
170   const PetscInt *garray;
171   PetscCall(ISGetSeqIS_SameColDist_Private(C,isrow,iscol,&isrow_d,&iscol_d,&iscol_o,&garray));
172 
173   PetscCall(ISDestroy(&isrow_d));
174   PetscCall(ISDestroy(&iscol_d));
175   PetscCall(ISDestroy(&iscol_o));
176   PetscCall(PetscFree(garray));
177 
178   PetscCall(MatCreateSubMatrix(C,isrow,iscol,MAT_INITIAL_MATRIX,&A));
179   PetscCall(MatCreateSubMatrix(C,isrow,iscol,MAT_REUSE_MATRIX,&A));
180 
181   PetscCall(ISDestroy(&isrow));
182   PetscCall(ISDestroy(&iscol));
183   PetscCall(MatDestroy(&A));
184   PetscCall(MatDestroy(&C));
185   PetscCall(PetscFinalize());
186   return 0;
187 }
188