xref: /petsc/src/ksp/ksp/tests/ex26.c (revision f5ff9c666c0d37e8a7ec3aa2f8e2aa9e44449bdb)
1 static char help[] ="Solves Laplacian with multigrid. Tests block API for PCMG\n\
2   -mx <xg>, where <xg> = number of grid points in the x-direction\n\
3   -my <yg>, where <yg> = number of grid points in the y-direction\n\
4   -Nx <npx>, where <npx> = number of processors in the x-direction\n\
5   -Ny <npy>, where <npy> = number of processors in the y-direction\n\n";
6 
7 /*  Modified from ~src/ksp/tests/ex19.c. Used for testing ML 6.2 interface.
8 
9     This problem is modeled by
10     the partial differential equation
11 
12             -Laplacian u  = g,  0 < x,y < 1,
13 
14     with boundary conditions
15 
16              u = 0  for  x = 0, x = 1, y = 0, y = 1.
17 
18     A finite difference approximation with the usual 5-point stencil
19     is used to discretize the boundary value problem to obtain a linear
20     system of equations.
21 
22     Usage: ./ex26 -ksp_monitor_short -pc_type ml
23            -mg_coarse_ksp_max_it 10
24            -mg_levels_1_ksp_max_it 10 -mg_levels_2_ksp_max_it 10
25            -mg_fine_ksp_max_it 10
26 */
27 
28 #include <petscksp.h>
29 #include <petscdm.h>
30 #include <petscdmda.h>
31 
32 /* User-defined application contexts */
33 typedef struct {
34   PetscInt mx,my;              /* number grid points in x and y direction */
35   Vec      localX,localF;      /* local vectors with ghost region */
36   DM       da;
37   Vec      x,b,r;              /* global vectors */
38   Mat      J;                  /* Jacobian on grid */
39   Mat      A,P,R;
40   KSP      ksp;
41 } GridCtx;
42 
43 static PetscErrorCode FormJacobian_Grid(GridCtx*,Mat);
44 
45 int main(int argc,char **argv)
46 {
47   PetscErrorCode ierr;
48   PetscInt       i,its,Nx=PETSC_DECIDE,Ny=PETSC_DECIDE,nlocal,nrhs = 1;
49   PetscScalar    one = 1.0;
50   Mat            A,B,X;
51   GridCtx        fine_ctx;
52   KSP            ksp;
53   PetscBool      Brand = PETSC_FALSE,flg;
54 
55   ierr = PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
56   /* set up discretization matrix for fine grid */
57   fine_ctx.mx = 9;
58   fine_ctx.my = 9;
59   ierr = PetscOptionsGetInt(NULL,NULL,"-mx",&fine_ctx.mx,NULL);CHKERRQ(ierr);
60   ierr = PetscOptionsGetInt(NULL,NULL,"-my",&fine_ctx.my,NULL);CHKERRQ(ierr);
61   ierr = PetscOptionsGetInt(NULL,NULL,"-nrhs",&nrhs,NULL);CHKERRQ(ierr);
62   ierr = PetscOptionsGetInt(NULL,NULL,"-Nx",&Nx,NULL);CHKERRQ(ierr);
63   ierr = PetscOptionsGetInt(NULL,NULL,"-Ny",&Ny,NULL);CHKERRQ(ierr);
64   ierr = PetscOptionsGetBool(NULL,NULL,"-rand",&Brand,NULL);CHKERRQ(ierr);
65   ierr = PetscPrintf(PETSC_COMM_WORLD,"Fine grid size %D by %D\n",fine_ctx.mx,fine_ctx.my);CHKERRQ(ierr);
66 
67   /* Set up distributed array for fine grid */
68   ierr = DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,fine_ctx.mx,fine_ctx.my,Nx,Ny,1,1,NULL,NULL,&fine_ctx.da);CHKERRQ(ierr);
69   ierr = DMSetFromOptions(fine_ctx.da);CHKERRQ(ierr);
70   ierr = DMSetUp(fine_ctx.da);CHKERRQ(ierr);
71   ierr = DMCreateGlobalVector(fine_ctx.da,&fine_ctx.x);CHKERRQ(ierr);
72   ierr = VecDuplicate(fine_ctx.x,&fine_ctx.b);CHKERRQ(ierr);
73   ierr = VecGetLocalSize(fine_ctx.x,&nlocal);CHKERRQ(ierr);
74   ierr = DMCreateLocalVector(fine_ctx.da,&fine_ctx.localX);CHKERRQ(ierr);
75   ierr = VecDuplicate(fine_ctx.localX,&fine_ctx.localF);CHKERRQ(ierr);
76   ierr = DMCreateMatrix(fine_ctx.da,&A);CHKERRQ(ierr);
77   ierr = FormJacobian_Grid(&fine_ctx,A);CHKERRQ(ierr);
78 
79   /* create linear solver */
80   ierr = KSPCreate(PETSC_COMM_WORLD,&ksp);CHKERRQ(ierr);
81   ierr = KSPSetDM(ksp,fine_ctx.da);CHKERRQ(ierr);
82   ierr = KSPSetDMActive(ksp,PETSC_FALSE);CHKERRQ(ierr);
83 
84   /* set values for rhs vector */
85   ierr = VecSet(fine_ctx.b,one);CHKERRQ(ierr);
86 
87   /* set options, then solve system */
88   ierr = KSPSetFromOptions(ksp);CHKERRQ(ierr); /* calls PCSetFromOptions_ML if 'pc_type=ml' */
89   ierr = KSPSetOperators(ksp,A,A);CHKERRQ(ierr);
90   ierr = KSPSolve(ksp,fine_ctx.b,fine_ctx.x);CHKERRQ(ierr);
91   ierr = VecViewFromOptions(fine_ctx.x,NULL,"-debug");CHKERRQ(ierr);
92   ierr = KSPGetIterationNumber(ksp,&its);CHKERRQ(ierr);
93   ierr = KSPGetIterationNumber(ksp,&its);CHKERRQ(ierr);
94   ierr = PetscPrintf(PETSC_COMM_WORLD,"Number of iterations = %D\n",its);CHKERRQ(ierr);
95 
96   /* test multiple right-hand side */
97   ierr = MatCreateDense(PETSC_COMM_WORLD,nlocal,PETSC_DECIDE,fine_ctx.mx*fine_ctx.my,nrhs,NULL,&B);CHKERRQ(ierr);
98   ierr = MatSetOptionsPrefix(B,"rhs_");CHKERRQ(ierr);
99   ierr = MatSetFromOptions(B);CHKERRQ(ierr);
100   ierr = MatDuplicate(B,MAT_DO_NOT_COPY_VALUES,&X);CHKERRQ(ierr);
101   if (Brand) {
102     ierr = MatSetRandom(B,NULL);CHKERRQ(ierr);
103   } else {
104     PetscScalar *b;
105 
106     ierr = MatDenseGetArrayWrite(B,&b);CHKERRQ(ierr);
107     for (i=0;i<nlocal*nrhs;i++) b[i] = 1.0;
108     ierr = MatDenseRestoreArrayWrite(B,&b);CHKERRQ(ierr);
109     ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
110     ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
111   }
112   ierr = KSPMatSolve(ksp,B,X);CHKERRQ(ierr);
113   ierr = MatViewFromOptions(X,NULL,"-debug");CHKERRQ(ierr);
114 
115   ierr = PetscObjectTypeCompare((PetscObject)ksp,KSPPREONLY,&flg);CHKERRQ(ierr);
116   if ((flg || nrhs == 1) && !Brand) {
117     PetscInt          n;
118     const PetscScalar *xx,*XX;
119 
120     ierr = VecGetArrayRead(fine_ctx.x,&xx);CHKERRQ(ierr);
121     ierr = MatDenseGetArrayRead(X,&XX);CHKERRQ(ierr);
122     for (n=0;n<nrhs;n++) {
123       for (i=0;i<nlocal;i++) {
124         if (PetscAbsScalar(xx[i] - XX[nlocal*n + i]) > PETSC_SMALL) {
125           ierr = PetscPrintf(PETSC_COMM_SELF,"[%d] Error local solve %D, entry %D -> %g + i %g != %g + i %g\n",PetscGlobalRank,n,i,(double)PetscRealPart(xx[i]),(double)PetscImaginaryPart(xx[i]),(double)PetscRealPart(XX[i]),(double)PetscImaginaryPart(XX[i]));CHKERRQ(ierr);
126         }
127       }
128     }
129     ierr = MatDenseRestoreArrayRead(X,&XX);CHKERRQ(ierr);
130     ierr = VecRestoreArrayRead(fine_ctx.x,&xx);CHKERRQ(ierr);
131   }
132 
133   /* free data structures */
134   ierr = VecDestroy(&fine_ctx.x);CHKERRQ(ierr);
135   ierr = VecDestroy(&fine_ctx.b);CHKERRQ(ierr);
136   ierr = DMDestroy(&fine_ctx.da);CHKERRQ(ierr);
137   ierr = VecDestroy(&fine_ctx.localX);CHKERRQ(ierr);
138   ierr = VecDestroy(&fine_ctx.localF);CHKERRQ(ierr);
139   ierr = MatDestroy(&A);CHKERRQ(ierr);
140   ierr = MatDestroy(&B);CHKERRQ(ierr);
141   ierr = MatDestroy(&X);CHKERRQ(ierr);
142   ierr = KSPDestroy(&ksp);CHKERRQ(ierr);
143 
144   ierr = PetscFinalize();
145   return ierr;
146 }
147 
148 PetscErrorCode FormJacobian_Grid(GridCtx *grid,Mat jac)
149 {
150   PetscErrorCode         ierr;
151   PetscInt               i,j,row,mx,my,xs,ys,xm,ym,Xs,Ys,Xm,Ym,col[5];
152   PetscInt               grow;
153   const PetscInt         *ltog;
154   PetscScalar            two = 2.0,one = 1.0,v[5],hx,hy,hxdhy,hydhx,value;
155   ISLocalToGlobalMapping ltogm;
156 
157   PetscFunctionBeginUser;
158   mx    = grid->mx;            my = grid->my;
159   hx    = one/(PetscReal)(mx-1);  hy = one/(PetscReal)(my-1);
160   hxdhy = hx/hy;            hydhx = hy/hx;
161 
162   /* Get ghost points */
163   ierr = DMDAGetCorners(grid->da,&xs,&ys,0,&xm,&ym,0);CHKERRQ(ierr);
164   ierr = DMDAGetGhostCorners(grid->da,&Xs,&Ys,0,&Xm,&Ym,0);CHKERRQ(ierr);
165   ierr = DMGetLocalToGlobalMapping(grid->da,&ltogm);CHKERRQ(ierr);
166   ierr = ISLocalToGlobalMappingGetIndices(ltogm,&ltog);CHKERRQ(ierr);
167 
168   /* Evaluate Jacobian of function */
169   for (j=ys; j<ys+ym; j++) {
170     row = (j - Ys)*Xm + xs - Xs - 1;
171     for (i=xs; i<xs+xm; i++) {
172       row++;
173       grow = ltog[row];
174       if (i > 0 && i < mx-1 && j > 0 && j < my-1) {
175         v[0] = -hxdhy; col[0] = ltog[row - Xm];
176         v[1] = -hydhx; col[1] = ltog[row - 1];
177         v[2] = two*(hydhx + hxdhy); col[2] = grow;
178         v[3] = -hydhx; col[3] = ltog[row + 1];
179         v[4] = -hxdhy; col[4] = ltog[row + Xm];
180         ierr = MatSetValues(jac,1,&grow,5,col,v,INSERT_VALUES);CHKERRQ(ierr);
181       } else if ((i > 0 && i < mx-1) || (j > 0 && j < my-1)) {
182         value = .5*two*(hydhx + hxdhy);
183         ierr  = MatSetValues(jac,1,&grow,1,&grow,&value,INSERT_VALUES);CHKERRQ(ierr);
184       } else {
185         value = .25*two*(hydhx + hxdhy);
186         ierr  = MatSetValues(jac,1,&grow,1,&grow,&value,INSERT_VALUES);CHKERRQ(ierr);
187       }
188     }
189   }
190   ierr = ISLocalToGlobalMappingRestoreIndices(ltogm,&ltog);CHKERRQ(ierr);
191   ierr = MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
192   ierr = MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
193   PetscFunctionReturn(0);
194 }
195 
196 /*TEST
197 
198     test:
199       args: -ksp_monitor_short
200 
201     test:
202       suffix: 2
203       args:  -ksp_monitor_short
204       nsize: 3
205 
206     test:
207       suffix: ml_1
208       args:  -ksp_monitor_short -pc_type ml -mat_no_inode
209       nsize: 3
210       requires: ml
211 
212     test:
213       suffix: ml_2
214       args:  -ksp_monitor_short -pc_type ml -mat_no_inode -ksp_max_it 3
215       nsize: 3
216       requires: ml
217 
218     test:
219       suffix: ml_3
220       args:  -ksp_monitor_short -pc_type ml -mat_no_inode -pc_mg_type ADDITIVE -ksp_max_it 7
221       nsize: 1
222       requires: ml
223 
224     test:
225       suffix: cycles
226       nsize: {{1 2}}
227       args: -ksp_view_final_residual -pc_type mg -mx 5 -my 5 -pc_mg_levels 3 -pc_mg_galerkin -ksp_monitor -mg_levels_ksp_type richardson -mg_levels_pc_type jacobi -pc_mg_type {{additive multiplicative full kaskade}separate output} -nrhs 1
228 
229     test:
230       suffix: matcycles
231       nsize: {{1 2}}
232       args: -ksp_view_final_residual -ksp_type preonly -pc_type mg -mx 5 -my 5 -pc_mg_levels 3 -pc_mg_galerkin -ksp_monitor -mg_levels_ksp_type richardson -mg_levels_pc_type jacobi -pc_mg_type {{additive multiplicative full kaskade}separate output} -nrhs 7 -ksp_matsolve_batch_size {{4 7}separate output}
233 
234     test:
235       requires: ml
236       suffix: matcycles_ml
237       nsize: {{1 2}}
238       args: -ksp_view_final_residual -ksp_type preonly -pc_type ml -mx 5 -my 5 -ksp_monitor -mg_levels_ksp_type richardson -mg_levels_pc_type jacobi -pc_mg_type {{additive multiplicative full kaskade}separate output} -nrhs 7 -ksp_matsolve_batch_size {{4 7}separate output}
239 
240     test:
241       requires: hpddm
242       suffix: matcycles_hpddm_mg
243       nsize: {{1 2}}
244       args: -ksp_view_final_residual -ksp_type hpddm -pc_type mg -pc_mg_levels 3 -pc_mg_galerkin -mx 5 -my 5 -ksp_monitor -mg_levels_ksp_type richardson -mg_levels_pc_type jacobi -pc_mg_type {{additive multiplicative full kaskade}separate output} -nrhs 7 -ksp_matsolve_batch_size {{4 7}separate output}
245 
246     test:
247       requires: hpddm
248       nsize: {{1 2}}
249       suffix: matcycles_hpddm_ilu
250       args: -ksp_view_final_residual -ksp_type hpddm -pc_type redundant -redundant_pc_type ilu -mx 5 -my 5 -ksp_monitor -nrhs 7 -ksp_matsolve_batch_size {{4 7}separate output}
251 
252 TEST*/
253