xref: /petsc/src/ts/tutorials/advection-diffusion-reaction/ex5adj.c (revision ffa8c5705e8ab2cf85ee1d14dbe507a6e2eb5283)
1 static char help[] = "Demonstrates adjoint sensitivity analysis for Reaction-Diffusion Equations.\n";
2 
3 /*
4   See ex5.c for details on the equation.
5   This code demonestrates the TSAdjoint interface to a system of time-dependent partial differential equations.
6   It computes the sensitivity of a component in the final solution, which locates in the center of the 2D domain, w.r.t. the initial conditions.
7   The user does not need to provide any additional functions. The required functions in the original simulation are reused in the adjoint run.
8 
9   Runtime options:
10     -forwardonly  - run the forward simulation without adjoint
11     -implicitform - provide IFunction and IJacobian to TS, if not set, RHSFunction and RHSJacobian will be used
12     -aijpc        - set the preconditioner matrix to be aij (the Jacobian matrix can be of a different type such as ELL)
13 */
14 #include "reaction_diffusion.h"
15 #include <petscdm.h>
16 #include <petscdmda.h>
17 
18 PetscErrorCode InitialConditions(DM,Vec);
19 
20 PetscErrorCode InitializeLambda(DM da,Vec lambda,PetscReal x,PetscReal y)
21 {
22    PetscInt i,j,Mx,My,xs,ys,xm,ym;
23    Field **l;
24    PetscFunctionBegin;
25 
26    PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
27    /* locate the global i index for x and j index for y */
28    i = (PetscInt)(x*(Mx-1));
29    j = (PetscInt)(y*(My-1));
30    PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
31 
32    if (xs <= i && i < xs+xm && ys <= j && j < ys+ym) {
33      /* the i,j vertex is on this process */
34      PetscCall(DMDAVecGetArray(da,lambda,&l));
35      l[j][i].u = 1.0;
36      l[j][i].v = 1.0;
37      PetscCall(DMDAVecRestoreArray(da,lambda,&l));
38    }
39    PetscFunctionReturn(0);
40 }
41 
42 int main(int argc,char **argv)
43 {
44   TS        ts;                 /* ODE integrator */
45   Vec       x;                  /* solution */
46   DM        da;
47   AppCtx    appctx;
48   Vec       lambda[1];
49   PetscBool forwardonly = PETSC_FALSE,implicitform=PETSC_TRUE;
50 
51   PetscCall(PetscInitialize(&argc,&argv,(char*)0,help));
52   PetscCall(PetscOptionsGetBool(NULL,NULL,"-forwardonly",&forwardonly,NULL));
53   PetscCall(PetscOptionsGetBool(NULL,NULL,"-implicitform",&implicitform,NULL));
54   appctx.aijpc = PETSC_FALSE;
55   PetscCall(PetscOptionsGetBool(NULL,NULL,"-aijpc",&appctx.aijpc,NULL));
56 
57   appctx.D1    = 8.0e-5;
58   appctx.D2    = 4.0e-5;
59   appctx.gamma = .024;
60   appctx.kappa = .06;
61 
62   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
63      Create distributed array (DMDA) to manage parallel grid and vectors
64   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
65   PetscCall(DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_PERIODIC,DM_BOUNDARY_PERIODIC,DMDA_STENCIL_STAR,64,64,PETSC_DECIDE,PETSC_DECIDE,2,1,NULL,NULL,&da));
66   PetscCall(DMSetFromOptions(da));
67   PetscCall(DMSetUp(da));
68   PetscCall(DMDASetFieldName(da,0,"u"));
69   PetscCall(DMDASetFieldName(da,1,"v"));
70 
71   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
72      Extract global vectors from DMDA; then duplicate for remaining
73      vectors that are the same types
74    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
75   PetscCall(DMCreateGlobalVector(da,&x));
76 
77   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
78      Create timestepping solver context
79      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
80   PetscCall(TSCreate(PETSC_COMM_WORLD,&ts));
81   PetscCall(TSSetDM(ts,da));
82   PetscCall(TSSetProblemType(ts,TS_NONLINEAR));
83   PetscCall(TSSetEquationType(ts,TS_EQ_ODE_EXPLICIT)); /* less Jacobian evaluations when adjoint BEuler is used, otherwise no effect */
84   if (!implicitform) {
85     PetscCall(TSSetType(ts,TSRK));
86     PetscCall(TSSetRHSFunction(ts,NULL,RHSFunction,&appctx));
87     PetscCall(TSSetRHSJacobian(ts,NULL,NULL,RHSJacobian,&appctx));
88   } else {
89     PetscCall(TSSetType(ts,TSCN));
90     PetscCall(TSSetIFunction(ts,NULL,IFunction,&appctx));
91     if (appctx.aijpc) {
92       Mat                    A,B;
93 
94       PetscCall(DMSetMatType(da,MATSELL));
95       PetscCall(DMCreateMatrix(da,&A));
96       PetscCall(MatConvert(A,MATAIJ,MAT_INITIAL_MATRIX,&B));
97       /* FIXME do we need to change viewer to display matrix in natural ordering as DMCreateMatrix_DA does? */
98       PetscCall(TSSetIJacobian(ts,A,B,IJacobian,&appctx));
99       PetscCall(MatDestroy(&A));
100       PetscCall(MatDestroy(&B));
101     } else {
102       PetscCall(TSSetIJacobian(ts,NULL,NULL,IJacobian,&appctx));
103     }
104   }
105 
106   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
107      Set initial conditions
108    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
109   PetscCall(InitialConditions(da,x));
110   PetscCall(TSSetSolution(ts,x));
111 
112   /*
113     Have the TS save its trajectory so that TSAdjointSolve() may be used
114   */
115   if (!forwardonly) PetscCall(TSSetSaveTrajectory(ts));
116 
117   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
118      Set solver options
119    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
120   PetscCall(TSSetMaxTime(ts,200.0));
121   PetscCall(TSSetTimeStep(ts,0.5));
122   PetscCall(TSSetExactFinalTime(ts,TS_EXACTFINALTIME_MATCHSTEP));
123   PetscCall(TSSetFromOptions(ts));
124 
125   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
126      Solve ODE system
127      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
128   PetscCall(TSSolve(ts,x));
129   if (!forwardonly) {
130     /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
131        Start the Adjoint model
132        - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
133     PetscCall(VecDuplicate(x,&lambda[0]));
134     /*   Reset initial conditions for the adjoint integration */
135     PetscCall(InitializeLambda(da,lambda[0],0.5,0.5));
136     PetscCall(TSSetCostGradients(ts,1,lambda,NULL));
137     PetscCall(TSAdjointSolve(ts));
138     PetscCall(VecDestroy(&lambda[0]));
139   }
140   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
141      Free work space.  All PETSc objects should be destroyed when they
142      are no longer needed.
143    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
144   PetscCall(VecDestroy(&x));
145   PetscCall(TSDestroy(&ts));
146   PetscCall(DMDestroy(&da));
147   PetscCall(PetscFinalize());
148   return 0;
149 }
150 
151 /* ------------------------------------------------------------------- */
152 PetscErrorCode InitialConditions(DM da,Vec U)
153 {
154   PetscInt       i,j,xs,ys,xm,ym,Mx,My;
155   Field          **u;
156   PetscReal      hx,hy,x,y;
157 
158   PetscFunctionBegin;
159   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
160 
161   hx = 2.5/(PetscReal)Mx;
162   hy = 2.5/(PetscReal)My;
163 
164   /*
165      Get pointers to vector data
166   */
167   PetscCall(DMDAVecGetArray(da,U,&u));
168 
169   /*
170      Get local grid boundaries
171   */
172   PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
173 
174   /*
175      Compute function over the locally owned part of the grid
176   */
177   for (j=ys; j<ys+ym; j++) {
178     y = j*hy;
179     for (i=xs; i<xs+xm; i++) {
180       x = i*hx;
181       if (PetscApproximateGTE(x,1.0) && PetscApproximateLTE(x,1.5) && PetscApproximateGTE(y,1.0) && PetscApproximateLTE(y,1.5)) u[j][i].v = PetscPowReal(PetscSinReal(4.0*PETSC_PI*x),2.0)*PetscPowReal(PetscSinReal(4.0*PETSC_PI*y),2.0)/4.0;
182       else u[j][i].v = 0.0;
183 
184       u[j][i].u = 1.0 - 2.0*u[j][i].v;
185     }
186   }
187 
188   /*
189      Restore vectors
190   */
191   PetscCall(DMDAVecRestoreArray(da,U,&u));
192   PetscFunctionReturn(0);
193 }
194 
195 /*TEST
196 
197    build:
198       depends: reaction_diffusion.c
199       requires: !complex !single
200 
201    test:
202       args: -ts_max_steps 10 -ts_monitor -ts_adjoint_monitor -da_grid_x 20 -da_grid_y 20
203       output_file: output/ex5adj_1.out
204 
205    test:
206       suffix: 2
207       nsize: 2
208       args: -ts_max_steps 10 -ts_dt 10 -ts_monitor -ts_adjoint_monitor -ksp_monitor_short -da_grid_x 20 -da_grid_y 20 -ts_trajectory_dirname Test-dir -ts_trajectory_file_template test-%06D.cp
209 
210    test:
211       suffix: 3
212       nsize: 2
213       args: -ts_max_steps 10 -ts_dt 10 -ts_adjoint_monitor_draw_sensi
214 
215    test:
216       suffix: 4
217       nsize: 2
218       args: -ts_max_steps 10 -ts_dt 10 -ts_monitor -ts_adjoint_monitor -ksp_monitor_short -da_grid_x 20 -da_grid_y 20 -snes_fd_color
219       output_file: output/ex5adj_2.out
220 
221    test:
222       suffix: 5
223       nsize: 2
224       args: -ts_max_steps 10 -implicitform 0 -ts_type rk -ts_rk_type 4 -ts_monitor -ts_adjoint_monitor -da_grid_x 20 -da_grid_y 20 -snes_fd_color
225       output_file: output/ex5adj_1.out
226 
227    test:
228       suffix: knl
229       args: -ts_max_steps 10 -ts_monitor -ts_adjoint_monitor -ts_trajectory_type memory -ts_trajectory_solution_only 0 -malloc_hbw -ts_trajectory_use_dram 1
230       output_file: output/ex5adj_3.out
231       requires: knl
232 
233    test:
234       suffix: sell
235       nsize: 4
236       args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type sell -pc_type none
237       output_file: output/ex5adj_sell_1.out
238 
239    test:
240       suffix: aijsell
241       nsize: 4
242       args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type aijsell -pc_type none
243       output_file: output/ex5adj_sell_1.out
244 
245    test:
246       suffix: sell2
247       nsize: 4
248       args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type sell -pc_type mg -pc_mg_levels 2 -mg_coarse_pc_type sor
249       output_file: output/ex5adj_sell_2.out
250 
251    test:
252       suffix: aijsell2
253       nsize: 4
254       args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type aijsell -pc_type mg -pc_mg_levels 2 -mg_coarse_pc_type sor
255       output_file: output/ex5adj_sell_2.out
256 
257    test:
258       suffix: sell3
259       nsize: 4
260       args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type sell -pc_type mg -pc_mg_levels 2 -mg_coarse_pc_type bjacobi -mg_levels_pc_type bjacobi
261       output_file: output/ex5adj_sell_3.out
262 
263    test:
264       suffix: sell4
265       nsize: 4
266       args: -forwardonly -implicitform -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type sell -pc_type mg -pc_mg_levels 2 -mg_coarse_pc_type bjacobi -mg_levels_pc_type bjacobi
267       output_file: output/ex5adj_sell_4.out
268 
269    test:
270       suffix: sell5
271       nsize: 4
272       args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type sell -aijpc
273       output_file: output/ex5adj_sell_5.out
274 
275    test:
276       suffix: aijsell5
277       nsize: 4
278       args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type aijsell
279       output_file: output/ex5adj_sell_5.out
280 
281    test:
282       suffix: sell6
283       args: -ts_max_steps 10 -ts_monitor -ts_adjoint_monitor -ts_trajectory_type memory -ts_trajectory_solution_only 0 -dm_mat_type sell -pc_type jacobi
284       output_file: output/ex5adj_sell_6.out
285 
286    test:
287       suffix: gamg1
288       args: -pc_type gamg -pc_gamg_esteig_ksp_type gmres -pc_gamg_esteig_ksp_max_it 10 -pc_mg_levels 2 -ksp_monitor_short -ts_max_steps 2 -ts_monitor -ts_adjoint_monitor -ts_trajectory_type memory -ksp_rtol 1e-2 -pc_gamg_use_sa_esteig 0
289       output_file: output/ex5adj_gamg.out
290 
291    test:
292       suffix: gamg2
293       args: -pc_type gamg -pc_gamg_esteig_ksp_type gmres -pc_gamg_esteig_ksp_max_it 10 -pc_mg_levels 2 -ksp_monitor_short -ts_max_steps 2 -ts_monitor -ts_adjoint_monitor -ts_trajectory_type memory -ksp_use_explicittranspose -ksp_rtol 1e-2 -pc_gamg_use_sa_esteig 0
294       output_file: output/ex5adj_gamg.out
295 TEST*/
296