xref: /petsc/src/ts/tutorials/advection-diffusion-reaction/ex5adj.c (revision ebead697dbf761eb322f829370bbe90b3bd93fa3)
1 static char help[] = "Demonstrates adjoint sensitivity analysis for Reaction-Diffusion Equations.\n";
2 
3 /*
4   See ex5.c for details on the equation.
5   This code demonestrates the TSAdjoint interface to a system of time-dependent partial differential equations.
6   It computes the sensitivity of a component in the final solution, which locates in the center of the 2D domain, w.r.t. the initial conditions.
7   The user does not need to provide any additional functions. The required functions in the original simulation are reused in the adjoint run.
8 
9   Runtime options:
10     -forwardonly  - run the forward simulation without adjoint
11     -implicitform - provide IFunction and IJacobian to TS, if not set, RHSFunction and RHSJacobian will be used
12     -aijpc        - set the preconditioner matrix to be aij (the Jacobian matrix can be of a different type such as ELL)
13 */
14 #include "reaction_diffusion.h"
15 #include <petscdm.h>
16 #include <petscdmda.h>
17 
18 PetscErrorCode InitialConditions(DM,Vec);
19 
20 PetscErrorCode InitializeLambda(DM da,Vec lambda,PetscReal x,PetscReal y)
21 {
22    PetscInt i,j,Mx,My,xs,ys,xm,ym;
23    Field **l;
24    PetscFunctionBegin;
25 
26    PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
27    /* locate the global i index for x and j index for y */
28    i = (PetscInt)(x*(Mx-1));
29    j = (PetscInt)(y*(My-1));
30    PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
31 
32    if (xs <= i && i < xs+xm && ys <= j && j < ys+ym) {
33      /* the i,j vertex is on this process */
34      PetscCall(DMDAVecGetArray(da,lambda,&l));
35      l[j][i].u = 1.0;
36      l[j][i].v = 1.0;
37      PetscCall(DMDAVecRestoreArray(da,lambda,&l));
38    }
39    PetscFunctionReturn(0);
40 }
41 
42 int main(int argc,char **argv)
43 {
44   TS        ts;                 /* ODE integrator */
45   Vec       x;                  /* solution */
46   DM        da;
47   AppCtx    appctx;
48   Vec       lambda[1];
49   PetscBool forwardonly = PETSC_FALSE,implicitform=PETSC_TRUE;
50 
51   PetscFunctionBeginUser;
52   PetscCall(PetscInitialize(&argc,&argv,(char*)0,help));
53   PetscCall(PetscOptionsGetBool(NULL,NULL,"-forwardonly",&forwardonly,NULL));
54   PetscCall(PetscOptionsGetBool(NULL,NULL,"-implicitform",&implicitform,NULL));
55   appctx.aijpc = PETSC_FALSE;
56   PetscCall(PetscOptionsGetBool(NULL,NULL,"-aijpc",&appctx.aijpc,NULL));
57 
58   appctx.D1    = 8.0e-5;
59   appctx.D2    = 4.0e-5;
60   appctx.gamma = .024;
61   appctx.kappa = .06;
62 
63   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
64      Create distributed array (DMDA) to manage parallel grid and vectors
65   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
66   PetscCall(DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_PERIODIC,DM_BOUNDARY_PERIODIC,DMDA_STENCIL_STAR,64,64,PETSC_DECIDE,PETSC_DECIDE,2,1,NULL,NULL,&da));
67   PetscCall(DMSetFromOptions(da));
68   PetscCall(DMSetUp(da));
69   PetscCall(DMDASetFieldName(da,0,"u"));
70   PetscCall(DMDASetFieldName(da,1,"v"));
71 
72   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
73      Extract global vectors from DMDA; then duplicate for remaining
74      vectors that are the same types
75    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
76   PetscCall(DMCreateGlobalVector(da,&x));
77 
78   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
79      Create timestepping solver context
80      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
81   PetscCall(TSCreate(PETSC_COMM_WORLD,&ts));
82   PetscCall(TSSetDM(ts,da));
83   PetscCall(TSSetProblemType(ts,TS_NONLINEAR));
84   PetscCall(TSSetEquationType(ts,TS_EQ_ODE_EXPLICIT)); /* less Jacobian evaluations when adjoint BEuler is used, otherwise no effect */
85   if (!implicitform) {
86     PetscCall(TSSetType(ts,TSRK));
87     PetscCall(TSSetRHSFunction(ts,NULL,RHSFunction,&appctx));
88     PetscCall(TSSetRHSJacobian(ts,NULL,NULL,RHSJacobian,&appctx));
89   } else {
90     PetscCall(TSSetType(ts,TSCN));
91     PetscCall(TSSetIFunction(ts,NULL,IFunction,&appctx));
92     if (appctx.aijpc) {
93       Mat                    A,B;
94 
95       PetscCall(DMSetMatType(da,MATSELL));
96       PetscCall(DMCreateMatrix(da,&A));
97       PetscCall(MatConvert(A,MATAIJ,MAT_INITIAL_MATRIX,&B));
98       /* FIXME do we need to change viewer to display matrix in natural ordering as DMCreateMatrix_DA does? */
99       PetscCall(TSSetIJacobian(ts,A,B,IJacobian,&appctx));
100       PetscCall(MatDestroy(&A));
101       PetscCall(MatDestroy(&B));
102     } else {
103       PetscCall(TSSetIJacobian(ts,NULL,NULL,IJacobian,&appctx));
104     }
105   }
106 
107   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
108      Set initial conditions
109    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
110   PetscCall(InitialConditions(da,x));
111   PetscCall(TSSetSolution(ts,x));
112 
113   /*
114     Have the TS save its trajectory so that TSAdjointSolve() may be used
115   */
116   if (!forwardonly) PetscCall(TSSetSaveTrajectory(ts));
117 
118   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
119      Set solver options
120    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
121   PetscCall(TSSetMaxTime(ts,200.0));
122   PetscCall(TSSetTimeStep(ts,0.5));
123   PetscCall(TSSetExactFinalTime(ts,TS_EXACTFINALTIME_MATCHSTEP));
124   PetscCall(TSSetFromOptions(ts));
125 
126   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
127      Solve ODE system
128      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
129   PetscCall(TSSolve(ts,x));
130   if (!forwardonly) {
131     /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
132        Start the Adjoint model
133        - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
134     PetscCall(VecDuplicate(x,&lambda[0]));
135     /*   Reset initial conditions for the adjoint integration */
136     PetscCall(InitializeLambda(da,lambda[0],0.5,0.5));
137     PetscCall(TSSetCostGradients(ts,1,lambda,NULL));
138     PetscCall(TSAdjointSolve(ts));
139     PetscCall(VecDestroy(&lambda[0]));
140   }
141   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
142      Free work space.  All PETSc objects should be destroyed when they
143      are no longer needed.
144    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
145   PetscCall(VecDestroy(&x));
146   PetscCall(TSDestroy(&ts));
147   PetscCall(DMDestroy(&da));
148   PetscCall(PetscFinalize());
149   return 0;
150 }
151 
152 /* ------------------------------------------------------------------- */
153 PetscErrorCode InitialConditions(DM da,Vec U)
154 {
155   PetscInt       i,j,xs,ys,xm,ym,Mx,My;
156   Field          **u;
157   PetscReal      hx,hy,x,y;
158 
159   PetscFunctionBegin;
160   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
161 
162   hx = 2.5/(PetscReal)Mx;
163   hy = 2.5/(PetscReal)My;
164 
165   /*
166      Get pointers to vector data
167   */
168   PetscCall(DMDAVecGetArray(da,U,&u));
169 
170   /*
171      Get local grid boundaries
172   */
173   PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
174 
175   /*
176      Compute function over the locally owned part of the grid
177   */
178   for (j=ys; j<ys+ym; j++) {
179     y = j*hy;
180     for (i=xs; i<xs+xm; i++) {
181       x = i*hx;
182       if (PetscApproximateGTE(x,1.0) && PetscApproximateLTE(x,1.5) && PetscApproximateGTE(y,1.0) && PetscApproximateLTE(y,1.5)) u[j][i].v = PetscPowReal(PetscSinReal(4.0*PETSC_PI*x),2.0)*PetscPowReal(PetscSinReal(4.0*PETSC_PI*y),2.0)/4.0;
183       else u[j][i].v = 0.0;
184 
185       u[j][i].u = 1.0 - 2.0*u[j][i].v;
186     }
187   }
188 
189   /*
190      Restore vectors
191   */
192   PetscCall(DMDAVecRestoreArray(da,U,&u));
193   PetscFunctionReturn(0);
194 }
195 
196 /*TEST
197 
198    build:
199       depends: reaction_diffusion.c
200       requires: !complex !single
201 
202    test:
203       args: -ts_max_steps 10 -ts_monitor -ts_adjoint_monitor -da_grid_x 20 -da_grid_y 20
204       output_file: output/ex5adj_1.out
205 
206    test:
207       suffix: 2
208       nsize: 2
209       args: -ts_max_steps 10 -ts_dt 10 -ts_monitor -ts_adjoint_monitor -ksp_monitor_short -da_grid_x 20 -da_grid_y 20 -ts_trajectory_dirname Test-dir -ts_trajectory_file_template test-%06D.cp
210 
211    test:
212       suffix: 3
213       nsize: 2
214       args: -ts_max_steps 10 -ts_dt 10 -ts_adjoint_monitor_draw_sensi
215 
216    test:
217       suffix: 4
218       nsize: 2
219       args: -ts_max_steps 10 -ts_dt 10 -ts_monitor -ts_adjoint_monitor -ksp_monitor_short -da_grid_x 20 -da_grid_y 20 -snes_fd_color
220       output_file: output/ex5adj_2.out
221 
222    test:
223       suffix: 5
224       nsize: 2
225       args: -ts_max_steps 10 -implicitform 0 -ts_type rk -ts_rk_type 4 -ts_monitor -ts_adjoint_monitor -da_grid_x 20 -da_grid_y 20 -snes_fd_color
226       output_file: output/ex5adj_1.out
227 
228    test:
229       suffix: knl
230       args: -ts_max_steps 10 -ts_monitor -ts_adjoint_monitor -ts_trajectory_type memory -ts_trajectory_solution_only 0 -malloc_hbw -ts_trajectory_use_dram 1
231       output_file: output/ex5adj_3.out
232       requires: knl
233 
234    test:
235       suffix: sell
236       nsize: 4
237       args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type sell -pc_type none
238       output_file: output/ex5adj_sell_1.out
239 
240    test:
241       suffix: aijsell
242       nsize: 4
243       args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type aijsell -pc_type none
244       output_file: output/ex5adj_sell_1.out
245 
246    test:
247       suffix: sell2
248       nsize: 4
249       args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type sell -pc_type mg -pc_mg_levels 2 -mg_coarse_pc_type sor
250       output_file: output/ex5adj_sell_2.out
251 
252    test:
253       suffix: aijsell2
254       nsize: 4
255       args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type aijsell -pc_type mg -pc_mg_levels 2 -mg_coarse_pc_type sor
256       output_file: output/ex5adj_sell_2.out
257 
258    test:
259       suffix: sell3
260       nsize: 4
261       args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type sell -pc_type mg -pc_mg_levels 2 -mg_coarse_pc_type bjacobi -mg_levels_pc_type bjacobi
262       output_file: output/ex5adj_sell_3.out
263 
264    test:
265       suffix: sell4
266       nsize: 4
267       args: -forwardonly -implicitform -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type sell -pc_type mg -pc_mg_levels 2 -mg_coarse_pc_type bjacobi -mg_levels_pc_type bjacobi
268       output_file: output/ex5adj_sell_4.out
269 
270    test:
271       suffix: sell5
272       nsize: 4
273       args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type sell -aijpc
274       output_file: output/ex5adj_sell_5.out
275 
276    test:
277       suffix: aijsell5
278       nsize: 4
279       args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type aijsell
280       output_file: output/ex5adj_sell_5.out
281 
282    test:
283       suffix: sell6
284       args: -ts_max_steps 10 -ts_monitor -ts_adjoint_monitor -ts_trajectory_type memory -ts_trajectory_solution_only 0 -dm_mat_type sell -pc_type jacobi
285       output_file: output/ex5adj_sell_6.out
286 
287    test:
288       suffix: gamg1
289       args: -pc_type gamg -pc_gamg_esteig_ksp_type gmres -pc_gamg_esteig_ksp_max_it 10 -pc_mg_levels 2 -ksp_monitor_short -ts_max_steps 2 -ts_monitor -ts_adjoint_monitor -ts_trajectory_type memory -ksp_rtol 1e-2 -pc_gamg_use_sa_esteig 0
290       output_file: output/ex5adj_gamg.out
291 
292    test:
293       suffix: gamg2
294       args: -pc_type gamg -pc_gamg_esteig_ksp_type gmres -pc_gamg_esteig_ksp_max_it 10 -pc_mg_levels 2 -ksp_monitor_short -ts_max_steps 2 -ts_monitor -ts_adjoint_monitor -ts_trajectory_type memory -ksp_use_explicittranspose -ksp_rtol 1e-2 -pc_gamg_use_sa_esteig 0
295       output_file: output/ex5adj_gamg.out
296 TEST*/
297