xref: /petsc/src/ts/tutorials/advection-diffusion-reaction/ex5adj.c (revision 58d68138c660dfb4e9f5b03334792cd4f2ffd7cc)
1 static char help[] = "Demonstrates adjoint sensitivity analysis for Reaction-Diffusion Equations.\n";
2 
3 /*
4   See ex5.c for details on the equation.
5   This code demonestrates the TSAdjoint interface to a system of time-dependent partial differential equations.
6   It computes the sensitivity of a component in the final solution, which locates in the center of the 2D domain, w.r.t. the initial conditions.
7   The user does not need to provide any additional functions. The required functions in the original simulation are reused in the adjoint run.
8 
9   Runtime options:
10     -forwardonly  - run the forward simulation without adjoint
11     -implicitform - provide IFunction and IJacobian to TS, if not set, RHSFunction and RHSJacobian will be used
12     -aijpc        - set the preconditioner matrix to be aij (the Jacobian matrix can be of a different type such as ELL)
13 */
14 #include "reaction_diffusion.h"
15 #include <petscdm.h>
16 #include <petscdmda.h>
17 
18 PetscErrorCode InitialConditions(DM, Vec);
19 
20 PetscErrorCode InitializeLambda(DM da, Vec lambda, PetscReal x, PetscReal y) {
21   PetscInt i, j, Mx, My, xs, ys, xm, ym;
22   Field  **l;
23   PetscFunctionBegin;
24 
25   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
26   /* locate the global i index for x and j index for y */
27   i = (PetscInt)(x * (Mx - 1));
28   j = (PetscInt)(y * (My - 1));
29   PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
30 
31   if (xs <= i && i < xs + xm && ys <= j && j < ys + ym) {
32     /* the i,j vertex is on this process */
33     PetscCall(DMDAVecGetArray(da, lambda, &l));
34     l[j][i].u = 1.0;
35     l[j][i].v = 1.0;
36     PetscCall(DMDAVecRestoreArray(da, lambda, &l));
37   }
38   PetscFunctionReturn(0);
39 }
40 
41 int main(int argc, char **argv) {
42   TS        ts; /* ODE integrator */
43   Vec       x;  /* solution */
44   DM        da;
45   AppCtx    appctx;
46   Vec       lambda[1];
47   PetscBool forwardonly = PETSC_FALSE, implicitform = PETSC_TRUE;
48 
49   PetscFunctionBeginUser;
50   PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
51   PetscCall(PetscOptionsGetBool(NULL, NULL, "-forwardonly", &forwardonly, NULL));
52   PetscCall(PetscOptionsGetBool(NULL, NULL, "-implicitform", &implicitform, NULL));
53   appctx.aijpc = PETSC_FALSE;
54   PetscCall(PetscOptionsGetBool(NULL, NULL, "-aijpc", &appctx.aijpc, NULL));
55 
56   appctx.D1    = 8.0e-5;
57   appctx.D2    = 4.0e-5;
58   appctx.gamma = .024;
59   appctx.kappa = .06;
60 
61   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
62      Create distributed array (DMDA) to manage parallel grid and vectors
63   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
64   PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC, DMDA_STENCIL_STAR, 64, 64, PETSC_DECIDE, PETSC_DECIDE, 2, 1, NULL, NULL, &da));
65   PetscCall(DMSetFromOptions(da));
66   PetscCall(DMSetUp(da));
67   PetscCall(DMDASetFieldName(da, 0, "u"));
68   PetscCall(DMDASetFieldName(da, 1, "v"));
69 
70   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
71      Extract global vectors from DMDA; then duplicate for remaining
72      vectors that are the same types
73    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
74   PetscCall(DMCreateGlobalVector(da, &x));
75 
76   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
77      Create timestepping solver context
78      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
79   PetscCall(TSCreate(PETSC_COMM_WORLD, &ts));
80   PetscCall(TSSetDM(ts, da));
81   PetscCall(TSSetProblemType(ts, TS_NONLINEAR));
82   PetscCall(TSSetEquationType(ts, TS_EQ_ODE_EXPLICIT)); /* less Jacobian evaluations when adjoint BEuler is used, otherwise no effect */
83   if (!implicitform) {
84     PetscCall(TSSetType(ts, TSRK));
85     PetscCall(TSSetRHSFunction(ts, NULL, RHSFunction, &appctx));
86     PetscCall(TSSetRHSJacobian(ts, NULL, NULL, RHSJacobian, &appctx));
87   } else {
88     PetscCall(TSSetType(ts, TSCN));
89     PetscCall(TSSetIFunction(ts, NULL, IFunction, &appctx));
90     if (appctx.aijpc) {
91       Mat A, B;
92 
93       PetscCall(DMSetMatType(da, MATSELL));
94       PetscCall(DMCreateMatrix(da, &A));
95       PetscCall(MatConvert(A, MATAIJ, MAT_INITIAL_MATRIX, &B));
96       /* FIXME do we need to change viewer to display matrix in natural ordering as DMCreateMatrix_DA does? */
97       PetscCall(TSSetIJacobian(ts, A, B, IJacobian, &appctx));
98       PetscCall(MatDestroy(&A));
99       PetscCall(MatDestroy(&B));
100     } else {
101       PetscCall(TSSetIJacobian(ts, NULL, NULL, IJacobian, &appctx));
102     }
103   }
104 
105   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
106      Set initial conditions
107    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
108   PetscCall(InitialConditions(da, x));
109   PetscCall(TSSetSolution(ts, x));
110 
111   /*
112     Have the TS save its trajectory so that TSAdjointSolve() may be used
113   */
114   if (!forwardonly) PetscCall(TSSetSaveTrajectory(ts));
115 
116   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
117      Set solver options
118    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
119   PetscCall(TSSetMaxTime(ts, 200.0));
120   PetscCall(TSSetTimeStep(ts, 0.5));
121   PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_MATCHSTEP));
122   PetscCall(TSSetFromOptions(ts));
123 
124   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
125      Solve ODE system
126      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
127   PetscCall(TSSolve(ts, x));
128   if (!forwardonly) {
129     /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
130        Start the Adjoint model
131        - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
132     PetscCall(VecDuplicate(x, &lambda[0]));
133     /*   Reset initial conditions for the adjoint integration */
134     PetscCall(InitializeLambda(da, lambda[0], 0.5, 0.5));
135     PetscCall(TSSetCostGradients(ts, 1, lambda, NULL));
136     PetscCall(TSAdjointSolve(ts));
137     PetscCall(VecDestroy(&lambda[0]));
138   }
139   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
140      Free work space.  All PETSc objects should be destroyed when they
141      are no longer needed.
142    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
143   PetscCall(VecDestroy(&x));
144   PetscCall(TSDestroy(&ts));
145   PetscCall(DMDestroy(&da));
146   PetscCall(PetscFinalize());
147   return 0;
148 }
149 
150 /* ------------------------------------------------------------------- */
151 PetscErrorCode InitialConditions(DM da, Vec U) {
152   PetscInt  i, j, xs, ys, xm, ym, Mx, My;
153   Field   **u;
154   PetscReal hx, hy, x, y;
155 
156   PetscFunctionBegin;
157   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
158 
159   hx = 2.5 / (PetscReal)Mx;
160   hy = 2.5 / (PetscReal)My;
161 
162   /*
163      Get pointers to vector data
164   */
165   PetscCall(DMDAVecGetArray(da, U, &u));
166 
167   /*
168      Get local grid boundaries
169   */
170   PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
171 
172   /*
173      Compute function over the locally owned part of the grid
174   */
175   for (j = ys; j < ys + ym; j++) {
176     y = j * hy;
177     for (i = xs; i < xs + xm; i++) {
178       x = i * hx;
179       if (PetscApproximateGTE(x, 1.0) && PetscApproximateLTE(x, 1.5) && PetscApproximateGTE(y, 1.0) && PetscApproximateLTE(y, 1.5))
180         u[j][i].v = PetscPowReal(PetscSinReal(4.0 * PETSC_PI * x), 2.0) * PetscPowReal(PetscSinReal(4.0 * PETSC_PI * y), 2.0) / 4.0;
181       else u[j][i].v = 0.0;
182 
183       u[j][i].u = 1.0 - 2.0 * u[j][i].v;
184     }
185   }
186 
187   /*
188      Restore vectors
189   */
190   PetscCall(DMDAVecRestoreArray(da, U, &u));
191   PetscFunctionReturn(0);
192 }
193 
194 /*TEST
195 
196    build:
197       depends: reaction_diffusion.c
198       requires: !complex !single
199 
200    test:
201       args: -ts_max_steps 10 -ts_monitor -ts_adjoint_monitor -da_grid_x 20 -da_grid_y 20
202       output_file: output/ex5adj_1.out
203 
204    test:
205       suffix: 2
206       nsize: 2
207       args: -ts_max_steps 10 -ts_dt 10 -ts_monitor -ts_adjoint_monitor -ksp_monitor_short -da_grid_x 20 -da_grid_y 20 -ts_trajectory_dirname Test-dir -ts_trajectory_file_template test-%06D.cp
208 
209    test:
210       suffix: 3
211       nsize: 2
212       args: -ts_max_steps 10 -ts_dt 10 -ts_adjoint_monitor_draw_sensi
213 
214    test:
215       suffix: 4
216       nsize: 2
217       args: -ts_max_steps 10 -ts_dt 10 -ts_monitor -ts_adjoint_monitor -ksp_monitor_short -da_grid_x 20 -da_grid_y 20 -snes_fd_color
218       output_file: output/ex5adj_2.out
219 
220    test:
221       suffix: 5
222       nsize: 2
223       args: -ts_max_steps 10 -implicitform 0 -ts_type rk -ts_rk_type 4 -ts_monitor -ts_adjoint_monitor -da_grid_x 20 -da_grid_y 20 -snes_fd_color
224       output_file: output/ex5adj_1.out
225 
226    test:
227       suffix: knl
228       args: -ts_max_steps 10 -ts_monitor -ts_adjoint_monitor -ts_trajectory_type memory -ts_trajectory_solution_only 0 -malloc_hbw -ts_trajectory_use_dram 1
229       output_file: output/ex5adj_3.out
230       requires: knl
231 
232    test:
233       suffix: sell
234       nsize: 4
235       args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type sell -pc_type none
236       output_file: output/ex5adj_sell_1.out
237 
238    test:
239       suffix: aijsell
240       nsize: 4
241       args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type aijsell -pc_type none
242       output_file: output/ex5adj_sell_1.out
243 
244    test:
245       suffix: sell2
246       nsize: 4
247       args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type sell -pc_type mg -pc_mg_levels 2 -mg_coarse_pc_type sor
248       output_file: output/ex5adj_sell_2.out
249 
250    test:
251       suffix: aijsell2
252       nsize: 4
253       args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type aijsell -pc_type mg -pc_mg_levels 2 -mg_coarse_pc_type sor
254       output_file: output/ex5adj_sell_2.out
255 
256    test:
257       suffix: sell3
258       nsize: 4
259       args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type sell -pc_type mg -pc_mg_levels 2 -mg_coarse_pc_type bjacobi -mg_levels_pc_type bjacobi
260       output_file: output/ex5adj_sell_3.out
261 
262    test:
263       suffix: sell4
264       nsize: 4
265       args: -forwardonly -implicitform -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type sell -pc_type mg -pc_mg_levels 2 -mg_coarse_pc_type bjacobi -mg_levels_pc_type bjacobi
266       output_file: output/ex5adj_sell_4.out
267 
268    test:
269       suffix: sell5
270       nsize: 4
271       args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type sell -aijpc
272       output_file: output/ex5adj_sell_5.out
273 
274    test:
275       suffix: aijsell5
276       nsize: 4
277       args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type aijsell
278       output_file: output/ex5adj_sell_5.out
279 
280    test:
281       suffix: sell6
282       args: -ts_max_steps 10 -ts_monitor -ts_adjoint_monitor -ts_trajectory_type memory -ts_trajectory_solution_only 0 -dm_mat_type sell -pc_type jacobi
283       output_file: output/ex5adj_sell_6.out
284 
285    test:
286       suffix: gamg1
287       args: -pc_type gamg -pc_gamg_esteig_ksp_type gmres -pc_gamg_esteig_ksp_max_it 10 -pc_mg_levels 2 -ksp_monitor_short -ts_max_steps 2 -ts_monitor -ts_adjoint_monitor -ts_trajectory_type memory -ksp_rtol 1e-2 -pc_gamg_use_sa_esteig 0
288       output_file: output/ex5adj_gamg.out
289 
290    test:
291       suffix: gamg2
292       args: -pc_type gamg -pc_gamg_esteig_ksp_type gmres -pc_gamg_esteig_ksp_max_it 10 -pc_mg_levels 2 -ksp_monitor_short -ts_max_steps 2 -ts_monitor -ts_adjoint_monitor -ts_trajectory_type memory -ksp_use_explicittranspose -ksp_rtol 1e-2 -pc_gamg_use_sa_esteig 0
293       output_file: output/ex5adj_gamg.out
294 TEST*/
295