1 static char help[] = "Demonstrates adjoint sensitivity analysis for Reaction-Diffusion Equations.\n";
2
3 /*
4 See ex5.c for details on the equation.
5 This code demonestrates the TSAdjoint interface to a system of time-dependent partial differential equations.
6 It computes the sensitivity of a component in the final solution, which locates in the center of the 2D domain, w.r.t. the initial conditions.
7 The user does not need to provide any additional functions. The required functions in the original simulation are reused in the adjoint run.
8
9 Runtime options:
10 -forwardonly - run the forward simulation without adjoint
11 -implicitform - provide IFunction and IJacobian to TS, if not set, RHSFunction and RHSJacobian will be used
12 -aijpc - set the matrix used to compute the preconditioner to be aij (the Jacobian matrix can be of a different type such as ELL)
13 */
14 #include "reaction_diffusion.h"
15 #include <petscdm.h>
16 #include <petscdmda.h>
17
18 PetscErrorCode InitialConditions(DM, Vec);
19
InitializeLambda(DM da,Vec lambda,PetscReal x,PetscReal y)20 PetscErrorCode InitializeLambda(DM da, Vec lambda, PetscReal x, PetscReal y)
21 {
22 PetscInt i, j, Mx, My, xs, ys, xm, ym;
23 Field **l;
24
25 PetscFunctionBegin;
26 PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
27 /* locate the global i index for x and j index for y */
28 i = (PetscInt)(x * (Mx - 1));
29 j = (PetscInt)(y * (My - 1));
30 PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
31
32 if (xs <= i && i < xs + xm && ys <= j && j < ys + ym) {
33 /* the i,j vertex is on this process */
34 PetscCall(DMDAVecGetArray(da, lambda, &l));
35 l[j][i].u = 1.0;
36 l[j][i].v = 1.0;
37 PetscCall(DMDAVecRestoreArray(da, lambda, &l));
38 }
39 PetscFunctionReturn(PETSC_SUCCESS);
40 }
41
main(int argc,char ** argv)42 int main(int argc, char **argv)
43 {
44 TS ts; /* ODE integrator */
45 Vec x; /* solution */
46 DM da;
47 AppCtx appctx;
48 Vec lambda[1];
49 PetscBool forwardonly = PETSC_FALSE, implicitform = PETSC_TRUE;
50
51 PetscFunctionBeginUser;
52 PetscCall(PetscInitialize(&argc, &argv, NULL, help));
53 PetscCall(PetscOptionsGetBool(NULL, NULL, "-forwardonly", &forwardonly, NULL));
54 PetscCall(PetscOptionsGetBool(NULL, NULL, "-implicitform", &implicitform, NULL));
55 appctx.aijpc = PETSC_FALSE;
56 PetscCall(PetscOptionsGetBool(NULL, NULL, "-aijpc", &appctx.aijpc, NULL));
57
58 appctx.D1 = 8.0e-5;
59 appctx.D2 = 4.0e-5;
60 appctx.gamma = .024;
61 appctx.kappa = .06;
62
63 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
64 Create distributed array (DMDA) to manage parallel grid and vectors
65 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
66 PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC, DMDA_STENCIL_STAR, 64, 64, PETSC_DECIDE, PETSC_DECIDE, 2, 1, NULL, NULL, &da));
67 PetscCall(DMSetFromOptions(da));
68 PetscCall(DMSetUp(da));
69 PetscCall(DMDASetFieldName(da, 0, "u"));
70 PetscCall(DMDASetFieldName(da, 1, "v"));
71
72 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
73 Extract global vectors from DMDA; then duplicate for remaining
74 vectors that are the same types
75 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
76 PetscCall(DMCreateGlobalVector(da, &x));
77
78 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
79 Create timestepping solver context
80 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
81 PetscCall(TSCreate(PETSC_COMM_WORLD, &ts));
82 PetscCall(TSSetDM(ts, da));
83 PetscCall(TSSetProblemType(ts, TS_NONLINEAR));
84 PetscCall(TSSetEquationType(ts, TS_EQ_ODE_EXPLICIT)); /* less Jacobian evaluations when adjoint BEuler is used, otherwise no effect */
85 if (!implicitform) {
86 PetscCall(TSSetType(ts, TSRK));
87 PetscCall(TSSetRHSFunction(ts, NULL, RHSFunction, &appctx));
88 PetscCall(TSSetRHSJacobian(ts, NULL, NULL, RHSJacobian, &appctx));
89 } else {
90 PetscCall(TSSetType(ts, TSCN));
91 PetscCall(TSSetIFunction(ts, NULL, IFunction, &appctx));
92 if (appctx.aijpc) {
93 Mat A, B;
94
95 PetscCall(DMSetMatType(da, MATSELL));
96 PetscCall(DMCreateMatrix(da, &A));
97 PetscCall(MatConvert(A, MATAIJ, MAT_INITIAL_MATRIX, &B));
98 /* FIXME do we need to change viewer to display matrix in natural ordering as DMCreateMatrix_DA does? */
99 PetscCall(TSSetIJacobian(ts, A, B, IJacobian, &appctx));
100 PetscCall(MatDestroy(&A));
101 PetscCall(MatDestroy(&B));
102 } else {
103 PetscCall(TSSetIJacobian(ts, NULL, NULL, IJacobian, &appctx));
104 }
105 }
106
107 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
108 Set initial conditions
109 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
110 PetscCall(InitialConditions(da, x));
111 PetscCall(TSSetSolution(ts, x));
112
113 /*
114 Have the TS save its trajectory so that TSAdjointSolve() may be used
115 */
116 if (!forwardonly) PetscCall(TSSetSaveTrajectory(ts));
117
118 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
119 Set solver options
120 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
121 PetscCall(TSSetMaxTime(ts, 200.0));
122 PetscCall(TSSetTimeStep(ts, 0.5));
123 PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_MATCHSTEP));
124 PetscCall(TSSetFromOptions(ts));
125
126 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
127 Solve ODE system
128 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
129 PetscCall(TSSolve(ts, x));
130 if (!forwardonly) {
131 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
132 Start the Adjoint model
133 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
134 PetscCall(VecDuplicate(x, &lambda[0]));
135 /* Reset initial conditions for the adjoint integration */
136 PetscCall(InitializeLambda(da, lambda[0], 0.5, 0.5));
137 PetscCall(TSSetCostGradients(ts, 1, lambda, NULL));
138 PetscCall(TSAdjointSolve(ts));
139 PetscCall(VecDestroy(&lambda[0]));
140 }
141 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
142 Free work space. All PETSc objects should be destroyed when they
143 are no longer needed.
144 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
145 PetscCall(VecDestroy(&x));
146 PetscCall(TSDestroy(&ts));
147 PetscCall(DMDestroy(&da));
148 PetscCall(PetscFinalize());
149 return 0;
150 }
151
152 /* ------------------------------------------------------------------- */
InitialConditions(DM da,Vec U)153 PetscErrorCode InitialConditions(DM da, Vec U)
154 {
155 PetscInt i, j, xs, ys, xm, ym, Mx, My;
156 Field **u;
157 PetscReal hx, hy, x, y;
158
159 PetscFunctionBegin;
160 PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
161
162 hx = 2.5 / (PetscReal)Mx;
163 hy = 2.5 / (PetscReal)My;
164
165 /*
166 Get pointers to vector data
167 */
168 PetscCall(DMDAVecGetArray(da, U, &u));
169
170 /*
171 Get local grid boundaries
172 */
173 PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
174
175 /*
176 Compute function over the locally owned part of the grid
177 */
178 for (j = ys; j < ys + ym; j++) {
179 y = j * hy;
180 for (i = xs; i < xs + xm; i++) {
181 x = i * hx;
182 if (PetscApproximateGTE(x, 1.0) && PetscApproximateLTE(x, 1.5) && PetscApproximateGTE(y, 1.0) && PetscApproximateLTE(y, 1.5))
183 u[j][i].v = PetscPowReal(PetscSinReal(4.0 * PETSC_PI * x), 2.0) * PetscPowReal(PetscSinReal(4.0 * PETSC_PI * y), 2.0) / 4.0;
184 else u[j][i].v = 0.0;
185
186 u[j][i].u = 1.0 - 2.0 * u[j][i].v;
187 }
188 }
189
190 /*
191 Restore vectors
192 */
193 PetscCall(DMDAVecRestoreArray(da, U, &u));
194 PetscFunctionReturn(PETSC_SUCCESS);
195 }
196
197 /*TEST
198
199 build:
200 depends: reaction_diffusion.c
201 requires: !complex !single
202
203 test:
204 args: -ts_max_steps 10 -ts_monitor -ts_adjoint_monitor -da_grid_x 20 -da_grid_y 20
205 output_file: output/ex5adj_1.out
206
207 test:
208 suffix: 2
209 nsize: 2
210 args: -ts_max_steps 10 -ts_time_step 10 -ts_monitor -ts_adjoint_monitor -ksp_monitor_short -da_grid_x 20 -da_grid_y 20 -ts_trajectory_dirname Test-dir -ts_trajectory_file_template test-%06D.cp
211
212 test:
213 suffix: 3
214 nsize: 2
215 args: -ts_max_steps 10 -ts_time_step 10 -ts_adjoint_monitor_draw_sensi
216 output_file: output/empty.out
217
218 test:
219 suffix: 4
220 nsize: 2
221 args: -ts_max_steps 10 -ts_time_step 10 -ts_monitor -ts_adjoint_monitor -ksp_monitor_short -da_grid_x 20 -da_grid_y 20 -snes_fd_color
222 output_file: output/ex5adj_2.out
223
224 test:
225 suffix: 5
226 nsize: 2
227 args: -ts_max_steps 10 -implicitform 0 -ts_type rk -ts_rk_type 4 -ts_monitor -ts_adjoint_monitor -da_grid_x 20 -da_grid_y 20 -snes_fd_color
228 output_file: output/ex5adj_1.out
229
230 test:
231 suffix: knl
232 args: -ts_max_steps 10 -ts_monitor -ts_adjoint_monitor -ts_trajectory_type memory -ts_trajectory_solution_only 0 -malloc_hbw -ts_trajectory_use_dram 1
233 output_file: output/empty.out
234 requires: knl
235
236 test:
237 suffix: sell
238 nsize: 4
239 args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type sell -pc_type none
240 output_file: output/ex5adj_sell_1.out
241
242 test:
243 suffix: aijsell
244 nsize: 4
245 args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type aijsell -pc_type none
246 output_file: output/ex5adj_sell_1.out
247
248 test:
249 suffix: sell2
250 nsize: 4
251 args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type sell -pc_type mg -pc_mg_levels 2 -mg_coarse_pc_type sor
252 output_file: output/ex5adj_sell_2.out
253
254 test:
255 suffix: aijsell2
256 nsize: 4
257 args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type aijsell -pc_type mg -pc_mg_levels 2 -mg_coarse_pc_type sor
258 output_file: output/ex5adj_sell_2.out
259
260 test:
261 suffix: sell3
262 nsize: 4
263 args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type sell -pc_type mg -pc_mg_levels 2 -mg_coarse_pc_type bjacobi -mg_levels_pc_type bjacobi
264 output_file: output/ex5adj_sell_3.out
265
266 test:
267 suffix: sell4
268 nsize: 4
269 args: -forwardonly -implicitform -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type sell -pc_type mg -pc_mg_levels 2 -mg_coarse_pc_type bjacobi -mg_levels_pc_type bjacobi
270 output_file: output/ex5adj_sell_4.out
271
272 test:
273 suffix: sell5
274 nsize: 4
275 args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type sell -aijpc
276 output_file: output/ex5adj_sell_5.out
277
278 test:
279 suffix: aijsell5
280 nsize: 4
281 args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type aijsell
282 output_file: output/ex5adj_sell_5.out
283
284 test:
285 suffix: sell6
286 args: -ts_max_steps 10 -ts_monitor -ts_adjoint_monitor -ts_trajectory_type memory -ts_trajectory_solution_only 0 -dm_mat_type sell -pc_type jacobi
287 output_file: output/ex5adj_sell_6.out
288
289 test:
290 suffix: sell7
291 args: -ts_max_steps 10 -ts_monitor -ts_adjoint_monitor -ts_trajectory_type memory -ts_trajectory_solution_only 0 -dm_mat_type sellcuda -dm_vec_type cuda -pc_type jacobi
292 output_file: output/ex5adj_sell_6.out
293 requires: cuda
294
295 test:
296 suffix: gamg1
297 args: -pc_type gamg -pc_gamg_esteig_ksp_type gmres -pc_gamg_esteig_ksp_max_it 10 -ksp_monitor_short -ts_max_steps 2 -ts_monitor -ts_adjoint_monitor -ts_trajectory_type memory -ksp_rtol 1e-2 -pc_gamg_use_sa_esteig 0
298 output_file: output/ex5adj_gamg.out
299
300 test:
301 suffix: gamg2
302 args: -pc_type gamg -pc_gamg_esteig_ksp_type gmres -pc_gamg_esteig_ksp_max_it 10 -ksp_monitor_short -ts_max_steps 2 -ts_monitor -ts_adjoint_monitor -ts_trajectory_type memory -ksp_use_explicittranspose -ksp_rtol 1e-2 -pc_gamg_use_sa_esteig 0
303 output_file: output/ex5adj_gamg.out
304 TEST*/
305