1 static char help[] = "Demonstrates adjoint sensitivity analysis for Reaction-Diffusion Equations.\n"; 2 3 /* 4 See ex5.c for details on the equation. 5 This code demonestrates the TSAdjoint/TAO interface to solve an inverse initial value problem built on a system of 6 time-dependent partial differential equations. 7 In this problem, the initial value for the PDE is unknown, but the output (the final solution of the PDE) is known. 8 We want to determine the initial value that can produce the given output. 9 We formulate the problem as a nonlinear optimization problem that minimizes the discrepency between the simulated 10 result and given reference solution, calculate the gradient of the objective function with the discrete adjoint 11 solver, and solve the optimization problem with TAO. 12 13 Runtime options: 14 -forwardonly - run only the forward simulation 15 -implicitform - provide IFunction and IJacobian to TS, if not set, RHSFunction and RHSJacobian will be used 16 */ 17 18 #include "reaction_diffusion.h" 19 #include <petscdm.h> 20 #include <petscdmda.h> 21 #include <petsctao.h> 22 23 /* User-defined routines */ 24 extern PetscErrorCode FormFunctionAndGradient(Tao, Vec, PetscReal *, Vec, void *); 25 26 /* 27 Set terminal condition for the adjoint variable 28 */ 29 PetscErrorCode InitializeLambda(DM da, Vec lambda, Vec U, AppCtx *appctx) 30 { 31 char filename[PETSC_MAX_PATH_LEN] = ""; 32 PetscViewer viewer; 33 Vec Uob; 34 35 PetscFunctionBegin; 36 PetscCall(VecDuplicate(U, &Uob)); 37 PetscCall(PetscSNPrintf(filename, sizeof filename, "ex5opt.ob")); 38 PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, filename, FILE_MODE_READ, &viewer)); 39 PetscCall(VecLoad(Uob, viewer)); 40 PetscCall(PetscViewerDestroy(&viewer)); 41 PetscCall(VecAYPX(Uob, -1., U)); 42 PetscCall(VecScale(Uob, 2.0)); 43 PetscCall(VecAXPY(lambda, 1., Uob)); 44 PetscCall(VecDestroy(&Uob)); 45 PetscFunctionReturn(0); 46 } 47 48 /* 49 Set up a viewer that dumps data into a binary file 50 */ 51 PetscErrorCode OutputBIN(DM da, const char *filename, PetscViewer *viewer) 52 { 53 PetscFunctionBegin; 54 PetscCall(PetscViewerCreate(PetscObjectComm((PetscObject)da), viewer)); 55 PetscCall(PetscViewerSetType(*viewer, PETSCVIEWERBINARY)); 56 PetscCall(PetscViewerFileSetMode(*viewer, FILE_MODE_WRITE)); 57 PetscCall(PetscViewerFileSetName(*viewer, filename)); 58 PetscFunctionReturn(0); 59 } 60 61 /* 62 Generate a reference solution and save it to a binary file 63 */ 64 PetscErrorCode GenerateOBs(TS ts, Vec U, AppCtx *appctx) 65 { 66 char filename[PETSC_MAX_PATH_LEN] = ""; 67 PetscViewer viewer; 68 DM da; 69 70 PetscFunctionBegin; 71 PetscCall(TSGetDM(ts, &da)); 72 PetscCall(TSSolve(ts, U)); 73 PetscCall(PetscSNPrintf(filename, sizeof filename, "ex5opt.ob")); 74 PetscCall(OutputBIN(da, filename, &viewer)); 75 PetscCall(VecView(U, viewer)); 76 PetscCall(PetscViewerDestroy(&viewer)); 77 PetscFunctionReturn(0); 78 } 79 80 PetscErrorCode InitialConditions(DM da, Vec U) 81 { 82 PetscInt i, j, xs, ys, xm, ym, Mx, My; 83 Field **u; 84 PetscReal hx, hy, x, y; 85 86 PetscFunctionBegin; 87 PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE)); 88 89 hx = 2.5 / (PetscReal)Mx; 90 hy = 2.5 / (PetscReal)My; 91 92 PetscCall(DMDAVecGetArray(da, U, &u)); 93 /* Get local grid boundaries */ 94 PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL)); 95 96 /* Compute function over the locally owned part of the grid */ 97 for (j = ys; j < ys + ym; j++) { 98 y = j * hy; 99 for (i = xs; i < xs + xm; i++) { 100 x = i * hx; 101 if ((1.0 <= x) && (x <= 1.5) && (1.0 <= y) && (y <= 1.5)) u[j][i].v = .25 * PetscPowReal(PetscSinReal(4.0 * PETSC_PI * x), 2.0) * PetscPowReal(PetscSinReal(4.0 * PETSC_PI * y), 2.0); 102 else u[j][i].v = 0.0; 103 104 u[j][i].u = 1.0 - 2.0 * u[j][i].v; 105 } 106 } 107 108 PetscCall(DMDAVecRestoreArray(da, U, &u)); 109 PetscFunctionReturn(0); 110 } 111 112 PetscErrorCode PerturbedInitialConditions(DM da, Vec U) 113 { 114 PetscInt i, j, xs, ys, xm, ym, Mx, My; 115 Field **u; 116 PetscReal hx, hy, x, y; 117 118 PetscFunctionBegin; 119 PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE)); 120 121 hx = 2.5 / (PetscReal)Mx; 122 hy = 2.5 / (PetscReal)My; 123 124 PetscCall(DMDAVecGetArray(da, U, &u)); 125 /* Get local grid boundaries */ 126 PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL)); 127 128 /* Compute function over the locally owned part of the grid */ 129 for (j = ys; j < ys + ym; j++) { 130 y = j * hy; 131 for (i = xs; i < xs + xm; i++) { 132 x = i * hx; 133 if ((1.0 <= x) && (x <= 1.5) && (1.0 <= y) && (y <= 1.5)) u[j][i].v = .25 * PetscPowReal(PetscSinReal(4.0 * PETSC_PI * 0.5 * x), 2.0) * PetscPowReal(PetscSinReal(4.0 * PETSC_PI * 0.5 * y), 2.0); 134 else u[j][i].v = 0.0; 135 136 u[j][i].u = 1.0 - 2.0 * u[j][i].v; 137 } 138 } 139 140 PetscCall(DMDAVecRestoreArray(da, U, &u)); 141 PetscFunctionReturn(0); 142 } 143 144 PetscErrorCode PerturbedInitialConditions2(DM da, Vec U) 145 { 146 PetscInt i, j, xs, ys, xm, ym, Mx, My; 147 Field **u; 148 PetscReal hx, hy, x, y; 149 150 PetscFunctionBegin; 151 PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE)); 152 153 hx = 2.5 / (PetscReal)Mx; 154 hy = 2.5 / (PetscReal)My; 155 156 PetscCall(DMDAVecGetArray(da, U, &u)); 157 /* Get local grid boundaries */ 158 PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL)); 159 160 /* Compute function over the locally owned part of the grid */ 161 for (j = ys; j < ys + ym; j++) { 162 y = j * hy; 163 for (i = xs; i < xs + xm; i++) { 164 x = i * hx; 165 if (PetscApproximateGTE(x, 1.0) && PetscApproximateLTE(x, 1.5) && PetscApproximateGTE(y, 1.0) && PetscApproximateLTE(y, 1.5)) 166 u[j][i].v = PetscPowReal(PetscSinReal(4.0 * PETSC_PI * (x - 0.5)), 2.0) * PetscPowReal(PetscSinReal(4.0 * PETSC_PI * y), 2.0) / 4.0; 167 else u[j][i].v = 0.0; 168 169 u[j][i].u = 1.0 - 2.0 * u[j][i].v; 170 } 171 } 172 173 PetscCall(DMDAVecRestoreArray(da, U, &u)); 174 PetscFunctionReturn(0); 175 } 176 177 PetscErrorCode PerturbedInitialConditions3(DM da, Vec U) 178 { 179 PetscInt i, j, xs, ys, xm, ym, Mx, My; 180 Field **u; 181 PetscReal hx, hy, x, y; 182 183 PetscFunctionBegin; 184 PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE)); 185 186 hx = 2.5 / (PetscReal)Mx; 187 hy = 2.5 / (PetscReal)My; 188 189 PetscCall(DMDAVecGetArray(da, U, &u)); 190 /* Get local grid boundaries */ 191 PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL)); 192 193 /* Compute function over the locally owned part of the grid */ 194 for (j = ys; j < ys + ym; j++) { 195 y = j * hy; 196 for (i = xs; i < xs + xm; i++) { 197 x = i * hx; 198 if ((0.5 <= x) && (x <= 2.0) && (0.5 <= y) && (y <= 2.0)) u[j][i].v = .25 * PetscPowReal(PetscSinReal(4.0 * PETSC_PI * x), 2.0) * PetscPowReal(PetscSinReal(4.0 * PETSC_PI * y), 2.0); 199 else u[j][i].v = 0.0; 200 201 u[j][i].u = 1.0 - 2.0 * u[j][i].v; 202 } 203 } 204 205 PetscCall(DMDAVecRestoreArray(da, U, &u)); 206 PetscFunctionReturn(0); 207 } 208 209 int main(int argc, char **argv) 210 { 211 DM da; 212 AppCtx appctx; 213 PetscBool forwardonly = PETSC_FALSE, implicitform = PETSC_FALSE; 214 PetscInt perturbic = 1; 215 216 PetscFunctionBeginUser; 217 PetscCall(PetscInitialize(&argc, &argv, (char *)0, help)); 218 PetscCall(PetscOptionsGetBool(NULL, NULL, "-forwardonly", &forwardonly, NULL)); 219 PetscCall(PetscOptionsGetBool(NULL, NULL, "-implicitform", &implicitform, NULL)); 220 PetscCall(PetscOptionsGetInt(NULL, NULL, "-perturbic", &perturbic, NULL)); 221 222 appctx.D1 = 8.0e-5; 223 appctx.D2 = 4.0e-5; 224 appctx.gamma = .024; 225 appctx.kappa = .06; 226 appctx.aijpc = PETSC_FALSE; 227 228 /* Create distributed array (DMDA) to manage parallel grid and vectors */ 229 PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC, DMDA_STENCIL_STAR, 64, 64, PETSC_DECIDE, PETSC_DECIDE, 2, 1, NULL, NULL, &da)); 230 PetscCall(DMSetFromOptions(da)); 231 PetscCall(DMSetUp(da)); 232 PetscCall(DMDASetFieldName(da, 0, "u")); 233 PetscCall(DMDASetFieldName(da, 1, "v")); 234 235 /* Extract global vectors from DMDA; then duplicate for remaining 236 vectors that are the same types */ 237 PetscCall(DMCreateGlobalVector(da, &appctx.U)); 238 239 /* Create timestepping solver context */ 240 PetscCall(TSCreate(PETSC_COMM_WORLD, &appctx.ts)); 241 PetscCall(TSSetType(appctx.ts, TSCN)); 242 PetscCall(TSSetDM(appctx.ts, da)); 243 PetscCall(TSSetProblemType(appctx.ts, TS_NONLINEAR)); 244 PetscCall(TSSetEquationType(appctx.ts, TS_EQ_ODE_EXPLICIT)); /* less Jacobian evaluations when adjoint BEuler is used, otherwise no effect */ 245 if (!implicitform) { 246 PetscCall(TSSetRHSFunction(appctx.ts, NULL, RHSFunction, &appctx)); 247 PetscCall(TSSetRHSJacobian(appctx.ts, NULL, NULL, RHSJacobian, &appctx)); 248 } else { 249 PetscCall(TSSetIFunction(appctx.ts, NULL, IFunction, &appctx)); 250 PetscCall(TSSetIJacobian(appctx.ts, NULL, NULL, IJacobian, &appctx)); 251 } 252 253 /* Set initial conditions */ 254 PetscCall(InitialConditions(da, appctx.U)); 255 PetscCall(TSSetSolution(appctx.ts, appctx.U)); 256 257 /* Set solver options */ 258 PetscCall(TSSetMaxTime(appctx.ts, 2000.0)); 259 PetscCall(TSSetTimeStep(appctx.ts, 0.5)); 260 PetscCall(TSSetExactFinalTime(appctx.ts, TS_EXACTFINALTIME_MATCHSTEP)); 261 PetscCall(TSSetFromOptions(appctx.ts)); 262 263 PetscCall(GenerateOBs(appctx.ts, appctx.U, &appctx)); 264 265 if (!forwardonly) { 266 Tao tao; 267 Vec P; 268 Vec lambda[1]; 269 #if defined(PETSC_USE_LOG) 270 PetscLogStage opt_stage; 271 #endif 272 273 PetscCall(PetscLogStageRegister("Optimization", &opt_stage)); 274 PetscCall(PetscLogStagePush(opt_stage)); 275 if (perturbic == 1) { 276 PetscCall(PerturbedInitialConditions(da, appctx.U)); 277 } else if (perturbic == 2) { 278 PetscCall(PerturbedInitialConditions2(da, appctx.U)); 279 } else if (perturbic == 3) { 280 PetscCall(PerturbedInitialConditions3(da, appctx.U)); 281 } 282 283 PetscCall(VecDuplicate(appctx.U, &lambda[0])); 284 PetscCall(TSSetCostGradients(appctx.ts, 1, lambda, NULL)); 285 286 /* Have the TS save its trajectory needed by TSAdjointSolve() */ 287 PetscCall(TSSetSaveTrajectory(appctx.ts)); 288 289 /* Create TAO solver and set desired solution method */ 290 PetscCall(TaoCreate(PETSC_COMM_WORLD, &tao)); 291 PetscCall(TaoSetType(tao, TAOBLMVM)); 292 293 /* Set initial guess for TAO */ 294 PetscCall(VecDuplicate(appctx.U, &P)); 295 PetscCall(VecCopy(appctx.U, P)); 296 PetscCall(TaoSetSolution(tao, P)); 297 298 /* Set routine for function and gradient evaluation */ 299 PetscCall(TaoSetObjectiveAndGradient(tao, NULL, FormFunctionAndGradient, &appctx)); 300 301 /* Check for any TAO command line options */ 302 PetscCall(TaoSetFromOptions(tao)); 303 304 PetscCall(TaoSolve(tao)); 305 PetscCall(TaoDestroy(&tao)); 306 PetscCall(VecDestroy(&lambda[0])); 307 PetscCall(VecDestroy(&P)); 308 PetscCall(PetscLogStagePop()); 309 } 310 311 /* Free work space. All PETSc objects should be destroyed when they 312 are no longer needed. */ 313 PetscCall(VecDestroy(&appctx.U)); 314 PetscCall(TSDestroy(&appctx.ts)); 315 PetscCall(DMDestroy(&da)); 316 PetscCall(PetscFinalize()); 317 return 0; 318 } 319 320 /* ------------------------ TAO callbacks ---------------------------- */ 321 322 /* 323 FormFunctionAndGradient - Evaluates the function and corresponding gradient. 324 325 Input Parameters: 326 tao - the Tao context 327 P - the input vector 328 ctx - optional user-defined context, as set by TaoSetObjectiveAndGradient() 329 330 Output Parameters: 331 f - the newly evaluated function 332 G - the newly evaluated gradient 333 */ 334 PetscErrorCode FormFunctionAndGradient(Tao tao, Vec P, PetscReal *f, Vec G, void *ctx) 335 { 336 AppCtx *appctx = (AppCtx *)ctx; 337 PetscReal soberr, timestep; 338 Vec *lambda; 339 Vec SDiff; 340 DM da; 341 char filename[PETSC_MAX_PATH_LEN] = ""; 342 PetscViewer viewer; 343 344 PetscFunctionBeginUser; 345 PetscCall(TSSetTime(appctx->ts, 0.0)); 346 PetscCall(TSGetTimeStep(appctx->ts, ×tep)); 347 if (timestep < 0) PetscCall(TSSetTimeStep(appctx->ts, -timestep)); 348 PetscCall(TSSetStepNumber(appctx->ts, 0)); 349 PetscCall(TSSetFromOptions(appctx->ts)); 350 351 PetscCall(VecDuplicate(P, &SDiff)); 352 PetscCall(VecCopy(P, appctx->U)); 353 PetscCall(TSGetDM(appctx->ts, &da)); 354 *f = 0; 355 356 PetscCall(TSSolve(appctx->ts, appctx->U)); 357 PetscCall(PetscSNPrintf(filename, sizeof filename, "ex5opt.ob")); 358 PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, filename, FILE_MODE_READ, &viewer)); 359 PetscCall(VecLoad(SDiff, viewer)); 360 PetscCall(PetscViewerDestroy(&viewer)); 361 PetscCall(VecAYPX(SDiff, -1., appctx->U)); 362 PetscCall(VecDot(SDiff, SDiff, &soberr)); 363 *f += soberr; 364 365 PetscCall(TSGetCostGradients(appctx->ts, NULL, &lambda, NULL)); 366 PetscCall(VecSet(lambda[0], 0.0)); 367 PetscCall(InitializeLambda(da, lambda[0], appctx->U, appctx)); 368 PetscCall(TSAdjointSolve(appctx->ts)); 369 370 PetscCall(VecCopy(lambda[0], G)); 371 372 PetscCall(VecDestroy(&SDiff)); 373 PetscFunctionReturn(0); 374 } 375 376 /*TEST 377 378 build: 379 depends: reaction_diffusion.c 380 requires: !complex !single 381 382 test: 383 args: -ts_max_steps 5 -ts_type rk -ts_rk_type 3 -ts_trajectory_type memory -tao_monitor -tao_view -tao_gatol 1e-6 384 output_file: output/ex5opt_ic_1.out 385 386 TEST*/ 387