xref: /petsc/src/ts/tutorials/autodiff/adr_ex5adj_mf.cxx (revision 58d68138c660dfb4e9f5b03334792cd4f2ffd7cc)
1 static char help[] = "Demonstrates automatic, matrix-free Jacobian generation using ADOL-C for a time-dependent PDE in 2d, solved using implicit timestepping.\n";
2 
3 /*
4    REQUIRES configuration of PETSc with option --download-adolc.
5 
6    For documentation on ADOL-C, see
7      $PETSC_ARCH/externalpackages/ADOL-C-2.6.0/ADOL-C/doc/adolc-manual.pdf
8 */
9 /* ------------------------------------------------------------------------
10   See ../advection-diffusion-reaction/ex5 for a description of the problem
11   ------------------------------------------------------------------------- */
12 
13 #include <petscdmda.h>
14 #include <petscts.h>
15 #include "adolc-utils/init.cxx"
16 #include "adolc-utils/matfree.cxx"
17 #include <adolc/adolc.h>
18 
19 /* (Passive) field for the two variables */
20 typedef struct {
21   PetscScalar u, v;
22 } Field;
23 
24 /* Active field for the two variables */
25 typedef struct {
26   adouble u, v;
27 } AField;
28 
29 /* Application context */
30 typedef struct {
31   PetscReal D1, D2, gamma, kappa;
32   AField  **u_a, **f_a;
33   AdolcCtx *adctx; /* Automatic differentation support */
34 } AppCtx;
35 
36 extern PetscErrorCode InitialConditions(DM da, Vec U);
37 extern PetscErrorCode InitializeLambda(DM da, Vec lambda, PetscReal x, PetscReal y);
38 extern PetscErrorCode IFunctionLocalPassive(DMDALocalInfo *info, PetscReal t, Field **u, Field **udot, Field **f, void *ptr);
39 extern PetscErrorCode IFunctionActive(TS ts, PetscReal ftime, Vec U, Vec Udot, Vec F, void *ptr);
40 extern PetscErrorCode IJacobianMatFree(TS ts, PetscReal t, Vec X, Vec Xdot, PetscReal a, Mat A_shell, Mat B, void *ctx);
41 
42 int main(int argc, char **argv) {
43   TS          ts;   /* ODE integrator */
44   Vec         x, r; /* solution, residual */
45   DM          da;
46   AppCtx      appctx; /* Application context */
47   AdolcMatCtx matctx; /* Matrix (free) context */
48   Vec         lambda[1];
49   PetscBool   forwardonly = PETSC_FALSE;
50   Mat         A; /* (Matrix free) Jacobian matrix */
51   PetscInt    gxm, gym;
52 
53   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
54      Initialize program
55      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
56   PetscFunctionBeginUser;
57   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
58   PetscCall(PetscOptionsGetBool(NULL, NULL, "-forwardonly", &forwardonly, NULL));
59   PetscFunctionBeginUser;
60   appctx.D1    = 8.0e-5;
61   appctx.D2    = 4.0e-5;
62   appctx.gamma = .024;
63   appctx.kappa = .06;
64   PetscCall(PetscLogEventRegister("df/dx forward", MAT_CLASSID, &matctx.event1));
65   PetscCall(PetscLogEventRegister("df/d(xdot) forward", MAT_CLASSID, &matctx.event2));
66   PetscCall(PetscLogEventRegister("df/dx reverse", MAT_CLASSID, &matctx.event3));
67   PetscCall(PetscLogEventRegister("df/d(xdot) reverse", MAT_CLASSID, &matctx.event4));
68 
69   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
70      Create distributed array (DMDA) to manage parallel grid and vectors
71   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
72   PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC, DMDA_STENCIL_STAR, 65, 65, PETSC_DECIDE, PETSC_DECIDE, 2, 1, NULL, NULL, &da));
73   PetscCall(DMSetFromOptions(da));
74   PetscCall(DMSetUp(da));
75   PetscCall(DMDASetFieldName(da, 0, "u"));
76   PetscCall(DMDASetFieldName(da, 1, "v"));
77 
78   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
79      Extract global vectors from DMDA; then duplicate for remaining
80      vectors that are the same types
81    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
82   PetscCall(DMCreateGlobalVector(da, &x));
83   PetscCall(VecDuplicate(x, &r));
84 
85   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
86     Create matrix free context and specify usage of PETSc-ADOL-C drivers
87     - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
88   PetscCall(DMSetMatType(da, MATSHELL));
89   PetscCall(DMCreateMatrix(da, &A));
90   PetscCall(MatShellSetContext(A, &matctx));
91   PetscCall(MatShellSetOperation(A, MATOP_MULT, (void (*)(void))PetscAdolcIJacobianVectorProductIDMass));
92   PetscCall(MatShellSetOperation(A, MATOP_MULT_TRANSPOSE, (void (*)(void))PetscAdolcIJacobianTransposeVectorProductIDMass));
93   PetscCall(VecDuplicate(x, &matctx.X));
94   PetscCall(VecDuplicate(x, &matctx.Xdot));
95   PetscCall(DMGetLocalVector(da, &matctx.localX0));
96 
97   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
98      Create timestepping solver context
99      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
100   PetscCall(TSCreate(PETSC_COMM_WORLD, &ts));
101   PetscCall(TSSetType(ts, TSCN));
102   PetscCall(TSSetDM(ts, da));
103   PetscCall(TSSetProblemType(ts, TS_NONLINEAR));
104   PetscCall(DMDATSSetIFunctionLocal(da, INSERT_VALUES, (DMDATSIFunctionLocal)IFunctionLocalPassive, &appctx));
105 
106   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
107     Some data required for matrix-free context
108      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
109   PetscCall(DMDAGetGhostCorners(da, NULL, NULL, NULL, &gxm, &gym, NULL));
110   matctx.m    = 2 * gxm * gym;
111   matctx.n    = 2 * gxm * gym; /* Number of dependent and independent variables */
112   matctx.flg  = PETSC_FALSE;   /* Flag for reverse mode */
113   matctx.tag1 = 1;             /* Tape identifier */
114 
115   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
116      Trace function just once
117    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
118   PetscCall(PetscNew(&appctx.adctx));
119   PetscCall(IFunctionActive(ts, 1., x, matctx.Xdot, r, &appctx));
120   PetscCall(PetscFree(appctx.adctx));
121 
122   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
123      Set Jacobian. In this case, IJacobian simply acts to pass context
124      information to the matrix-free Jacobian vector product.
125    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
126   PetscCall(TSSetIJacobian(ts, A, A, IJacobianMatFree, &appctx));
127 
128   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
129      Set initial conditions
130    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
131   PetscCall(InitialConditions(da, x));
132   PetscCall(TSSetSolution(ts, x));
133 
134   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
135     Have the TS save its trajectory so that TSAdjointSolve() may be used
136     and set solver options
137    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
138   if (!forwardonly) {
139     PetscCall(TSSetSaveTrajectory(ts));
140     PetscCall(TSSetMaxTime(ts, 200.0));
141     PetscCall(TSSetTimeStep(ts, 0.5));
142   } else {
143     PetscCall(TSSetMaxTime(ts, 2000.0));
144     PetscCall(TSSetTimeStep(ts, 10));
145   }
146   PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER));
147   PetscCall(TSSetFromOptions(ts));
148 
149   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
150      Solve ODE system
151      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
152   PetscCall(TSSolve(ts, x));
153   if (!forwardonly) {
154     /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
155        Start the Adjoint model
156        - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
157     PetscCall(VecDuplicate(x, &lambda[0]));
158     /*   Reset initial conditions for the adjoint integration */
159     PetscCall(InitializeLambda(da, lambda[0], 0.5, 0.5));
160     PetscCall(TSSetCostGradients(ts, 1, lambda, NULL));
161     PetscCall(TSAdjointSolve(ts));
162     PetscCall(VecDestroy(&lambda[0]));
163   }
164 
165   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
166      Free work space.  All PETSc objects should be destroyed when they
167      are no longer needed.
168    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
169   PetscCall(DMRestoreLocalVector(da, &matctx.localX0));
170   PetscCall(VecDestroy(&r));
171   PetscCall(VecDestroy(&matctx.X));
172   PetscCall(VecDestroy(&matctx.Xdot));
173   PetscCall(MatDestroy(&A));
174   PetscCall(VecDestroy(&x));
175   PetscCall(TSDestroy(&ts));
176   PetscCall(DMDestroy(&da));
177 
178   PetscCall(PetscFinalize());
179   return 0;
180 }
181 
182 PetscErrorCode InitialConditions(DM da, Vec U) {
183   PetscInt  i, j, xs, ys, xm, ym, Mx, My;
184   Field   **u;
185   PetscReal hx, hy, x, y;
186 
187   PetscFunctionBegin;
188   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
189 
190   hx = 2.5 / (PetscReal)Mx;
191   hy = 2.5 / (PetscReal)My;
192 
193   /*
194      Get pointers to vector data
195   */
196   PetscCall(DMDAVecGetArray(da, U, &u));
197 
198   /*
199      Get local grid boundaries
200   */
201   PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
202 
203   /*
204      Compute function over the locally owned part of the grid
205   */
206   for (j = ys; j < ys + ym; j++) {
207     y = j * hy;
208     for (i = xs; i < xs + xm; i++) {
209       x = i * hx;
210       if (PetscApproximateGTE(x, 1.0) && PetscApproximateLTE(x, 1.5) && PetscApproximateGTE(y, 1.0) && PetscApproximateLTE(y, 1.5))
211         u[j][i].v = PetscPowReal(PetscSinReal(4.0 * PETSC_PI * x), 2.0) * PetscPowReal(PetscSinReal(4.0 * PETSC_PI * y), 2.0) / 4.0;
212       else u[j][i].v = 0.0;
213 
214       u[j][i].u = 1.0 - 2.0 * u[j][i].v;
215     }
216   }
217 
218   /*
219      Restore vectors
220   */
221   PetscCall(DMDAVecRestoreArray(da, U, &u));
222   PetscFunctionReturn(0);
223 }
224 
225 PetscErrorCode InitializeLambda(DM da, Vec lambda, PetscReal x, PetscReal y) {
226   PetscInt i, j, Mx, My, xs, ys, xm, ym;
227   Field  **l;
228 
229   PetscFunctionBegin;
230   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
231   /* locate the global i index for x and j index for y */
232   i = (PetscInt)(x * (Mx - 1));
233   j = (PetscInt)(y * (My - 1));
234   PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
235 
236   if (xs <= i && i < xs + xm && ys <= j && j < ys + ym) {
237     /* the i,j vertex is on this process */
238     PetscCall(DMDAVecGetArray(da, lambda, &l));
239     l[j][i].u = 1.0;
240     l[j][i].v = 1.0;
241     PetscCall(DMDAVecRestoreArray(da, lambda, &l));
242   }
243   PetscFunctionReturn(0);
244 }
245 
246 PetscErrorCode IFunctionLocalPassive(DMDALocalInfo *info, PetscReal t, Field **u, Field **udot, Field **f, void *ptr) {
247   AppCtx     *appctx = (AppCtx *)ptr;
248   PetscInt    i, j, xs, ys, xm, ym;
249   PetscReal   hx, hy, sx, sy;
250   PetscScalar uc, uxx, uyy, vc, vxx, vyy;
251 
252   PetscFunctionBegin;
253   hx = 2.50 / (PetscReal)(info->mx);
254   sx = 1.0 / (hx * hx);
255   hy = 2.50 / (PetscReal)(info->my);
256   sy = 1.0 / (hy * hy);
257 
258   /* Get local grid boundaries */
259   xs = info->xs;
260   xm = info->xm;
261   ys = info->ys;
262   ym = info->ym;
263 
264   /* Compute function over the locally owned part of the grid */
265   for (j = ys; j < ys + ym; j++) {
266     for (i = xs; i < xs + xm; i++) {
267       uc        = u[j][i].u;
268       uxx       = (-2.0 * uc + u[j][i - 1].u + u[j][i + 1].u) * sx;
269       uyy       = (-2.0 * uc + u[j - 1][i].u + u[j + 1][i].u) * sy;
270       vc        = u[j][i].v;
271       vxx       = (-2.0 * vc + u[j][i - 1].v + u[j][i + 1].v) * sx;
272       vyy       = (-2.0 * vc + u[j - 1][i].v + u[j + 1][i].v) * sy;
273       f[j][i].u = udot[j][i].u - appctx->D1 * (uxx + uyy) + uc * vc * vc - appctx->gamma * (1.0 - uc);
274       f[j][i].v = udot[j][i].v - appctx->D2 * (vxx + vyy) - uc * vc * vc + (appctx->gamma + appctx->kappa) * vc;
275     }
276   }
277   PetscCall(PetscLogFlops(16.0 * xm * ym));
278   PetscFunctionReturn(0);
279 }
280 
281 PetscErrorCode IFunctionActive(TS ts, PetscReal ftime, Vec U, Vec Udot, Vec F, void *ptr) {
282   AppCtx       *appctx = (AppCtx *)ptr;
283   DM            da;
284   DMDALocalInfo info;
285   Field       **u, **f, **udot;
286   Vec           localU;
287   PetscInt      i, j, xs, ys, xm, ym, gxs, gys, gxm, gym;
288   PetscReal     hx, hy, sx, sy;
289   adouble       uc, uxx, uyy, vc, vxx, vyy;
290   AField      **f_a, *f_c, **u_a, *u_c;
291   PetscScalar   dummy;
292 
293   PetscFunctionBegin;
294   PetscCall(TSGetDM(ts, &da));
295   PetscCall(DMDAGetLocalInfo(da, &info));
296   PetscCall(DMGetLocalVector(da, &localU));
297   hx  = 2.50 / (PetscReal)(info.mx);
298   sx  = 1.0 / (hx * hx);
299   hy  = 2.50 / (PetscReal)(info.my);
300   sy  = 1.0 / (hy * hy);
301   xs  = info.xs;
302   xm  = info.xm;
303   gxs = info.gxs;
304   gxm = info.gxm;
305   ys  = info.ys;
306   ym  = info.ym;
307   gys = info.gys;
308   gym = info.gym;
309 
310   /*
311      Scatter ghost points to local vector,using the 2-step process
312         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
313      By placing code between these two statements, computations can be
314      done while messages are in transition.
315   */
316   PetscCall(DMGlobalToLocalBegin(da, U, INSERT_VALUES, localU));
317   PetscCall(DMGlobalToLocalEnd(da, U, INSERT_VALUES, localU));
318 
319   /*
320      Get pointers to vector data
321   */
322   PetscCall(DMDAVecGetArrayRead(da, localU, &u));
323   PetscCall(DMDAVecGetArray(da, F, &f));
324   PetscCall(DMDAVecGetArrayRead(da, Udot, &udot));
325 
326   /*
327     Create contiguous 1-arrays of AFields
328 
329     NOTE: Memory for ADOL-C active variables (such as adouble and AField)
330           cannot be allocated using PetscMalloc, as this does not call the
331           relevant class constructor. Instead, we use the C++ keyword `new`.
332   */
333   u_c = new AField[info.gxm * info.gym];
334   f_c = new AField[info.gxm * info.gym];
335 
336   /* Create corresponding 2-arrays of AFields */
337   u_a = new AField *[info.gym];
338   f_a = new AField *[info.gym];
339 
340   /* Align indices between array types to endow 2d array with ghost points */
341   PetscCall(GiveGhostPoints(da, u_c, &u_a));
342   PetscCall(GiveGhostPoints(da, f_c, &f_a));
343 
344   trace_on(1); /* Start of active section on tape 1 */
345 
346   /*
347     Mark independence
348 
349     NOTE: Ghost points are marked as independent, in place of the points they represent on
350           other processors / on other boundaries.
351   */
352   for (j = gys; j < gys + gym; j++) {
353     for (i = gxs; i < gxs + gxm; i++) {
354       u_a[j][i].u <<= u[j][i].u;
355       u_a[j][i].v <<= u[j][i].v;
356     }
357   }
358 
359   /* Compute function over the locally owned part of the grid */
360   for (j = ys; j < ys + ym; j++) {
361     for (i = xs; i < xs + xm; i++) {
362       uc          = u_a[j][i].u;
363       uxx         = (-2.0 * uc + u_a[j][i - 1].u + u_a[j][i + 1].u) * sx;
364       uyy         = (-2.0 * uc + u_a[j - 1][i].u + u_a[j + 1][i].u) * sy;
365       vc          = u_a[j][i].v;
366       vxx         = (-2.0 * vc + u_a[j][i - 1].v + u_a[j][i + 1].v) * sx;
367       vyy         = (-2.0 * vc + u_a[j - 1][i].v + u_a[j + 1][i].v) * sy;
368       f_a[j][i].u = udot[j][i].u - appctx->D1 * (uxx + uyy) + uc * vc * vc - appctx->gamma * (1.0 - uc);
369       f_a[j][i].v = udot[j][i].v - appctx->D2 * (vxx + vyy) - uc * vc * vc + (appctx->gamma + appctx->kappa) * vc;
370     }
371   }
372 
373   /*
374     Mark dependence
375 
376     NOTE: Marking dependence of dummy variables makes the index notation much simpler when forming
377           the Jacobian later.
378   */
379   for (j = gys; j < gys + gym; j++) {
380     for (i = gxs; i < gxs + gxm; i++) {
381       if ((i < xs) || (i >= xs + xm) || (j < ys) || (j >= ys + ym)) {
382         f_a[j][i].u >>= dummy;
383         f_a[j][i].v >>= dummy;
384       } else {
385         f_a[j][i].u >>= f[j][i].u;
386         f_a[j][i].v >>= f[j][i].v;
387       }
388     }
389   }
390   trace_off(); /* End of active section */
391   PetscCall(PetscLogFlops(16.0 * xm * ym));
392 
393   /* Restore vectors */
394   PetscCall(DMDAVecRestoreArray(da, F, &f));
395   PetscCall(DMDAVecRestoreArrayRead(da, localU, &u));
396   PetscCall(DMDAVecRestoreArrayRead(da, Udot, &udot));
397 
398   PetscCall(DMRestoreLocalVector(da, &localU));
399 
400   /* Destroy AFields appropriately */
401   f_a += info.gys;
402   u_a += info.gys;
403   delete[] f_a;
404   delete[] u_a;
405   delete[] f_c;
406   delete[] u_c;
407   PetscFunctionReturn(0);
408 }
409 
410 /*
411   Simply acts to pass TS information to the AdolcMatCtx
412 */
413 PetscErrorCode IJacobianMatFree(TS ts, PetscReal t, Vec X, Vec Xdot, PetscReal a, Mat A_shell, Mat B, void *ctx) {
414   AdolcMatCtx *mctx;
415   DM           da;
416 
417   PetscFunctionBeginUser;
418   PetscCall(MatShellGetContext(A_shell, &mctx));
419 
420   mctx->time  = t;
421   mctx->shift = a;
422   if (mctx->ts != ts) mctx->ts = ts;
423   PetscCall(VecCopy(X, mctx->X));
424   PetscCall(VecCopy(Xdot, mctx->Xdot));
425   PetscCall(TSGetDM(ts, &da));
426   PetscCall(DMGlobalToLocalBegin(da, mctx->X, INSERT_VALUES, mctx->localX0));
427   PetscCall(DMGlobalToLocalEnd(da, mctx->X, INSERT_VALUES, mctx->localX0));
428   PetscFunctionReturn(0);
429 }
430 
431 /*TEST
432 
433   build:
434     requires: double !complex adolc
435 
436   test:
437     suffix: 1
438     args: -ts_max_steps 1 -da_grid_x 12 -da_grid_y 12 -snes_test_jacobian
439     output_file: output/adr_ex5adj_mf_1.out
440 
441   test:
442     suffix: 2
443     nsize: 4
444     args: -ts_max_steps 10 -da_grid_x 12 -da_grid_y 12 -ts_monitor -ts_adjoint_monitor
445     output_file: output/adr_ex5adj_mf_2.out
446 
447 TEST*/
448