xref: /petsc/src/ts/tutorials/autodiff/adr_ex5adj_mf.cxx (revision 21e3ffae2f3b73c0bd738cf6d0a809700fc04bb0)
1 static char help[] = "Demonstrates automatic, matrix-free Jacobian generation using ADOL-C for a time-dependent PDE in 2d, solved using implicit timestepping.\n";
2 
3 /*
4    REQUIRES configuration of PETSc with option --download-adolc.
5 
6    For documentation on ADOL-C, see
7      $PETSC_ARCH/externalpackages/ADOL-C-2.6.0/ADOL-C/doc/adolc-manual.pdf
8 */
9 /* ------------------------------------------------------------------------
10   See ../advection-diffusion-reaction/ex5 for a description of the problem
11   ------------------------------------------------------------------------- */
12 
13 #include <petscdmda.h>
14 #include <petscts.h>
15 #include "adolc-utils/init.cxx"
16 #include "adolc-utils/matfree.cxx"
17 #include <adolc/adolc.h>
18 
19 /* (Passive) field for the two variables */
20 typedef struct {
21   PetscScalar u, v;
22 } Field;
23 
24 /* Active field for the two variables */
25 typedef struct {
26   adouble u, v;
27 } AField;
28 
29 /* Application context */
30 typedef struct {
31   PetscReal D1, D2, gamma, kappa;
32   AField  **u_a, **f_a;
33   AdolcCtx *adctx; /* Automatic differentation support */
34 } AppCtx;
35 
36 extern PetscErrorCode InitialConditions(DM da, Vec U);
37 extern PetscErrorCode InitializeLambda(DM da, Vec lambda, PetscReal x, PetscReal y);
38 extern PetscErrorCode IFunctionLocalPassive(DMDALocalInfo *info, PetscReal t, Field **u, Field **udot, Field **f, void *ptr);
39 extern PetscErrorCode IFunctionActive(TS ts, PetscReal ftime, Vec U, Vec Udot, Vec F, void *ptr);
40 extern PetscErrorCode IJacobianMatFree(TS ts, PetscReal t, Vec X, Vec Xdot, PetscReal a, Mat A_shell, Mat B, void *ctx);
41 
42 int main(int argc, char **argv)
43 {
44   TS          ts;   /* ODE integrator */
45   Vec         x, r; /* solution, residual */
46   DM          da;
47   AppCtx      appctx; /* Application context */
48   AdolcMatCtx matctx; /* Matrix (free) context */
49   Vec         lambda[1];
50   PetscBool   forwardonly = PETSC_FALSE;
51   Mat         A; /* (Matrix free) Jacobian matrix */
52   PetscInt    gxm, gym;
53 
54   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
55      Initialize program
56      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
57   PetscFunctionBeginUser;
58   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
59   PetscCall(PetscOptionsGetBool(NULL, NULL, "-forwardonly", &forwardonly, NULL));
60   PetscFunctionBeginUser;
61   appctx.D1    = 8.0e-5;
62   appctx.D2    = 4.0e-5;
63   appctx.gamma = .024;
64   appctx.kappa = .06;
65   PetscCall(PetscLogEventRegister("df/dx forward", MAT_CLASSID, &matctx.event1));
66   PetscCall(PetscLogEventRegister("df/d(xdot) forward", MAT_CLASSID, &matctx.event2));
67   PetscCall(PetscLogEventRegister("df/dx reverse", MAT_CLASSID, &matctx.event3));
68   PetscCall(PetscLogEventRegister("df/d(xdot) reverse", MAT_CLASSID, &matctx.event4));
69 
70   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
71      Create distributed array (DMDA) to manage parallel grid and vectors
72   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
73   PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC, DMDA_STENCIL_STAR, 65, 65, PETSC_DECIDE, PETSC_DECIDE, 2, 1, NULL, NULL, &da));
74   PetscCall(DMSetFromOptions(da));
75   PetscCall(DMSetUp(da));
76   PetscCall(DMDASetFieldName(da, 0, "u"));
77   PetscCall(DMDASetFieldName(da, 1, "v"));
78 
79   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
80      Extract global vectors from DMDA; then duplicate for remaining
81      vectors that are the same types
82    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
83   PetscCall(DMCreateGlobalVector(da, &x));
84   PetscCall(VecDuplicate(x, &r));
85 
86   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
87     Create matrix free context and specify usage of PETSc-ADOL-C drivers
88     - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
89   PetscCall(DMSetMatType(da, MATSHELL));
90   PetscCall(DMCreateMatrix(da, &A));
91   PetscCall(MatShellSetContext(A, &matctx));
92   PetscCall(MatShellSetOperation(A, MATOP_MULT, (void (*)(void))PetscAdolcIJacobianVectorProductIDMass));
93   PetscCall(MatShellSetOperation(A, MATOP_MULT_TRANSPOSE, (void (*)(void))PetscAdolcIJacobianTransposeVectorProductIDMass));
94   PetscCall(VecDuplicate(x, &matctx.X));
95   PetscCall(VecDuplicate(x, &matctx.Xdot));
96   PetscCall(DMGetLocalVector(da, &matctx.localX0));
97 
98   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
99      Create timestepping solver context
100      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
101   PetscCall(TSCreate(PETSC_COMM_WORLD, &ts));
102   PetscCall(TSSetType(ts, TSCN));
103   PetscCall(TSSetDM(ts, da));
104   PetscCall(TSSetProblemType(ts, TS_NONLINEAR));
105   PetscCall(DMDATSSetIFunctionLocal(da, INSERT_VALUES, (DMDATSIFunctionLocal)IFunctionLocalPassive, &appctx));
106 
107   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
108     Some data required for matrix-free context
109      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
110   PetscCall(DMDAGetGhostCorners(da, NULL, NULL, NULL, &gxm, &gym, NULL));
111   matctx.m    = 2 * gxm * gym;
112   matctx.n    = 2 * gxm * gym; /* Number of dependent and independent variables */
113   matctx.flg  = PETSC_FALSE;   /* Flag for reverse mode */
114   matctx.tag1 = 1;             /* Tape identifier */
115 
116   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
117      Trace function just once
118    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
119   PetscCall(PetscNew(&appctx.adctx));
120   PetscCall(IFunctionActive(ts, 1., x, matctx.Xdot, r, &appctx));
121   PetscCall(PetscFree(appctx.adctx));
122 
123   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
124      Set Jacobian. In this case, IJacobian simply acts to pass context
125      information to the matrix-free Jacobian vector product.
126    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
127   PetscCall(TSSetIJacobian(ts, A, A, IJacobianMatFree, &appctx));
128 
129   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
130      Set initial conditions
131    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
132   PetscCall(InitialConditions(da, x));
133   PetscCall(TSSetSolution(ts, x));
134 
135   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
136     Have the TS save its trajectory so that TSAdjointSolve() may be used
137     and set solver options
138    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
139   if (!forwardonly) {
140     PetscCall(TSSetSaveTrajectory(ts));
141     PetscCall(TSSetMaxTime(ts, 200.0));
142     PetscCall(TSSetTimeStep(ts, 0.5));
143   } else {
144     PetscCall(TSSetMaxTime(ts, 2000.0));
145     PetscCall(TSSetTimeStep(ts, 10));
146   }
147   PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER));
148   PetscCall(TSSetFromOptions(ts));
149 
150   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
151      Solve ODE system
152      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
153   PetscCall(TSSolve(ts, x));
154   if (!forwardonly) {
155     /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
156        Start the Adjoint model
157        - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
158     PetscCall(VecDuplicate(x, &lambda[0]));
159     /*   Reset initial conditions for the adjoint integration */
160     PetscCall(InitializeLambda(da, lambda[0], 0.5, 0.5));
161     PetscCall(TSSetCostGradients(ts, 1, lambda, NULL));
162     PetscCall(TSAdjointSolve(ts));
163     PetscCall(VecDestroy(&lambda[0]));
164   }
165 
166   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
167      Free work space.  All PETSc objects should be destroyed when they
168      are no longer needed.
169    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
170   PetscCall(DMRestoreLocalVector(da, &matctx.localX0));
171   PetscCall(VecDestroy(&r));
172   PetscCall(VecDestroy(&matctx.X));
173   PetscCall(VecDestroy(&matctx.Xdot));
174   PetscCall(MatDestroy(&A));
175   PetscCall(VecDestroy(&x));
176   PetscCall(TSDestroy(&ts));
177   PetscCall(DMDestroy(&da));
178 
179   PetscCall(PetscFinalize());
180   return 0;
181 }
182 
183 PetscErrorCode InitialConditions(DM da, Vec U)
184 {
185   PetscInt  i, j, xs, ys, xm, ym, Mx, My;
186   Field   **u;
187   PetscReal hx, hy, x, y;
188 
189   PetscFunctionBegin;
190   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
191 
192   hx = 2.5 / (PetscReal)Mx;
193   hy = 2.5 / (PetscReal)My;
194 
195   /*
196      Get pointers to vector data
197   */
198   PetscCall(DMDAVecGetArray(da, U, &u));
199 
200   /*
201      Get local grid boundaries
202   */
203   PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
204 
205   /*
206      Compute function over the locally owned part of the grid
207   */
208   for (j = ys; j < ys + ym; j++) {
209     y = j * hy;
210     for (i = xs; i < xs + xm; i++) {
211       x = i * hx;
212       if (PetscApproximateGTE(x, 1.0) && PetscApproximateLTE(x, 1.5) && PetscApproximateGTE(y, 1.0) && PetscApproximateLTE(y, 1.5))
213         u[j][i].v = PetscPowReal(PetscSinReal(4.0 * PETSC_PI * x), 2.0) * PetscPowReal(PetscSinReal(4.0 * PETSC_PI * y), 2.0) / 4.0;
214       else u[j][i].v = 0.0;
215 
216       u[j][i].u = 1.0 - 2.0 * u[j][i].v;
217     }
218   }
219 
220   /*
221      Restore vectors
222   */
223   PetscCall(DMDAVecRestoreArray(da, U, &u));
224   PetscFunctionReturn(PETSC_SUCCESS);
225 }
226 
227 PetscErrorCode InitializeLambda(DM da, Vec lambda, PetscReal x, PetscReal y)
228 {
229   PetscInt i, j, Mx, My, xs, ys, xm, ym;
230   Field  **l;
231 
232   PetscFunctionBegin;
233   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
234   /* locate the global i index for x and j index for y */
235   i = (PetscInt)(x * (Mx - 1));
236   j = (PetscInt)(y * (My - 1));
237   PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
238 
239   if (xs <= i && i < xs + xm && ys <= j && j < ys + ym) {
240     /* the i,j vertex is on this process */
241     PetscCall(DMDAVecGetArray(da, lambda, &l));
242     l[j][i].u = 1.0;
243     l[j][i].v = 1.0;
244     PetscCall(DMDAVecRestoreArray(da, lambda, &l));
245   }
246   PetscFunctionReturn(PETSC_SUCCESS);
247 }
248 
249 PetscErrorCode IFunctionLocalPassive(DMDALocalInfo *info, PetscReal t, Field **u, Field **udot, Field **f, void *ptr)
250 {
251   AppCtx     *appctx = (AppCtx *)ptr;
252   PetscInt    i, j, xs, ys, xm, ym;
253   PetscReal   hx, hy, sx, sy;
254   PetscScalar uc, uxx, uyy, vc, vxx, vyy;
255 
256   PetscFunctionBegin;
257   hx = 2.50 / (PetscReal)(info->mx);
258   sx = 1.0 / (hx * hx);
259   hy = 2.50 / (PetscReal)(info->my);
260   sy = 1.0 / (hy * hy);
261 
262   /* Get local grid boundaries */
263   xs = info->xs;
264   xm = info->xm;
265   ys = info->ys;
266   ym = info->ym;
267 
268   /* Compute function over the locally owned part of the grid */
269   for (j = ys; j < ys + ym; j++) {
270     for (i = xs; i < xs + xm; i++) {
271       uc        = u[j][i].u;
272       uxx       = (-2.0 * uc + u[j][i - 1].u + u[j][i + 1].u) * sx;
273       uyy       = (-2.0 * uc + u[j - 1][i].u + u[j + 1][i].u) * sy;
274       vc        = u[j][i].v;
275       vxx       = (-2.0 * vc + u[j][i - 1].v + u[j][i + 1].v) * sx;
276       vyy       = (-2.0 * vc + u[j - 1][i].v + u[j + 1][i].v) * sy;
277       f[j][i].u = udot[j][i].u - appctx->D1 * (uxx + uyy) + uc * vc * vc - appctx->gamma * (1.0 - uc);
278       f[j][i].v = udot[j][i].v - appctx->D2 * (vxx + vyy) - uc * vc * vc + (appctx->gamma + appctx->kappa) * vc;
279     }
280   }
281   PetscCall(PetscLogFlops(16.0 * xm * ym));
282   PetscFunctionReturn(PETSC_SUCCESS);
283 }
284 
285 PetscErrorCode IFunctionActive(TS ts, PetscReal ftime, Vec U, Vec Udot, Vec F, void *ptr)
286 {
287   AppCtx       *appctx = (AppCtx *)ptr;
288   DM            da;
289   DMDALocalInfo info;
290   Field       **u, **f, **udot;
291   Vec           localU;
292   PetscInt      i, j, xs, ys, xm, ym, gxs, gys, gxm, gym;
293   PetscReal     hx, hy, sx, sy;
294   adouble       uc, uxx, uyy, vc, vxx, vyy;
295   AField      **f_a, *f_c, **u_a, *u_c;
296   PetscScalar   dummy;
297 
298   PetscFunctionBegin;
299   PetscCall(TSGetDM(ts, &da));
300   PetscCall(DMDAGetLocalInfo(da, &info));
301   PetscCall(DMGetLocalVector(da, &localU));
302   hx  = 2.50 / (PetscReal)(info.mx);
303   sx  = 1.0 / (hx * hx);
304   hy  = 2.50 / (PetscReal)(info.my);
305   sy  = 1.0 / (hy * hy);
306   xs  = info.xs;
307   xm  = info.xm;
308   gxs = info.gxs;
309   gxm = info.gxm;
310   ys  = info.ys;
311   ym  = info.ym;
312   gys = info.gys;
313   gym = info.gym;
314 
315   /*
316      Scatter ghost points to local vector,using the 2-step process
317         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
318      By placing code between these two statements, computations can be
319      done while messages are in transition.
320   */
321   PetscCall(DMGlobalToLocalBegin(da, U, INSERT_VALUES, localU));
322   PetscCall(DMGlobalToLocalEnd(da, U, INSERT_VALUES, localU));
323 
324   /*
325      Get pointers to vector data
326   */
327   PetscCall(DMDAVecGetArrayRead(da, localU, &u));
328   PetscCall(DMDAVecGetArray(da, F, &f));
329   PetscCall(DMDAVecGetArrayRead(da, Udot, &udot));
330 
331   /*
332     Create contiguous 1-arrays of AFields
333 
334     NOTE: Memory for ADOL-C active variables (such as adouble and AField)
335           cannot be allocated using PetscMalloc, as this does not call the
336           relevant class constructor. Instead, we use the C++ keyword `new`.
337   */
338   u_c = new AField[info.gxm * info.gym];
339   f_c = new AField[info.gxm * info.gym];
340 
341   /* Create corresponding 2-arrays of AFields */
342   u_a = new AField *[info.gym];
343   f_a = new AField *[info.gym];
344 
345   /* Align indices between array types to endow 2d array with ghost points */
346   PetscCall(GiveGhostPoints(da, u_c, &u_a));
347   PetscCall(GiveGhostPoints(da, f_c, &f_a));
348 
349   trace_on(1); /* Start of active section on tape 1 */
350 
351   /*
352     Mark independence
353 
354     NOTE: Ghost points are marked as independent, in place of the points they represent on
355           other processors / on other boundaries.
356   */
357   for (j = gys; j < gys + gym; j++) {
358     for (i = gxs; i < gxs + gxm; i++) {
359       u_a[j][i].u <<= u[j][i].u;
360       u_a[j][i].v <<= u[j][i].v;
361     }
362   }
363 
364   /* Compute function over the locally owned part of the grid */
365   for (j = ys; j < ys + ym; j++) {
366     for (i = xs; i < xs + xm; i++) {
367       uc          = u_a[j][i].u;
368       uxx         = (-2.0 * uc + u_a[j][i - 1].u + u_a[j][i + 1].u) * sx;
369       uyy         = (-2.0 * uc + u_a[j - 1][i].u + u_a[j + 1][i].u) * sy;
370       vc          = u_a[j][i].v;
371       vxx         = (-2.0 * vc + u_a[j][i - 1].v + u_a[j][i + 1].v) * sx;
372       vyy         = (-2.0 * vc + u_a[j - 1][i].v + u_a[j + 1][i].v) * sy;
373       f_a[j][i].u = udot[j][i].u - appctx->D1 * (uxx + uyy) + uc * vc * vc - appctx->gamma * (1.0 - uc);
374       f_a[j][i].v = udot[j][i].v - appctx->D2 * (vxx + vyy) - uc * vc * vc + (appctx->gamma + appctx->kappa) * vc;
375     }
376   }
377 
378   /*
379     Mark dependence
380 
381     NOTE: Marking dependence of dummy variables makes the index notation much simpler when forming
382           the Jacobian later.
383   */
384   for (j = gys; j < gys + gym; j++) {
385     for (i = gxs; i < gxs + gxm; i++) {
386       if ((i < xs) || (i >= xs + xm) || (j < ys) || (j >= ys + ym)) {
387         f_a[j][i].u >>= dummy;
388         f_a[j][i].v >>= dummy;
389       } else {
390         f_a[j][i].u >>= f[j][i].u;
391         f_a[j][i].v >>= f[j][i].v;
392       }
393     }
394   }
395   trace_off(); /* End of active section */
396   PetscCall(PetscLogFlops(16.0 * xm * ym));
397 
398   /* Restore vectors */
399   PetscCall(DMDAVecRestoreArray(da, F, &f));
400   PetscCall(DMDAVecRestoreArrayRead(da, localU, &u));
401   PetscCall(DMDAVecRestoreArrayRead(da, Udot, &udot));
402 
403   PetscCall(DMRestoreLocalVector(da, &localU));
404 
405   /* Destroy AFields appropriately */
406   f_a += info.gys;
407   u_a += info.gys;
408   delete[] f_a;
409   delete[] u_a;
410   delete[] f_c;
411   delete[] u_c;
412   PetscFunctionReturn(PETSC_SUCCESS);
413 }
414 
415 /*
416   Simply acts to pass TS information to the AdolcMatCtx
417 */
418 PetscErrorCode IJacobianMatFree(TS ts, PetscReal t, Vec X, Vec Xdot, PetscReal a, Mat A_shell, Mat B, void *ctx)
419 {
420   AdolcMatCtx *mctx;
421   DM           da;
422 
423   PetscFunctionBeginUser;
424   PetscCall(MatShellGetContext(A_shell, &mctx));
425 
426   mctx->time  = t;
427   mctx->shift = a;
428   if (mctx->ts != ts) mctx->ts = ts;
429   PetscCall(VecCopy(X, mctx->X));
430   PetscCall(VecCopy(Xdot, mctx->Xdot));
431   PetscCall(TSGetDM(ts, &da));
432   PetscCall(DMGlobalToLocalBegin(da, mctx->X, INSERT_VALUES, mctx->localX0));
433   PetscCall(DMGlobalToLocalEnd(da, mctx->X, INSERT_VALUES, mctx->localX0));
434   PetscFunctionReturn(PETSC_SUCCESS);
435 }
436 
437 /*TEST
438 
439   build:
440     requires: double !complex adolc
441 
442   test:
443     suffix: 1
444     args: -ts_max_steps 1 -da_grid_x 12 -da_grid_y 12 -snes_test_jacobian
445     output_file: output/adr_ex5adj_mf_1.out
446 
447   test:
448     suffix: 2
449     nsize: 4
450     args: -ts_max_steps 10 -da_grid_x 12 -da_grid_y 12 -ts_monitor -ts_adjoint_monitor
451     output_file: output/adr_ex5adj_mf_2.out
452 
453 TEST*/
454