xref: /petsc/src/ts/tutorials/autodiff/adr_ex5adj.cxx (revision 21e3ffae2f3b73c0bd738cf6d0a809700fc04bb0)
1 static char help[] = "Demonstrates adjoint sensitivity analysis for Reaction-Diffusion Equations.\n";
2 
3 /*
4    REQUIRES configuration of PETSc with option --download-adolc.
5 
6    For documentation on ADOL-C, see
7      $PETSC_ARCH/externalpackages/ADOL-C-2.6.0/ADOL-C/doc/adolc-manual.pdf
8 
9    REQUIRES configuration of PETSc with option --download-colpack
10 
11    For documentation on ColPack, see
12      $PETSC_ARCH/externalpackages/git.colpack/README.md
13 */
14 /* ------------------------------------------------------------------------
15   See ../advection-diffusion-reaction/ex5 for a description of the problem
16   ------------------------------------------------------------------------- */
17 
18 /*
19   Runtime options:
20 
21     Solver:
22       -forwardonly       - Run the forward simulation without adjoint.
23       -implicitform      - Provide IFunction and IJacobian to TS, if not set, RHSFunction and RHSJacobian will be used.
24       -aijpc             - Set the preconditioner matrix to be aij (the Jacobian matrix can be of a different type such as ELL).
25 
26     Jacobian generation:
27       -jacobian_by_hand  - Use the hand-coded Jacobian of ex5.c, rather than generating it automatically.
28       -no_annotation     - Do not annotate ADOL-C active variables. (Should be used alongside -jacobian_by_hand.)
29       -adolc_sparse      - Calculate Jacobian in compressed format, using ADOL-C sparse functionality.
30       -adolc_sparse_view - Print sparsity pattern used by -adolc_sparse option.
31  */
32 /*
33   NOTE: If -adolc_sparse option is used, at least four processors should be used, so that no processor owns boundaries which are
34         identified by the periodic boundary conditions. The number of grid points in both x- and y-directions should be multiples
35         of 5, in order for the 5-point stencil to be cleanly parallelised.
36 */
37 
38 #include <petscdmda.h>
39 #include <petscts.h>
40 #include "adolc-utils/drivers.cxx"
41 #include <adolc/adolc.h>
42 
43 /* (Passive) field for the two variables */
44 typedef struct {
45   PetscScalar u, v;
46 } Field;
47 
48 /* Active field for the two variables */
49 typedef struct {
50   adouble u, v;
51 } AField;
52 
53 /* Application context */
54 typedef struct {
55   PetscReal D1, D2, gamma, kappa;
56   AField  **u_a, **f_a;
57   PetscBool aijpc;
58   AdolcCtx *adctx; /* Automatic differentation support */
59 } AppCtx;
60 
61 extern PetscErrorCode InitialConditions(DM da, Vec U);
62 extern PetscErrorCode InitializeLambda(DM da, Vec lambda, PetscReal x, PetscReal y);
63 extern PetscErrorCode IFunctionLocalPassive(DMDALocalInfo *info, PetscReal t, Field **u, Field **udot, Field **f, void *ptr);
64 extern PetscErrorCode IFunctionActive(TS ts, PetscReal ftime, Vec U, Vec Udot, Vec F, void *ptr);
65 extern PetscErrorCode RHSFunctionActive(TS ts, PetscReal ftime, Vec U, Vec F, void *ptr);
66 extern PetscErrorCode RHSFunctionPassive(TS ts, PetscReal ftime, Vec U, Vec F, void *ptr);
67 extern PetscErrorCode IJacobianByHand(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal a, Mat A, Mat B, void *ctx);
68 extern PetscErrorCode IJacobianAdolc(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal a, Mat A, Mat B, void *ctx);
69 extern PetscErrorCode RHSJacobianByHand(TS ts, PetscReal t, Vec U, Mat A, Mat B, void *ctx);
70 extern PetscErrorCode RHSJacobianAdolc(TS ts, PetscReal t, Vec U, Mat A, Mat B, void *ctx);
71 
72 int main(int argc, char **argv)
73 {
74   TS             ts;
75   Vec            x, r, xdot;
76   DM             da;
77   AppCtx         appctx;
78   AdolcCtx      *adctx;
79   Vec            lambda[1];
80   PetscBool      forwardonly = PETSC_FALSE, implicitform = PETSC_FALSE, byhand = PETSC_FALSE;
81   PetscInt       gxm, gym, i, dofs = 2, ctrl[3] = {0, 0, 0};
82   PetscScalar  **Seed = NULL, **Rec = NULL, *u_vec;
83   unsigned int **JP = NULL;
84   ISColoring     iscoloring;
85 
86   PetscFunctionBeginUser;
87   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
88   PetscCall(PetscNew(&adctx));
89   PetscCall(PetscOptionsGetBool(NULL, NULL, "-forwardonly", &forwardonly, NULL));
90   PetscCall(PetscOptionsGetBool(NULL, NULL, "-implicitform", &implicitform, NULL));
91   appctx.aijpc = PETSC_FALSE, adctx->no_an = PETSC_FALSE, adctx->sparse = PETSC_FALSE, adctx->sparse_view = PETSC_FALSE;
92   adctx->sparse_view_done = PETSC_FALSE;
93   PetscCall(PetscOptionsGetBool(NULL, NULL, "-aijpc", &appctx.aijpc, NULL));
94   PetscCall(PetscOptionsGetBool(NULL, NULL, "-no_annotation", &adctx->no_an, NULL));
95   PetscCall(PetscOptionsGetBool(NULL, NULL, "-jacobian_by_hand", &byhand, NULL));
96   PetscCall(PetscOptionsGetBool(NULL, NULL, "-adolc_sparse", &adctx->sparse, NULL));
97   PetscCall(PetscOptionsGetBool(NULL, NULL, "-adolc_sparse_view", &adctx->sparse_view, NULL));
98   appctx.D1    = 8.0e-5;
99   appctx.D2    = 4.0e-5;
100   appctx.gamma = .024;
101   appctx.kappa = .06;
102   appctx.adctx = adctx;
103 
104   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
105      Create distributed array (DMDA) to manage parallel grid and vectors
106   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
107   PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC, DMDA_STENCIL_STAR, 65, 65, PETSC_DECIDE, PETSC_DECIDE, 2, 1, NULL, NULL, &da));
108   PetscCall(DMSetFromOptions(da));
109   PetscCall(DMSetUp(da));
110   PetscCall(DMDASetFieldName(da, 0, "u"));
111   PetscCall(DMDASetFieldName(da, 1, "v"));
112 
113   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
114      Extract global vectors from DMDA; then duplicate for remaining
115      vectors that are the same types
116    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
117   PetscCall(DMCreateGlobalVector(da, &x));
118   PetscCall(VecDuplicate(x, &r));
119   PetscCall(VecDuplicate(x, &xdot));
120 
121   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
122      Create timestepping solver context
123      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
124   PetscCall(TSCreate(PETSC_COMM_WORLD, &ts));
125   PetscCall(TSSetType(ts, TSCN));
126   PetscCall(TSSetDM(ts, da));
127   PetscCall(TSSetProblemType(ts, TS_NONLINEAR));
128   if (!implicitform) {
129     PetscCall(TSSetRHSFunction(ts, NULL, RHSFunctionPassive, &appctx));
130   } else {
131     PetscCall(DMDATSSetIFunctionLocal(da, INSERT_VALUES, (DMDATSIFunctionLocal)IFunctionLocalPassive, &appctx));
132   }
133 
134   if (!adctx->no_an) {
135     PetscCall(DMDAGetGhostCorners(da, NULL, NULL, NULL, &gxm, &gym, NULL));
136     adctx->m = dofs * gxm * gym;
137     adctx->n = dofs * gxm * gym; /* Number of dependent and independent variables */
138 
139     /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
140        Trace function(s) just once
141      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
142     if (!implicitform) {
143       PetscCall(RHSFunctionActive(ts, 1.0, x, r, &appctx));
144     } else {
145       PetscCall(IFunctionActive(ts, 1.0, x, xdot, r, &appctx));
146     }
147 
148     /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
149       In the case where ADOL-C generates the Jacobian in compressed format,
150       seed and recovery matrices are required. Since the sparsity structure
151       of the Jacobian does not change over the course of the time
152       integration, we can save computational effort by only generating
153       these objects once.
154        - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
155     if (adctx->sparse) {
156       /*
157          Generate sparsity pattern
158 
159          One way to do this is to use the Jacobian pattern driver `jac_pat`
160          provided by ColPack. Otherwise, it is the responsibility of the user
161          to obtain the sparsity pattern.
162       */
163       PetscCall(PetscMalloc1(adctx->n, &u_vec));
164       JP = (unsigned int **)malloc(adctx->m * sizeof(unsigned int *));
165       jac_pat(1, adctx->m, adctx->n, u_vec, JP, ctrl);
166       if (adctx->sparse_view) PetscCall(PrintSparsity(MPI_COMM_WORLD, adctx->m, JP));
167 
168       /*
169         Extract a column colouring
170 
171         For problems using DMDA, colourings can be extracted directly, as
172         follows. There are also ColPack drivers available. Otherwise, it is the
173         responsibility of the user to obtain a suitable colouring.
174       */
175       PetscCall(DMCreateColoring(da, IS_COLORING_LOCAL, &iscoloring));
176       PetscCall(ISColoringGetIS(iscoloring, PETSC_USE_POINTER, &adctx->p, NULL));
177 
178       /* Generate seed matrix to propagate through the forward mode of AD */
179       PetscCall(AdolcMalloc2(adctx->n, adctx->p, &Seed));
180       PetscCall(GenerateSeedMatrix(iscoloring, Seed));
181       PetscCall(ISColoringDestroy(&iscoloring));
182 
183       /*
184         Generate recovery matrix, which is used to recover the Jacobian from
185         compressed format */
186       PetscCall(AdolcMalloc2(adctx->m, adctx->p, &Rec));
187       PetscCall(GetRecoveryMatrix(Seed, JP, adctx->m, adctx->p, Rec));
188 
189       /* Store results and free workspace */
190       adctx->Rec = Rec;
191       for (i = 0; i < adctx->m; i++) free(JP[i]);
192       free(JP);
193       PetscCall(PetscFree(u_vec));
194 
195     } else {
196       /*
197         In 'full' Jacobian mode, propagate an identity matrix through the
198         forward mode of AD.
199       */
200       adctx->p = adctx->n;
201       PetscCall(AdolcMalloc2(adctx->n, adctx->p, &Seed));
202       PetscCall(Identity(adctx->n, Seed));
203     }
204     adctx->Seed = Seed;
205   }
206 
207   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
208      Set Jacobian
209    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
210   if (!implicitform) {
211     if (!byhand) {
212       PetscCall(TSSetRHSJacobian(ts, NULL, NULL, RHSJacobianAdolc, &appctx));
213     } else {
214       PetscCall(TSSetRHSJacobian(ts, NULL, NULL, RHSJacobianByHand, &appctx));
215     }
216   } else {
217     if (appctx.aijpc) {
218       Mat A, B;
219 
220       PetscCall(DMSetMatType(da, MATSELL));
221       PetscCall(DMCreateMatrix(da, &A));
222       PetscCall(MatConvert(A, MATAIJ, MAT_INITIAL_MATRIX, &B));
223       /* FIXME do we need to change viewer to display matrix in natural ordering as DMCreateMatrix_DA does? */
224       if (!byhand) {
225         PetscCall(TSSetIJacobian(ts, A, B, IJacobianAdolc, &appctx));
226       } else {
227         PetscCall(TSSetIJacobian(ts, A, B, IJacobianByHand, &appctx));
228       }
229       PetscCall(MatDestroy(&A));
230       PetscCall(MatDestroy(&B));
231     } else {
232       if (!byhand) {
233         PetscCall(TSSetIJacobian(ts, NULL, NULL, IJacobianAdolc, &appctx));
234       } else {
235         PetscCall(TSSetIJacobian(ts, NULL, NULL, IJacobianByHand, &appctx));
236       }
237     }
238   }
239 
240   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
241      Set initial conditions
242    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
243   PetscCall(InitialConditions(da, x));
244   PetscCall(TSSetSolution(ts, x));
245 
246   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
247     Have the TS save its trajectory so that TSAdjointSolve() may be used
248     and set solver options
249    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
250   if (!forwardonly) {
251     PetscCall(TSSetSaveTrajectory(ts));
252     PetscCall(TSSetMaxTime(ts, 200.0));
253     PetscCall(TSSetTimeStep(ts, 0.5));
254   } else {
255     PetscCall(TSSetMaxTime(ts, 2000.0));
256     PetscCall(TSSetTimeStep(ts, 10));
257   }
258   PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER));
259   PetscCall(TSSetFromOptions(ts));
260 
261   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
262      Solve ODE system
263      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
264   PetscCall(TSSolve(ts, x));
265   if (!forwardonly) {
266     /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
267        Start the Adjoint model
268        - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
269     PetscCall(VecDuplicate(x, &lambda[0]));
270     /*   Reset initial conditions for the adjoint integration */
271     PetscCall(InitializeLambda(da, lambda[0], 0.5, 0.5));
272     PetscCall(TSSetCostGradients(ts, 1, lambda, NULL));
273     PetscCall(TSAdjointSolve(ts));
274     PetscCall(VecDestroy(&lambda[0]));
275   }
276 
277   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
278      Free work space.
279    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
280   PetscCall(VecDestroy(&xdot));
281   PetscCall(VecDestroy(&r));
282   PetscCall(VecDestroy(&x));
283   PetscCall(TSDestroy(&ts));
284   if (!adctx->no_an) {
285     if (adctx->sparse) PetscCall(AdolcFree2(Rec));
286     PetscCall(AdolcFree2(Seed));
287   }
288   PetscCall(DMDestroy(&da));
289   PetscCall(PetscFree(adctx));
290   PetscCall(PetscFinalize());
291   return 0;
292 }
293 
294 PetscErrorCode InitialConditions(DM da, Vec U)
295 {
296   PetscInt  i, j, xs, ys, xm, ym, Mx, My;
297   Field   **u;
298   PetscReal hx, hy, x, y;
299 
300   PetscFunctionBegin;
301   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
302 
303   hx = 2.5 / (PetscReal)Mx;
304   hy = 2.5 / (PetscReal)My;
305 
306   /*
307      Get pointers to vector data
308   */
309   PetscCall(DMDAVecGetArray(da, U, &u));
310 
311   /*
312      Get local grid boundaries
313   */
314   PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
315 
316   /*
317      Compute function over the locally owned part of the grid
318   */
319   for (j = ys; j < ys + ym; j++) {
320     y = j * hy;
321     for (i = xs; i < xs + xm; i++) {
322       x = i * hx;
323       if (PetscApproximateGTE(x, 1.0) && PetscApproximateLTE(x, 1.5) && PetscApproximateGTE(y, 1.0) && PetscApproximateLTE(y, 1.5))
324         u[j][i].v = PetscPowReal(PetscSinReal(4.0 * PETSC_PI * x), 2.0) * PetscPowReal(PetscSinReal(4.0 * PETSC_PI * y), 2.0) / 4.0;
325       else u[j][i].v = 0.0;
326 
327       u[j][i].u = 1.0 - 2.0 * u[j][i].v;
328     }
329   }
330 
331   /*
332      Restore vectors
333   */
334   PetscCall(DMDAVecRestoreArray(da, U, &u));
335   PetscFunctionReturn(PETSC_SUCCESS);
336 }
337 
338 PetscErrorCode InitializeLambda(DM da, Vec lambda, PetscReal x, PetscReal y)
339 {
340   PetscInt i, j, Mx, My, xs, ys, xm, ym;
341   Field  **l;
342   PetscFunctionBegin;
343 
344   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
345   /* locate the global i index for x and j index for y */
346   i = (PetscInt)(x * (Mx - 1));
347   j = (PetscInt)(y * (My - 1));
348   PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
349 
350   if (xs <= i && i < xs + xm && ys <= j && j < ys + ym) {
351     /* the i,j vertex is on this process */
352     PetscCall(DMDAVecGetArray(da, lambda, &l));
353     l[j][i].u = 1.0;
354     l[j][i].v = 1.0;
355     PetscCall(DMDAVecRestoreArray(da, lambda, &l));
356   }
357   PetscFunctionReturn(PETSC_SUCCESS);
358 }
359 
360 PetscErrorCode IFunctionLocalPassive(DMDALocalInfo *info, PetscReal t, Field **u, Field **udot, Field **f, void *ptr)
361 {
362   AppCtx     *appctx = (AppCtx *)ptr;
363   PetscInt    i, j, xs, ys, xm, ym;
364   PetscReal   hx, hy, sx, sy;
365   PetscScalar uc, uxx, uyy, vc, vxx, vyy;
366 
367   PetscFunctionBegin;
368   hx = 2.50 / (PetscReal)(info->mx);
369   sx = 1.0 / (hx * hx);
370   hy = 2.50 / (PetscReal)(info->my);
371   sy = 1.0 / (hy * hy);
372 
373   /* Get local grid boundaries */
374   xs = info->xs;
375   xm = info->xm;
376   ys = info->ys;
377   ym = info->ym;
378 
379   /* Compute function over the locally owned part of the grid */
380   for (j = ys; j < ys + ym; j++) {
381     for (i = xs; i < xs + xm; i++) {
382       uc        = u[j][i].u;
383       uxx       = (-2.0 * uc + u[j][i - 1].u + u[j][i + 1].u) * sx;
384       uyy       = (-2.0 * uc + u[j - 1][i].u + u[j + 1][i].u) * sy;
385       vc        = u[j][i].v;
386       vxx       = (-2.0 * vc + u[j][i - 1].v + u[j][i + 1].v) * sx;
387       vyy       = (-2.0 * vc + u[j - 1][i].v + u[j + 1][i].v) * sy;
388       f[j][i].u = udot[j][i].u - appctx->D1 * (uxx + uyy) + uc * vc * vc - appctx->gamma * (1.0 - uc);
389       f[j][i].v = udot[j][i].v - appctx->D2 * (vxx + vyy) - uc * vc * vc + (appctx->gamma + appctx->kappa) * vc;
390     }
391   }
392   PetscCall(PetscLogFlops(16.0 * xm * ym));
393   PetscFunctionReturn(PETSC_SUCCESS);
394 }
395 
396 PetscErrorCode IFunctionActive(TS ts, PetscReal ftime, Vec U, Vec Udot, Vec F, void *ptr)
397 {
398   AppCtx       *appctx = (AppCtx *)ptr;
399   DM            da;
400   DMDALocalInfo info;
401   Field       **u, **f, **udot;
402   Vec           localU;
403   PetscInt      i, j, xs, ys, xm, ym, gxs, gys, gxm, gym;
404   PetscReal     hx, hy, sx, sy;
405   adouble       uc, uxx, uyy, vc, vxx, vyy;
406   AField      **f_a, *f_c, **u_a, *u_c;
407   PetscScalar   dummy;
408 
409   PetscFunctionBegin;
410 
411   PetscCall(TSGetDM(ts, &da));
412   PetscCall(DMDAGetLocalInfo(da, &info));
413   PetscCall(DMGetLocalVector(da, &localU));
414   hx  = 2.50 / (PetscReal)(info.mx);
415   sx  = 1.0 / (hx * hx);
416   hy  = 2.50 / (PetscReal)(info.my);
417   sy  = 1.0 / (hy * hy);
418   xs  = info.xs;
419   xm  = info.xm;
420   gxs = info.gxs;
421   gxm = info.gxm;
422   ys  = info.ys;
423   ym  = info.ym;
424   gys = info.gys;
425   gym = info.gym;
426 
427   /*
428      Scatter ghost points to local vector,using the 2-step process
429         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
430      By placing code between these two statements, computations can be
431      done while messages are in transition.
432   */
433   PetscCall(DMGlobalToLocalBegin(da, U, INSERT_VALUES, localU));
434   PetscCall(DMGlobalToLocalEnd(da, U, INSERT_VALUES, localU));
435 
436   /*
437      Get pointers to vector data
438   */
439   PetscCall(DMDAVecGetArrayRead(da, localU, &u));
440   PetscCall(DMDAVecGetArray(da, F, &f));
441   PetscCall(DMDAVecGetArrayRead(da, Udot, &udot));
442 
443   /*
444     Create contiguous 1-arrays of AFields
445 
446     NOTE: Memory for ADOL-C active variables (such as adouble and AField)
447           cannot be allocated using PetscMalloc, as this does not call the
448           relevant class constructor. Instead, we use the C++ keyword `new`.
449   */
450   u_c = new AField[info.gxm * info.gym];
451   f_c = new AField[info.gxm * info.gym];
452 
453   /* Create corresponding 2-arrays of AFields */
454   u_a = new AField *[info.gym];
455   f_a = new AField *[info.gym];
456 
457   /* Align indices between array types to endow 2d array with ghost points */
458   PetscCall(GiveGhostPoints(da, u_c, &u_a));
459   PetscCall(GiveGhostPoints(da, f_c, &f_a));
460 
461   trace_on(1); /* Start of active section on tape 1 */
462 
463   /*
464     Mark independence
465 
466     NOTE: Ghost points are marked as independent, in place of the points they represent on
467           other processors / on other boundaries.
468   */
469   for (j = gys; j < gys + gym; j++) {
470     for (i = gxs; i < gxs + gxm; i++) {
471       u_a[j][i].u <<= u[j][i].u;
472       u_a[j][i].v <<= u[j][i].v;
473     }
474   }
475 
476   /* Compute function over the locally owned part of the grid */
477   for (j = ys; j < ys + ym; j++) {
478     for (i = xs; i < xs + xm; i++) {
479       uc          = u_a[j][i].u;
480       uxx         = (-2.0 * uc + u_a[j][i - 1].u + u_a[j][i + 1].u) * sx;
481       uyy         = (-2.0 * uc + u_a[j - 1][i].u + u_a[j + 1][i].u) * sy;
482       vc          = u_a[j][i].v;
483       vxx         = (-2.0 * vc + u_a[j][i - 1].v + u_a[j][i + 1].v) * sx;
484       vyy         = (-2.0 * vc + u_a[j - 1][i].v + u_a[j + 1][i].v) * sy;
485       f_a[j][i].u = udot[j][i].u - appctx->D1 * (uxx + uyy) + uc * vc * vc - appctx->gamma * (1.0 - uc);
486       f_a[j][i].v = udot[j][i].v - appctx->D2 * (vxx + vyy) - uc * vc * vc + (appctx->gamma + appctx->kappa) * vc;
487     }
488   }
489 
490   /*
491     Mark dependence
492 
493     NOTE: Marking dependence of dummy variables makes the index notation much simpler when forming
494           the Jacobian later.
495   */
496   for (j = gys; j < gys + gym; j++) {
497     for (i = gxs; i < gxs + gxm; i++) {
498       if ((i < xs) || (i >= xs + xm) || (j < ys) || (j >= ys + ym)) {
499         f_a[j][i].u >>= dummy;
500         f_a[j][i].v >>= dummy;
501       } else {
502         f_a[j][i].u >>= f[j][i].u;
503         f_a[j][i].v >>= f[j][i].v;
504       }
505     }
506   }
507   trace_off(); /* End of active section */
508   PetscCall(PetscLogFlops(16.0 * xm * ym));
509 
510   /* Restore vectors */
511   PetscCall(DMDAVecRestoreArray(da, F, &f));
512   PetscCall(DMDAVecRestoreArrayRead(da, localU, &u));
513   PetscCall(DMDAVecRestoreArrayRead(da, Udot, &udot));
514 
515   PetscCall(DMRestoreLocalVector(da, &localU));
516 
517   /* Destroy AFields appropriately */
518   f_a += info.gys;
519   u_a += info.gys;
520   delete[] f_a;
521   delete[] u_a;
522   delete[] f_c;
523   delete[] u_c;
524 
525   PetscFunctionReturn(PETSC_SUCCESS);
526 }
527 
528 PetscErrorCode RHSFunctionPassive(TS ts, PetscReal ftime, Vec U, Vec F, void *ptr)
529 {
530   AppCtx     *appctx = (AppCtx *)ptr;
531   DM          da;
532   PetscInt    i, j, xs, ys, xm, ym, Mx, My;
533   PetscReal   hx, hy, sx, sy;
534   PetscScalar uc, uxx, uyy, vc, vxx, vyy;
535   Field     **u, **f;
536   Vec         localU, localF;
537 
538   PetscFunctionBegin;
539   PetscCall(TSGetDM(ts, &da));
540   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
541   hx = 2.50 / (PetscReal)(Mx);
542   sx = 1.0 / (hx * hx);
543   hy = 2.50 / (PetscReal)(My);
544   sy = 1.0 / (hy * hy);
545   PetscCall(DMGetLocalVector(da, &localU));
546   PetscCall(DMGetLocalVector(da, &localF));
547 
548   /*
549      Scatter ghost points to local vector,using the 2-step process
550         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
551      By placing code between these two statements, computations can be
552      done while messages are in transition.
553   */
554   PetscCall(DMGlobalToLocalBegin(da, U, INSERT_VALUES, localU));
555   PetscCall(DMGlobalToLocalEnd(da, U, INSERT_VALUES, localU));
556   PetscCall(VecZeroEntries(F)); // NOTE (1): See (2) below
557   PetscCall(DMGlobalToLocalBegin(da, F, INSERT_VALUES, localF));
558   PetscCall(DMGlobalToLocalEnd(da, F, INSERT_VALUES, localF));
559 
560   /*
561      Get pointers to vector data
562   */
563   PetscCall(DMDAVecGetArrayRead(da, localU, &u));
564   PetscCall(DMDAVecGetArray(da, localF, &f));
565 
566   /*
567      Get local grid boundaries
568   */
569   PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
570 
571   /*
572      Compute function over the locally owned part of the grid
573   */
574   for (j = ys; j < ys + ym; j++) {
575     for (i = xs; i < xs + xm; i++) {
576       uc        = u[j][i].u;
577       uxx       = (-2.0 * uc + u[j][i - 1].u + u[j][i + 1].u) * sx;
578       uyy       = (-2.0 * uc + u[j - 1][i].u + u[j + 1][i].u) * sy;
579       vc        = u[j][i].v;
580       vxx       = (-2.0 * vc + u[j][i - 1].v + u[j][i + 1].v) * sx;
581       vyy       = (-2.0 * vc + u[j - 1][i].v + u[j + 1][i].v) * sy;
582       f[j][i].u = appctx->D1 * (uxx + uyy) - uc * vc * vc + appctx->gamma * (1.0 - uc);
583       f[j][i].v = appctx->D2 * (vxx + vyy) + uc * vc * vc - (appctx->gamma + appctx->kappa) * vc;
584     }
585   }
586 
587   /*
588      Gather global vector, using the 2-step process
589         DMLocalToGlobalBegin(),DMLocalToGlobalEnd().
590 
591      NOTE (2): We need to use ADD_VALUES if boundaries are not of type DM_BOUNDARY_NONE or
592                DM_BOUNDARY_GHOSTED, meaning we should also zero F before addition (see (1) above).
593   */
594   PetscCall(DMLocalToGlobalBegin(da, localF, ADD_VALUES, F));
595   PetscCall(DMLocalToGlobalEnd(da, localF, ADD_VALUES, F));
596 
597   /*
598      Restore vectors
599   */
600   PetscCall(DMDAVecRestoreArray(da, localF, &f));
601   PetscCall(DMDAVecRestoreArrayRead(da, localU, &u));
602   PetscCall(DMRestoreLocalVector(da, &localF));
603   PetscCall(DMRestoreLocalVector(da, &localU));
604   PetscCall(PetscLogFlops(16.0 * xm * ym));
605   PetscFunctionReturn(PETSC_SUCCESS);
606 }
607 
608 PetscErrorCode RHSFunctionActive(TS ts, PetscReal ftime, Vec U, Vec F, void *ptr)
609 {
610   AppCtx   *appctx = (AppCtx *)ptr;
611   DM        da;
612   PetscInt  i, j, xs, ys, xm, ym, gxs, gys, gxm, gym, Mx, My;
613   PetscReal hx, hy, sx, sy;
614   AField  **f_a, *f_c, **u_a, *u_c;
615   adouble   uc, uxx, uyy, vc, vxx, vyy;
616   Field   **u, **f;
617   Vec       localU, localF;
618 
619   PetscFunctionBegin;
620   PetscCall(TSGetDM(ts, &da));
621   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
622   hx = 2.50 / (PetscReal)(Mx);
623   sx = 1.0 / (hx * hx);
624   hy = 2.50 / (PetscReal)(My);
625   sy = 1.0 / (hy * hy);
626   PetscCall(DMGetLocalVector(da, &localU));
627   PetscCall(DMGetLocalVector(da, &localF));
628 
629   /*
630      Scatter ghost points to local vector,using the 2-step process
631         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
632      By placing code between these two statements, computations can be
633      done while messages are in transition.
634   */
635   PetscCall(DMGlobalToLocalBegin(da, U, INSERT_VALUES, localU));
636   PetscCall(DMGlobalToLocalEnd(da, U, INSERT_VALUES, localU));
637   PetscCall(VecZeroEntries(F)); // NOTE (1): See (2) below
638   PetscCall(DMGlobalToLocalBegin(da, F, INSERT_VALUES, localF));
639   PetscCall(DMGlobalToLocalEnd(da, F, INSERT_VALUES, localF));
640 
641   /*
642      Get pointers to vector data
643   */
644   PetscCall(DMDAVecGetArrayRead(da, localU, &u));
645   PetscCall(DMDAVecGetArray(da, localF, &f));
646 
647   /*
648      Get local and ghosted grid boundaries
649   */
650   PetscCall(DMDAGetGhostCorners(da, &gxs, &gys, NULL, &gxm, &gym, NULL));
651   PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
652 
653   /*
654     Create contiguous 1-arrays of AFields
655 
656     NOTE: Memory for ADOL-C active variables (such as adouble and AField)
657           cannot be allocated using PetscMalloc, as this does not call the
658           relevant class constructor. Instead, we use the C++ keyword `new`.
659   */
660   u_c = new AField[gxm * gym];
661   f_c = new AField[gxm * gym];
662 
663   /* Create corresponding 2-arrays of AFields */
664   u_a = new AField *[gym];
665   f_a = new AField *[gym];
666 
667   /* Align indices between array types to endow 2d array with ghost points */
668   PetscCall(GiveGhostPoints(da, u_c, &u_a));
669   PetscCall(GiveGhostPoints(da, f_c, &f_a));
670 
671   /*
672      Compute function over the locally owned part of the grid
673   */
674   trace_on(1); // ----------------------------------------------- Start of active section
675 
676   /*
677     Mark independence
678 
679     NOTE: Ghost points are marked as independent, in place of the points they represent on
680           other processors / on other boundaries.
681   */
682   for (j = gys; j < gys + gym; j++) {
683     for (i = gxs; i < gxs + gxm; i++) {
684       u_a[j][i].u <<= u[j][i].u;
685       u_a[j][i].v <<= u[j][i].v;
686     }
687   }
688 
689   /*
690     Compute function over the locally owned part of the grid
691   */
692   for (j = ys; j < ys + ym; j++) {
693     for (i = xs; i < xs + xm; i++) {
694       uc          = u_a[j][i].u;
695       uxx         = (-2.0 * uc + u_a[j][i - 1].u + u_a[j][i + 1].u) * sx;
696       uyy         = (-2.0 * uc + u_a[j - 1][i].u + u_a[j + 1][i].u) * sy;
697       vc          = u_a[j][i].v;
698       vxx         = (-2.0 * vc + u_a[j][i - 1].v + u_a[j][i + 1].v) * sx;
699       vyy         = (-2.0 * vc + u_a[j - 1][i].v + u_a[j + 1][i].v) * sy;
700       f_a[j][i].u = appctx->D1 * (uxx + uyy) - uc * vc * vc + appctx->gamma * (1.0 - uc);
701       f_a[j][i].v = appctx->D2 * (vxx + vyy) + uc * vc * vc - (appctx->gamma + appctx->kappa) * vc;
702     }
703   }
704   /*
705     Mark dependence
706 
707     NOTE: Ghost points are marked as dependent in order to vastly simplify index notation
708           during Jacobian assembly.
709   */
710   for (j = gys; j < gys + gym; j++) {
711     for (i = gxs; i < gxs + gxm; i++) {
712       f_a[j][i].u >>= f[j][i].u;
713       f_a[j][i].v >>= f[j][i].v;
714     }
715   }
716   trace_off(); // ----------------------------------------------- End of active section
717 
718   /* Test zeroth order scalar evaluation in ADOL-C gives the same result */
719   //  if (appctx->adctx->zos) {
720   //    PetscCall(TestZOS2d(da,f,u,appctx)); // FIXME: Why does this give nonzero?
721   //  }
722 
723   /*
724      Gather global vector, using the 2-step process
725         DMLocalToGlobalBegin(),DMLocalToGlobalEnd().
726 
727      NOTE (2): We need to use ADD_VALUES if boundaries are not of type DM_BOUNDARY_NONE or
728                DM_BOUNDARY_GHOSTED, meaning we should also zero F before addition (see (1) above).
729   */
730   PetscCall(DMLocalToGlobalBegin(da, localF, ADD_VALUES, F));
731   PetscCall(DMLocalToGlobalEnd(da, localF, ADD_VALUES, F));
732 
733   /*
734      Restore vectors
735   */
736   PetscCall(DMDAVecRestoreArray(da, localF, &f));
737   PetscCall(DMDAVecRestoreArrayRead(da, localU, &u));
738   PetscCall(DMRestoreLocalVector(da, &localF));
739   PetscCall(DMRestoreLocalVector(da, &localU));
740   PetscCall(PetscLogFlops(16.0 * xm * ym));
741 
742   /* Destroy AFields appropriately */
743   f_a += gys;
744   u_a += gys;
745   delete[] f_a;
746   delete[] u_a;
747   delete[] f_c;
748   delete[] u_c;
749 
750   PetscFunctionReturn(PETSC_SUCCESS);
751 }
752 
753 PetscErrorCode IJacobianAdolc(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal a, Mat A, Mat B, void *ctx)
754 {
755   AppCtx            *appctx = (AppCtx *)ctx;
756   DM                 da;
757   const PetscScalar *u_vec;
758   Vec                localU;
759 
760   PetscFunctionBegin;
761   PetscCall(TSGetDM(ts, &da));
762   PetscCall(DMGetLocalVector(da, &localU));
763 
764   /*
765      Scatter ghost points to local vector,using the 2-step process
766         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
767      By placing code between these two statements, computations can be
768      done while messages are in transition.
769   */
770   PetscCall(DMGlobalToLocalBegin(da, U, INSERT_VALUES, localU));
771   PetscCall(DMGlobalToLocalEnd(da, U, INSERT_VALUES, localU));
772 
773   /* Get pointers to vector data */
774   PetscCall(VecGetArrayRead(localU, &u_vec));
775 
776   /*
777     Compute Jacobian
778   */
779   PetscCall(PetscAdolcComputeIJacobianLocalIDMass(1, A, u_vec, a, appctx->adctx));
780 
781   /*
782      Restore vectors
783   */
784   PetscCall(VecRestoreArrayRead(localU, &u_vec));
785   PetscCall(DMRestoreLocalVector(da, &localU));
786   PetscFunctionReturn(PETSC_SUCCESS);
787 }
788 
789 PetscErrorCode IJacobianByHand(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal a, Mat A, Mat B, void *ctx)
790 {
791   AppCtx     *appctx = (AppCtx *)ctx; /* user-defined application context */
792   DM          da;
793   PetscInt    i, j, Mx, My, xs, ys, xm, ym;
794   PetscReal   hx, hy, sx, sy;
795   PetscScalar uc, vc;
796   Field     **u;
797   Vec         localU;
798   MatStencil  stencil[6], rowstencil;
799   PetscScalar entries[6];
800 
801   PetscFunctionBegin;
802   PetscCall(TSGetDM(ts, &da));
803   PetscCall(DMGetLocalVector(da, &localU));
804   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
805 
806   hx = 2.50 / (PetscReal)Mx;
807   sx = 1.0 / (hx * hx);
808   hy = 2.50 / (PetscReal)My;
809   sy = 1.0 / (hy * hy);
810 
811   /*
812      Scatter ghost points to local vector,using the 2-step process
813         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
814      By placing code between these two statements, computations can be
815      done while messages are in transition.
816   */
817   PetscCall(DMGlobalToLocalBegin(da, U, INSERT_VALUES, localU));
818   PetscCall(DMGlobalToLocalEnd(da, U, INSERT_VALUES, localU));
819 
820   /*
821      Get pointers to vector data
822   */
823   PetscCall(DMDAVecGetArrayRead(da, localU, &u));
824 
825   /*
826      Get local grid boundaries
827   */
828   PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
829 
830   stencil[0].k = 0;
831   stencil[1].k = 0;
832   stencil[2].k = 0;
833   stencil[3].k = 0;
834   stencil[4].k = 0;
835   stencil[5].k = 0;
836   rowstencil.k = 0;
837   /*
838      Compute function over the locally owned part of the grid
839   */
840   for (j = ys; j < ys + ym; j++) {
841     stencil[0].j = j - 1;
842     stencil[1].j = j + 1;
843     stencil[2].j = j;
844     stencil[3].j = j;
845     stencil[4].j = j;
846     stencil[5].j = j;
847     rowstencil.k = 0;
848     rowstencil.j = j;
849     for (i = xs; i < xs + xm; i++) {
850       uc = u[j][i].u;
851       vc = u[j][i].v;
852 
853       /*      uxx       = (-2.0*uc + u[j][i-1].u + u[j][i+1].u)*sx;
854       uyy       = (-2.0*uc + u[j-1][i].u + u[j+1][i].u)*sy;
855 
856       vxx       = (-2.0*vc + u[j][i-1].v + u[j][i+1].v)*sx;
857       vyy       = (-2.0*vc + u[j-1][i].v + u[j+1][i].v)*sy;
858        f[j][i].u = appctx->D1*(uxx + uyy) - uc*vc*vc + appctx->gamma*(1.0 - uc);*/
859 
860       stencil[0].i = i;
861       stencil[0].c = 0;
862       entries[0]   = -appctx->D1 * sy;
863       stencil[1].i = i;
864       stencil[1].c = 0;
865       entries[1]   = -appctx->D1 * sy;
866       stencil[2].i = i - 1;
867       stencil[2].c = 0;
868       entries[2]   = -appctx->D1 * sx;
869       stencil[3].i = i + 1;
870       stencil[3].c = 0;
871       entries[3]   = -appctx->D1 * sx;
872       stencil[4].i = i;
873       stencil[4].c = 0;
874       entries[4]   = 2.0 * appctx->D1 * (sx + sy) + vc * vc + appctx->gamma + a;
875       stencil[5].i = i;
876       stencil[5].c = 1;
877       entries[5]   = 2.0 * uc * vc;
878       rowstencil.i = i;
879       rowstencil.c = 0;
880 
881       PetscCall(MatSetValuesStencil(A, 1, &rowstencil, 6, stencil, entries, INSERT_VALUES));
882       if (appctx->aijpc) PetscCall(MatSetValuesStencil(B, 1, &rowstencil, 6, stencil, entries, INSERT_VALUES));
883       stencil[0].c = 1;
884       entries[0]   = -appctx->D2 * sy;
885       stencil[1].c = 1;
886       entries[1]   = -appctx->D2 * sy;
887       stencil[2].c = 1;
888       entries[2]   = -appctx->D2 * sx;
889       stencil[3].c = 1;
890       entries[3]   = -appctx->D2 * sx;
891       stencil[4].c = 1;
892       entries[4]   = 2.0 * appctx->D2 * (sx + sy) - 2.0 * uc * vc + appctx->gamma + appctx->kappa + a;
893       stencil[5].c = 0;
894       entries[5]   = -vc * vc;
895 
896       PetscCall(MatSetValuesStencil(A, 1, &rowstencil, 6, stencil, entries, INSERT_VALUES));
897       if (appctx->aijpc) PetscCall(MatSetValuesStencil(B, 1, &rowstencil, 6, stencil, entries, INSERT_VALUES));
898       /* f[j][i].v = appctx->D2*(vxx + vyy) + uc*vc*vc - (appctx->gamma + appctx->kappa)*vc; */
899     }
900   }
901 
902   /*
903      Restore vectors
904   */
905   PetscCall(PetscLogFlops(19.0 * xm * ym));
906   PetscCall(DMDAVecRestoreArrayRead(da, localU, &u));
907   PetscCall(DMRestoreLocalVector(da, &localU));
908   PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
909   PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
910   PetscCall(MatSetOption(A, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE));
911   if (appctx->aijpc) {
912     PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
913     PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
914     PetscCall(MatSetOption(B, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE));
915   }
916   PetscFunctionReturn(PETSC_SUCCESS);
917 }
918 
919 PetscErrorCode RHSJacobianByHand(TS ts, PetscReal t, Vec U, Mat A, Mat B, void *ctx)
920 {
921   AppCtx     *appctx = (AppCtx *)ctx; /* user-defined application context */
922   DM          da;
923   PetscInt    i, j, Mx, My, xs, ys, xm, ym;
924   PetscReal   hx, hy, sx, sy;
925   PetscScalar uc, vc;
926   Field     **u;
927   Vec         localU;
928   MatStencil  stencil[6], rowstencil;
929   PetscScalar entries[6];
930 
931   PetscFunctionBegin;
932   PetscCall(TSGetDM(ts, &da));
933   PetscCall(DMGetLocalVector(da, &localU));
934   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
935 
936   hx = 2.50 / (PetscReal)(Mx);
937   sx = 1.0 / (hx * hx);
938   hy = 2.50 / (PetscReal)(My);
939   sy = 1.0 / (hy * hy);
940 
941   /*
942      Scatter ghost points to local vector,using the 2-step process
943         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
944      By placing code between these two statements, computations can be
945      done while messages are in transition.
946   */
947   PetscCall(DMGlobalToLocalBegin(da, U, INSERT_VALUES, localU));
948   PetscCall(DMGlobalToLocalEnd(da, U, INSERT_VALUES, localU));
949 
950   /*
951      Get pointers to vector data
952   */
953   PetscCall(DMDAVecGetArrayRead(da, localU, &u));
954 
955   /*
956      Get local grid boundaries
957   */
958   PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
959 
960   stencil[0].k = 0;
961   stencil[1].k = 0;
962   stencil[2].k = 0;
963   stencil[3].k = 0;
964   stencil[4].k = 0;
965   stencil[5].k = 0;
966   rowstencil.k = 0;
967 
968   /*
969      Compute function over the locally owned part of the grid
970   */
971   for (j = ys; j < ys + ym; j++) {
972     stencil[0].j = j - 1;
973     stencil[1].j = j + 1;
974     stencil[2].j = j;
975     stencil[3].j = j;
976     stencil[4].j = j;
977     stencil[5].j = j;
978     rowstencil.k = 0;
979     rowstencil.j = j;
980     for (i = xs; i < xs + xm; i++) {
981       uc = u[j][i].u;
982       vc = u[j][i].v;
983 
984       /*      uxx       = (-2.0*uc + u[j][i-1].u + u[j][i+1].u)*sx;
985       uyy       = (-2.0*uc + u[j-1][i].u + u[j+1][i].u)*sy;
986 
987       vxx       = (-2.0*vc + u[j][i-1].v + u[j][i+1].v)*sx;
988       vyy       = (-2.0*vc + u[j-1][i].v + u[j+1][i].v)*sy;
989        f[j][i].u = appctx->D1*(uxx + uyy) - uc*vc*vc + appctx->gamma*(1.0 - uc);*/
990 
991       stencil[0].i = i;
992       stencil[0].c = 0;
993       entries[0]   = appctx->D1 * sy;
994       stencil[1].i = i;
995       stencil[1].c = 0;
996       entries[1]   = appctx->D1 * sy;
997       stencil[2].i = i - 1;
998       stencil[2].c = 0;
999       entries[2]   = appctx->D1 * sx;
1000       stencil[3].i = i + 1;
1001       stencil[3].c = 0;
1002       entries[3]   = appctx->D1 * sx;
1003       stencil[4].i = i;
1004       stencil[4].c = 0;
1005       entries[4]   = -2.0 * appctx->D1 * (sx + sy) - vc * vc - appctx->gamma;
1006       stencil[5].i = i;
1007       stencil[5].c = 1;
1008       entries[5]   = -2.0 * uc * vc;
1009       rowstencil.i = i;
1010       rowstencil.c = 0;
1011 
1012       PetscCall(MatSetValuesStencil(A, 1, &rowstencil, 6, stencil, entries, INSERT_VALUES));
1013 
1014       stencil[0].c = 1;
1015       entries[0]   = appctx->D2 * sy;
1016       stencil[1].c = 1;
1017       entries[1]   = appctx->D2 * sy;
1018       stencil[2].c = 1;
1019       entries[2]   = appctx->D2 * sx;
1020       stencil[3].c = 1;
1021       entries[3]   = appctx->D2 * sx;
1022       stencil[4].c = 1;
1023       entries[4]   = -2.0 * appctx->D2 * (sx + sy) + 2.0 * uc * vc - appctx->gamma - appctx->kappa;
1024       stencil[5].c = 0;
1025       entries[5]   = vc * vc;
1026       rowstencil.c = 1;
1027 
1028       PetscCall(MatSetValuesStencil(A, 1, &rowstencil, 6, stencil, entries, INSERT_VALUES));
1029       /* f[j][i].v = appctx->D2*(vxx + vyy) + uc*vc*vc - (appctx->gamma + appctx->kappa)*vc; */
1030     }
1031   }
1032 
1033   /*
1034      Restore vectors
1035   */
1036   PetscCall(PetscLogFlops(19.0 * xm * ym));
1037   PetscCall(DMDAVecRestoreArrayRead(da, localU, &u));
1038   PetscCall(DMRestoreLocalVector(da, &localU));
1039   PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
1040   PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
1041   PetscCall(MatSetOption(A, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE));
1042   if (appctx->aijpc) {
1043     PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
1044     PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
1045     PetscCall(MatSetOption(B, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE));
1046   }
1047   PetscFunctionReturn(PETSC_SUCCESS);
1048 }
1049 
1050 PetscErrorCode RHSJacobianAdolc(TS ts, PetscReal t, Vec U, Mat A, Mat B, void *ctx)
1051 {
1052   AppCtx      *appctx = (AppCtx *)ctx;
1053   DM           da;
1054   PetscScalar *u_vec;
1055   Vec          localU;
1056 
1057   PetscFunctionBegin;
1058   PetscCall(TSGetDM(ts, &da));
1059   PetscCall(DMGetLocalVector(da, &localU));
1060 
1061   /*
1062      Scatter ghost points to local vector,using the 2-step process
1063         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
1064      By placing code between these two statements, computations can be
1065      done while messages are in transition.
1066   */
1067   PetscCall(DMGlobalToLocalBegin(da, U, INSERT_VALUES, localU));
1068   PetscCall(DMGlobalToLocalEnd(da, U, INSERT_VALUES, localU));
1069 
1070   /* Get pointers to vector data */
1071   PetscCall(VecGetArray(localU, &u_vec));
1072 
1073   /*
1074     Compute Jacobian
1075   */
1076   PetscCall(PetscAdolcComputeRHSJacobianLocal(1, A, u_vec, appctx->adctx));
1077 
1078   /*
1079      Restore vectors
1080   */
1081   PetscCall(VecRestoreArray(localU, &u_vec));
1082   PetscCall(DMRestoreLocalVector(da, &localU));
1083   PetscFunctionReturn(PETSC_SUCCESS);
1084 }
1085 
1086 /*TEST
1087 
1088   build:
1089     requires: double !complex adolc colpack
1090 
1091   test:
1092     suffix: 1
1093     nsize: 1
1094     args: -ts_max_steps 1 -da_grid_x 12 -da_grid_y 12 -snes_test_jacobian
1095     output_file: output/adr_ex5adj_1.out
1096 
1097   test:
1098     suffix: 2
1099     nsize: 1
1100     args: -ts_max_steps 1 -da_grid_x 12 -da_grid_y 12 -snes_test_jacobian -implicitform
1101     output_file: output/adr_ex5adj_2.out
1102 
1103   test:
1104     suffix: 3
1105     nsize: 4
1106     args: -ts_max_steps 10 -da_grid_x 12 -da_grid_y 12 -ts_monitor -ts_adjoint_monitor
1107     output_file: output/adr_ex5adj_3.out
1108 
1109   test:
1110     suffix: 4
1111     nsize: 4
1112     args: -ts_max_steps 10 -da_grid_x 12 -da_grid_y 12 -ts_monitor -ts_adjoint_monitor -implicitform
1113     output_file: output/adr_ex5adj_4.out
1114 
1115   testset:
1116     suffix: 5
1117     nsize: 4
1118     args: -ts_max_steps 10 -da_grid_x 15 -da_grid_y 15 -ts_monitor -ts_adjoint_monitor -adolc_sparse
1119     output_file: output/adr_ex5adj_5.out
1120 
1121   testset:
1122     suffix: 6
1123     nsize: 4
1124     args: -ts_max_steps 10 -da_grid_x 15 -da_grid_y 15 -ts_monitor -ts_adjoint_monitor -adolc_sparse -implicitform
1125     output_file: output/adr_ex5adj_6.out
1126 
1127 TEST*/
1128