xref: /petsc/src/ts/tutorials/autodiff/adr_ex5adj.cxx (revision ebead697dbf761eb322f829370bbe90b3bd93fa3)
1 static char help[] = "Demonstrates adjoint sensitivity analysis for Reaction-Diffusion Equations.\n";
2 
3 /*
4    REQUIRES configuration of PETSc with option --download-adolc.
5 
6    For documentation on ADOL-C, see
7      $PETSC_ARCH/externalpackages/ADOL-C-2.6.0/ADOL-C/doc/adolc-manual.pdf
8 
9    REQUIRES configuration of PETSc with option --download-colpack
10 
11    For documentation on ColPack, see
12      $PETSC_ARCH/externalpackages/git.colpack/README.md
13 */
14 /* ------------------------------------------------------------------------
15   See ../advection-diffusion-reaction/ex5 for a description of the problem
16   ------------------------------------------------------------------------- */
17 
18 /*
19   Runtime options:
20 
21     Solver:
22       -forwardonly       - Run the forward simulation without adjoint.
23       -implicitform      - Provide IFunction and IJacobian to TS, if not set, RHSFunction and RHSJacobian will be used.
24       -aijpc             - Set the preconditioner matrix to be aij (the Jacobian matrix can be of a different type such as ELL).
25 
26     Jacobian generation:
27       -jacobian_by_hand  - Use the hand-coded Jacobian of ex5.c, rather than generating it automatically.
28       -no_annotation     - Do not annotate ADOL-C active variables. (Should be used alongside -jacobian_by_hand.)
29       -adolc_sparse      - Calculate Jacobian in compressed format, using ADOL-C sparse functionality.
30       -adolc_sparse_view - Print sparsity pattern used by -adolc_sparse option.
31  */
32 /*
33   NOTE: If -adolc_sparse option is used, at least four processors should be used, so that no processor owns boundaries which are
34         identified by the periodic boundary conditions. The number of grid points in both x- and y-directions should be multiples
35         of 5, in order for the 5-point stencil to be cleanly parallelised.
36 */
37 
38 #include <petscdmda.h>
39 #include <petscts.h>
40 #include "adolc-utils/drivers.cxx"
41 #include <adolc/adolc.h>
42 
43 /* (Passive) field for the two variables */
44 typedef struct {
45   PetscScalar u,v;
46 } Field;
47 
48 /* Active field for the two variables */
49 typedef struct {
50   adouble u,v;
51 } AField;
52 
53 /* Application context */
54 typedef struct {
55   PetscReal D1,D2,gamma,kappa;
56   AField    **u_a,**f_a;
57   PetscBool aijpc;
58   AdolcCtx  *adctx; /* Automatic differentation support */
59 } AppCtx;
60 
61 extern PetscErrorCode InitialConditions(DM da,Vec U);
62 extern PetscErrorCode InitializeLambda(DM da,Vec lambda,PetscReal x,PetscReal y);
63 extern PetscErrorCode IFunctionLocalPassive(DMDALocalInfo *info,PetscReal t,Field**u,Field**udot,Field**f,void *ptr);
64 extern PetscErrorCode IFunctionActive(TS ts,PetscReal ftime,Vec U,Vec Udot,Vec F,void *ptr);
65 extern PetscErrorCode RHSFunctionActive(TS ts,PetscReal ftime,Vec U,Vec F,void *ptr);
66 extern PetscErrorCode RHSFunctionPassive(TS ts,PetscReal ftime,Vec U,Vec F,void *ptr);
67 extern PetscErrorCode IJacobianByHand(TS ts,PetscReal t,Vec U,Vec Udot,PetscReal a,Mat A,Mat B,void *ctx);
68 extern PetscErrorCode IJacobianAdolc(TS ts,PetscReal t,Vec U,Vec Udot,PetscReal a,Mat A,Mat B,void *ctx);
69 extern PetscErrorCode RHSJacobianByHand(TS ts,PetscReal t,Vec U,Mat A,Mat B,void *ctx);
70 extern PetscErrorCode RHSJacobianAdolc(TS ts,PetscReal t,Vec U,Mat A,Mat B,void *ctx);
71 
72 int main(int argc,char **argv)
73 {
74   TS             ts;
75   Vec            x,r,xdot;
76   DM             da;
77   AppCtx         appctx;
78   AdolcCtx       *adctx;
79   Vec            lambda[1];
80   PetscBool      forwardonly=PETSC_FALSE,implicitform=PETSC_FALSE,byhand=PETSC_FALSE;
81   PetscInt       gxm,gym,i,dofs = 2,ctrl[3] = {0,0,0};
82   PetscScalar    **Seed = NULL,**Rec = NULL,*u_vec;
83   unsigned int   **JP = NULL;
84   ISColoring     iscoloring;
85 
86   PetscFunctionBeginUser;
87   PetscCall(PetscInitialize(&argc,&argv,NULL,help));
88   PetscCall(PetscNew(&adctx));
89   PetscCall(PetscOptionsGetBool(NULL,NULL,"-forwardonly",&forwardonly,NULL));
90   PetscCall(PetscOptionsGetBool(NULL,NULL,"-implicitform",&implicitform,NULL));
91   appctx.aijpc = PETSC_FALSE,adctx->no_an = PETSC_FALSE,adctx->sparse = PETSC_FALSE,adctx->sparse_view = PETSC_FALSE;adctx->sparse_view_done = PETSC_FALSE;
92   PetscCall(PetscOptionsGetBool(NULL,NULL,"-aijpc",&appctx.aijpc,NULL));
93   PetscCall(PetscOptionsGetBool(NULL,NULL,"-no_annotation",&adctx->no_an,NULL));
94   PetscCall(PetscOptionsGetBool(NULL,NULL,"-jacobian_by_hand",&byhand,NULL));
95   PetscCall(PetscOptionsGetBool(NULL,NULL,"-adolc_sparse",&adctx->sparse,NULL));
96   PetscCall(PetscOptionsGetBool(NULL,NULL,"-adolc_sparse_view",&adctx->sparse_view,NULL));
97   appctx.D1    = 8.0e-5;
98   appctx.D2    = 4.0e-5;
99   appctx.gamma = .024;
100   appctx.kappa = .06;
101   appctx.adctx = adctx;
102 
103   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
104      Create distributed array (DMDA) to manage parallel grid and vectors
105   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
106   PetscCall(DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_PERIODIC,DM_BOUNDARY_PERIODIC,DMDA_STENCIL_STAR,65,65,PETSC_DECIDE,PETSC_DECIDE,2,1,NULL,NULL,&da));
107   PetscCall(DMSetFromOptions(da));
108   PetscCall(DMSetUp(da));
109   PetscCall(DMDASetFieldName(da,0,"u"));
110   PetscCall(DMDASetFieldName(da,1,"v"));
111 
112   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
113      Extract global vectors from DMDA; then duplicate for remaining
114      vectors that are the same types
115    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
116   PetscCall(DMCreateGlobalVector(da,&x));
117   PetscCall(VecDuplicate(x,&r));
118   PetscCall(VecDuplicate(x,&xdot));
119 
120   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
121      Create timestepping solver context
122      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
123   PetscCall(TSCreate(PETSC_COMM_WORLD,&ts));
124   PetscCall(TSSetType(ts,TSCN));
125   PetscCall(TSSetDM(ts,da));
126   PetscCall(TSSetProblemType(ts,TS_NONLINEAR));
127   if (!implicitform) {
128     PetscCall(TSSetRHSFunction(ts,NULL,RHSFunctionPassive,&appctx));
129   } else {
130     PetscCall(DMDATSSetIFunctionLocal(da,INSERT_VALUES,(DMDATSIFunctionLocal)IFunctionLocalPassive,&appctx));
131   }
132 
133   if (!adctx->no_an) {
134     PetscCall(DMDAGetGhostCorners(da,NULL,NULL,NULL,&gxm,&gym,NULL));
135     adctx->m = dofs*gxm*gym;adctx->n = dofs*gxm*gym; /* Number of dependent and independent variables */
136 
137     /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
138        Trace function(s) just once
139      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
140     if (!implicitform) {
141       PetscCall(RHSFunctionActive(ts,1.0,x,r,&appctx));
142     } else {
143       PetscCall(IFunctionActive(ts,1.0,x,xdot,r,&appctx));
144     }
145 
146     /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
147       In the case where ADOL-C generates the Jacobian in compressed format,
148       seed and recovery matrices are required. Since the sparsity structure
149       of the Jacobian does not change over the course of the time
150       integration, we can save computational effort by only generating
151       these objects once.
152        - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
153     if (adctx->sparse) {
154       /*
155          Generate sparsity pattern
156 
157          One way to do this is to use the Jacobian pattern driver `jac_pat`
158          provided by ColPack. Otherwise, it is the responsibility of the user
159          to obtain the sparsity pattern.
160       */
161       PetscCall(PetscMalloc1(adctx->n,&u_vec));
162       JP = (unsigned int **) malloc(adctx->m*sizeof(unsigned int*));
163       jac_pat(1,adctx->m,adctx->n,u_vec,JP,ctrl);
164       if (adctx->sparse_view) {
165         PetscCall(PrintSparsity(MPI_COMM_WORLD,adctx->m,JP));
166       }
167 
168       /*
169         Extract a column colouring
170 
171         For problems using DMDA, colourings can be extracted directly, as
172         follows. There are also ColPack drivers available. Otherwise, it is the
173         responsibility of the user to obtain a suitable colouring.
174       */
175       PetscCall(DMCreateColoring(da,IS_COLORING_LOCAL,&iscoloring));
176       PetscCall(ISColoringGetIS(iscoloring,PETSC_USE_POINTER,&adctx->p,NULL));
177 
178       /* Generate seed matrix to propagate through the forward mode of AD */
179       PetscCall(AdolcMalloc2(adctx->n,adctx->p,&Seed));
180       PetscCall(GenerateSeedMatrix(iscoloring,Seed));
181       PetscCall(ISColoringDestroy(&iscoloring));
182 
183       /*
184         Generate recovery matrix, which is used to recover the Jacobian from
185         compressed format */
186       PetscCall(AdolcMalloc2(adctx->m,adctx->p,&Rec));
187       PetscCall(GetRecoveryMatrix(Seed,JP,adctx->m,adctx->p,Rec));
188 
189       /* Store results and free workspace */
190       adctx->Rec = Rec;
191       for (i=0;i<adctx->m;i++)
192         free(JP[i]);
193       free(JP);
194       PetscCall(PetscFree(u_vec));
195 
196     } else {
197 
198       /*
199         In 'full' Jacobian mode, propagate an identity matrix through the
200         forward mode of AD.
201       */
202       adctx->p = adctx->n;
203       PetscCall(AdolcMalloc2(adctx->n,adctx->p,&Seed));
204       PetscCall(Identity(adctx->n,Seed));
205     }
206     adctx->Seed = Seed;
207   }
208 
209   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
210      Set Jacobian
211    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
212   if (!implicitform) {
213     if (!byhand) {
214       PetscCall(TSSetRHSJacobian(ts,NULL,NULL,RHSJacobianAdolc,&appctx));
215     } else {
216       PetscCall(TSSetRHSJacobian(ts,NULL,NULL,RHSJacobianByHand,&appctx));
217     }
218   } else {
219     if (appctx.aijpc) {
220       Mat A,B;
221 
222       PetscCall(DMSetMatType(da,MATSELL));
223       PetscCall(DMCreateMatrix(da,&A));
224       PetscCall(MatConvert(A,MATAIJ,MAT_INITIAL_MATRIX,&B));
225       /* FIXME do we need to change viewer to display matrix in natural ordering as DMCreateMatrix_DA does? */
226       if (!byhand) {
227         PetscCall(TSSetIJacobian(ts,A,B,IJacobianAdolc,&appctx));
228       } else {
229         PetscCall(TSSetIJacobian(ts,A,B,IJacobianByHand,&appctx));
230       }
231       PetscCall(MatDestroy(&A));
232       PetscCall(MatDestroy(&B));
233     } else {
234       if (!byhand) {
235         PetscCall(TSSetIJacobian(ts,NULL,NULL,IJacobianAdolc,&appctx));
236       } else {
237         PetscCall(TSSetIJacobian(ts,NULL,NULL,IJacobianByHand,&appctx));
238       }
239     }
240   }
241 
242   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
243      Set initial conditions
244    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
245   PetscCall(InitialConditions(da,x));
246   PetscCall(TSSetSolution(ts,x));
247 
248   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
249     Have the TS save its trajectory so that TSAdjointSolve() may be used
250     and set solver options
251    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
252   if (!forwardonly) {
253     PetscCall(TSSetSaveTrajectory(ts));
254     PetscCall(TSSetMaxTime(ts,200.0));
255     PetscCall(TSSetTimeStep(ts,0.5));
256   } else {
257     PetscCall(TSSetMaxTime(ts,2000.0));
258     PetscCall(TSSetTimeStep(ts,10));
259   }
260   PetscCall(TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER));
261   PetscCall(TSSetFromOptions(ts));
262 
263   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
264      Solve ODE system
265      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
266   PetscCall(TSSolve(ts,x));
267   if (!forwardonly) {
268     /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
269        Start the Adjoint model
270        - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
271     PetscCall(VecDuplicate(x,&lambda[0]));
272     /*   Reset initial conditions for the adjoint integration */
273     PetscCall(InitializeLambda(da,lambda[0],0.5,0.5));
274     PetscCall(TSSetCostGradients(ts,1,lambda,NULL));
275     PetscCall(TSAdjointSolve(ts));
276     PetscCall(VecDestroy(&lambda[0]));
277   }
278 
279   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
280      Free work space.
281    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
282   PetscCall(VecDestroy(&xdot));
283   PetscCall(VecDestroy(&r));
284   PetscCall(VecDestroy(&x));
285   PetscCall(TSDestroy(&ts));
286   if (!adctx->no_an) {
287     if (adctx->sparse) PetscCall(AdolcFree2(Rec));
288     PetscCall(AdolcFree2(Seed));
289   }
290   PetscCall(DMDestroy(&da));
291   PetscCall(PetscFree(adctx));
292   PetscCall(PetscFinalize());
293   return 0;
294 }
295 
296 PetscErrorCode InitialConditions(DM da,Vec U)
297 {
298   PetscInt       i,j,xs,ys,xm,ym,Mx,My;
299   Field          **u;
300   PetscReal      hx,hy,x,y;
301 
302   PetscFunctionBegin;
303   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
304 
305   hx = 2.5/(PetscReal)Mx;
306   hy = 2.5/(PetscReal)My;
307 
308   /*
309      Get pointers to vector data
310   */
311   PetscCall(DMDAVecGetArray(da,U,&u));
312 
313   /*
314      Get local grid boundaries
315   */
316   PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
317 
318   /*
319      Compute function over the locally owned part of the grid
320   */
321   for (j=ys; j<ys+ym; j++) {
322     y = j*hy;
323     for (i=xs; i<xs+xm; i++) {
324       x = i*hx;
325       if (PetscApproximateGTE(x,1.0) && PetscApproximateLTE(x,1.5) && PetscApproximateGTE(y,1.0) && PetscApproximateLTE(y,1.5)) u[j][i].v = PetscPowReal(PetscSinReal(4.0*PETSC_PI*x),2.0)*PetscPowReal(PetscSinReal(4.0*PETSC_PI*y),2.0)/4.0;
326       else u[j][i].v = 0.0;
327 
328       u[j][i].u = 1.0 - 2.0*u[j][i].v;
329     }
330   }
331 
332   /*
333      Restore vectors
334   */
335   PetscCall(DMDAVecRestoreArray(da,U,&u));
336   PetscFunctionReturn(0);
337 }
338 
339 PetscErrorCode InitializeLambda(DM da,Vec lambda,PetscReal x,PetscReal y)
340 {
341    PetscInt i,j,Mx,My,xs,ys,xm,ym;
342    Field **l;
343    PetscFunctionBegin;
344 
345    PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
346    /* locate the global i index for x and j index for y */
347    i = (PetscInt)(x*(Mx-1));
348    j = (PetscInt)(y*(My-1));
349    PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
350 
351    if (xs <= i && i < xs+xm && ys <= j && j < ys+ym) {
352      /* the i,j vertex is on this process */
353      PetscCall(DMDAVecGetArray(da,lambda,&l));
354      l[j][i].u = 1.0;
355      l[j][i].v = 1.0;
356      PetscCall(DMDAVecRestoreArray(da,lambda,&l));
357    }
358    PetscFunctionReturn(0);
359 }
360 
361 PetscErrorCode IFunctionLocalPassive(DMDALocalInfo *info,PetscReal t,Field**u,Field**udot,Field**f,void *ptr)
362 {
363   AppCtx         *appctx = (AppCtx*)ptr;
364   PetscInt       i,j,xs,ys,xm,ym;
365   PetscReal      hx,hy,sx,sy;
366   PetscScalar    uc,uxx,uyy,vc,vxx,vyy;
367 
368   PetscFunctionBegin;
369   hx = 2.50/(PetscReal)(info->mx); sx = 1.0/(hx*hx);
370   hy = 2.50/(PetscReal)(info->my); sy = 1.0/(hy*hy);
371 
372   /* Get local grid boundaries */
373   xs = info->xs; xm = info->xm; ys = info->ys; ym = info->ym;
374 
375   /* Compute function over the locally owned part of the grid */
376   for (j=ys; j<ys+ym; j++) {
377     for (i=xs; i<xs+xm; i++) {
378       uc        = u[j][i].u;
379       uxx       = (-2.0*uc + u[j][i-1].u + u[j][i+1].u)*sx;
380       uyy       = (-2.0*uc + u[j-1][i].u + u[j+1][i].u)*sy;
381       vc        = u[j][i].v;
382       vxx       = (-2.0*vc + u[j][i-1].v + u[j][i+1].v)*sx;
383       vyy       = (-2.0*vc + u[j-1][i].v + u[j+1][i].v)*sy;
384       f[j][i].u = udot[j][i].u - appctx->D1*(uxx + uyy) + uc*vc*vc - appctx->gamma*(1.0 - uc);
385       f[j][i].v = udot[j][i].v - appctx->D2*(vxx + vyy) - uc*vc*vc + (appctx->gamma + appctx->kappa)*vc;
386     }
387   }
388   PetscCall(PetscLogFlops(16.0*xm*ym));
389   PetscFunctionReturn(0);
390 }
391 
392 PetscErrorCode IFunctionActive(TS ts,PetscReal ftime,Vec U,Vec Udot,Vec F,void *ptr)
393 {
394   AppCtx         *appctx = (AppCtx*)ptr;
395   DM             da;
396   DMDALocalInfo  info;
397   Field          **u,**f,**udot;
398   Vec            localU;
399   PetscInt       i,j,xs,ys,xm,ym,gxs,gys,gxm,gym;
400   PetscReal      hx,hy,sx,sy;
401   adouble        uc,uxx,uyy,vc,vxx,vyy;
402   AField         **f_a,*f_c,**u_a,*u_c;
403   PetscScalar    dummy;
404 
405   PetscFunctionBegin;
406 
407   PetscCall(TSGetDM(ts,&da));
408   PetscCall(DMDAGetLocalInfo(da,&info));
409   PetscCall(DMGetLocalVector(da,&localU));
410   hx = 2.50/(PetscReal)(info.mx); sx = 1.0/(hx*hx);
411   hy = 2.50/(PetscReal)(info.my); sy = 1.0/(hy*hy);
412   xs = info.xs; xm = info.xm; gxs = info.gxs; gxm = info.gxm;
413   ys = info.ys; ym = info.ym; gys = info.gys; gym = info.gym;
414 
415   /*
416      Scatter ghost points to local vector,using the 2-step process
417         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
418      By placing code between these two statements, computations can be
419      done while messages are in transition.
420   */
421   PetscCall(DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU));
422   PetscCall(DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU));
423 
424   /*
425      Get pointers to vector data
426   */
427   PetscCall(DMDAVecGetArrayRead(da,localU,&u));
428   PetscCall(DMDAVecGetArray(da,F,&f));
429   PetscCall(DMDAVecGetArrayRead(da,Udot,&udot));
430 
431   /*
432     Create contiguous 1-arrays of AFields
433 
434     NOTE: Memory for ADOL-C active variables (such as adouble and AField)
435           cannot be allocated using PetscMalloc, as this does not call the
436           relevant class constructor. Instead, we use the C++ keyword `new`.
437   */
438   u_c = new AField[info.gxm*info.gym];
439   f_c = new AField[info.gxm*info.gym];
440 
441   /* Create corresponding 2-arrays of AFields */
442   u_a = new AField*[info.gym];
443   f_a = new AField*[info.gym];
444 
445   /* Align indices between array types to endow 2d array with ghost points */
446   PetscCall(GiveGhostPoints(da,u_c,&u_a));
447   PetscCall(GiveGhostPoints(da,f_c,&f_a));
448 
449   trace_on(1);  /* Start of active section on tape 1 */
450 
451   /*
452     Mark independence
453 
454     NOTE: Ghost points are marked as independent, in place of the points they represent on
455           other processors / on other boundaries.
456   */
457   for (j=gys; j<gys+gym; j++) {
458     for (i=gxs; i<gxs+gxm; i++) {
459       u_a[j][i].u <<= u[j][i].u;
460       u_a[j][i].v <<= u[j][i].v;
461     }
462   }
463 
464   /* Compute function over the locally owned part of the grid */
465   for (j=ys; j<ys+ym; j++) {
466     for (i=xs; i<xs+xm; i++) {
467       uc        = u_a[j][i].u;
468       uxx       = (-2.0*uc + u_a[j][i-1].u + u_a[j][i+1].u)*sx;
469       uyy       = (-2.0*uc + u_a[j-1][i].u + u_a[j+1][i].u)*sy;
470       vc        = u_a[j][i].v;
471       vxx       = (-2.0*vc + u_a[j][i-1].v + u_a[j][i+1].v)*sx;
472       vyy       = (-2.0*vc + u_a[j-1][i].v + u_a[j+1][i].v)*sy;
473       f_a[j][i].u = udot[j][i].u - appctx->D1*(uxx + uyy) + uc*vc*vc - appctx->gamma*(1.0 - uc);
474       f_a[j][i].v = udot[j][i].v - appctx->D2*(vxx + vyy) - uc*vc*vc + (appctx->gamma + appctx->kappa)*vc;
475     }
476   }
477 
478   /*
479     Mark dependence
480 
481     NOTE: Marking dependence of dummy variables makes the index notation much simpler when forming
482           the Jacobian later.
483   */
484   for (j=gys; j<gys+gym; j++) {
485     for (i=gxs; i<gxs+gxm; i++) {
486       if ((i < xs) || (i >= xs+xm) || (j < ys) || (j >= ys+ym)) {
487         f_a[j][i].u >>= dummy;
488         f_a[j][i].v >>= dummy;
489       } else {
490         f_a[j][i].u >>= f[j][i].u;
491         f_a[j][i].v >>= f[j][i].v;
492       }
493     }
494   }
495   trace_off();  /* End of active section */
496   PetscCall(PetscLogFlops(16.0*xm*ym));
497 
498   /* Restore vectors */
499   PetscCall(DMDAVecRestoreArray(da,F,&f));
500   PetscCall(DMDAVecRestoreArrayRead(da,localU,&u));
501   PetscCall(DMDAVecRestoreArrayRead(da,Udot,&udot));
502 
503   PetscCall(DMRestoreLocalVector(da,&localU));
504 
505   /* Destroy AFields appropriately */
506   f_a += info.gys;
507   u_a += info.gys;
508   delete[] f_a;
509   delete[] u_a;
510   delete[] f_c;
511   delete[] u_c;
512 
513   PetscFunctionReturn(0);
514 }
515 
516 PetscErrorCode RHSFunctionPassive(TS ts,PetscReal ftime,Vec U,Vec F,void *ptr)
517 {
518   AppCtx         *appctx = (AppCtx*)ptr;
519   DM             da;
520   PetscInt       i,j,xs,ys,xm,ym,Mx,My;
521   PetscReal      hx,hy,sx,sy;
522   PetscScalar    uc,uxx,uyy,vc,vxx,vyy;
523   Field          **u,**f;
524   Vec            localU,localF;
525 
526   PetscFunctionBegin;
527   PetscCall(TSGetDM(ts,&da));
528   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
529   hx = 2.50/(PetscReal)(Mx);sx = 1.0/(hx*hx);
530   hy = 2.50/(PetscReal)(My);sy = 1.0/(hy*hy);
531   PetscCall(DMGetLocalVector(da,&localU));
532   PetscCall(DMGetLocalVector(da,&localF));
533 
534   /*
535      Scatter ghost points to local vector,using the 2-step process
536         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
537      By placing code between these two statements, computations can be
538      done while messages are in transition.
539   */
540   PetscCall(DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU));
541   PetscCall(DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU));
542   PetscCall(VecZeroEntries(F)); // NOTE (1): See (2) below
543   PetscCall(DMGlobalToLocalBegin(da,F,INSERT_VALUES,localF));
544   PetscCall(DMGlobalToLocalEnd(da,F,INSERT_VALUES,localF));
545 
546   /*
547      Get pointers to vector data
548   */
549   PetscCall(DMDAVecGetArrayRead(da,localU,&u));
550   PetscCall(DMDAVecGetArray(da,localF,&f));
551 
552   /*
553      Get local grid boundaries
554   */
555   PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
556 
557   /*
558      Compute function over the locally owned part of the grid
559   */
560   for (j=ys; j<ys+ym; j++) {
561     for (i=xs; i<xs+xm; i++) {
562       uc        = u[j][i].u;
563       uxx       = (-2.0*uc + u[j][i-1].u + u[j][i+1].u)*sx;
564       uyy       = (-2.0*uc + u[j-1][i].u + u[j+1][i].u)*sy;
565       vc        = u[j][i].v;
566       vxx       = (-2.0*vc + u[j][i-1].v + u[j][i+1].v)*sx;
567       vyy       = (-2.0*vc + u[j-1][i].v + u[j+1][i].v)*sy;
568       f[j][i].u = appctx->D1*(uxx + uyy) - uc*vc*vc + appctx->gamma*(1.0 - uc);
569       f[j][i].v = appctx->D2*(vxx + vyy) + uc*vc*vc - (appctx->gamma + appctx->kappa)*vc;
570     }
571   }
572 
573   /*
574      Gather global vector, using the 2-step process
575         DMLocalToGlobalBegin(),DMLocalToGlobalEnd().
576 
577      NOTE (2): We need to use ADD_VALUES if boundaries are not of type DM_BOUNDARY_NONE or
578                DM_BOUNDARY_GHOSTED, meaning we should also zero F before addition (see (1) above).
579   */
580   PetscCall(DMLocalToGlobalBegin(da,localF,ADD_VALUES,F));
581   PetscCall(DMLocalToGlobalEnd(da,localF,ADD_VALUES,F));
582 
583   /*
584      Restore vectors
585   */
586   PetscCall(DMDAVecRestoreArray(da,localF,&f));
587   PetscCall(DMDAVecRestoreArrayRead(da,localU,&u));
588   PetscCall(DMRestoreLocalVector(da,&localF));
589   PetscCall(DMRestoreLocalVector(da,&localU));
590   PetscCall(PetscLogFlops(16.0*xm*ym));
591   PetscFunctionReturn(0);
592 }
593 
594 PetscErrorCode RHSFunctionActive(TS ts,PetscReal ftime,Vec U,Vec F,void *ptr)
595 {
596   AppCtx         *appctx = (AppCtx*)ptr;
597   DM             da;
598   PetscInt       i,j,xs,ys,xm,ym,gxs,gys,gxm,gym,Mx,My;
599   PetscReal      hx,hy,sx,sy;
600   AField         **f_a,*f_c,**u_a,*u_c;
601   adouble        uc,uxx,uyy,vc,vxx,vyy;
602   Field          **u,**f;
603   Vec            localU,localF;
604 
605   PetscFunctionBegin;
606   PetscCall(TSGetDM(ts,&da));
607   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
608   hx = 2.50/(PetscReal)(Mx);sx = 1.0/(hx*hx);
609   hy = 2.50/(PetscReal)(My);sy = 1.0/(hy*hy);
610   PetscCall(DMGetLocalVector(da,&localU));
611   PetscCall(DMGetLocalVector(da,&localF));
612 
613   /*
614      Scatter ghost points to local vector,using the 2-step process
615         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
616      By placing code between these two statements, computations can be
617      done while messages are in transition.
618   */
619   PetscCall(DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU));
620   PetscCall(DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU));
621   PetscCall(VecZeroEntries(F)); // NOTE (1): See (2) below
622   PetscCall(DMGlobalToLocalBegin(da,F,INSERT_VALUES,localF));
623   PetscCall(DMGlobalToLocalEnd(da,F,INSERT_VALUES,localF));
624 
625   /*
626      Get pointers to vector data
627   */
628   PetscCall(DMDAVecGetArrayRead(da,localU,&u));
629   PetscCall(DMDAVecGetArray(da,localF,&f));
630 
631   /*
632      Get local and ghosted grid boundaries
633   */
634   PetscCall(DMDAGetGhostCorners(da,&gxs,&gys,NULL,&gxm,&gym,NULL));
635   PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
636 
637   /*
638     Create contiguous 1-arrays of AFields
639 
640     NOTE: Memory for ADOL-C active variables (such as adouble and AField)
641           cannot be allocated using PetscMalloc, as this does not call the
642           relevant class constructor. Instead, we use the C++ keyword `new`.
643   */
644   u_c = new AField[gxm*gym];
645   f_c = new AField[gxm*gym];
646 
647   /* Create corresponding 2-arrays of AFields */
648   u_a = new AField*[gym];
649   f_a = new AField*[gym];
650 
651   /* Align indices between array types to endow 2d array with ghost points */
652   PetscCall(GiveGhostPoints(da,u_c,&u_a));
653   PetscCall(GiveGhostPoints(da,f_c,&f_a));
654 
655   /*
656      Compute function over the locally owned part of the grid
657   */
658   trace_on(1);  // ----------------------------------------------- Start of active section
659 
660   /*
661     Mark independence
662 
663     NOTE: Ghost points are marked as independent, in place of the points they represent on
664           other processors / on other boundaries.
665   */
666   for (j=gys; j<gys+gym; j++) {
667     for (i=gxs; i<gxs+gxm; i++) {
668       u_a[j][i].u <<= u[j][i].u;
669       u_a[j][i].v <<= u[j][i].v;
670     }
671   }
672 
673   /*
674     Compute function over the locally owned part of the grid
675   */
676   for (j=ys; j<ys+ym; j++) {
677     for (i=xs; i<xs+xm; i++) {
678       uc          = u_a[j][i].u;
679       uxx         = (-2.0*uc + u_a[j][i-1].u + u_a[j][i+1].u)*sx;
680       uyy         = (-2.0*uc + u_a[j-1][i].u + u_a[j+1][i].u)*sy;
681       vc          = u_a[j][i].v;
682       vxx         = (-2.0*vc + u_a[j][i-1].v + u_a[j][i+1].v)*sx;
683       vyy         = (-2.0*vc + u_a[j-1][i].v + u_a[j+1][i].v)*sy;
684       f_a[j][i].u = appctx->D1*(uxx + uyy) - uc*vc*vc + appctx->gamma*(1.0 - uc);
685       f_a[j][i].v = appctx->D2*(vxx + vyy) + uc*vc*vc - (appctx->gamma + appctx->kappa)*vc;
686     }
687   }
688   /*
689     Mark dependence
690 
691     NOTE: Ghost points are marked as dependent in order to vastly simplify index notation
692           during Jacobian assembly.
693   */
694   for (j=gys; j<gys+gym; j++) {
695     for (i=gxs; i<gxs+gxm; i++) {
696       f_a[j][i].u >>= f[j][i].u;
697       f_a[j][i].v >>= f[j][i].v;
698     }
699   }
700   trace_off();  // ----------------------------------------------- End of active section
701 
702   /* Test zeroth order scalar evaluation in ADOL-C gives the same result */
703 //  if (appctx->adctx->zos) {
704 //    PetscCall(TestZOS2d(da,f,u,appctx)); // FIXME: Why does this give nonzero?
705 //  }
706 
707   /*
708      Gather global vector, using the 2-step process
709         DMLocalToGlobalBegin(),DMLocalToGlobalEnd().
710 
711      NOTE (2): We need to use ADD_VALUES if boundaries are not of type DM_BOUNDARY_NONE or
712                DM_BOUNDARY_GHOSTED, meaning we should also zero F before addition (see (1) above).
713   */
714   PetscCall(DMLocalToGlobalBegin(da,localF,ADD_VALUES,F));
715   PetscCall(DMLocalToGlobalEnd(da,localF,ADD_VALUES,F));
716 
717   /*
718      Restore vectors
719   */
720   PetscCall(DMDAVecRestoreArray(da,localF,&f));
721   PetscCall(DMDAVecRestoreArrayRead(da,localU,&u));
722   PetscCall(DMRestoreLocalVector(da,&localF));
723   PetscCall(DMRestoreLocalVector(da,&localU));
724   PetscCall(PetscLogFlops(16.0*xm*ym));
725 
726   /* Destroy AFields appropriately */
727   f_a += gys;
728   u_a += gys;
729   delete[] f_a;
730   delete[] u_a;
731   delete[] f_c;
732   delete[] u_c;
733 
734   PetscFunctionReturn(0);
735 }
736 
737 PetscErrorCode IJacobianAdolc(TS ts,PetscReal t,Vec U,Vec Udot,PetscReal a,Mat A,Mat B,void *ctx)
738 {
739   AppCtx            *appctx = (AppCtx*)ctx;
740   DM                da;
741   const PetscScalar *u_vec;
742   Vec               localU;
743 
744   PetscFunctionBegin;
745   PetscCall(TSGetDM(ts,&da));
746   PetscCall(DMGetLocalVector(da,&localU));
747 
748   /*
749      Scatter ghost points to local vector,using the 2-step process
750         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
751      By placing code between these two statements, computations can be
752      done while messages are in transition.
753   */
754   PetscCall(DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU));
755   PetscCall(DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU));
756 
757   /* Get pointers to vector data */
758   PetscCall(VecGetArrayRead(localU,&u_vec));
759 
760   /*
761     Compute Jacobian
762   */
763   PetscCall(PetscAdolcComputeIJacobianLocalIDMass(1,A,u_vec,a,appctx->adctx));
764 
765   /*
766      Restore vectors
767   */
768   PetscCall(VecRestoreArrayRead(localU,&u_vec));
769   PetscCall(DMRestoreLocalVector(da,&localU));
770   PetscFunctionReturn(0);
771 }
772 
773 PetscErrorCode IJacobianByHand(TS ts,PetscReal t,Vec U,Vec Udot,PetscReal a,Mat A,Mat B,void *ctx)
774 {
775   AppCtx         *appctx = (AppCtx*)ctx;     /* user-defined application context */
776   DM             da;
777   PetscInt       i,j,Mx,My,xs,ys,xm,ym;
778   PetscReal      hx,hy,sx,sy;
779   PetscScalar    uc,vc;
780   Field          **u;
781   Vec            localU;
782   MatStencil     stencil[6],rowstencil;
783   PetscScalar    entries[6];
784 
785   PetscFunctionBegin;
786   PetscCall(TSGetDM(ts,&da));
787   PetscCall(DMGetLocalVector(da,&localU));
788   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
789 
790   hx = 2.50/(PetscReal)Mx; sx = 1.0/(hx*hx);
791   hy = 2.50/(PetscReal)My; sy = 1.0/(hy*hy);
792 
793   /*
794      Scatter ghost points to local vector,using the 2-step process
795         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
796      By placing code between these two statements, computations can be
797      done while messages are in transition.
798   */
799   PetscCall(DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU));
800   PetscCall(DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU));
801 
802   /*
803      Get pointers to vector data
804   */
805   PetscCall(DMDAVecGetArrayRead(da,localU,&u));
806 
807   /*
808      Get local grid boundaries
809   */
810   PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
811 
812   stencil[0].k = 0;
813   stencil[1].k = 0;
814   stencil[2].k = 0;
815   stencil[3].k = 0;
816   stencil[4].k = 0;
817   stencil[5].k = 0;
818   rowstencil.k = 0;
819   /*
820      Compute function over the locally owned part of the grid
821   */
822   for (j=ys; j<ys+ym; j++) {
823 
824     stencil[0].j = j-1;
825     stencil[1].j = j+1;
826     stencil[2].j = j;
827     stencil[3].j = j;
828     stencil[4].j = j;
829     stencil[5].j = j;
830     rowstencil.k = 0; rowstencil.j = j;
831     for (i=xs; i<xs+xm; i++) {
832       uc = u[j][i].u;
833       vc = u[j][i].v;
834 
835       /*      uxx       = (-2.0*uc + u[j][i-1].u + u[j][i+1].u)*sx;
836       uyy       = (-2.0*uc + u[j-1][i].u + u[j+1][i].u)*sy;
837 
838       vxx       = (-2.0*vc + u[j][i-1].v + u[j][i+1].v)*sx;
839       vyy       = (-2.0*vc + u[j-1][i].v + u[j+1][i].v)*sy;
840        f[j][i].u = appctx->D1*(uxx + uyy) - uc*vc*vc + appctx->gamma*(1.0 - uc);*/
841 
842       stencil[0].i = i; stencil[0].c = 0; entries[0] = -appctx->D1*sy;
843       stencil[1].i = i; stencil[1].c = 0; entries[1] = -appctx->D1*sy;
844       stencil[2].i = i-1; stencil[2].c = 0; entries[2] = -appctx->D1*sx;
845       stencil[3].i = i+1; stencil[3].c = 0; entries[3] = -appctx->D1*sx;
846       stencil[4].i = i; stencil[4].c = 0; entries[4] = 2.0*appctx->D1*(sx + sy) + vc*vc + appctx->gamma + a;
847       stencil[5].i = i; stencil[5].c = 1; entries[5] = 2.0*uc*vc;
848       rowstencil.i = i; rowstencil.c = 0;
849 
850       PetscCall(MatSetValuesStencil(A,1,&rowstencil,6,stencil,entries,INSERT_VALUES));
851       if (appctx->aijpc) {
852         PetscCall(MatSetValuesStencil(B,1,&rowstencil,6,stencil,entries,INSERT_VALUES));
853       }
854       stencil[0].c = 1; entries[0] = -appctx->D2*sy;
855       stencil[1].c = 1; entries[1] = -appctx->D2*sy;
856       stencil[2].c = 1; entries[2] = -appctx->D2*sx;
857       stencil[3].c = 1; entries[3] = -appctx->D2*sx;
858       stencil[4].c = 1; entries[4] = 2.0*appctx->D2*(sx + sy) - 2.0*uc*vc + appctx->gamma + appctx->kappa + a;
859       stencil[5].c = 0; entries[5] = -vc*vc;
860 
861       PetscCall(MatSetValuesStencil(A,1,&rowstencil,6,stencil,entries,INSERT_VALUES));
862       if (appctx->aijpc) {
863         PetscCall(MatSetValuesStencil(B,1,&rowstencil,6,stencil,entries,INSERT_VALUES));
864       }
865       /* f[j][i].v = appctx->D2*(vxx + vyy) + uc*vc*vc - (appctx->gamma + appctx->kappa)*vc; */
866     }
867   }
868 
869   /*
870      Restore vectors
871   */
872   PetscCall(PetscLogFlops(19.0*xm*ym));
873   PetscCall(DMDAVecRestoreArrayRead(da,localU,&u));
874   PetscCall(DMRestoreLocalVector(da,&localU));
875   PetscCall(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY));
876   PetscCall(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY));
877   PetscCall(MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE));
878   if (appctx->aijpc) {
879     PetscCall(MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY));
880     PetscCall(MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY));
881     PetscCall(MatSetOption(B,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE));
882   }
883   PetscFunctionReturn(0);
884 }
885 
886 PetscErrorCode RHSJacobianByHand(TS ts,PetscReal t,Vec U,Mat A,Mat B,void *ctx)
887 {
888   AppCtx         *appctx = (AppCtx*)ctx;     /* user-defined application context */
889   DM             da;
890   PetscInt       i,j,Mx,My,xs,ys,xm,ym;
891   PetscReal      hx,hy,sx,sy;
892   PetscScalar    uc,vc;
893   Field          **u;
894   Vec            localU;
895   MatStencil     stencil[6],rowstencil;
896   PetscScalar    entries[6];
897 
898   PetscFunctionBegin;
899   PetscCall(TSGetDM(ts,&da));
900   PetscCall(DMGetLocalVector(da,&localU));
901   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
902 
903   hx = 2.50/(PetscReal)(Mx); sx = 1.0/(hx*hx);
904   hy = 2.50/(PetscReal)(My); sy = 1.0/(hy*hy);
905 
906   /*
907      Scatter ghost points to local vector,using the 2-step process
908         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
909      By placing code between these two statements, computations can be
910      done while messages are in transition.
911   */
912   PetscCall(DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU));
913   PetscCall(DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU));
914 
915   /*
916      Get pointers to vector data
917   */
918   PetscCall(DMDAVecGetArrayRead(da,localU,&u));
919 
920   /*
921      Get local grid boundaries
922   */
923   PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
924 
925   stencil[0].k = 0;
926   stencil[1].k = 0;
927   stencil[2].k = 0;
928   stencil[3].k = 0;
929   stencil[4].k = 0;
930   stencil[5].k = 0;
931   rowstencil.k = 0;
932 
933   /*
934      Compute function over the locally owned part of the grid
935   */
936   for (j=ys; j<ys+ym; j++) {
937 
938     stencil[0].j = j-1;
939     stencil[1].j = j+1;
940     stencil[2].j = j;
941     stencil[3].j = j;
942     stencil[4].j = j;
943     stencil[5].j = j;
944     rowstencil.k = 0; rowstencil.j = j;
945     for (i=xs; i<xs+xm; i++) {
946       uc = u[j][i].u;
947       vc = u[j][i].v;
948 
949       /*      uxx       = (-2.0*uc + u[j][i-1].u + u[j][i+1].u)*sx;
950       uyy       = (-2.0*uc + u[j-1][i].u + u[j+1][i].u)*sy;
951 
952       vxx       = (-2.0*vc + u[j][i-1].v + u[j][i+1].v)*sx;
953       vyy       = (-2.0*vc + u[j-1][i].v + u[j+1][i].v)*sy;
954        f[j][i].u = appctx->D1*(uxx + uyy) - uc*vc*vc + appctx->gamma*(1.0 - uc);*/
955 
956       stencil[0].i = i; stencil[0].c = 0; entries[0] = appctx->D1*sy;
957       stencil[1].i = i; stencil[1].c = 0; entries[1] = appctx->D1*sy;
958       stencil[2].i = i-1; stencil[2].c = 0; entries[2] = appctx->D1*sx;
959       stencil[3].i = i+1; stencil[3].c = 0; entries[3] = appctx->D1*sx;
960       stencil[4].i = i; stencil[4].c = 0; entries[4] = -2.0*appctx->D1*(sx + sy) - vc*vc - appctx->gamma;
961       stencil[5].i = i; stencil[5].c = 1; entries[5] = -2.0*uc*vc;
962       rowstencil.i = i; rowstencil.c = 0;
963 
964       PetscCall(MatSetValuesStencil(A,1,&rowstencil,6,stencil,entries,INSERT_VALUES));
965 
966       stencil[0].c = 1; entries[0] = appctx->D2*sy;
967       stencil[1].c = 1; entries[1] = appctx->D2*sy;
968       stencil[2].c = 1; entries[2] = appctx->D2*sx;
969       stencil[3].c = 1; entries[3] = appctx->D2*sx;
970       stencil[4].c = 1; entries[4] = -2.0*appctx->D2*(sx + sy) + 2.0*uc*vc - appctx->gamma - appctx->kappa;
971       stencil[5].c = 0; entries[5] = vc*vc;
972       rowstencil.c = 1;
973 
974       PetscCall(MatSetValuesStencil(A,1,&rowstencil,6,stencil,entries,INSERT_VALUES));
975       /* f[j][i].v = appctx->D2*(vxx + vyy) + uc*vc*vc - (appctx->gamma + appctx->kappa)*vc; */
976     }
977   }
978 
979   /*
980      Restore vectors
981   */
982   PetscCall(PetscLogFlops(19.0*xm*ym));
983   PetscCall(DMDAVecRestoreArrayRead(da,localU,&u));
984   PetscCall(DMRestoreLocalVector(da,&localU));
985   PetscCall(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY));
986   PetscCall(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY));
987   PetscCall(MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE));
988   if (appctx->aijpc) {
989     PetscCall(MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY));
990     PetscCall(MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY));
991     PetscCall(MatSetOption(B,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE));
992   }
993   PetscFunctionReturn(0);
994 }
995 
996 PetscErrorCode RHSJacobianAdolc(TS ts,PetscReal t,Vec U,Mat A,Mat B,void *ctx)
997 {
998   AppCtx         *appctx = (AppCtx*)ctx;
999   DM             da;
1000   PetscScalar    *u_vec;
1001   Vec            localU;
1002 
1003   PetscFunctionBegin;
1004   PetscCall(TSGetDM(ts,&da));
1005   PetscCall(DMGetLocalVector(da,&localU));
1006 
1007   /*
1008      Scatter ghost points to local vector,using the 2-step process
1009         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
1010      By placing code between these two statements, computations can be
1011      done while messages are in transition.
1012   */
1013   PetscCall(DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU));
1014   PetscCall(DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU));
1015 
1016   /* Get pointers to vector data */
1017   PetscCall(VecGetArray(localU,&u_vec));
1018 
1019   /*
1020     Compute Jacobian
1021   */
1022   PetscCall(PetscAdolcComputeRHSJacobianLocal(1,A,u_vec,appctx->adctx));
1023 
1024   /*
1025      Restore vectors
1026   */
1027   PetscCall(VecRestoreArray(localU,&u_vec));
1028   PetscCall(DMRestoreLocalVector(da,&localU));
1029   PetscFunctionReturn(0);
1030 }
1031 
1032 /*TEST
1033 
1034   build:
1035     requires: double !complex adolc colpack
1036 
1037   test:
1038     suffix: 1
1039     nsize: 1
1040     args: -ts_max_steps 1 -da_grid_x 12 -da_grid_y 12 -snes_test_jacobian
1041     output_file: output/adr_ex5adj_1.out
1042 
1043   test:
1044     suffix: 2
1045     nsize: 1
1046     args: -ts_max_steps 1 -da_grid_x 12 -da_grid_y 12 -snes_test_jacobian -implicitform
1047     output_file: output/adr_ex5adj_2.out
1048 
1049   test:
1050     suffix: 3
1051     nsize: 4
1052     args: -ts_max_steps 10 -da_grid_x 12 -da_grid_y 12 -ts_monitor -ts_adjoint_monitor
1053     output_file: output/adr_ex5adj_3.out
1054 
1055   test:
1056     suffix: 4
1057     nsize: 4
1058     args: -ts_max_steps 10 -da_grid_x 12 -da_grid_y 12 -ts_monitor -ts_adjoint_monitor -implicitform
1059     output_file: output/adr_ex5adj_4.out
1060 
1061   testset:
1062     suffix: 5
1063     nsize: 4
1064     args: -ts_max_steps 10 -da_grid_x 15 -da_grid_y 15 -ts_monitor -ts_adjoint_monitor -adolc_sparse
1065     output_file: output/adr_ex5adj_5.out
1066 
1067   testset:
1068     suffix: 6
1069     nsize: 4
1070     args: -ts_max_steps 10 -da_grid_x 15 -da_grid_y 15 -ts_monitor -ts_adjoint_monitor -adolc_sparse -implicitform
1071     output_file: output/adr_ex5adj_6.out
1072 
1073 TEST*/
1074