xref: /petsc/src/ts/tutorials/autodiff/adr_ex5adj.cxx (revision c3b5f7ba6bc5ce25a01a67bb37ba5d34b02bbbd7)
1 static char help[] = "Demonstrates adjoint sensitivity analysis for Reaction-Diffusion Equations.\n";
2 
3 /*
4    REQUIRES configuration of PETSc with option --download-adolc.
5 
6    For documentation on ADOL-C, see
7      $PETSC_ARCH/externalpackages/ADOL-C-2.6.0/ADOL-C/doc/adolc-manual.pdf
8 
9    REQUIRES configuration of PETSc with option --download-colpack
10 
11    For documentation on ColPack, see
12      $PETSC_ARCH/externalpackages/git.colpack/README.md
13 */
14 /* ------------------------------------------------------------------------
15   See ../advection-diffusion-reaction/ex5 for a description of the problem
16   ------------------------------------------------------------------------- */
17 
18 /*
19   Runtime options:
20 
21     Solver:
22       -forwardonly       - Run the forward simulation without adjoint.
23       -implicitform      - Provide IFunction and IJacobian to TS, if not set, RHSFunction and RHSJacobian will be used.
24       -aijpc             - Set the preconditioner matrix to be aij (the Jacobian matrix can be of a different type such as ELL).
25 
26     Jacobian generation:
27       -jacobian_by_hand  - Use the hand-coded Jacobian of ex5.c, rather than generating it automatically.
28       -no_annotation     - Do not annotate ADOL-C active variables. (Should be used alongside -jacobian_by_hand.)
29       -adolc_sparse      - Calculate Jacobian in compressed format, using ADOL-C sparse functionality.
30       -adolc_sparse_view - Print sparsity pattern used by -adolc_sparse option.
31  */
32 /*
33   NOTE: If -adolc_sparse option is used, at least four processors should be used, so that no processor owns boundaries which are
34         identified by the periodic boundary conditions. The number of grid points in both x- and y-directions should be multiples
35         of 5, in order for the 5-point stencil to be cleanly parallelised.
36 */
37 
38 #include <petscdmda.h>
39 #include <petscts.h>
40 #include "adolc-utils/drivers.cxx"
41 #include <adolc/adolc.h>
42 
43 /* (Passive) field for the two variables */
44 typedef struct {
45   PetscScalar u,v;
46 } Field;
47 
48 /* Active field for the two variables */
49 typedef struct {
50   adouble u,v;
51 } AField;
52 
53 /* Application context */
54 typedef struct {
55   PetscReal D1,D2,gamma,kappa;
56   AField    **u_a,**f_a;
57   PetscBool aijpc;
58   AdolcCtx  *adctx; /* Automatic differentation support */
59 } AppCtx;
60 
61 extern PetscErrorCode InitialConditions(DM da,Vec U);
62 extern PetscErrorCode InitializeLambda(DM da,Vec lambda,PetscReal x,PetscReal y);
63 extern PetscErrorCode IFunctionLocalPassive(DMDALocalInfo *info,PetscReal t,Field**u,Field**udot,Field**f,void *ptr);
64 extern PetscErrorCode IFunctionActive(TS ts,PetscReal ftime,Vec U,Vec Udot,Vec F,void *ptr);
65 extern PetscErrorCode RHSFunctionActive(TS ts,PetscReal ftime,Vec U,Vec F,void *ptr);
66 extern PetscErrorCode RHSFunctionPassive(TS ts,PetscReal ftime,Vec U,Vec F,void *ptr);
67 extern PetscErrorCode IJacobianByHand(TS ts,PetscReal t,Vec U,Vec Udot,PetscReal a,Mat A,Mat B,void *ctx);
68 extern PetscErrorCode IJacobianAdolc(TS ts,PetscReal t,Vec U,Vec Udot,PetscReal a,Mat A,Mat B,void *ctx);
69 extern PetscErrorCode RHSJacobianByHand(TS ts,PetscReal t,Vec U,Mat A,Mat B,void *ctx);
70 extern PetscErrorCode RHSJacobianAdolc(TS ts,PetscReal t,Vec U,Mat A,Mat B,void *ctx);
71 
72 int main(int argc,char **argv)
73 {
74   TS             ts;
75   Vec            x,r,xdot;
76   DM             da;
77   AppCtx         appctx;
78   AdolcCtx       *adctx;
79   Vec            lambda[1];
80   PetscBool      forwardonly=PETSC_FALSE,implicitform=PETSC_FALSE,byhand=PETSC_FALSE;
81   PetscInt       gxm,gym,i,dofs = 2,ctrl[3] = {0,0,0};
82   PetscScalar    **Seed = NULL,**Rec = NULL,*u_vec;
83   unsigned int   **JP = NULL;
84   ISColoring     iscoloring;
85 
86   PetscCall(PetscInitialize(&argc,&argv,NULL,help));
87   PetscCall(PetscNew(&adctx));
88   PetscCall(PetscOptionsGetBool(NULL,NULL,"-forwardonly",&forwardonly,NULL));
89   PetscCall(PetscOptionsGetBool(NULL,NULL,"-implicitform",&implicitform,NULL));
90   appctx.aijpc = PETSC_FALSE,adctx->no_an = PETSC_FALSE,adctx->sparse = PETSC_FALSE,adctx->sparse_view = PETSC_FALSE;adctx->sparse_view_done = PETSC_FALSE;
91   PetscCall(PetscOptionsGetBool(NULL,NULL,"-aijpc",&appctx.aijpc,NULL));
92   PetscCall(PetscOptionsGetBool(NULL,NULL,"-no_annotation",&adctx->no_an,NULL));
93   PetscCall(PetscOptionsGetBool(NULL,NULL,"-jacobian_by_hand",&byhand,NULL));
94   PetscCall(PetscOptionsGetBool(NULL,NULL,"-adolc_sparse",&adctx->sparse,NULL));
95   PetscCall(PetscOptionsGetBool(NULL,NULL,"-adolc_sparse_view",&adctx->sparse_view,NULL));
96   appctx.D1    = 8.0e-5;
97   appctx.D2    = 4.0e-5;
98   appctx.gamma = .024;
99   appctx.kappa = .06;
100   appctx.adctx = adctx;
101 
102   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
103      Create distributed array (DMDA) to manage parallel grid and vectors
104   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
105   PetscCall(DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_PERIODIC,DM_BOUNDARY_PERIODIC,DMDA_STENCIL_STAR,65,65,PETSC_DECIDE,PETSC_DECIDE,2,1,NULL,NULL,&da));
106   PetscCall(DMSetFromOptions(da));
107   PetscCall(DMSetUp(da));
108   PetscCall(DMDASetFieldName(da,0,"u"));
109   PetscCall(DMDASetFieldName(da,1,"v"));
110 
111   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
112      Extract global vectors from DMDA; then duplicate for remaining
113      vectors that are the same types
114    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
115   PetscCall(DMCreateGlobalVector(da,&x));
116   PetscCall(VecDuplicate(x,&r));
117   PetscCall(VecDuplicate(x,&xdot));
118 
119   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
120      Create timestepping solver context
121      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
122   PetscCall(TSCreate(PETSC_COMM_WORLD,&ts));
123   PetscCall(TSSetType(ts,TSCN));
124   PetscCall(TSSetDM(ts,da));
125   PetscCall(TSSetProblemType(ts,TS_NONLINEAR));
126   if (!implicitform) {
127     PetscCall(TSSetRHSFunction(ts,NULL,RHSFunctionPassive,&appctx));
128   } else {
129     PetscCall(DMDATSSetIFunctionLocal(da,INSERT_VALUES,(DMDATSIFunctionLocal)IFunctionLocalPassive,&appctx));
130   }
131 
132   if (!adctx->no_an) {
133     PetscCall(DMDAGetGhostCorners(da,NULL,NULL,NULL,&gxm,&gym,NULL));
134     adctx->m = dofs*gxm*gym;adctx->n = dofs*gxm*gym; /* Number of dependent and independent variables */
135 
136     /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
137        Trace function(s) just once
138      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
139     if (!implicitform) {
140       PetscCall(RHSFunctionActive(ts,1.0,x,r,&appctx));
141     } else {
142       PetscCall(IFunctionActive(ts,1.0,x,xdot,r,&appctx));
143     }
144 
145     /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
146       In the case where ADOL-C generates the Jacobian in compressed format,
147       seed and recovery matrices are required. Since the sparsity structure
148       of the Jacobian does not change over the course of the time
149       integration, we can save computational effort by only generating
150       these objects once.
151        - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
152     if (adctx->sparse) {
153       /*
154          Generate sparsity pattern
155 
156          One way to do this is to use the Jacobian pattern driver `jac_pat`
157          provided by ColPack. Otherwise, it is the responsibility of the user
158          to obtain the sparsity pattern.
159       */
160       PetscCall(PetscMalloc1(adctx->n,&u_vec));
161       JP = (unsigned int **) malloc(adctx->m*sizeof(unsigned int*));
162       jac_pat(1,adctx->m,adctx->n,u_vec,JP,ctrl);
163       if (adctx->sparse_view) {
164         PetscCall(PrintSparsity(MPI_COMM_WORLD,adctx->m,JP));
165       }
166 
167       /*
168         Extract a column colouring
169 
170         For problems using DMDA, colourings can be extracted directly, as
171         follows. There are also ColPack drivers available. Otherwise, it is the
172         responsibility of the user to obtain a suitable colouring.
173       */
174       PetscCall(DMCreateColoring(da,IS_COLORING_LOCAL,&iscoloring));
175       PetscCall(ISColoringGetIS(iscoloring,PETSC_USE_POINTER,&adctx->p,NULL));
176 
177       /* Generate seed matrix to propagate through the forward mode of AD */
178       PetscCall(AdolcMalloc2(adctx->n,adctx->p,&Seed));
179       PetscCall(GenerateSeedMatrix(iscoloring,Seed));
180       PetscCall(ISColoringDestroy(&iscoloring));
181 
182       /*
183         Generate recovery matrix, which is used to recover the Jacobian from
184         compressed format */
185       PetscCall(AdolcMalloc2(adctx->m,adctx->p,&Rec));
186       PetscCall(GetRecoveryMatrix(Seed,JP,adctx->m,adctx->p,Rec));
187 
188       /* Store results and free workspace */
189       adctx->Rec = Rec;
190       for (i=0;i<adctx->m;i++)
191         free(JP[i]);
192       free(JP);
193       PetscCall(PetscFree(u_vec));
194 
195     } else {
196 
197       /*
198         In 'full' Jacobian mode, propagate an identity matrix through the
199         forward mode of AD.
200       */
201       adctx->p = adctx->n;
202       PetscCall(AdolcMalloc2(adctx->n,adctx->p,&Seed));
203       PetscCall(Identity(adctx->n,Seed));
204     }
205     adctx->Seed = Seed;
206   }
207 
208   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
209      Set Jacobian
210    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
211   if (!implicitform) {
212     if (!byhand) {
213       PetscCall(TSSetRHSJacobian(ts,NULL,NULL,RHSJacobianAdolc,&appctx));
214     } else {
215       PetscCall(TSSetRHSJacobian(ts,NULL,NULL,RHSJacobianByHand,&appctx));
216     }
217   } else {
218     if (appctx.aijpc) {
219       Mat A,B;
220 
221       PetscCall(DMSetMatType(da,MATSELL));
222       PetscCall(DMCreateMatrix(da,&A));
223       PetscCall(MatConvert(A,MATAIJ,MAT_INITIAL_MATRIX,&B));
224       /* FIXME do we need to change viewer to display matrix in natural ordering as DMCreateMatrix_DA does? */
225       if (!byhand) {
226         PetscCall(TSSetIJacobian(ts,A,B,IJacobianAdolc,&appctx));
227       } else {
228         PetscCall(TSSetIJacobian(ts,A,B,IJacobianByHand,&appctx));
229       }
230       PetscCall(MatDestroy(&A));
231       PetscCall(MatDestroy(&B));
232     } else {
233       if (!byhand) {
234         PetscCall(TSSetIJacobian(ts,NULL,NULL,IJacobianAdolc,&appctx));
235       } else {
236         PetscCall(TSSetIJacobian(ts,NULL,NULL,IJacobianByHand,&appctx));
237       }
238     }
239   }
240 
241   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
242      Set initial conditions
243    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
244   PetscCall(InitialConditions(da,x));
245   PetscCall(TSSetSolution(ts,x));
246 
247   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
248     Have the TS save its trajectory so that TSAdjointSolve() may be used
249     and set solver options
250    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
251   if (!forwardonly) {
252     PetscCall(TSSetSaveTrajectory(ts));
253     PetscCall(TSSetMaxTime(ts,200.0));
254     PetscCall(TSSetTimeStep(ts,0.5));
255   } else {
256     PetscCall(TSSetMaxTime(ts,2000.0));
257     PetscCall(TSSetTimeStep(ts,10));
258   }
259   PetscCall(TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER));
260   PetscCall(TSSetFromOptions(ts));
261 
262   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
263      Solve ODE system
264      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
265   PetscCall(TSSolve(ts,x));
266   if (!forwardonly) {
267     /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
268        Start the Adjoint model
269        - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
270     PetscCall(VecDuplicate(x,&lambda[0]));
271     /*   Reset initial conditions for the adjoint integration */
272     PetscCall(InitializeLambda(da,lambda[0],0.5,0.5));
273     PetscCall(TSSetCostGradients(ts,1,lambda,NULL));
274     PetscCall(TSAdjointSolve(ts));
275     PetscCall(VecDestroy(&lambda[0]));
276   }
277 
278   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
279      Free work space.
280    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
281   PetscCall(VecDestroy(&xdot));
282   PetscCall(VecDestroy(&r));
283   PetscCall(VecDestroy(&x));
284   PetscCall(TSDestroy(&ts));
285   if (!adctx->no_an) {
286     if (adctx->sparse) PetscCall(AdolcFree2(Rec));
287     PetscCall(AdolcFree2(Seed));
288   }
289   PetscCall(DMDestroy(&da));
290   PetscCall(PetscFree(adctx));
291   PetscCall(PetscFinalize());
292   return 0;
293 }
294 
295 PetscErrorCode InitialConditions(DM da,Vec U)
296 {
297   PetscInt       i,j,xs,ys,xm,ym,Mx,My;
298   Field          **u;
299   PetscReal      hx,hy,x,y;
300 
301   PetscFunctionBegin;
302   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
303 
304   hx = 2.5/(PetscReal)Mx;
305   hy = 2.5/(PetscReal)My;
306 
307   /*
308      Get pointers to vector data
309   */
310   PetscCall(DMDAVecGetArray(da,U,&u));
311 
312   /*
313      Get local grid boundaries
314   */
315   PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
316 
317   /*
318      Compute function over the locally owned part of the grid
319   */
320   for (j=ys; j<ys+ym; j++) {
321     y = j*hy;
322     for (i=xs; i<xs+xm; i++) {
323       x = i*hx;
324       if (PetscApproximateGTE(x,1.0) && PetscApproximateLTE(x,1.5) && PetscApproximateGTE(y,1.0) && PetscApproximateLTE(y,1.5)) u[j][i].v = PetscPowReal(PetscSinReal(4.0*PETSC_PI*x),2.0)*PetscPowReal(PetscSinReal(4.0*PETSC_PI*y),2.0)/4.0;
325       else u[j][i].v = 0.0;
326 
327       u[j][i].u = 1.0 - 2.0*u[j][i].v;
328     }
329   }
330 
331   /*
332      Restore vectors
333   */
334   PetscCall(DMDAVecRestoreArray(da,U,&u));
335   PetscFunctionReturn(0);
336 }
337 
338 PetscErrorCode InitializeLambda(DM da,Vec lambda,PetscReal x,PetscReal y)
339 {
340    PetscInt i,j,Mx,My,xs,ys,xm,ym;
341    Field **l;
342    PetscFunctionBegin;
343 
344    PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
345    /* locate the global i index for x and j index for y */
346    i = (PetscInt)(x*(Mx-1));
347    j = (PetscInt)(y*(My-1));
348    PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
349 
350    if (xs <= i && i < xs+xm && ys <= j && j < ys+ym) {
351      /* the i,j vertex is on this process */
352      PetscCall(DMDAVecGetArray(da,lambda,&l));
353      l[j][i].u = 1.0;
354      l[j][i].v = 1.0;
355      PetscCall(DMDAVecRestoreArray(da,lambda,&l));
356    }
357    PetscFunctionReturn(0);
358 }
359 
360 PetscErrorCode IFunctionLocalPassive(DMDALocalInfo *info,PetscReal t,Field**u,Field**udot,Field**f,void *ptr)
361 {
362   AppCtx         *appctx = (AppCtx*)ptr;
363   PetscInt       i,j,xs,ys,xm,ym;
364   PetscReal      hx,hy,sx,sy;
365   PetscScalar    uc,uxx,uyy,vc,vxx,vyy;
366 
367   PetscFunctionBegin;
368   hx = 2.50/(PetscReal)(info->mx); sx = 1.0/(hx*hx);
369   hy = 2.50/(PetscReal)(info->my); sy = 1.0/(hy*hy);
370 
371   /* Get local grid boundaries */
372   xs = info->xs; xm = info->xm; ys = info->ys; ym = info->ym;
373 
374   /* Compute function over the locally owned part of the grid */
375   for (j=ys; j<ys+ym; j++) {
376     for (i=xs; i<xs+xm; i++) {
377       uc        = u[j][i].u;
378       uxx       = (-2.0*uc + u[j][i-1].u + u[j][i+1].u)*sx;
379       uyy       = (-2.0*uc + u[j-1][i].u + u[j+1][i].u)*sy;
380       vc        = u[j][i].v;
381       vxx       = (-2.0*vc + u[j][i-1].v + u[j][i+1].v)*sx;
382       vyy       = (-2.0*vc + u[j-1][i].v + u[j+1][i].v)*sy;
383       f[j][i].u = udot[j][i].u - appctx->D1*(uxx + uyy) + uc*vc*vc - appctx->gamma*(1.0 - uc);
384       f[j][i].v = udot[j][i].v - appctx->D2*(vxx + vyy) - uc*vc*vc + (appctx->gamma + appctx->kappa)*vc;
385     }
386   }
387   PetscCall(PetscLogFlops(16.0*xm*ym));
388   PetscFunctionReturn(0);
389 }
390 
391 PetscErrorCode IFunctionActive(TS ts,PetscReal ftime,Vec U,Vec Udot,Vec F,void *ptr)
392 {
393   AppCtx         *appctx = (AppCtx*)ptr;
394   DM             da;
395   DMDALocalInfo  info;
396   Field          **u,**f,**udot;
397   Vec            localU;
398   PetscInt       i,j,xs,ys,xm,ym,gxs,gys,gxm,gym;
399   PetscReal      hx,hy,sx,sy;
400   adouble        uc,uxx,uyy,vc,vxx,vyy;
401   AField         **f_a,*f_c,**u_a,*u_c;
402   PetscScalar    dummy;
403 
404   PetscFunctionBegin;
405 
406   PetscCall(TSGetDM(ts,&da));
407   PetscCall(DMDAGetLocalInfo(da,&info));
408   PetscCall(DMGetLocalVector(da,&localU));
409   hx = 2.50/(PetscReal)(info.mx); sx = 1.0/(hx*hx);
410   hy = 2.50/(PetscReal)(info.my); sy = 1.0/(hy*hy);
411   xs = info.xs; xm = info.xm; gxs = info.gxs; gxm = info.gxm;
412   ys = info.ys; ym = info.ym; gys = info.gys; gym = info.gym;
413 
414   /*
415      Scatter ghost points to local vector,using the 2-step process
416         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
417      By placing code between these two statements, computations can be
418      done while messages are in transition.
419   */
420   PetscCall(DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU));
421   PetscCall(DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU));
422 
423   /*
424      Get pointers to vector data
425   */
426   PetscCall(DMDAVecGetArrayRead(da,localU,&u));
427   PetscCall(DMDAVecGetArray(da,F,&f));
428   PetscCall(DMDAVecGetArrayRead(da,Udot,&udot));
429 
430   /*
431     Create contiguous 1-arrays of AFields
432 
433     NOTE: Memory for ADOL-C active variables (such as adouble and AField)
434           cannot be allocated using PetscMalloc, as this does not call the
435           relevant class constructor. Instead, we use the C++ keyword `new`.
436   */
437   u_c = new AField[info.gxm*info.gym];
438   f_c = new AField[info.gxm*info.gym];
439 
440   /* Create corresponding 2-arrays of AFields */
441   u_a = new AField*[info.gym];
442   f_a = new AField*[info.gym];
443 
444   /* Align indices between array types to endow 2d array with ghost points */
445   PetscCall(GiveGhostPoints(da,u_c,&u_a));
446   PetscCall(GiveGhostPoints(da,f_c,&f_a));
447 
448   trace_on(1);  /* Start of active section on tape 1 */
449 
450   /*
451     Mark independence
452 
453     NOTE: Ghost points are marked as independent, in place of the points they represent on
454           other processors / on other boundaries.
455   */
456   for (j=gys; j<gys+gym; j++) {
457     for (i=gxs; i<gxs+gxm; i++) {
458       u_a[j][i].u <<= u[j][i].u;
459       u_a[j][i].v <<= u[j][i].v;
460     }
461   }
462 
463   /* Compute function over the locally owned part of the grid */
464   for (j=ys; j<ys+ym; j++) {
465     for (i=xs; i<xs+xm; i++) {
466       uc        = u_a[j][i].u;
467       uxx       = (-2.0*uc + u_a[j][i-1].u + u_a[j][i+1].u)*sx;
468       uyy       = (-2.0*uc + u_a[j-1][i].u + u_a[j+1][i].u)*sy;
469       vc        = u_a[j][i].v;
470       vxx       = (-2.0*vc + u_a[j][i-1].v + u_a[j][i+1].v)*sx;
471       vyy       = (-2.0*vc + u_a[j-1][i].v + u_a[j+1][i].v)*sy;
472       f_a[j][i].u = udot[j][i].u - appctx->D1*(uxx + uyy) + uc*vc*vc - appctx->gamma*(1.0 - uc);
473       f_a[j][i].v = udot[j][i].v - appctx->D2*(vxx + vyy) - uc*vc*vc + (appctx->gamma + appctx->kappa)*vc;
474     }
475   }
476 
477   /*
478     Mark dependence
479 
480     NOTE: Marking dependence of dummy variables makes the index notation much simpler when forming
481           the Jacobian later.
482   */
483   for (j=gys; j<gys+gym; j++) {
484     for (i=gxs; i<gxs+gxm; i++) {
485       if ((i < xs) || (i >= xs+xm) || (j < ys) || (j >= ys+ym)) {
486         f_a[j][i].u >>= dummy;
487         f_a[j][i].v >>= dummy;
488       } else {
489         f_a[j][i].u >>= f[j][i].u;
490         f_a[j][i].v >>= f[j][i].v;
491       }
492     }
493   }
494   trace_off();  /* End of active section */
495   PetscCall(PetscLogFlops(16.0*xm*ym));
496 
497   /* Restore vectors */
498   PetscCall(DMDAVecRestoreArray(da,F,&f));
499   PetscCall(DMDAVecRestoreArrayRead(da,localU,&u));
500   PetscCall(DMDAVecRestoreArrayRead(da,Udot,&udot));
501 
502   PetscCall(DMRestoreLocalVector(da,&localU));
503 
504   /* Destroy AFields appropriately */
505   f_a += info.gys;
506   u_a += info.gys;
507   delete[] f_a;
508   delete[] u_a;
509   delete[] f_c;
510   delete[] u_c;
511 
512   PetscFunctionReturn(0);
513 }
514 
515 PetscErrorCode RHSFunctionPassive(TS ts,PetscReal ftime,Vec U,Vec F,void *ptr)
516 {
517   AppCtx         *appctx = (AppCtx*)ptr;
518   DM             da;
519   PetscInt       i,j,xs,ys,xm,ym,Mx,My;
520   PetscReal      hx,hy,sx,sy;
521   PetscScalar    uc,uxx,uyy,vc,vxx,vyy;
522   Field          **u,**f;
523   Vec            localU,localF;
524 
525   PetscFunctionBegin;
526   PetscCall(TSGetDM(ts,&da));
527   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
528   hx = 2.50/(PetscReal)(Mx);sx = 1.0/(hx*hx);
529   hy = 2.50/(PetscReal)(My);sy = 1.0/(hy*hy);
530   PetscCall(DMGetLocalVector(da,&localU));
531   PetscCall(DMGetLocalVector(da,&localF));
532 
533   /*
534      Scatter ghost points to local vector,using the 2-step process
535         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
536      By placing code between these two statements, computations can be
537      done while messages are in transition.
538   */
539   PetscCall(DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU));
540   PetscCall(DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU));
541   PetscCall(VecZeroEntries(F)); // NOTE (1): See (2) below
542   PetscCall(DMGlobalToLocalBegin(da,F,INSERT_VALUES,localF));
543   PetscCall(DMGlobalToLocalEnd(da,F,INSERT_VALUES,localF));
544 
545   /*
546      Get pointers to vector data
547   */
548   PetscCall(DMDAVecGetArrayRead(da,localU,&u));
549   PetscCall(DMDAVecGetArray(da,localF,&f));
550 
551   /*
552      Get local grid boundaries
553   */
554   PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
555 
556   /*
557      Compute function over the locally owned part of the grid
558   */
559   for (j=ys; j<ys+ym; j++) {
560     for (i=xs; i<xs+xm; i++) {
561       uc        = u[j][i].u;
562       uxx       = (-2.0*uc + u[j][i-1].u + u[j][i+1].u)*sx;
563       uyy       = (-2.0*uc + u[j-1][i].u + u[j+1][i].u)*sy;
564       vc        = u[j][i].v;
565       vxx       = (-2.0*vc + u[j][i-1].v + u[j][i+1].v)*sx;
566       vyy       = (-2.0*vc + u[j-1][i].v + u[j+1][i].v)*sy;
567       f[j][i].u = appctx->D1*(uxx + uyy) - uc*vc*vc + appctx->gamma*(1.0 - uc);
568       f[j][i].v = appctx->D2*(vxx + vyy) + uc*vc*vc - (appctx->gamma + appctx->kappa)*vc;
569     }
570   }
571 
572   /*
573      Gather global vector, using the 2-step process
574         DMLocalToGlobalBegin(),DMLocalToGlobalEnd().
575 
576      NOTE (2): We need to use ADD_VALUES if boundaries are not of type DM_BOUNDARY_NONE or
577                DM_BOUNDARY_GHOSTED, meaning we should also zero F before addition (see (1) above).
578   */
579   PetscCall(DMLocalToGlobalBegin(da,localF,ADD_VALUES,F));
580   PetscCall(DMLocalToGlobalEnd(da,localF,ADD_VALUES,F));
581 
582   /*
583      Restore vectors
584   */
585   PetscCall(DMDAVecRestoreArray(da,localF,&f));
586   PetscCall(DMDAVecRestoreArrayRead(da,localU,&u));
587   PetscCall(DMRestoreLocalVector(da,&localF));
588   PetscCall(DMRestoreLocalVector(da,&localU));
589   PetscCall(PetscLogFlops(16.0*xm*ym));
590   PetscFunctionReturn(0);
591 }
592 
593 PetscErrorCode RHSFunctionActive(TS ts,PetscReal ftime,Vec U,Vec F,void *ptr)
594 {
595   AppCtx         *appctx = (AppCtx*)ptr;
596   DM             da;
597   PetscInt       i,j,xs,ys,xm,ym,gxs,gys,gxm,gym,Mx,My;
598   PetscReal      hx,hy,sx,sy;
599   AField         **f_a,*f_c,**u_a,*u_c;
600   adouble        uc,uxx,uyy,vc,vxx,vyy;
601   Field          **u,**f;
602   Vec            localU,localF;
603 
604   PetscFunctionBegin;
605   PetscCall(TSGetDM(ts,&da));
606   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
607   hx = 2.50/(PetscReal)(Mx);sx = 1.0/(hx*hx);
608   hy = 2.50/(PetscReal)(My);sy = 1.0/(hy*hy);
609   PetscCall(DMGetLocalVector(da,&localU));
610   PetscCall(DMGetLocalVector(da,&localF));
611 
612   /*
613      Scatter ghost points to local vector,using the 2-step process
614         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
615      By placing code between these two statements, computations can be
616      done while messages are in transition.
617   */
618   PetscCall(DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU));
619   PetscCall(DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU));
620   PetscCall(VecZeroEntries(F)); // NOTE (1): See (2) below
621   PetscCall(DMGlobalToLocalBegin(da,F,INSERT_VALUES,localF));
622   PetscCall(DMGlobalToLocalEnd(da,F,INSERT_VALUES,localF));
623 
624   /*
625      Get pointers to vector data
626   */
627   PetscCall(DMDAVecGetArrayRead(da,localU,&u));
628   PetscCall(DMDAVecGetArray(da,localF,&f));
629 
630   /*
631      Get local and ghosted grid boundaries
632   */
633   PetscCall(DMDAGetGhostCorners(da,&gxs,&gys,NULL,&gxm,&gym,NULL));
634   PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
635 
636   /*
637     Create contiguous 1-arrays of AFields
638 
639     NOTE: Memory for ADOL-C active variables (such as adouble and AField)
640           cannot be allocated using PetscMalloc, as this does not call the
641           relevant class constructor. Instead, we use the C++ keyword `new`.
642   */
643   u_c = new AField[gxm*gym];
644   f_c = new AField[gxm*gym];
645 
646   /* Create corresponding 2-arrays of AFields */
647   u_a = new AField*[gym];
648   f_a = new AField*[gym];
649 
650   /* Align indices between array types to endow 2d array with ghost points */
651   PetscCall(GiveGhostPoints(da,u_c,&u_a));
652   PetscCall(GiveGhostPoints(da,f_c,&f_a));
653 
654   /*
655      Compute function over the locally owned part of the grid
656   */
657   trace_on(1);  // ----------------------------------------------- Start of active section
658 
659   /*
660     Mark independence
661 
662     NOTE: Ghost points are marked as independent, in place of the points they represent on
663           other processors / on other boundaries.
664   */
665   for (j=gys; j<gys+gym; j++) {
666     for (i=gxs; i<gxs+gxm; i++) {
667       u_a[j][i].u <<= u[j][i].u;
668       u_a[j][i].v <<= u[j][i].v;
669     }
670   }
671 
672   /*
673     Compute function over the locally owned part of the grid
674   */
675   for (j=ys; j<ys+ym; j++) {
676     for (i=xs; i<xs+xm; i++) {
677       uc          = u_a[j][i].u;
678       uxx         = (-2.0*uc + u_a[j][i-1].u + u_a[j][i+1].u)*sx;
679       uyy         = (-2.0*uc + u_a[j-1][i].u + u_a[j+1][i].u)*sy;
680       vc          = u_a[j][i].v;
681       vxx         = (-2.0*vc + u_a[j][i-1].v + u_a[j][i+1].v)*sx;
682       vyy         = (-2.0*vc + u_a[j-1][i].v + u_a[j+1][i].v)*sy;
683       f_a[j][i].u = appctx->D1*(uxx + uyy) - uc*vc*vc + appctx->gamma*(1.0 - uc);
684       f_a[j][i].v = appctx->D2*(vxx + vyy) + uc*vc*vc - (appctx->gamma + appctx->kappa)*vc;
685     }
686   }
687   /*
688     Mark dependence
689 
690     NOTE: Ghost points are marked as dependent in order to vastly simplify index notation
691           during Jacobian assembly.
692   */
693   for (j=gys; j<gys+gym; j++) {
694     for (i=gxs; i<gxs+gxm; i++) {
695       f_a[j][i].u >>= f[j][i].u;
696       f_a[j][i].v >>= f[j][i].v;
697     }
698   }
699   trace_off();  // ----------------------------------------------- End of active section
700 
701   /* Test zeroth order scalar evaluation in ADOL-C gives the same result */
702 //  if (appctx->adctx->zos) {
703 //    PetscCall(TestZOS2d(da,f,u,appctx)); // FIXME: Why does this give nonzero?
704 //  }
705 
706   /*
707      Gather global vector, using the 2-step process
708         DMLocalToGlobalBegin(),DMLocalToGlobalEnd().
709 
710      NOTE (2): We need to use ADD_VALUES if boundaries are not of type DM_BOUNDARY_NONE or
711                DM_BOUNDARY_GHOSTED, meaning we should also zero F before addition (see (1) above).
712   */
713   PetscCall(DMLocalToGlobalBegin(da,localF,ADD_VALUES,F));
714   PetscCall(DMLocalToGlobalEnd(da,localF,ADD_VALUES,F));
715 
716   /*
717      Restore vectors
718   */
719   PetscCall(DMDAVecRestoreArray(da,localF,&f));
720   PetscCall(DMDAVecRestoreArrayRead(da,localU,&u));
721   PetscCall(DMRestoreLocalVector(da,&localF));
722   PetscCall(DMRestoreLocalVector(da,&localU));
723   PetscCall(PetscLogFlops(16.0*xm*ym));
724 
725   /* Destroy AFields appropriately */
726   f_a += gys;
727   u_a += gys;
728   delete[] f_a;
729   delete[] u_a;
730   delete[] f_c;
731   delete[] u_c;
732 
733   PetscFunctionReturn(0);
734 }
735 
736 PetscErrorCode IJacobianAdolc(TS ts,PetscReal t,Vec U,Vec Udot,PetscReal a,Mat A,Mat B,void *ctx)
737 {
738   AppCtx            *appctx = (AppCtx*)ctx;
739   DM                da;
740   const PetscScalar *u_vec;
741   Vec               localU;
742 
743   PetscFunctionBegin;
744   PetscCall(TSGetDM(ts,&da));
745   PetscCall(DMGetLocalVector(da,&localU));
746 
747   /*
748      Scatter ghost points to local vector,using the 2-step process
749         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
750      By placing code between these two statements, computations can be
751      done while messages are in transition.
752   */
753   PetscCall(DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU));
754   PetscCall(DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU));
755 
756   /* Get pointers to vector data */
757   PetscCall(VecGetArrayRead(localU,&u_vec));
758 
759   /*
760     Compute Jacobian
761   */
762   PetscCall(PetscAdolcComputeIJacobianLocalIDMass(1,A,u_vec,a,appctx->adctx));
763 
764   /*
765      Restore vectors
766   */
767   PetscCall(VecRestoreArrayRead(localU,&u_vec));
768   PetscCall(DMRestoreLocalVector(da,&localU));
769   PetscFunctionReturn(0);
770 }
771 
772 PetscErrorCode IJacobianByHand(TS ts,PetscReal t,Vec U,Vec Udot,PetscReal a,Mat A,Mat B,void *ctx)
773 {
774   AppCtx         *appctx = (AppCtx*)ctx;     /* user-defined application context */
775   DM             da;
776   PetscInt       i,j,Mx,My,xs,ys,xm,ym;
777   PetscReal      hx,hy,sx,sy;
778   PetscScalar    uc,vc;
779   Field          **u;
780   Vec            localU;
781   MatStencil     stencil[6],rowstencil;
782   PetscScalar    entries[6];
783 
784   PetscFunctionBegin;
785   PetscCall(TSGetDM(ts,&da));
786   PetscCall(DMGetLocalVector(da,&localU));
787   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
788 
789   hx = 2.50/(PetscReal)Mx; sx = 1.0/(hx*hx);
790   hy = 2.50/(PetscReal)My; sy = 1.0/(hy*hy);
791 
792   /*
793      Scatter ghost points to local vector,using the 2-step process
794         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
795      By placing code between these two statements, computations can be
796      done while messages are in transition.
797   */
798   PetscCall(DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU));
799   PetscCall(DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU));
800 
801   /*
802      Get pointers to vector data
803   */
804   PetscCall(DMDAVecGetArrayRead(da,localU,&u));
805 
806   /*
807      Get local grid boundaries
808   */
809   PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
810 
811   stencil[0].k = 0;
812   stencil[1].k = 0;
813   stencil[2].k = 0;
814   stencil[3].k = 0;
815   stencil[4].k = 0;
816   stencil[5].k = 0;
817   rowstencil.k = 0;
818   /*
819      Compute function over the locally owned part of the grid
820   */
821   for (j=ys; j<ys+ym; j++) {
822 
823     stencil[0].j = j-1;
824     stencil[1].j = j+1;
825     stencil[2].j = j;
826     stencil[3].j = j;
827     stencil[4].j = j;
828     stencil[5].j = j;
829     rowstencil.k = 0; rowstencil.j = j;
830     for (i=xs; i<xs+xm; i++) {
831       uc = u[j][i].u;
832       vc = u[j][i].v;
833 
834       /*      uxx       = (-2.0*uc + u[j][i-1].u + u[j][i+1].u)*sx;
835       uyy       = (-2.0*uc + u[j-1][i].u + u[j+1][i].u)*sy;
836 
837       vxx       = (-2.0*vc + u[j][i-1].v + u[j][i+1].v)*sx;
838       vyy       = (-2.0*vc + u[j-1][i].v + u[j+1][i].v)*sy;
839        f[j][i].u = appctx->D1*(uxx + uyy) - uc*vc*vc + appctx->gamma*(1.0 - uc);*/
840 
841       stencil[0].i = i; stencil[0].c = 0; entries[0] = -appctx->D1*sy;
842       stencil[1].i = i; stencil[1].c = 0; entries[1] = -appctx->D1*sy;
843       stencil[2].i = i-1; stencil[2].c = 0; entries[2] = -appctx->D1*sx;
844       stencil[3].i = i+1; stencil[3].c = 0; entries[3] = -appctx->D1*sx;
845       stencil[4].i = i; stencil[4].c = 0; entries[4] = 2.0*appctx->D1*(sx + sy) + vc*vc + appctx->gamma + a;
846       stencil[5].i = i; stencil[5].c = 1; entries[5] = 2.0*uc*vc;
847       rowstencil.i = i; rowstencil.c = 0;
848 
849       PetscCall(MatSetValuesStencil(A,1,&rowstencil,6,stencil,entries,INSERT_VALUES));
850       if (appctx->aijpc) {
851         PetscCall(MatSetValuesStencil(B,1,&rowstencil,6,stencil,entries,INSERT_VALUES));
852       }
853       stencil[0].c = 1; entries[0] = -appctx->D2*sy;
854       stencil[1].c = 1; entries[1] = -appctx->D2*sy;
855       stencil[2].c = 1; entries[2] = -appctx->D2*sx;
856       stencil[3].c = 1; entries[3] = -appctx->D2*sx;
857       stencil[4].c = 1; entries[4] = 2.0*appctx->D2*(sx + sy) - 2.0*uc*vc + appctx->gamma + appctx->kappa + a;
858       stencil[5].c = 0; entries[5] = -vc*vc;
859 
860       PetscCall(MatSetValuesStencil(A,1,&rowstencil,6,stencil,entries,INSERT_VALUES));
861       if (appctx->aijpc) {
862         PetscCall(MatSetValuesStencil(B,1,&rowstencil,6,stencil,entries,INSERT_VALUES));
863       }
864       /* f[j][i].v = appctx->D2*(vxx + vyy) + uc*vc*vc - (appctx->gamma + appctx->kappa)*vc; */
865     }
866   }
867 
868   /*
869      Restore vectors
870   */
871   PetscCall(PetscLogFlops(19.0*xm*ym));
872   PetscCall(DMDAVecRestoreArrayRead(da,localU,&u));
873   PetscCall(DMRestoreLocalVector(da,&localU));
874   PetscCall(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY));
875   PetscCall(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY));
876   PetscCall(MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE));
877   if (appctx->aijpc) {
878     PetscCall(MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY));
879     PetscCall(MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY));
880     PetscCall(MatSetOption(B,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE));
881   }
882   PetscFunctionReturn(0);
883 }
884 
885 PetscErrorCode RHSJacobianByHand(TS ts,PetscReal t,Vec U,Mat A,Mat B,void *ctx)
886 {
887   AppCtx         *appctx = (AppCtx*)ctx;     /* user-defined application context */
888   DM             da;
889   PetscInt       i,j,Mx,My,xs,ys,xm,ym;
890   PetscReal      hx,hy,sx,sy;
891   PetscScalar    uc,vc;
892   Field          **u;
893   Vec            localU;
894   MatStencil     stencil[6],rowstencil;
895   PetscScalar    entries[6];
896 
897   PetscFunctionBegin;
898   PetscCall(TSGetDM(ts,&da));
899   PetscCall(DMGetLocalVector(da,&localU));
900   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
901 
902   hx = 2.50/(PetscReal)(Mx); sx = 1.0/(hx*hx);
903   hy = 2.50/(PetscReal)(My); sy = 1.0/(hy*hy);
904 
905   /*
906      Scatter ghost points to local vector,using the 2-step process
907         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
908      By placing code between these two statements, computations can be
909      done while messages are in transition.
910   */
911   PetscCall(DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU));
912   PetscCall(DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU));
913 
914   /*
915      Get pointers to vector data
916   */
917   PetscCall(DMDAVecGetArrayRead(da,localU,&u));
918 
919   /*
920      Get local grid boundaries
921   */
922   PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
923 
924   stencil[0].k = 0;
925   stencil[1].k = 0;
926   stencil[2].k = 0;
927   stencil[3].k = 0;
928   stencil[4].k = 0;
929   stencil[5].k = 0;
930   rowstencil.k = 0;
931 
932   /*
933      Compute function over the locally owned part of the grid
934   */
935   for (j=ys; j<ys+ym; j++) {
936 
937     stencil[0].j = j-1;
938     stencil[1].j = j+1;
939     stencil[2].j = j;
940     stencil[3].j = j;
941     stencil[4].j = j;
942     stencil[5].j = j;
943     rowstencil.k = 0; rowstencil.j = j;
944     for (i=xs; i<xs+xm; i++) {
945       uc = u[j][i].u;
946       vc = u[j][i].v;
947 
948       /*      uxx       = (-2.0*uc + u[j][i-1].u + u[j][i+1].u)*sx;
949       uyy       = (-2.0*uc + u[j-1][i].u + u[j+1][i].u)*sy;
950 
951       vxx       = (-2.0*vc + u[j][i-1].v + u[j][i+1].v)*sx;
952       vyy       = (-2.0*vc + u[j-1][i].v + u[j+1][i].v)*sy;
953        f[j][i].u = appctx->D1*(uxx + uyy) - uc*vc*vc + appctx->gamma*(1.0 - uc);*/
954 
955       stencil[0].i = i; stencil[0].c = 0; entries[0] = appctx->D1*sy;
956       stencil[1].i = i; stencil[1].c = 0; entries[1] = appctx->D1*sy;
957       stencil[2].i = i-1; stencil[2].c = 0; entries[2] = appctx->D1*sx;
958       stencil[3].i = i+1; stencil[3].c = 0; entries[3] = appctx->D1*sx;
959       stencil[4].i = i; stencil[4].c = 0; entries[4] = -2.0*appctx->D1*(sx + sy) - vc*vc - appctx->gamma;
960       stencil[5].i = i; stencil[5].c = 1; entries[5] = -2.0*uc*vc;
961       rowstencil.i = i; rowstencil.c = 0;
962 
963       PetscCall(MatSetValuesStencil(A,1,&rowstencil,6,stencil,entries,INSERT_VALUES));
964 
965       stencil[0].c = 1; entries[0] = appctx->D2*sy;
966       stencil[1].c = 1; entries[1] = appctx->D2*sy;
967       stencil[2].c = 1; entries[2] = appctx->D2*sx;
968       stencil[3].c = 1; entries[3] = appctx->D2*sx;
969       stencil[4].c = 1; entries[4] = -2.0*appctx->D2*(sx + sy) + 2.0*uc*vc - appctx->gamma - appctx->kappa;
970       stencil[5].c = 0; entries[5] = vc*vc;
971       rowstencil.c = 1;
972 
973       PetscCall(MatSetValuesStencil(A,1,&rowstencil,6,stencil,entries,INSERT_VALUES));
974       /* f[j][i].v = appctx->D2*(vxx + vyy) + uc*vc*vc - (appctx->gamma + appctx->kappa)*vc; */
975     }
976   }
977 
978   /*
979      Restore vectors
980   */
981   PetscCall(PetscLogFlops(19.0*xm*ym));
982   PetscCall(DMDAVecRestoreArrayRead(da,localU,&u));
983   PetscCall(DMRestoreLocalVector(da,&localU));
984   PetscCall(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY));
985   PetscCall(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY));
986   PetscCall(MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE));
987   if (appctx->aijpc) {
988     PetscCall(MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY));
989     PetscCall(MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY));
990     PetscCall(MatSetOption(B,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE));
991   }
992   PetscFunctionReturn(0);
993 }
994 
995 PetscErrorCode RHSJacobianAdolc(TS ts,PetscReal t,Vec U,Mat A,Mat B,void *ctx)
996 {
997   AppCtx         *appctx = (AppCtx*)ctx;
998   DM             da;
999   PetscScalar    *u_vec;
1000   Vec            localU;
1001 
1002   PetscFunctionBegin;
1003   PetscCall(TSGetDM(ts,&da));
1004   PetscCall(DMGetLocalVector(da,&localU));
1005 
1006   /*
1007      Scatter ghost points to local vector,using the 2-step process
1008         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
1009      By placing code between these two statements, computations can be
1010      done while messages are in transition.
1011   */
1012   PetscCall(DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU));
1013   PetscCall(DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU));
1014 
1015   /* Get pointers to vector data */
1016   PetscCall(VecGetArray(localU,&u_vec));
1017 
1018   /*
1019     Compute Jacobian
1020   */
1021   PetscCall(PetscAdolcComputeRHSJacobianLocal(1,A,u_vec,appctx->adctx));
1022 
1023   /*
1024      Restore vectors
1025   */
1026   PetscCall(VecRestoreArray(localU,&u_vec));
1027   PetscCall(DMRestoreLocalVector(da,&localU));
1028   PetscFunctionReturn(0);
1029 }
1030 
1031 /*TEST
1032 
1033   build:
1034     requires: double !complex adolc colpack
1035 
1036   test:
1037     suffix: 1
1038     nsize: 1
1039     args: -ts_max_steps 1 -da_grid_x 12 -da_grid_y 12 -snes_test_jacobian
1040     output_file: output/adr_ex5adj_1.out
1041 
1042   test:
1043     suffix: 2
1044     nsize: 1
1045     args: -ts_max_steps 1 -da_grid_x 12 -da_grid_y 12 -snes_test_jacobian -implicitform
1046     output_file: output/adr_ex5adj_2.out
1047 
1048   test:
1049     suffix: 3
1050     nsize: 4
1051     args: -ts_max_steps 10 -da_grid_x 12 -da_grid_y 12 -ts_monitor -ts_adjoint_monitor
1052     output_file: output/adr_ex5adj_3.out
1053 
1054   test:
1055     suffix: 4
1056     nsize: 4
1057     args: -ts_max_steps 10 -da_grid_x 12 -da_grid_y 12 -ts_monitor -ts_adjoint_monitor -implicitform
1058     output_file: output/adr_ex5adj_4.out
1059 
1060   testset:
1061     suffix: 5
1062     nsize: 4
1063     args: -ts_max_steps 10 -da_grid_x 15 -da_grid_y 15 -ts_monitor -ts_adjoint_monitor -adolc_sparse
1064     output_file: output/adr_ex5adj_5.out
1065 
1066   testset:
1067     suffix: 6
1068     nsize: 4
1069     args: -ts_max_steps 10 -da_grid_x 15 -da_grid_y 15 -ts_monitor -ts_adjoint_monitor -adolc_sparse -implicitform
1070     output_file: output/adr_ex5adj_6.out
1071 
1072 TEST*/
1073