xref: /petsc/src/ts/tutorials/autodiff/adr_ex5adj.cxx (revision 503c0ea9b45bcfbcebbb1ea5341243bbc69f0bea)
1 static char help[] = "Demonstrates adjoint sensitivity analysis for Reaction-Diffusion Equations.\n";
2 
3 /*
4    Concepts: TS^time-dependent nonlinear problems
5    Concepts: Automatic differentiation using ADOL-C
6 */
7 /*
8    REQUIRES configuration of PETSc with option --download-adolc.
9 
10    For documentation on ADOL-C, see
11      $PETSC_ARCH/externalpackages/ADOL-C-2.6.0/ADOL-C/doc/adolc-manual.pdf
12 
13    REQUIRES configuration of PETSc with option --download-colpack
14 
15    For documentation on ColPack, see
16      $PETSC_ARCH/externalpackages/git.colpack/README.md
17 */
18 /* ------------------------------------------------------------------------
19   See ../advection-diffusion-reaction/ex5 for a description of the problem
20   ------------------------------------------------------------------------- */
21 
22 /*
23   Runtime options:
24 
25     Solver:
26       -forwardonly       - Run the forward simulation without adjoint.
27       -implicitform      - Provide IFunction and IJacobian to TS, if not set, RHSFunction and RHSJacobian will be used.
28       -aijpc             - Set the preconditioner matrix to be aij (the Jacobian matrix can be of a different type such as ELL).
29 
30     Jacobian generation:
31       -jacobian_by_hand  - Use the hand-coded Jacobian of ex5.c, rather than generating it automatically.
32       -no_annotation     - Do not annotate ADOL-C active variables. (Should be used alongside -jacobian_by_hand.)
33       -adolc_sparse      - Calculate Jacobian in compressed format, using ADOL-C sparse functionality.
34       -adolc_sparse_view - Print sparsity pattern used by -adolc_sparse option.
35  */
36 /*
37   NOTE: If -adolc_sparse option is used, at least four processors should be used, so that no processor owns boundaries which are
38         identified by the periodic boundary conditions. The number of grid points in both x- and y-directions should be multiples
39         of 5, in order for the 5-point stencil to be cleanly parallelised.
40 */
41 
42 #include <petscdmda.h>
43 #include <petscts.h>
44 #include "adolc-utils/drivers.cxx"
45 #include <adolc/adolc.h>
46 
47 /* (Passive) field for the two variables */
48 typedef struct {
49   PetscScalar u,v;
50 } Field;
51 
52 /* Active field for the two variables */
53 typedef struct {
54   adouble u,v;
55 } AField;
56 
57 /* Application context */
58 typedef struct {
59   PetscReal D1,D2,gamma,kappa;
60   AField    **u_a,**f_a;
61   PetscBool aijpc;
62   AdolcCtx  *adctx; /* Automatic differentation support */
63 } AppCtx;
64 
65 extern PetscErrorCode InitialConditions(DM da,Vec U);
66 extern PetscErrorCode InitializeLambda(DM da,Vec lambda,PetscReal x,PetscReal y);
67 extern PetscErrorCode IFunctionLocalPassive(DMDALocalInfo *info,PetscReal t,Field**u,Field**udot,Field**f,void *ptr);
68 extern PetscErrorCode IFunctionActive(TS ts,PetscReal ftime,Vec U,Vec Udot,Vec F,void *ptr);
69 extern PetscErrorCode RHSFunctionActive(TS ts,PetscReal ftime,Vec U,Vec F,void *ptr);
70 extern PetscErrorCode RHSFunctionPassive(TS ts,PetscReal ftime,Vec U,Vec F,void *ptr);
71 extern PetscErrorCode IJacobianByHand(TS ts,PetscReal t,Vec U,Vec Udot,PetscReal a,Mat A,Mat B,void *ctx);
72 extern PetscErrorCode IJacobianAdolc(TS ts,PetscReal t,Vec U,Vec Udot,PetscReal a,Mat A,Mat B,void *ctx);
73 extern PetscErrorCode RHSJacobianByHand(TS ts,PetscReal t,Vec U,Mat A,Mat B,void *ctx);
74 extern PetscErrorCode RHSJacobianAdolc(TS ts,PetscReal t,Vec U,Mat A,Mat B,void *ctx);
75 
76 int main(int argc,char **argv)
77 {
78   TS             ts;
79   Vec            x,r,xdot;
80   DM             da;
81   AppCtx         appctx;
82   AdolcCtx       *adctx;
83   Vec            lambda[1];
84   PetscBool      forwardonly=PETSC_FALSE,implicitform=PETSC_FALSE,byhand=PETSC_FALSE;
85   PetscInt       gxm,gym,i,dofs = 2,ctrl[3] = {0,0,0};
86   PetscScalar    **Seed = NULL,**Rec = NULL,*u_vec;
87   unsigned int   **JP = NULL;
88   ISColoring     iscoloring;
89 
90   PetscCall(PetscInitialize(&argc,&argv,NULL,help));
91   PetscCall(PetscNew(&adctx));
92   PetscCall(PetscOptionsGetBool(NULL,NULL,"-forwardonly",&forwardonly,NULL));
93   PetscCall(PetscOptionsGetBool(NULL,NULL,"-implicitform",&implicitform,NULL));
94   appctx.aijpc = PETSC_FALSE,adctx->no_an = PETSC_FALSE,adctx->sparse = PETSC_FALSE,adctx->sparse_view = PETSC_FALSE;adctx->sparse_view_done = PETSC_FALSE;
95   PetscCall(PetscOptionsGetBool(NULL,NULL,"-aijpc",&appctx.aijpc,NULL));
96   PetscCall(PetscOptionsGetBool(NULL,NULL,"-no_annotation",&adctx->no_an,NULL));
97   PetscCall(PetscOptionsGetBool(NULL,NULL,"-jacobian_by_hand",&byhand,NULL));
98   PetscCall(PetscOptionsGetBool(NULL,NULL,"-adolc_sparse",&adctx->sparse,NULL));
99   PetscCall(PetscOptionsGetBool(NULL,NULL,"-adolc_sparse_view",&adctx->sparse_view,NULL));
100   appctx.D1    = 8.0e-5;
101   appctx.D2    = 4.0e-5;
102   appctx.gamma = .024;
103   appctx.kappa = .06;
104   appctx.adctx = adctx;
105 
106   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
107      Create distributed array (DMDA) to manage parallel grid and vectors
108   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
109   PetscCall(DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_PERIODIC,DM_BOUNDARY_PERIODIC,DMDA_STENCIL_STAR,65,65,PETSC_DECIDE,PETSC_DECIDE,2,1,NULL,NULL,&da));
110   PetscCall(DMSetFromOptions(da));
111   PetscCall(DMSetUp(da));
112   PetscCall(DMDASetFieldName(da,0,"u"));
113   PetscCall(DMDASetFieldName(da,1,"v"));
114 
115   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
116      Extract global vectors from DMDA; then duplicate for remaining
117      vectors that are the same types
118    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
119   PetscCall(DMCreateGlobalVector(da,&x));
120   PetscCall(VecDuplicate(x,&r));
121   PetscCall(VecDuplicate(x,&xdot));
122 
123   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
124      Create timestepping solver context
125      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
126   PetscCall(TSCreate(PETSC_COMM_WORLD,&ts));
127   PetscCall(TSSetType(ts,TSCN));
128   PetscCall(TSSetDM(ts,da));
129   PetscCall(TSSetProblemType(ts,TS_NONLINEAR));
130   if (!implicitform) {
131     PetscCall(TSSetRHSFunction(ts,NULL,RHSFunctionPassive,&appctx));
132   } else {
133     PetscCall(DMDATSSetIFunctionLocal(da,INSERT_VALUES,(DMDATSIFunctionLocal)IFunctionLocalPassive,&appctx));
134   }
135 
136   if (!adctx->no_an) {
137     PetscCall(DMDAGetGhostCorners(da,NULL,NULL,NULL,&gxm,&gym,NULL));
138     adctx->m = dofs*gxm*gym;adctx->n = dofs*gxm*gym; /* Number of dependent and independent variables */
139 
140     /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
141        Trace function(s) just once
142      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
143     if (!implicitform) {
144       PetscCall(RHSFunctionActive(ts,1.0,x,r,&appctx));
145     } else {
146       PetscCall(IFunctionActive(ts,1.0,x,xdot,r,&appctx));
147     }
148 
149     /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
150       In the case where ADOL-C generates the Jacobian in compressed format,
151       seed and recovery matrices are required. Since the sparsity structure
152       of the Jacobian does not change over the course of the time
153       integration, we can save computational effort by only generating
154       these objects once.
155        - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
156     if (adctx->sparse) {
157       /*
158          Generate sparsity pattern
159 
160          One way to do this is to use the Jacobian pattern driver `jac_pat`
161          provided by ColPack. Otherwise, it is the responsibility of the user
162          to obtain the sparsity pattern.
163       */
164       PetscCall(PetscMalloc1(adctx->n,&u_vec));
165       JP = (unsigned int **) malloc(adctx->m*sizeof(unsigned int*));
166       jac_pat(1,adctx->m,adctx->n,u_vec,JP,ctrl);
167       if (adctx->sparse_view) {
168         PetscCall(PrintSparsity(MPI_COMM_WORLD,adctx->m,JP));
169       }
170 
171       /*
172         Extract a column colouring
173 
174         For problems using DMDA, colourings can be extracted directly, as
175         follows. There are also ColPack drivers available. Otherwise, it is the
176         responsibility of the user to obtain a suitable colouring.
177       */
178       PetscCall(DMCreateColoring(da,IS_COLORING_LOCAL,&iscoloring));
179       PetscCall(ISColoringGetIS(iscoloring,PETSC_USE_POINTER,&adctx->p,NULL));
180 
181       /* Generate seed matrix to propagate through the forward mode of AD */
182       PetscCall(AdolcMalloc2(adctx->n,adctx->p,&Seed));
183       PetscCall(GenerateSeedMatrix(iscoloring,Seed));
184       PetscCall(ISColoringDestroy(&iscoloring));
185 
186       /*
187         Generate recovery matrix, which is used to recover the Jacobian from
188         compressed format */
189       PetscCall(AdolcMalloc2(adctx->m,adctx->p,&Rec));
190       PetscCall(GetRecoveryMatrix(Seed,JP,adctx->m,adctx->p,Rec));
191 
192       /* Store results and free workspace */
193       adctx->Rec = Rec;
194       for (i=0;i<adctx->m;i++)
195         free(JP[i]);
196       free(JP);
197       PetscCall(PetscFree(u_vec));
198 
199     } else {
200 
201       /*
202         In 'full' Jacobian mode, propagate an identity matrix through the
203         forward mode of AD.
204       */
205       adctx->p = adctx->n;
206       PetscCall(AdolcMalloc2(adctx->n,adctx->p,&Seed));
207       PetscCall(Identity(adctx->n,Seed));
208     }
209     adctx->Seed = Seed;
210   }
211 
212   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
213      Set Jacobian
214    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
215   if (!implicitform) {
216     if (!byhand) {
217       PetscCall(TSSetRHSJacobian(ts,NULL,NULL,RHSJacobianAdolc,&appctx));
218     } else {
219       PetscCall(TSSetRHSJacobian(ts,NULL,NULL,RHSJacobianByHand,&appctx));
220     }
221   } else {
222     if (appctx.aijpc) {
223       Mat A,B;
224 
225       PetscCall(DMSetMatType(da,MATSELL));
226       PetscCall(DMCreateMatrix(da,&A));
227       PetscCall(MatConvert(A,MATAIJ,MAT_INITIAL_MATRIX,&B));
228       /* FIXME do we need to change viewer to display matrix in natural ordering as DMCreateMatrix_DA does? */
229       if (!byhand) {
230         PetscCall(TSSetIJacobian(ts,A,B,IJacobianAdolc,&appctx));
231       } else {
232         PetscCall(TSSetIJacobian(ts,A,B,IJacobianByHand,&appctx));
233       }
234       PetscCall(MatDestroy(&A));
235       PetscCall(MatDestroy(&B));
236     } else {
237       if (!byhand) {
238         PetscCall(TSSetIJacobian(ts,NULL,NULL,IJacobianAdolc,&appctx));
239       } else {
240         PetscCall(TSSetIJacobian(ts,NULL,NULL,IJacobianByHand,&appctx));
241       }
242     }
243   }
244 
245   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
246      Set initial conditions
247    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
248   PetscCall(InitialConditions(da,x));
249   PetscCall(TSSetSolution(ts,x));
250 
251   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
252     Have the TS save its trajectory so that TSAdjointSolve() may be used
253     and set solver options
254    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
255   if (!forwardonly) {
256     PetscCall(TSSetSaveTrajectory(ts));
257     PetscCall(TSSetMaxTime(ts,200.0));
258     PetscCall(TSSetTimeStep(ts,0.5));
259   } else {
260     PetscCall(TSSetMaxTime(ts,2000.0));
261     PetscCall(TSSetTimeStep(ts,10));
262   }
263   PetscCall(TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER));
264   PetscCall(TSSetFromOptions(ts));
265 
266   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
267      Solve ODE system
268      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
269   PetscCall(TSSolve(ts,x));
270   if (!forwardonly) {
271     /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
272        Start the Adjoint model
273        - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
274     PetscCall(VecDuplicate(x,&lambda[0]));
275     /*   Reset initial conditions for the adjoint integration */
276     PetscCall(InitializeLambda(da,lambda[0],0.5,0.5));
277     PetscCall(TSSetCostGradients(ts,1,lambda,NULL));
278     PetscCall(TSAdjointSolve(ts));
279     PetscCall(VecDestroy(&lambda[0]));
280   }
281 
282   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
283      Free work space.
284    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
285   PetscCall(VecDestroy(&xdot));
286   PetscCall(VecDestroy(&r));
287   PetscCall(VecDestroy(&x));
288   PetscCall(TSDestroy(&ts));
289   if (!adctx->no_an) {
290     if (adctx->sparse) PetscCall(AdolcFree2(Rec));
291     PetscCall(AdolcFree2(Seed));
292   }
293   PetscCall(DMDestroy(&da));
294   PetscCall(PetscFree(adctx));
295   PetscCall(PetscFinalize());
296   return 0;
297 }
298 
299 PetscErrorCode InitialConditions(DM da,Vec U)
300 {
301   PetscInt       i,j,xs,ys,xm,ym,Mx,My;
302   Field          **u;
303   PetscReal      hx,hy,x,y;
304 
305   PetscFunctionBegin;
306   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
307 
308   hx = 2.5/(PetscReal)Mx;
309   hy = 2.5/(PetscReal)My;
310 
311   /*
312      Get pointers to vector data
313   */
314   PetscCall(DMDAVecGetArray(da,U,&u));
315 
316   /*
317      Get local grid boundaries
318   */
319   PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
320 
321   /*
322      Compute function over the locally owned part of the grid
323   */
324   for (j=ys; j<ys+ym; j++) {
325     y = j*hy;
326     for (i=xs; i<xs+xm; i++) {
327       x = i*hx;
328       if (PetscApproximateGTE(x,1.0) && PetscApproximateLTE(x,1.5) && PetscApproximateGTE(y,1.0) && PetscApproximateLTE(y,1.5)) u[j][i].v = PetscPowReal(PetscSinReal(4.0*PETSC_PI*x),2.0)*PetscPowReal(PetscSinReal(4.0*PETSC_PI*y),2.0)/4.0;
329       else u[j][i].v = 0.0;
330 
331       u[j][i].u = 1.0 - 2.0*u[j][i].v;
332     }
333   }
334 
335   /*
336      Restore vectors
337   */
338   PetscCall(DMDAVecRestoreArray(da,U,&u));
339   PetscFunctionReturn(0);
340 }
341 
342 PetscErrorCode InitializeLambda(DM da,Vec lambda,PetscReal x,PetscReal y)
343 {
344    PetscInt i,j,Mx,My,xs,ys,xm,ym;
345    Field **l;
346    PetscFunctionBegin;
347 
348    PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
349    /* locate the global i index for x and j index for y */
350    i = (PetscInt)(x*(Mx-1));
351    j = (PetscInt)(y*(My-1));
352    PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
353 
354    if (xs <= i && i < xs+xm && ys <= j && j < ys+ym) {
355      /* the i,j vertex is on this process */
356      PetscCall(DMDAVecGetArray(da,lambda,&l));
357      l[j][i].u = 1.0;
358      l[j][i].v = 1.0;
359      PetscCall(DMDAVecRestoreArray(da,lambda,&l));
360    }
361    PetscFunctionReturn(0);
362 }
363 
364 PetscErrorCode IFunctionLocalPassive(DMDALocalInfo *info,PetscReal t,Field**u,Field**udot,Field**f,void *ptr)
365 {
366   AppCtx         *appctx = (AppCtx*)ptr;
367   PetscInt       i,j,xs,ys,xm,ym;
368   PetscReal      hx,hy,sx,sy;
369   PetscScalar    uc,uxx,uyy,vc,vxx,vyy;
370 
371   PetscFunctionBegin;
372   hx = 2.50/(PetscReal)(info->mx); sx = 1.0/(hx*hx);
373   hy = 2.50/(PetscReal)(info->my); sy = 1.0/(hy*hy);
374 
375   /* Get local grid boundaries */
376   xs = info->xs; xm = info->xm; ys = info->ys; ym = info->ym;
377 
378   /* Compute function over the locally owned part of the grid */
379   for (j=ys; j<ys+ym; j++) {
380     for (i=xs; i<xs+xm; i++) {
381       uc        = u[j][i].u;
382       uxx       = (-2.0*uc + u[j][i-1].u + u[j][i+1].u)*sx;
383       uyy       = (-2.0*uc + u[j-1][i].u + u[j+1][i].u)*sy;
384       vc        = u[j][i].v;
385       vxx       = (-2.0*vc + u[j][i-1].v + u[j][i+1].v)*sx;
386       vyy       = (-2.0*vc + u[j-1][i].v + u[j+1][i].v)*sy;
387       f[j][i].u = udot[j][i].u - appctx->D1*(uxx + uyy) + uc*vc*vc - appctx->gamma*(1.0 - uc);
388       f[j][i].v = udot[j][i].v - appctx->D2*(vxx + vyy) - uc*vc*vc + (appctx->gamma + appctx->kappa)*vc;
389     }
390   }
391   PetscCall(PetscLogFlops(16.0*xm*ym));
392   PetscFunctionReturn(0);
393 }
394 
395 PetscErrorCode IFunctionActive(TS ts,PetscReal ftime,Vec U,Vec Udot,Vec F,void *ptr)
396 {
397   AppCtx         *appctx = (AppCtx*)ptr;
398   DM             da;
399   DMDALocalInfo  info;
400   Field          **u,**f,**udot;
401   Vec            localU;
402   PetscInt       i,j,xs,ys,xm,ym,gxs,gys,gxm,gym;
403   PetscReal      hx,hy,sx,sy;
404   adouble        uc,uxx,uyy,vc,vxx,vyy;
405   AField         **f_a,*f_c,**u_a,*u_c;
406   PetscScalar    dummy;
407 
408   PetscFunctionBegin;
409 
410   PetscCall(TSGetDM(ts,&da));
411   PetscCall(DMDAGetLocalInfo(da,&info));
412   PetscCall(DMGetLocalVector(da,&localU));
413   hx = 2.50/(PetscReal)(info.mx); sx = 1.0/(hx*hx);
414   hy = 2.50/(PetscReal)(info.my); sy = 1.0/(hy*hy);
415   xs = info.xs; xm = info.xm; gxs = info.gxs; gxm = info.gxm;
416   ys = info.ys; ym = info.ym; gys = info.gys; gym = info.gym;
417 
418   /*
419      Scatter ghost points to local vector,using the 2-step process
420         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
421      By placing code between these two statements, computations can be
422      done while messages are in transition.
423   */
424   PetscCall(DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU));
425   PetscCall(DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU));
426 
427   /*
428      Get pointers to vector data
429   */
430   PetscCall(DMDAVecGetArrayRead(da,localU,&u));
431   PetscCall(DMDAVecGetArray(da,F,&f));
432   PetscCall(DMDAVecGetArrayRead(da,Udot,&udot));
433 
434   /*
435     Create contiguous 1-arrays of AFields
436 
437     NOTE: Memory for ADOL-C active variables (such as adouble and AField)
438           cannot be allocated using PetscMalloc, as this does not call the
439           relevant class constructor. Instead, we use the C++ keyword `new`.
440   */
441   u_c = new AField[info.gxm*info.gym];
442   f_c = new AField[info.gxm*info.gym];
443 
444   /* Create corresponding 2-arrays of AFields */
445   u_a = new AField*[info.gym];
446   f_a = new AField*[info.gym];
447 
448   /* Align indices between array types to endow 2d array with ghost points */
449   PetscCall(GiveGhostPoints(da,u_c,&u_a));
450   PetscCall(GiveGhostPoints(da,f_c,&f_a));
451 
452   trace_on(1);  /* Start of active section on tape 1 */
453 
454   /*
455     Mark independence
456 
457     NOTE: Ghost points are marked as independent, in place of the points they represent on
458           other processors / on other boundaries.
459   */
460   for (j=gys; j<gys+gym; j++) {
461     for (i=gxs; i<gxs+gxm; i++) {
462       u_a[j][i].u <<= u[j][i].u;
463       u_a[j][i].v <<= u[j][i].v;
464     }
465   }
466 
467   /* Compute function over the locally owned part of the grid */
468   for (j=ys; j<ys+ym; j++) {
469     for (i=xs; i<xs+xm; i++) {
470       uc        = u_a[j][i].u;
471       uxx       = (-2.0*uc + u_a[j][i-1].u + u_a[j][i+1].u)*sx;
472       uyy       = (-2.0*uc + u_a[j-1][i].u + u_a[j+1][i].u)*sy;
473       vc        = u_a[j][i].v;
474       vxx       = (-2.0*vc + u_a[j][i-1].v + u_a[j][i+1].v)*sx;
475       vyy       = (-2.0*vc + u_a[j-1][i].v + u_a[j+1][i].v)*sy;
476       f_a[j][i].u = udot[j][i].u - appctx->D1*(uxx + uyy) + uc*vc*vc - appctx->gamma*(1.0 - uc);
477       f_a[j][i].v = udot[j][i].v - appctx->D2*(vxx + vyy) - uc*vc*vc + (appctx->gamma + appctx->kappa)*vc;
478     }
479   }
480 
481   /*
482     Mark dependence
483 
484     NOTE: Marking dependence of dummy variables makes the index notation much simpler when forming
485           the Jacobian later.
486   */
487   for (j=gys; j<gys+gym; j++) {
488     for (i=gxs; i<gxs+gxm; i++) {
489       if ((i < xs) || (i >= xs+xm) || (j < ys) || (j >= ys+ym)) {
490         f_a[j][i].u >>= dummy;
491         f_a[j][i].v >>= dummy;
492       } else {
493         f_a[j][i].u >>= f[j][i].u;
494         f_a[j][i].v >>= f[j][i].v;
495       }
496     }
497   }
498   trace_off();  /* End of active section */
499   PetscCall(PetscLogFlops(16.0*xm*ym));
500 
501   /* Restore vectors */
502   PetscCall(DMDAVecRestoreArray(da,F,&f));
503   PetscCall(DMDAVecRestoreArrayRead(da,localU,&u));
504   PetscCall(DMDAVecRestoreArrayRead(da,Udot,&udot));
505 
506   PetscCall(DMRestoreLocalVector(da,&localU));
507 
508   /* Destroy AFields appropriately */
509   f_a += info.gys;
510   u_a += info.gys;
511   delete[] f_a;
512   delete[] u_a;
513   delete[] f_c;
514   delete[] u_c;
515 
516   PetscFunctionReturn(0);
517 }
518 
519 PetscErrorCode RHSFunctionPassive(TS ts,PetscReal ftime,Vec U,Vec F,void *ptr)
520 {
521   AppCtx         *appctx = (AppCtx*)ptr;
522   DM             da;
523   PetscInt       i,j,xs,ys,xm,ym,Mx,My;
524   PetscReal      hx,hy,sx,sy;
525   PetscScalar    uc,uxx,uyy,vc,vxx,vyy;
526   Field          **u,**f;
527   Vec            localU,localF;
528 
529   PetscFunctionBegin;
530   PetscCall(TSGetDM(ts,&da));
531   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
532   hx = 2.50/(PetscReal)(Mx);sx = 1.0/(hx*hx);
533   hy = 2.50/(PetscReal)(My);sy = 1.0/(hy*hy);
534   PetscCall(DMGetLocalVector(da,&localU));
535   PetscCall(DMGetLocalVector(da,&localF));
536 
537   /*
538      Scatter ghost points to local vector,using the 2-step process
539         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
540      By placing code between these two statements, computations can be
541      done while messages are in transition.
542   */
543   PetscCall(DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU));
544   PetscCall(DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU));
545   PetscCall(VecZeroEntries(F)); // NOTE (1): See (2) below
546   PetscCall(DMGlobalToLocalBegin(da,F,INSERT_VALUES,localF));
547   PetscCall(DMGlobalToLocalEnd(da,F,INSERT_VALUES,localF));
548 
549   /*
550      Get pointers to vector data
551   */
552   PetscCall(DMDAVecGetArrayRead(da,localU,&u));
553   PetscCall(DMDAVecGetArray(da,localF,&f));
554 
555   /*
556      Get local grid boundaries
557   */
558   PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
559 
560   /*
561      Compute function over the locally owned part of the grid
562   */
563   for (j=ys; j<ys+ym; j++) {
564     for (i=xs; i<xs+xm; i++) {
565       uc        = u[j][i].u;
566       uxx       = (-2.0*uc + u[j][i-1].u + u[j][i+1].u)*sx;
567       uyy       = (-2.0*uc + u[j-1][i].u + u[j+1][i].u)*sy;
568       vc        = u[j][i].v;
569       vxx       = (-2.0*vc + u[j][i-1].v + u[j][i+1].v)*sx;
570       vyy       = (-2.0*vc + u[j-1][i].v + u[j+1][i].v)*sy;
571       f[j][i].u = appctx->D1*(uxx + uyy) - uc*vc*vc + appctx->gamma*(1.0 - uc);
572       f[j][i].v = appctx->D2*(vxx + vyy) + uc*vc*vc - (appctx->gamma + appctx->kappa)*vc;
573     }
574   }
575 
576   /*
577      Gather global vector, using the 2-step process
578         DMLocalToGlobalBegin(),DMLocalToGlobalEnd().
579 
580      NOTE (2): We need to use ADD_VALUES if boundaries are not of type DM_BOUNDARY_NONE or
581                DM_BOUNDARY_GHOSTED, meaning we should also zero F before addition (see (1) above).
582   */
583   PetscCall(DMLocalToGlobalBegin(da,localF,ADD_VALUES,F));
584   PetscCall(DMLocalToGlobalEnd(da,localF,ADD_VALUES,F));
585 
586   /*
587      Restore vectors
588   */
589   PetscCall(DMDAVecRestoreArray(da,localF,&f));
590   PetscCall(DMDAVecRestoreArrayRead(da,localU,&u));
591   PetscCall(DMRestoreLocalVector(da,&localF));
592   PetscCall(DMRestoreLocalVector(da,&localU));
593   PetscCall(PetscLogFlops(16.0*xm*ym));
594   PetscFunctionReturn(0);
595 }
596 
597 PetscErrorCode RHSFunctionActive(TS ts,PetscReal ftime,Vec U,Vec F,void *ptr)
598 {
599   AppCtx         *appctx = (AppCtx*)ptr;
600   DM             da;
601   PetscInt       i,j,xs,ys,xm,ym,gxs,gys,gxm,gym,Mx,My;
602   PetscReal      hx,hy,sx,sy;
603   AField         **f_a,*f_c,**u_a,*u_c;
604   adouble        uc,uxx,uyy,vc,vxx,vyy;
605   Field          **u,**f;
606   Vec            localU,localF;
607 
608   PetscFunctionBegin;
609   PetscCall(TSGetDM(ts,&da));
610   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
611   hx = 2.50/(PetscReal)(Mx);sx = 1.0/(hx*hx);
612   hy = 2.50/(PetscReal)(My);sy = 1.0/(hy*hy);
613   PetscCall(DMGetLocalVector(da,&localU));
614   PetscCall(DMGetLocalVector(da,&localF));
615 
616   /*
617      Scatter ghost points to local vector,using the 2-step process
618         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
619      By placing code between these two statements, computations can be
620      done while messages are in transition.
621   */
622   PetscCall(DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU));
623   PetscCall(DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU));
624   PetscCall(VecZeroEntries(F)); // NOTE (1): See (2) below
625   PetscCall(DMGlobalToLocalBegin(da,F,INSERT_VALUES,localF));
626   PetscCall(DMGlobalToLocalEnd(da,F,INSERT_VALUES,localF));
627 
628   /*
629      Get pointers to vector data
630   */
631   PetscCall(DMDAVecGetArrayRead(da,localU,&u));
632   PetscCall(DMDAVecGetArray(da,localF,&f));
633 
634   /*
635      Get local and ghosted grid boundaries
636   */
637   PetscCall(DMDAGetGhostCorners(da,&gxs,&gys,NULL,&gxm,&gym,NULL));
638   PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
639 
640   /*
641     Create contiguous 1-arrays of AFields
642 
643     NOTE: Memory for ADOL-C active variables (such as adouble and AField)
644           cannot be allocated using PetscMalloc, as this does not call the
645           relevant class constructor. Instead, we use the C++ keyword `new`.
646   */
647   u_c = new AField[gxm*gym];
648   f_c = new AField[gxm*gym];
649 
650   /* Create corresponding 2-arrays of AFields */
651   u_a = new AField*[gym];
652   f_a = new AField*[gym];
653 
654   /* Align indices between array types to endow 2d array with ghost points */
655   PetscCall(GiveGhostPoints(da,u_c,&u_a));
656   PetscCall(GiveGhostPoints(da,f_c,&f_a));
657 
658   /*
659      Compute function over the locally owned part of the grid
660   */
661   trace_on(1);  // ----------------------------------------------- Start of active section
662 
663   /*
664     Mark independence
665 
666     NOTE: Ghost points are marked as independent, in place of the points they represent on
667           other processors / on other boundaries.
668   */
669   for (j=gys; j<gys+gym; j++) {
670     for (i=gxs; i<gxs+gxm; i++) {
671       u_a[j][i].u <<= u[j][i].u;
672       u_a[j][i].v <<= u[j][i].v;
673     }
674   }
675 
676   /*
677     Compute function over the locally owned part of the grid
678   */
679   for (j=ys; j<ys+ym; j++) {
680     for (i=xs; i<xs+xm; i++) {
681       uc          = u_a[j][i].u;
682       uxx         = (-2.0*uc + u_a[j][i-1].u + u_a[j][i+1].u)*sx;
683       uyy         = (-2.0*uc + u_a[j-1][i].u + u_a[j+1][i].u)*sy;
684       vc          = u_a[j][i].v;
685       vxx         = (-2.0*vc + u_a[j][i-1].v + u_a[j][i+1].v)*sx;
686       vyy         = (-2.0*vc + u_a[j-1][i].v + u_a[j+1][i].v)*sy;
687       f_a[j][i].u = appctx->D1*(uxx + uyy) - uc*vc*vc + appctx->gamma*(1.0 - uc);
688       f_a[j][i].v = appctx->D2*(vxx + vyy) + uc*vc*vc - (appctx->gamma + appctx->kappa)*vc;
689     }
690   }
691   /*
692     Mark dependence
693 
694     NOTE: Ghost points are marked as dependent in order to vastly simplify index notation
695           during Jacobian assembly.
696   */
697   for (j=gys; j<gys+gym; j++) {
698     for (i=gxs; i<gxs+gxm; i++) {
699       f_a[j][i].u >>= f[j][i].u;
700       f_a[j][i].v >>= f[j][i].v;
701     }
702   }
703   trace_off();  // ----------------------------------------------- End of active section
704 
705   /* Test zeroth order scalar evaluation in ADOL-C gives the same result */
706 //  if (appctx->adctx->zos) {
707 //    PetscCall(TestZOS2d(da,f,u,appctx)); // FIXME: Why does this give nonzero?
708 //  }
709 
710   /*
711      Gather global vector, using the 2-step process
712         DMLocalToGlobalBegin(),DMLocalToGlobalEnd().
713 
714      NOTE (2): We need to use ADD_VALUES if boundaries are not of type DM_BOUNDARY_NONE or
715                DM_BOUNDARY_GHOSTED, meaning we should also zero F before addition (see (1) above).
716   */
717   PetscCall(DMLocalToGlobalBegin(da,localF,ADD_VALUES,F));
718   PetscCall(DMLocalToGlobalEnd(da,localF,ADD_VALUES,F));
719 
720   /*
721      Restore vectors
722   */
723   PetscCall(DMDAVecRestoreArray(da,localF,&f));
724   PetscCall(DMDAVecRestoreArrayRead(da,localU,&u));
725   PetscCall(DMRestoreLocalVector(da,&localF));
726   PetscCall(DMRestoreLocalVector(da,&localU));
727   PetscCall(PetscLogFlops(16.0*xm*ym));
728 
729   /* Destroy AFields appropriately */
730   f_a += gys;
731   u_a += gys;
732   delete[] f_a;
733   delete[] u_a;
734   delete[] f_c;
735   delete[] u_c;
736 
737   PetscFunctionReturn(0);
738 }
739 
740 PetscErrorCode IJacobianAdolc(TS ts,PetscReal t,Vec U,Vec Udot,PetscReal a,Mat A,Mat B,void *ctx)
741 {
742   AppCtx            *appctx = (AppCtx*)ctx;
743   DM                da;
744   const PetscScalar *u_vec;
745   Vec               localU;
746 
747   PetscFunctionBegin;
748   PetscCall(TSGetDM(ts,&da));
749   PetscCall(DMGetLocalVector(da,&localU));
750 
751   /*
752      Scatter ghost points to local vector,using the 2-step process
753         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
754      By placing code between these two statements, computations can be
755      done while messages are in transition.
756   */
757   PetscCall(DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU));
758   PetscCall(DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU));
759 
760   /* Get pointers to vector data */
761   PetscCall(VecGetArrayRead(localU,&u_vec));
762 
763   /*
764     Compute Jacobian
765   */
766   PetscCall(PetscAdolcComputeIJacobianLocalIDMass(1,A,u_vec,a,appctx->adctx));
767 
768   /*
769      Restore vectors
770   */
771   PetscCall(VecRestoreArrayRead(localU,&u_vec));
772   PetscCall(DMRestoreLocalVector(da,&localU));
773   PetscFunctionReturn(0);
774 }
775 
776 PetscErrorCode IJacobianByHand(TS ts,PetscReal t,Vec U,Vec Udot,PetscReal a,Mat A,Mat B,void *ctx)
777 {
778   AppCtx         *appctx = (AppCtx*)ctx;     /* user-defined application context */
779   DM             da;
780   PetscInt       i,j,Mx,My,xs,ys,xm,ym;
781   PetscReal      hx,hy,sx,sy;
782   PetscScalar    uc,vc;
783   Field          **u;
784   Vec            localU;
785   MatStencil     stencil[6],rowstencil;
786   PetscScalar    entries[6];
787 
788   PetscFunctionBegin;
789   PetscCall(TSGetDM(ts,&da));
790   PetscCall(DMGetLocalVector(da,&localU));
791   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
792 
793   hx = 2.50/(PetscReal)Mx; sx = 1.0/(hx*hx);
794   hy = 2.50/(PetscReal)My; sy = 1.0/(hy*hy);
795 
796   /*
797      Scatter ghost points to local vector,using the 2-step process
798         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
799      By placing code between these two statements, computations can be
800      done while messages are in transition.
801   */
802   PetscCall(DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU));
803   PetscCall(DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU));
804 
805   /*
806      Get pointers to vector data
807   */
808   PetscCall(DMDAVecGetArrayRead(da,localU,&u));
809 
810   /*
811      Get local grid boundaries
812   */
813   PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
814 
815   stencil[0].k = 0;
816   stencil[1].k = 0;
817   stencil[2].k = 0;
818   stencil[3].k = 0;
819   stencil[4].k = 0;
820   stencil[5].k = 0;
821   rowstencil.k = 0;
822   /*
823      Compute function over the locally owned part of the grid
824   */
825   for (j=ys; j<ys+ym; j++) {
826 
827     stencil[0].j = j-1;
828     stencil[1].j = j+1;
829     stencil[2].j = j;
830     stencil[3].j = j;
831     stencil[4].j = j;
832     stencil[5].j = j;
833     rowstencil.k = 0; rowstencil.j = j;
834     for (i=xs; i<xs+xm; i++) {
835       uc = u[j][i].u;
836       vc = u[j][i].v;
837 
838       /*      uxx       = (-2.0*uc + u[j][i-1].u + u[j][i+1].u)*sx;
839       uyy       = (-2.0*uc + u[j-1][i].u + u[j+1][i].u)*sy;
840 
841       vxx       = (-2.0*vc + u[j][i-1].v + u[j][i+1].v)*sx;
842       vyy       = (-2.0*vc + u[j-1][i].v + u[j+1][i].v)*sy;
843        f[j][i].u = appctx->D1*(uxx + uyy) - uc*vc*vc + appctx->gamma*(1.0 - uc);*/
844 
845       stencil[0].i = i; stencil[0].c = 0; entries[0] = -appctx->D1*sy;
846       stencil[1].i = i; stencil[1].c = 0; entries[1] = -appctx->D1*sy;
847       stencil[2].i = i-1; stencil[2].c = 0; entries[2] = -appctx->D1*sx;
848       stencil[3].i = i+1; stencil[3].c = 0; entries[3] = -appctx->D1*sx;
849       stencil[4].i = i; stencil[4].c = 0; entries[4] = 2.0*appctx->D1*(sx + sy) + vc*vc + appctx->gamma + a;
850       stencil[5].i = i; stencil[5].c = 1; entries[5] = 2.0*uc*vc;
851       rowstencil.i = i; rowstencil.c = 0;
852 
853       PetscCall(MatSetValuesStencil(A,1,&rowstencil,6,stencil,entries,INSERT_VALUES));
854       if (appctx->aijpc) {
855         PetscCall(MatSetValuesStencil(B,1,&rowstencil,6,stencil,entries,INSERT_VALUES));
856       }
857       stencil[0].c = 1; entries[0] = -appctx->D2*sy;
858       stencil[1].c = 1; entries[1] = -appctx->D2*sy;
859       stencil[2].c = 1; entries[2] = -appctx->D2*sx;
860       stencil[3].c = 1; entries[3] = -appctx->D2*sx;
861       stencil[4].c = 1; entries[4] = 2.0*appctx->D2*(sx + sy) - 2.0*uc*vc + appctx->gamma + appctx->kappa + a;
862       stencil[5].c = 0; entries[5] = -vc*vc;
863 
864       PetscCall(MatSetValuesStencil(A,1,&rowstencil,6,stencil,entries,INSERT_VALUES));
865       if (appctx->aijpc) {
866         PetscCall(MatSetValuesStencil(B,1,&rowstencil,6,stencil,entries,INSERT_VALUES));
867       }
868       /* f[j][i].v = appctx->D2*(vxx + vyy) + uc*vc*vc - (appctx->gamma + appctx->kappa)*vc; */
869     }
870   }
871 
872   /*
873      Restore vectors
874   */
875   PetscCall(PetscLogFlops(19.0*xm*ym));
876   PetscCall(DMDAVecRestoreArrayRead(da,localU,&u));
877   PetscCall(DMRestoreLocalVector(da,&localU));
878   PetscCall(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY));
879   PetscCall(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY));
880   PetscCall(MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE));
881   if (appctx->aijpc) {
882     PetscCall(MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY));
883     PetscCall(MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY));
884     PetscCall(MatSetOption(B,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE));
885   }
886   PetscFunctionReturn(0);
887 }
888 
889 PetscErrorCode RHSJacobianByHand(TS ts,PetscReal t,Vec U,Mat A,Mat B,void *ctx)
890 {
891   AppCtx         *appctx = (AppCtx*)ctx;     /* user-defined application context */
892   DM             da;
893   PetscInt       i,j,Mx,My,xs,ys,xm,ym;
894   PetscReal      hx,hy,sx,sy;
895   PetscScalar    uc,vc;
896   Field          **u;
897   Vec            localU;
898   MatStencil     stencil[6],rowstencil;
899   PetscScalar    entries[6];
900 
901   PetscFunctionBegin;
902   PetscCall(TSGetDM(ts,&da));
903   PetscCall(DMGetLocalVector(da,&localU));
904   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
905 
906   hx = 2.50/(PetscReal)(Mx); sx = 1.0/(hx*hx);
907   hy = 2.50/(PetscReal)(My); sy = 1.0/(hy*hy);
908 
909   /*
910      Scatter ghost points to local vector,using the 2-step process
911         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
912      By placing code between these two statements, computations can be
913      done while messages are in transition.
914   */
915   PetscCall(DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU));
916   PetscCall(DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU));
917 
918   /*
919      Get pointers to vector data
920   */
921   PetscCall(DMDAVecGetArrayRead(da,localU,&u));
922 
923   /*
924      Get local grid boundaries
925   */
926   PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
927 
928   stencil[0].k = 0;
929   stencil[1].k = 0;
930   stencil[2].k = 0;
931   stencil[3].k = 0;
932   stencil[4].k = 0;
933   stencil[5].k = 0;
934   rowstencil.k = 0;
935 
936   /*
937      Compute function over the locally owned part of the grid
938   */
939   for (j=ys; j<ys+ym; j++) {
940 
941     stencil[0].j = j-1;
942     stencil[1].j = j+1;
943     stencil[2].j = j;
944     stencil[3].j = j;
945     stencil[4].j = j;
946     stencil[5].j = j;
947     rowstencil.k = 0; rowstencil.j = j;
948     for (i=xs; i<xs+xm; i++) {
949       uc = u[j][i].u;
950       vc = u[j][i].v;
951 
952       /*      uxx       = (-2.0*uc + u[j][i-1].u + u[j][i+1].u)*sx;
953       uyy       = (-2.0*uc + u[j-1][i].u + u[j+1][i].u)*sy;
954 
955       vxx       = (-2.0*vc + u[j][i-1].v + u[j][i+1].v)*sx;
956       vyy       = (-2.0*vc + u[j-1][i].v + u[j+1][i].v)*sy;
957        f[j][i].u = appctx->D1*(uxx + uyy) - uc*vc*vc + appctx->gamma*(1.0 - uc);*/
958 
959       stencil[0].i = i; stencil[0].c = 0; entries[0] = appctx->D1*sy;
960       stencil[1].i = i; stencil[1].c = 0; entries[1] = appctx->D1*sy;
961       stencil[2].i = i-1; stencil[2].c = 0; entries[2] = appctx->D1*sx;
962       stencil[3].i = i+1; stencil[3].c = 0; entries[3] = appctx->D1*sx;
963       stencil[4].i = i; stencil[4].c = 0; entries[4] = -2.0*appctx->D1*(sx + sy) - vc*vc - appctx->gamma;
964       stencil[5].i = i; stencil[5].c = 1; entries[5] = -2.0*uc*vc;
965       rowstencil.i = i; rowstencil.c = 0;
966 
967       PetscCall(MatSetValuesStencil(A,1,&rowstencil,6,stencil,entries,INSERT_VALUES));
968 
969       stencil[0].c = 1; entries[0] = appctx->D2*sy;
970       stencil[1].c = 1; entries[1] = appctx->D2*sy;
971       stencil[2].c = 1; entries[2] = appctx->D2*sx;
972       stencil[3].c = 1; entries[3] = appctx->D2*sx;
973       stencil[4].c = 1; entries[4] = -2.0*appctx->D2*(sx + sy) + 2.0*uc*vc - appctx->gamma - appctx->kappa;
974       stencil[5].c = 0; entries[5] = vc*vc;
975       rowstencil.c = 1;
976 
977       PetscCall(MatSetValuesStencil(A,1,&rowstencil,6,stencil,entries,INSERT_VALUES));
978       /* f[j][i].v = appctx->D2*(vxx + vyy) + uc*vc*vc - (appctx->gamma + appctx->kappa)*vc; */
979     }
980   }
981 
982   /*
983      Restore vectors
984   */
985   PetscCall(PetscLogFlops(19.0*xm*ym));
986   PetscCall(DMDAVecRestoreArrayRead(da,localU,&u));
987   PetscCall(DMRestoreLocalVector(da,&localU));
988   PetscCall(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY));
989   PetscCall(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY));
990   PetscCall(MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE));
991   if (appctx->aijpc) {
992     PetscCall(MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY));
993     PetscCall(MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY));
994     PetscCall(MatSetOption(B,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE));
995   }
996   PetscFunctionReturn(0);
997 }
998 
999 PetscErrorCode RHSJacobianAdolc(TS ts,PetscReal t,Vec U,Mat A,Mat B,void *ctx)
1000 {
1001   AppCtx         *appctx = (AppCtx*)ctx;
1002   DM             da;
1003   PetscScalar    *u_vec;
1004   Vec            localU;
1005 
1006   PetscFunctionBegin;
1007   PetscCall(TSGetDM(ts,&da));
1008   PetscCall(DMGetLocalVector(da,&localU));
1009 
1010   /*
1011      Scatter ghost points to local vector,using the 2-step process
1012         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
1013      By placing code between these two statements, computations can be
1014      done while messages are in transition.
1015   */
1016   PetscCall(DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU));
1017   PetscCall(DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU));
1018 
1019   /* Get pointers to vector data */
1020   PetscCall(VecGetArray(localU,&u_vec));
1021 
1022   /*
1023     Compute Jacobian
1024   */
1025   PetscCall(PetscAdolcComputeRHSJacobianLocal(1,A,u_vec,appctx->adctx));
1026 
1027   /*
1028      Restore vectors
1029   */
1030   PetscCall(VecRestoreArray(localU,&u_vec));
1031   PetscCall(DMRestoreLocalVector(da,&localU));
1032   PetscFunctionReturn(0);
1033 }
1034 
1035 /*TEST
1036 
1037   build:
1038     requires: double !complex adolc colpack
1039 
1040   test:
1041     suffix: 1
1042     nsize: 1
1043     args: -ts_max_steps 1 -da_grid_x 12 -da_grid_y 12 -snes_test_jacobian
1044     output_file: output/adr_ex5adj_1.out
1045 
1046   test:
1047     suffix: 2
1048     nsize: 1
1049     args: -ts_max_steps 1 -da_grid_x 12 -da_grid_y 12 -snes_test_jacobian -implicitform
1050     output_file: output/adr_ex5adj_2.out
1051 
1052   test:
1053     suffix: 3
1054     nsize: 4
1055     args: -ts_max_steps 10 -da_grid_x 12 -da_grid_y 12 -ts_monitor -ts_adjoint_monitor
1056     output_file: output/adr_ex5adj_3.out
1057 
1058   test:
1059     suffix: 4
1060     nsize: 4
1061     args: -ts_max_steps 10 -da_grid_x 12 -da_grid_y 12 -ts_monitor -ts_adjoint_monitor -implicitform
1062     output_file: output/adr_ex5adj_4.out
1063 
1064   testset:
1065     suffix: 5
1066     nsize: 4
1067     args: -ts_max_steps 10 -da_grid_x 15 -da_grid_y 15 -ts_monitor -ts_adjoint_monitor -adolc_sparse
1068     output_file: output/adr_ex5adj_5.out
1069 
1070   testset:
1071     suffix: 6
1072     nsize: 4
1073     args: -ts_max_steps 10 -da_grid_x 15 -da_grid_y 15 -ts_monitor -ts_adjoint_monitor -adolc_sparse -implicitform
1074     output_file: output/adr_ex5adj_6.out
1075 
1076 TEST*/
1077