xref: /petsc/src/ts/tutorials/advection-diffusion-reaction/ex5adj_mf.c (revision 2f613bf53f46f9356e00a2ca2bd69453be72fc31) !
1 static char help[] = "Demonstrates adjoint sensitivity analysis for Reaction-Diffusion Equations.\n";
2 
3 /*
4   See ex5.c for details on the equation.
5   This code demonestrates the TSAdjoint interface to a system of time-dependent partial differential equations.
6   It computes the sensitivity of a component in the final solution, which locates in the center of the 2D domain, w.r.t. the initial conditions.
7   The user does not need to provide any additional functions. The required functions in the original simulation are reused in the adjoint run.
8 
9   Runtime options:
10     -forwardonly  - run the forward simulation without adjoint
11     -implicitform - provide IFunction and IJacobian to TS, if not set, RHSFunction and RHSJacobian will be used
12     -aijpc        - set the preconditioner matrix to be aij (the Jacobian matrix can be of a different type such as ELL)
13 */
14 
15 #include "reaction_diffusion.h"
16 #include <petscdm.h>
17 #include <petscdmda.h>
18 
19 /* Matrix free support */
20 typedef struct {
21   PetscReal time;
22   Vec       U;
23   Vec       Udot;
24   PetscReal shift;
25   AppCtx*   appctx;
26   TS        ts;
27 } MCtx;
28 
29 /*
30    User-defined routines
31 */
32 PetscErrorCode InitialConditions(DM,Vec);
33 PetscErrorCode RHSJacobianShell(TS,PetscReal,Vec,Mat,Mat,void*);
34 PetscErrorCode IJacobianShell(TS,PetscReal,Vec,Vec,PetscReal,Mat,Mat,void*);
35 
36 PetscErrorCode InitializeLambda(DM da,Vec lambda,PetscReal x,PetscReal y)
37 {
38    PetscInt i,j,Mx,My,xs,ys,xm,ym;
39    PetscErrorCode ierr;
40    Field **l;
41    PetscFunctionBegin;
42 
43    ierr = DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);CHKERRQ(ierr);
44    /* locate the global i index for x and j index for y */
45    i = (PetscInt)(x*(Mx-1));
46    j = (PetscInt)(y*(My-1));
47    ierr = DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);CHKERRQ(ierr);
48 
49    if (xs <= i && i < xs+xm && ys <= j && j < ys+ym) {
50      /* the i,j vertex is on this process */
51      ierr = DMDAVecGetArray(da,lambda,&l);CHKERRQ(ierr);
52      l[j][i].u = 1.0;
53      l[j][i].v = 1.0;
54      ierr = DMDAVecRestoreArray(da,lambda,&l);CHKERRQ(ierr);
55    }
56    PetscFunctionReturn(0);
57 }
58 
59 static PetscErrorCode MyRHSMatMultTranspose(Mat A_shell,Vec X,Vec Y)
60 {
61   MCtx           *mctx;
62   AppCtx         *appctx;
63   DM             da;
64   PetscInt       i,j,Mx,My,xs,ys,xm,ym;
65   PetscReal      hx,hy,sx,sy;
66   PetscScalar    uc,uxx,uyy,vc,vxx,vyy,ucb,vcb;
67   Field          **u,**x,**y;
68   Vec            localX;
69   PetscErrorCode ierr;
70 
71   PetscFunctionBeginUser;
72   ierr = MatShellGetContext(A_shell,&mctx);CHKERRQ(ierr);
73   appctx = mctx->appctx;
74   ierr = TSGetDM(mctx->ts,&da);CHKERRQ(ierr);
75   ierr = DMGetLocalVector(da,&localX);CHKERRQ(ierr);
76   ierr = DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);CHKERRQ(ierr);
77   hx = 2.50/(PetscReal)Mx; sx = 1.0/(hx*hx);
78   hy = 2.50/(PetscReal)My; sy = 1.0/(hy*hy);
79 
80   /* Scatter ghost points to local vector,using the 2-step process
81      DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
82      By placing code between these two statements, computations can be
83      done while messages are in transition. */
84   ierr = DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX);CHKERRQ(ierr);
85   ierr = DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX);CHKERRQ(ierr);
86 
87   /* Get pointers to vector data */
88   ierr = DMDAVecGetArrayRead(da,localX,&x);CHKERRQ(ierr);
89   ierr = DMDAVecGetArrayRead(da,mctx->U,&u);CHKERRQ(ierr);
90   ierr = DMDAVecGetArray(da,Y,&y);CHKERRQ(ierr);
91 
92   /* Get local grid boundaries */
93   ierr = DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);CHKERRQ(ierr);
94 
95   /* Compute function over the locally owned part of the grid */
96   for (j=ys; j<ys+ym; j++) {
97     for (i=xs; i<xs+xm; i++) {
98       uc        = u[j][i].u;
99       ucb       = x[j][i].u;
100       uxx       = (-2.0*ucb + x[j][i-1].u + x[j][i+1].u)*sx;
101       uyy       = (-2.0*ucb + x[j-1][i].u + x[j+1][i].u)*sy;
102       vc        = u[j][i].v;
103       vcb       = x[j][i].v;
104       vxx       = (-2.0*vcb + x[j][i-1].v + x[j][i+1].v)*sx;
105       vyy       = (-2.0*vcb + x[j-1][i].v + x[j+1][i].v)*sy;
106       y[j][i].u = appctx->D1*(uxx + uyy) - ucb*(vc*vc+appctx->gamma) + vc*vc*vcb;
107       y[j][i].v = appctx->D2*(vxx + vyy) - 2.0*uc*vc*ucb + (2.0*uc*vc-appctx->gamma - appctx->kappa)*vcb;
108     }
109   }
110   ierr = DMDAVecRestoreArrayRead(da,localX,&x);CHKERRQ(ierr);
111   ierr = DMDAVecRestoreArrayRead(da,mctx->U,&u);CHKERRQ(ierr);
112   ierr = DMDAVecRestoreArray(da,Y,&y);CHKERRQ(ierr);
113   ierr = DMRestoreLocalVector(da,&localX);CHKERRQ(ierr);
114   PetscFunctionReturn(0);
115 }
116 
117 static PetscErrorCode MyIMatMultTranspose(Mat A_shell,Vec X,Vec Y)
118 {
119   MCtx           *mctx;
120   AppCtx         *appctx;
121   DM             da;
122   PetscInt       i,j,Mx,My,xs,ys,xm,ym;
123   PetscReal      hx,hy,sx,sy;
124   PetscScalar    uc,uxx,uyy,vc,vxx,vyy,ucb,vcb;
125   Field          **u,**x,**y;
126   Vec            localX;
127   PetscErrorCode ierr;
128 
129   PetscFunctionBeginUser;
130   ierr = MatShellGetContext(A_shell,&mctx);CHKERRQ(ierr);
131   appctx = mctx->appctx;
132   ierr = TSGetDM(mctx->ts,&da);CHKERRQ(ierr);
133   ierr = DMGetLocalVector(da,&localX);CHKERRQ(ierr);
134   ierr = DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);CHKERRQ(ierr);
135   hx = 2.50/(PetscReal)Mx; sx = 1.0/(hx*hx);
136   hy = 2.50/(PetscReal)My; sy = 1.0/(hy*hy);
137 
138   /* Scatter ghost points to local vector,using the 2-step process
139      DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
140      By placing code between these two statements, computations can be
141      done while messages are in transition. */
142   ierr = DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX);CHKERRQ(ierr);
143   ierr = DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX);CHKERRQ(ierr);
144 
145   /* Get pointers to vector data */
146   ierr = DMDAVecGetArrayRead(da,localX,&x);CHKERRQ(ierr);
147   ierr = DMDAVecGetArrayRead(da,mctx->U,&u);CHKERRQ(ierr);
148   ierr = DMDAVecGetArray(da,Y,&y);CHKERRQ(ierr);
149 
150   /* Get local grid boundaries */
151   ierr = DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);CHKERRQ(ierr);
152 
153   /* Compute function over the locally owned part of the grid */
154   for (j=ys; j<ys+ym; j++) {
155     for (i=xs; i<xs+xm; i++) {
156       uc        = u[j][i].u;
157       ucb       = x[j][i].u;
158       uxx       = (-2.0*ucb + x[j][i-1].u + x[j][i+1].u)*sx;
159       uyy       = (-2.0*ucb + x[j-1][i].u + x[j+1][i].u)*sy;
160       vc        = u[j][i].v;
161       vcb       = x[j][i].v;
162       vxx       = (-2.0*vcb + x[j][i-1].v + x[j][i+1].v)*sx;
163       vyy       = (-2.0*vcb + x[j-1][i].v + x[j+1][i].v)*sy;
164       y[j][i].u = appctx->D1*(uxx + uyy) - ucb*(vc*vc+appctx->gamma) + vc*vc*vcb;
165       y[j][i].v = appctx->D2*(vxx + vyy) - 2.0*uc*vc*ucb + (2.0*uc*vc-appctx->gamma - appctx->kappa)*vcb;
166       y[j][i].u = mctx->shift*ucb - y[j][i].u;
167       y[j][i].v = mctx->shift*vcb - y[j][i].v;
168     }
169   }
170   ierr = DMDAVecRestoreArrayRead(da,localX,&x);CHKERRQ(ierr);
171   ierr = DMDAVecRestoreArrayRead(da,mctx->U,&u);CHKERRQ(ierr);
172   ierr = DMDAVecRestoreArray(da,Y,&y);CHKERRQ(ierr);
173   ierr = DMRestoreLocalVector(da,&localX);CHKERRQ(ierr);
174   PetscFunctionReturn(0);
175 }
176 
177 static PetscErrorCode MyIMatMult(Mat A_shell,Vec X,Vec Y)
178 {
179   MCtx           *mctx;
180   AppCtx         *appctx;
181   DM             da;
182   PetscInt       i,j,Mx,My,xs,ys,xm,ym;
183   PetscReal      hx,hy,sx,sy;
184   PetscScalar    uc,uxx,uyy,vc,vxx,vyy,ucb,vcb;
185   Field          **u,**x,**y;
186   Vec            localX;
187   PetscErrorCode ierr;
188 
189   PetscFunctionBeginUser;
190   ierr = MatShellGetContext(A_shell,&mctx);CHKERRQ(ierr);
191   appctx = mctx->appctx;
192   ierr = TSGetDM(mctx->ts,&da);CHKERRQ(ierr);
193   ierr = DMGetLocalVector(da,&localX);CHKERRQ(ierr);
194   ierr = DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);CHKERRQ(ierr);
195   hx = 2.50/(PetscReal)Mx; sx = 1.0/(hx*hx);
196   hy = 2.50/(PetscReal)My; sy = 1.0/(hy*hy);
197 
198   /* Scatter ghost points to local vector,using the 2-step process
199      DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
200      By placing code between these two statements, computations can be
201      done while messages are in transition. */
202   ierr = DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX);CHKERRQ(ierr);
203   ierr = DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX);CHKERRQ(ierr);
204 
205   /* Get pointers to vector data */
206   ierr = DMDAVecGetArrayRead(da,localX,&x);CHKERRQ(ierr);
207   ierr = DMDAVecGetArrayRead(da,mctx->U,&u);CHKERRQ(ierr);
208   ierr = DMDAVecGetArray(da,Y,&y);CHKERRQ(ierr);
209 
210   /* Get local grid boundaries */
211   ierr = DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);CHKERRQ(ierr);
212 
213   /* Compute function over the locally owned part of the grid */
214   for (j=ys; j<ys+ym; j++) {
215     for (i=xs; i<xs+xm; i++) {
216       uc        = u[j][i].u;
217       ucb       = x[j][i].u;
218       uxx       = (-2.0*ucb + x[j][i-1].u + x[j][i+1].u)*sx;
219       uyy       = (-2.0*ucb + x[j-1][i].u + x[j+1][i].u)*sy;
220       vc        = u[j][i].v;
221       vcb       = x[j][i].v;
222       vxx       = (-2.0*vcb + x[j][i-1].v + x[j][i+1].v)*sx;
223       vyy       = (-2.0*vcb + x[j-1][i].v + x[j+1][i].v)*sy;
224       y[j][i].u = appctx->D1*(uxx + uyy) - (vc*vc+appctx->gamma)*ucb - 2.0*uc*vc*vcb;
225       y[j][i].v = appctx->D2*(vxx + vyy) + vc*vc*ucb + (2.0*uc*vc-appctx->gamma-appctx->kappa)*vcb;
226       y[j][i].u = mctx->shift*ucb - y[j][i].u;
227       y[j][i].v = mctx->shift*vcb - y[j][i].v;
228     }
229   }
230   ierr = DMDAVecRestoreArrayRead(da,localX,&x);CHKERRQ(ierr);
231   ierr = DMDAVecRestoreArrayRead(da,mctx->U,&u);CHKERRQ(ierr);
232   ierr = DMDAVecRestoreArray(da,Y,&y);CHKERRQ(ierr);
233   ierr = DMRestoreLocalVector(da,&localX);CHKERRQ(ierr);
234   PetscFunctionReturn(0);
235 }
236 
237 int main(int argc,char **argv)
238 {
239   TS             ts;                  /* ODE integrator */
240   Vec            x;                   /* solution */
241   PetscErrorCode ierr;
242   DM             da;
243   AppCtx         appctx;
244   MCtx           mctx;
245   Vec            lambda[1];
246   PetscBool      forwardonly=PETSC_FALSE,implicitform=PETSC_TRUE,mf = PETSC_FALSE;
247   PetscLogDouble v1,v2;
248 #if defined(PETSC_USE_LOG)
249   PetscLogStage  stage;
250 #endif
251 
252   ierr = PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
253   ierr = PetscOptionsGetBool(NULL,NULL,"-forwardonly",&forwardonly,NULL);CHKERRQ(ierr);
254   ierr = PetscOptionsGetBool(NULL,NULL,"-implicitform",&implicitform,NULL);CHKERRQ(ierr);
255   appctx.aijpc = PETSC_FALSE;
256   ierr = PetscOptionsGetBool(NULL,NULL,"-aijpc",&appctx.aijpc,NULL);CHKERRQ(ierr);
257   ierr = PetscOptionsGetBool(NULL,NULL,"-mf",&mf,NULL);CHKERRQ(ierr);
258 
259   appctx.D1    = 8.0e-5;
260   appctx.D2    = 4.0e-5;
261   appctx.gamma = .024;
262   appctx.kappa = .06;
263 
264   PetscLogStageRegister("MyAdjoint", &stage);
265   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
266      Create distributed array (DMDA) to manage parallel grid and vectors
267   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
268   ierr = DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_PERIODIC,DM_BOUNDARY_PERIODIC,DMDA_STENCIL_STAR,64,64,PETSC_DECIDE,PETSC_DECIDE,2,1,NULL,NULL,&da);CHKERRQ(ierr);
269   ierr = DMSetFromOptions(da);CHKERRQ(ierr);
270   ierr = DMSetUp(da);CHKERRQ(ierr);
271   ierr = DMDASetFieldName(da,0,"u");CHKERRQ(ierr);
272   ierr = DMDASetFieldName(da,1,"v");CHKERRQ(ierr);
273 
274   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
275      Extract global vectors from DMDA; then duplicate for remaining
276      vectors that are the same types
277    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
278   ierr = DMCreateGlobalVector(da,&x);CHKERRQ(ierr);
279 
280   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
281      Create timestepping solver context
282      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
283   ierr = TSCreate(PETSC_COMM_WORLD,&ts);CHKERRQ(ierr);
284   ierr = TSSetDM(ts,da);CHKERRQ(ierr);
285   ierr = TSSetProblemType(ts,TS_NONLINEAR);CHKERRQ(ierr);
286   ierr = TSSetEquationType(ts,TS_EQ_ODE_EXPLICIT);CHKERRQ(ierr); /* less Jacobian evaluations when adjoint BEuler is used, otherwise no effect */
287   if (!implicitform) {
288     ierr = TSSetType(ts,TSRK);CHKERRQ(ierr);
289     ierr = TSSetRHSFunction(ts,NULL,RHSFunction,&appctx);CHKERRQ(ierr);
290     ierr = TSSetRHSJacobian(ts,NULL,NULL,RHSJacobian,&appctx);CHKERRQ(ierr);
291   } else {
292     ierr = TSSetType(ts,TSCN);CHKERRQ(ierr);
293     ierr = TSSetIFunction(ts,NULL,IFunction,&appctx);CHKERRQ(ierr);
294     if (appctx.aijpc) {
295       Mat                    A,B;
296 
297       ierr = DMSetMatType(da,MATSELL);CHKERRQ(ierr);
298       ierr = DMCreateMatrix(da,&A);CHKERRQ(ierr);
299       ierr = MatConvert(A,MATAIJ,MAT_INITIAL_MATRIX,&B);CHKERRQ(ierr);
300       /* FIXME do we need to change viewer to display matrix in natural ordering as DMCreateMatrix_DA does? */
301       ierr = TSSetIJacobian(ts,A,B,IJacobian,&appctx);CHKERRQ(ierr);
302       ierr = MatDestroy(&A);CHKERRQ(ierr);
303       ierr = MatDestroy(&B);CHKERRQ(ierr);
304     } else {
305       ierr = TSSetIJacobian(ts,NULL,NULL,IJacobian,&appctx);CHKERRQ(ierr);
306     }
307   }
308 
309   if (mf) {
310     PetscInt xm,ym,Mx,My,dof;
311     mctx.ts = ts;
312     mctx.appctx = &appctx;
313     ierr = VecDuplicate(x,&mctx.U);CHKERRQ(ierr);
314     /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
315       Create matrix free context
316       - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
317     ierr = DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,&dof,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);CHKERRQ(ierr);
318     ierr = DMDAGetCorners(da,NULL,NULL,NULL,&xm,&ym,NULL);CHKERRQ(ierr);
319     ierr = MatCreateShell(PETSC_COMM_WORLD,dof*xm*ym,PETSC_DETERMINE,dof*Mx*My,dof*Mx*My,&mctx,&appctx.A);CHKERRQ(ierr);
320     ierr = MatShellSetOperation(appctx.A,MATOP_MULT_TRANSPOSE,(void (*)(void))MyRHSMatMultTranspose);CHKERRQ(ierr);
321     if (!implicitform) { /* for explicit methods only */
322       ierr = TSSetRHSJacobian(ts,appctx.A,appctx.A,RHSJacobianShell,&appctx);CHKERRQ(ierr);
323     } else {
324       /* ierr = VecDuplicate(appctx.U,&mctx.Udot);CHKERRQ(ierr); */
325       ierr = MatShellSetOperation(appctx.A,MATOP_MULT,(void (*)(void))MyIMatMult);CHKERRQ(ierr);
326       ierr = MatShellSetOperation(appctx.A,MATOP_MULT_TRANSPOSE,(void (*)(void))MyIMatMultTranspose);CHKERRQ(ierr);
327       ierr = TSSetIJacobian(ts,appctx.A,appctx.A,IJacobianShell,&appctx);CHKERRQ(ierr);
328     }
329   }
330 
331   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
332      Set initial conditions
333    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
334   ierr = InitialConditions(da,x);CHKERRQ(ierr);
335   ierr = TSSetSolution(ts,x);CHKERRQ(ierr);
336 
337   /*
338     Have the TS save its trajectory so that TSAdjointSolve() may be used
339   */
340   if (!forwardonly) { ierr = TSSetSaveTrajectory(ts);CHKERRQ(ierr); }
341 
342   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
343      Set solver options
344    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
345   ierr = TSSetMaxTime(ts,200.0);CHKERRQ(ierr);
346   ierr = TSSetTimeStep(ts,0.5);CHKERRQ(ierr);
347   ierr = TSSetExactFinalTime(ts,TS_EXACTFINALTIME_MATCHSTEP);CHKERRQ(ierr);
348   ierr = TSSetFromOptions(ts);CHKERRQ(ierr);
349 
350   ierr = PetscTime(&v1);CHKERRQ(ierr);
351   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
352      Solve ODE system
353      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
354   ierr = TSSolve(ts,x);CHKERRQ(ierr);
355   if (!forwardonly) {
356     /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
357        Start the Adjoint model
358        - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
359     ierr = VecDuplicate(x,&lambda[0]);CHKERRQ(ierr);
360     /*   Reset initial conditions for the adjoint integration */
361     ierr = InitializeLambda(da,lambda[0],0.5,0.5);CHKERRQ(ierr);
362     ierr = TSSetCostGradients(ts,1,lambda,NULL);CHKERRQ(ierr);
363     PetscLogStagePush(stage);
364     ierr = TSAdjointSolve(ts);CHKERRQ(ierr);
365     PetscLogStagePop();
366     ierr = VecDestroy(&lambda[0]);CHKERRQ(ierr);
367   }
368   ierr = PetscTime(&v2);CHKERRQ(ierr);
369   ierr = PetscPrintf(PETSC_COMM_WORLD," %.3lf ",v2-v1);
370 
371   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
372      Free work space.  All PETSc objects should be destroyed when they
373      are no longer needed.
374    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
375   ierr = VecDestroy(&x);CHKERRQ(ierr);
376   ierr = TSDestroy(&ts);CHKERRQ(ierr);
377   ierr = DMDestroy(&da);CHKERRQ(ierr);
378   if (mf) {
379     ierr = VecDestroy(&mctx.U);CHKERRQ(ierr);
380     /* ierr = VecDestroy(&mctx.Udot);CHKERRQ(ierr);*/
381     ierr = MatDestroy(&appctx.A);CHKERRQ(ierr);
382   }
383   ierr = PetscFinalize();
384   return ierr;
385 }
386 
387 /* ------------------------------------------------------------------- */
388 PetscErrorCode RHSJacobianShell(TS ts,PetscReal t,Vec U,Mat A,Mat BB,void *ctx)
389 {
390   MCtx           *mctx;
391   PetscErrorCode ierr;
392 
393   PetscFunctionBegin;
394   ierr = MatShellGetContext(A,&mctx);CHKERRQ(ierr);
395   ierr = VecCopy(U,mctx->U);CHKERRQ(ierr);
396   PetscFunctionReturn(0);
397 }
398 
399 PetscErrorCode IJacobianShell(TS ts,PetscReal t,Vec U,Vec Udot,PetscReal a,Mat A,Mat BB,void *ctx)
400 {
401   MCtx           *mctx;
402   PetscErrorCode ierr;
403 
404   PetscFunctionBegin;
405   ierr = MatShellGetContext(A,&mctx);CHKERRQ(ierr);
406   ierr = VecCopy(U,mctx->U);CHKERRQ(ierr);
407   /* ierr = VecCopy(Udot,mctx->Udot);CHKERRQ(ierr); */
408   mctx->shift = a;
409   PetscFunctionReturn(0);
410 }
411 
412 /* ------------------------------------------------------------------- */
413 PetscErrorCode InitialConditions(DM da,Vec U)
414 {
415   PetscErrorCode ierr;
416   PetscInt       i,j,xs,ys,xm,ym,Mx,My;
417   Field          **u;
418   PetscReal      hx,hy,x,y;
419 
420   PetscFunctionBegin;
421   ierr = DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);CHKERRQ(ierr);
422 
423   hx = 2.5/(PetscReal)Mx;
424   hy = 2.5/(PetscReal)My;
425 
426   /*
427      Get pointers to vector data
428   */
429   ierr = DMDAVecGetArray(da,U,&u);CHKERRQ(ierr);
430 
431   /*
432      Get local grid boundaries
433   */
434   ierr = DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);CHKERRQ(ierr);
435 
436   /*
437      Compute function over the locally owned part of the grid
438   */
439   for (j=ys; j<ys+ym; j++) {
440     y = j*hy;
441     for (i=xs; i<xs+xm; i++) {
442       x = i*hx;
443       if ((1.0 <= x) && (x <= 1.5) && (1.0 <= y) && (y <= 1.5)) u[j][i].v = .25*PetscPowReal(PetscSinReal(4.0*PETSC_PI*x),2.0)*PetscPowReal(PetscSinReal(4.0*PETSC_PI*y),2.0);
444       else u[j][i].v = 0.0;
445 
446       u[j][i].u = 1.0 - 2.0*u[j][i].v;
447     }
448   }
449 
450   /*
451      Restore vectors
452   */
453   ierr = DMDAVecRestoreArray(da,U,&u);CHKERRQ(ierr);
454   PetscFunctionReturn(0);
455 }
456 
457 /*TEST
458 
459    build:
460       depends: reaction_diffusion.c
461       requires: !complex !single
462 
463    test:
464       requires: cams
465       args: -mf -ts_max_steps 10 -ts_monitor -ts_adjoint_monitor -ts_trajectory_type memory -ts_trajectory_solution_only 0 -ts_trajectory_max_units_ram 6 -ts_trajectory_memory_type cams
466 TEST*/
467