xref: /petsc/src/ts/tutorials/advection-diffusion-reaction/ex5adj_mf.c (revision 503c0ea9b45bcfbcebbb1ea5341243bbc69f0bea)
1 static char help[] = "Demonstrates adjoint sensitivity analysis for Reaction-Diffusion Equations.\n";
2 
3 /*
4   See ex5.c for details on the equation.
5   This code demonestrates the TSAdjoint interface to a system of time-dependent partial differential equations.
6   It computes the sensitivity of a component in the final solution, which locates in the center of the 2D domain, w.r.t. the initial conditions.
7   The user does not need to provide any additional functions. The required functions in the original simulation are reused in the adjoint run.
8 
9   Runtime options:
10     -forwardonly  - run the forward simulation without adjoint
11     -implicitform - provide IFunction and IJacobian to TS, if not set, RHSFunction and RHSJacobian will be used
12     -aijpc        - set the preconditioner matrix to be aij (the Jacobian matrix can be of a different type such as ELL)
13 */
14 
15 #include "reaction_diffusion.h"
16 #include <petscdm.h>
17 #include <petscdmda.h>
18 
19 /* Matrix free support */
20 typedef struct {
21   PetscReal time;
22   Vec       U;
23   Vec       Udot;
24   PetscReal shift;
25   AppCtx*   appctx;
26   TS        ts;
27 } MCtx;
28 
29 /*
30    User-defined routines
31 */
32 PetscErrorCode InitialConditions(DM,Vec);
33 PetscErrorCode RHSJacobianShell(TS,PetscReal,Vec,Mat,Mat,void*);
34 PetscErrorCode IJacobianShell(TS,PetscReal,Vec,Vec,PetscReal,Mat,Mat,void*);
35 
36 PetscErrorCode InitializeLambda(DM da,Vec lambda,PetscReal x,PetscReal y)
37 {
38    PetscInt i,j,Mx,My,xs,ys,xm,ym;
39    Field **l;
40    PetscFunctionBegin;
41 
42    PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
43    /* locate the global i index for x and j index for y */
44    i = (PetscInt)(x*(Mx-1));
45    j = (PetscInt)(y*(My-1));
46    PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
47 
48    if (xs <= i && i < xs+xm && ys <= j && j < ys+ym) {
49      /* the i,j vertex is on this process */
50      PetscCall(DMDAVecGetArray(da,lambda,&l));
51      l[j][i].u = 1.0;
52      l[j][i].v = 1.0;
53      PetscCall(DMDAVecRestoreArray(da,lambda,&l));
54    }
55    PetscFunctionReturn(0);
56 }
57 
58 static PetscErrorCode MyRHSMatMultTranspose(Mat A_shell,Vec X,Vec Y)
59 {
60   MCtx           *mctx;
61   AppCtx         *appctx;
62   DM             da;
63   PetscInt       i,j,Mx,My,xs,ys,xm,ym;
64   PetscReal      hx,hy,sx,sy;
65   PetscScalar    uc,uxx,uyy,vc,vxx,vyy,ucb,vcb;
66   Field          **u,**x,**y;
67   Vec            localX;
68 
69   PetscFunctionBeginUser;
70   PetscCall(MatShellGetContext(A_shell,&mctx));
71   appctx = mctx->appctx;
72   PetscCall(TSGetDM(mctx->ts,&da));
73   PetscCall(DMGetLocalVector(da,&localX));
74   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
75   hx = 2.50/(PetscReal)Mx; sx = 1.0/(hx*hx);
76   hy = 2.50/(PetscReal)My; sy = 1.0/(hy*hy);
77 
78   /* Scatter ghost points to local vector,using the 2-step process
79      DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
80      By placing code between these two statements, computations can be
81      done while messages are in transition. */
82   PetscCall(DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX));
83   PetscCall(DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX));
84 
85   /* Get pointers to vector data */
86   PetscCall(DMDAVecGetArrayRead(da,localX,&x));
87   PetscCall(DMDAVecGetArrayRead(da,mctx->U,&u));
88   PetscCall(DMDAVecGetArray(da,Y,&y));
89 
90   /* Get local grid boundaries */
91   PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
92 
93   /* Compute function over the locally owned part of the grid */
94   for (j=ys; j<ys+ym; j++) {
95     for (i=xs; i<xs+xm; i++) {
96       uc        = u[j][i].u;
97       ucb       = x[j][i].u;
98       uxx       = (-2.0*ucb + x[j][i-1].u + x[j][i+1].u)*sx;
99       uyy       = (-2.0*ucb + x[j-1][i].u + x[j+1][i].u)*sy;
100       vc        = u[j][i].v;
101       vcb       = x[j][i].v;
102       vxx       = (-2.0*vcb + x[j][i-1].v + x[j][i+1].v)*sx;
103       vyy       = (-2.0*vcb + x[j-1][i].v + x[j+1][i].v)*sy;
104       y[j][i].u = appctx->D1*(uxx + uyy) - ucb*(vc*vc+appctx->gamma) + vc*vc*vcb;
105       y[j][i].v = appctx->D2*(vxx + vyy) - 2.0*uc*vc*ucb + (2.0*uc*vc-appctx->gamma - appctx->kappa)*vcb;
106     }
107   }
108   PetscCall(DMDAVecRestoreArrayRead(da,localX,&x));
109   PetscCall(DMDAVecRestoreArrayRead(da,mctx->U,&u));
110   PetscCall(DMDAVecRestoreArray(da,Y,&y));
111   PetscCall(DMRestoreLocalVector(da,&localX));
112   PetscFunctionReturn(0);
113 }
114 
115 static PetscErrorCode MyIMatMultTranspose(Mat A_shell,Vec X,Vec Y)
116 {
117   MCtx           *mctx;
118   AppCtx         *appctx;
119   DM             da;
120   PetscInt       i,j,Mx,My,xs,ys,xm,ym;
121   PetscReal      hx,hy,sx,sy;
122   PetscScalar    uc,uxx,uyy,vc,vxx,vyy,ucb,vcb;
123   Field          **u,**x,**y;
124   Vec            localX;
125 
126   PetscFunctionBeginUser;
127   PetscCall(MatShellGetContext(A_shell,&mctx));
128   appctx = mctx->appctx;
129   PetscCall(TSGetDM(mctx->ts,&da));
130   PetscCall(DMGetLocalVector(da,&localX));
131   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
132   hx = 2.50/(PetscReal)Mx; sx = 1.0/(hx*hx);
133   hy = 2.50/(PetscReal)My; sy = 1.0/(hy*hy);
134 
135   /* Scatter ghost points to local vector,using the 2-step process
136      DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
137      By placing code between these two statements, computations can be
138      done while messages are in transition. */
139   PetscCall(DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX));
140   PetscCall(DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX));
141 
142   /* Get pointers to vector data */
143   PetscCall(DMDAVecGetArrayRead(da,localX,&x));
144   PetscCall(DMDAVecGetArrayRead(da,mctx->U,&u));
145   PetscCall(DMDAVecGetArray(da,Y,&y));
146 
147   /* Get local grid boundaries */
148   PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
149 
150   /* Compute function over the locally owned part of the grid */
151   for (j=ys; j<ys+ym; j++) {
152     for (i=xs; i<xs+xm; i++) {
153       uc        = u[j][i].u;
154       ucb       = x[j][i].u;
155       uxx       = (-2.0*ucb + x[j][i-1].u + x[j][i+1].u)*sx;
156       uyy       = (-2.0*ucb + x[j-1][i].u + x[j+1][i].u)*sy;
157       vc        = u[j][i].v;
158       vcb       = x[j][i].v;
159       vxx       = (-2.0*vcb + x[j][i-1].v + x[j][i+1].v)*sx;
160       vyy       = (-2.0*vcb + x[j-1][i].v + x[j+1][i].v)*sy;
161       y[j][i].u = appctx->D1*(uxx + uyy) - ucb*(vc*vc+appctx->gamma) + vc*vc*vcb;
162       y[j][i].v = appctx->D2*(vxx + vyy) - 2.0*uc*vc*ucb + (2.0*uc*vc-appctx->gamma - appctx->kappa)*vcb;
163       y[j][i].u = mctx->shift*ucb - y[j][i].u;
164       y[j][i].v = mctx->shift*vcb - y[j][i].v;
165     }
166   }
167   PetscCall(DMDAVecRestoreArrayRead(da,localX,&x));
168   PetscCall(DMDAVecRestoreArrayRead(da,mctx->U,&u));
169   PetscCall(DMDAVecRestoreArray(da,Y,&y));
170   PetscCall(DMRestoreLocalVector(da,&localX));
171   PetscFunctionReturn(0);
172 }
173 
174 static PetscErrorCode MyIMatMult(Mat A_shell,Vec X,Vec Y)
175 {
176   MCtx           *mctx;
177   AppCtx         *appctx;
178   DM             da;
179   PetscInt       i,j,Mx,My,xs,ys,xm,ym;
180   PetscReal      hx,hy,sx,sy;
181   PetscScalar    uc,uxx,uyy,vc,vxx,vyy,ucb,vcb;
182   Field          **u,**x,**y;
183   Vec            localX;
184 
185   PetscFunctionBeginUser;
186   PetscCall(MatShellGetContext(A_shell,&mctx));
187   appctx = mctx->appctx;
188   PetscCall(TSGetDM(mctx->ts,&da));
189   PetscCall(DMGetLocalVector(da,&localX));
190   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
191   hx = 2.50/(PetscReal)Mx; sx = 1.0/(hx*hx);
192   hy = 2.50/(PetscReal)My; sy = 1.0/(hy*hy);
193 
194   /* Scatter ghost points to local vector,using the 2-step process
195      DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
196      By placing code between these two statements, computations can be
197      done while messages are in transition. */
198   PetscCall(DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX));
199   PetscCall(DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX));
200 
201   /* Get pointers to vector data */
202   PetscCall(DMDAVecGetArrayRead(da,localX,&x));
203   PetscCall(DMDAVecGetArrayRead(da,mctx->U,&u));
204   PetscCall(DMDAVecGetArray(da,Y,&y));
205 
206   /* Get local grid boundaries */
207   PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
208 
209   /* Compute function over the locally owned part of the grid */
210   for (j=ys; j<ys+ym; j++) {
211     for (i=xs; i<xs+xm; i++) {
212       uc        = u[j][i].u;
213       ucb       = x[j][i].u;
214       uxx       = (-2.0*ucb + x[j][i-1].u + x[j][i+1].u)*sx;
215       uyy       = (-2.0*ucb + x[j-1][i].u + x[j+1][i].u)*sy;
216       vc        = u[j][i].v;
217       vcb       = x[j][i].v;
218       vxx       = (-2.0*vcb + x[j][i-1].v + x[j][i+1].v)*sx;
219       vyy       = (-2.0*vcb + x[j-1][i].v + x[j+1][i].v)*sy;
220       y[j][i].u = appctx->D1*(uxx + uyy) - (vc*vc+appctx->gamma)*ucb - 2.0*uc*vc*vcb;
221       y[j][i].v = appctx->D2*(vxx + vyy) + vc*vc*ucb + (2.0*uc*vc-appctx->gamma-appctx->kappa)*vcb;
222       y[j][i].u = mctx->shift*ucb - y[j][i].u;
223       y[j][i].v = mctx->shift*vcb - y[j][i].v;
224     }
225   }
226   PetscCall(DMDAVecRestoreArrayRead(da,localX,&x));
227   PetscCall(DMDAVecRestoreArrayRead(da,mctx->U,&u));
228   PetscCall(DMDAVecRestoreArray(da,Y,&y));
229   PetscCall(DMRestoreLocalVector(da,&localX));
230   PetscFunctionReturn(0);
231 }
232 
233 int main(int argc,char **argv)
234 {
235   TS             ts;                  /* ODE integrator */
236   Vec            x;                   /* solution */
237   DM             da;
238   AppCtx         appctx;
239   MCtx           mctx;
240   Vec            lambda[1];
241   PetscBool      forwardonly=PETSC_FALSE,implicitform=PETSC_TRUE,mf = PETSC_FALSE;
242   PetscLogDouble v1,v2;
243 #if defined(PETSC_USE_LOG)
244   PetscLogStage  stage;
245 #endif
246 
247   PetscCall(PetscInitialize(&argc,&argv,(char*)0,help));
248   PetscCall(PetscOptionsGetBool(NULL,NULL,"-forwardonly",&forwardonly,NULL));
249   PetscCall(PetscOptionsGetBool(NULL,NULL,"-implicitform",&implicitform,NULL));
250   appctx.aijpc = PETSC_FALSE;
251   PetscCall(PetscOptionsGetBool(NULL,NULL,"-aijpc",&appctx.aijpc,NULL));
252   PetscCall(PetscOptionsGetBool(NULL,NULL,"-mf",&mf,NULL));
253 
254   appctx.D1    = 8.0e-5;
255   appctx.D2    = 4.0e-5;
256   appctx.gamma = .024;
257   appctx.kappa = .06;
258 
259   PetscLogStageRegister("MyAdjoint", &stage);
260   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
261      Create distributed array (DMDA) to manage parallel grid and vectors
262   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
263   PetscCall(DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_PERIODIC,DM_BOUNDARY_PERIODIC,DMDA_STENCIL_STAR,64,64,PETSC_DECIDE,PETSC_DECIDE,2,1,NULL,NULL,&da));
264   PetscCall(DMSetFromOptions(da));
265   PetscCall(DMSetUp(da));
266   PetscCall(DMDASetFieldName(da,0,"u"));
267   PetscCall(DMDASetFieldName(da,1,"v"));
268 
269   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
270      Extract global vectors from DMDA; then duplicate for remaining
271      vectors that are the same types
272    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
273   PetscCall(DMCreateGlobalVector(da,&x));
274 
275   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
276      Create timestepping solver context
277      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
278   PetscCall(TSCreate(PETSC_COMM_WORLD,&ts));
279   PetscCall(TSSetDM(ts,da));
280   PetscCall(TSSetProblemType(ts,TS_NONLINEAR));
281   PetscCall(TSSetEquationType(ts,TS_EQ_ODE_EXPLICIT)); /* less Jacobian evaluations when adjoint BEuler is used, otherwise no effect */
282   if (!implicitform) {
283     PetscCall(TSSetType(ts,TSRK));
284     PetscCall(TSSetRHSFunction(ts,NULL,RHSFunction,&appctx));
285     PetscCall(TSSetRHSJacobian(ts,NULL,NULL,RHSJacobian,&appctx));
286   } else {
287     PetscCall(TSSetType(ts,TSCN));
288     PetscCall(TSSetIFunction(ts,NULL,IFunction,&appctx));
289     if (appctx.aijpc) {
290       Mat                    A,B;
291 
292       PetscCall(DMSetMatType(da,MATSELL));
293       PetscCall(DMCreateMatrix(da,&A));
294       PetscCall(MatConvert(A,MATAIJ,MAT_INITIAL_MATRIX,&B));
295       /* FIXME do we need to change viewer to display matrix in natural ordering as DMCreateMatrix_DA does? */
296       PetscCall(TSSetIJacobian(ts,A,B,IJacobian,&appctx));
297       PetscCall(MatDestroy(&A));
298       PetscCall(MatDestroy(&B));
299     } else {
300       PetscCall(TSSetIJacobian(ts,NULL,NULL,IJacobian,&appctx));
301     }
302   }
303 
304   if (mf) {
305     PetscInt xm,ym,Mx,My,dof;
306     mctx.ts = ts;
307     mctx.appctx = &appctx;
308     PetscCall(VecDuplicate(x,&mctx.U));
309     /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
310       Create matrix free context
311       - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
312     PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,&dof,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
313     PetscCall(DMDAGetCorners(da,NULL,NULL,NULL,&xm,&ym,NULL));
314     PetscCall(MatCreateShell(PETSC_COMM_WORLD,dof*xm*ym,PETSC_DETERMINE,dof*Mx*My,dof*Mx*My,&mctx,&appctx.A));
315     PetscCall(MatShellSetOperation(appctx.A,MATOP_MULT_TRANSPOSE,(void (*)(void))MyRHSMatMultTranspose));
316     if (!implicitform) { /* for explicit methods only */
317       PetscCall(TSSetRHSJacobian(ts,appctx.A,appctx.A,RHSJacobianShell,&appctx));
318     } else {
319       /* PetscCall(VecDuplicate(appctx.U,&mctx.Udot)); */
320       PetscCall(MatShellSetOperation(appctx.A,MATOP_MULT,(void (*)(void))MyIMatMult));
321       PetscCall(MatShellSetOperation(appctx.A,MATOP_MULT_TRANSPOSE,(void (*)(void))MyIMatMultTranspose));
322       PetscCall(TSSetIJacobian(ts,appctx.A,appctx.A,IJacobianShell,&appctx));
323     }
324   }
325 
326   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
327      Set initial conditions
328    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
329   PetscCall(InitialConditions(da,x));
330   PetscCall(TSSetSolution(ts,x));
331 
332   /*
333     Have the TS save its trajectory so that TSAdjointSolve() may be used
334   */
335   if (!forwardonly) PetscCall(TSSetSaveTrajectory(ts));
336 
337   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
338      Set solver options
339    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
340   PetscCall(TSSetMaxTime(ts,200.0));
341   PetscCall(TSSetTimeStep(ts,0.5));
342   PetscCall(TSSetExactFinalTime(ts,TS_EXACTFINALTIME_MATCHSTEP));
343   PetscCall(TSSetFromOptions(ts));
344 
345   PetscCall(PetscTime(&v1));
346   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
347      Solve ODE system
348      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
349   PetscCall(TSSolve(ts,x));
350   if (!forwardonly) {
351     /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
352        Start the Adjoint model
353        - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
354     PetscCall(VecDuplicate(x,&lambda[0]));
355     /*   Reset initial conditions for the adjoint integration */
356     PetscCall(InitializeLambda(da,lambda[0],0.5,0.5));
357     PetscCall(TSSetCostGradients(ts,1,lambda,NULL));
358     PetscLogStagePush(stage);
359     PetscCall(TSAdjointSolve(ts));
360     PetscLogStagePop();
361     PetscCall(VecDestroy(&lambda[0]));
362   }
363   PetscCall(PetscTime(&v2));
364   PetscCall(PetscPrintf(PETSC_COMM_WORLD," %.3lf ",v2-v1));
365 
366   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
367      Free work space.  All PETSc objects should be destroyed when they
368      are no longer needed.
369    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
370   PetscCall(VecDestroy(&x));
371   PetscCall(TSDestroy(&ts));
372   PetscCall(DMDestroy(&da));
373   if (mf) {
374     PetscCall(VecDestroy(&mctx.U));
375     /* PetscCall(VecDestroy(&mctx.Udot));*/
376     PetscCall(MatDestroy(&appctx.A));
377   }
378   PetscCall(PetscFinalize());
379   return 0;
380 }
381 
382 /* ------------------------------------------------------------------- */
383 PetscErrorCode RHSJacobianShell(TS ts,PetscReal t,Vec U,Mat A,Mat BB,void *ctx)
384 {
385   MCtx           *mctx;
386 
387   PetscFunctionBegin;
388   PetscCall(MatShellGetContext(A,&mctx));
389   PetscCall(VecCopy(U,mctx->U));
390   PetscFunctionReturn(0);
391 }
392 
393 PetscErrorCode IJacobianShell(TS ts,PetscReal t,Vec U,Vec Udot,PetscReal a,Mat A,Mat BB,void *ctx)
394 {
395   MCtx           *mctx;
396 
397   PetscFunctionBegin;
398   PetscCall(MatShellGetContext(A,&mctx));
399   PetscCall(VecCopy(U,mctx->U));
400   /* PetscCall(VecCopy(Udot,mctx->Udot)); */
401   mctx->shift = a;
402   PetscFunctionReturn(0);
403 }
404 
405 /* ------------------------------------------------------------------- */
406 PetscErrorCode InitialConditions(DM da,Vec U)
407 {
408   PetscInt       i,j,xs,ys,xm,ym,Mx,My;
409   Field          **u;
410   PetscReal      hx,hy,x,y;
411 
412   PetscFunctionBegin;
413   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
414 
415   hx = 2.5/(PetscReal)Mx;
416   hy = 2.5/(PetscReal)My;
417 
418   /*
419      Get pointers to vector data
420   */
421   PetscCall(DMDAVecGetArray(da,U,&u));
422 
423   /*
424      Get local grid boundaries
425   */
426   PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
427 
428   /*
429      Compute function over the locally owned part of the grid
430   */
431   for (j=ys; j<ys+ym; j++) {
432     y = j*hy;
433     for (i=xs; i<xs+xm; i++) {
434       x = i*hx;
435       if ((1.0 <= x) && (x <= 1.5) && (1.0 <= y) && (y <= 1.5)) u[j][i].v = .25*PetscPowReal(PetscSinReal(4.0*PETSC_PI*x),2.0)*PetscPowReal(PetscSinReal(4.0*PETSC_PI*y),2.0);
436       else u[j][i].v = 0.0;
437 
438       u[j][i].u = 1.0 - 2.0*u[j][i].v;
439     }
440   }
441 
442   /*
443      Restore vectors
444   */
445   PetscCall(DMDAVecRestoreArray(da,U,&u));
446   PetscFunctionReturn(0);
447 }
448 
449 /*TEST
450 
451    build:
452       depends: reaction_diffusion.c
453       requires: !complex !single
454 
455    test:
456       requires: cams
457       args: -mf -ts_max_steps 10 -ts_monitor -ts_adjoint_monitor -ts_trajectory_type memory -ts_trajectory_solution_only 0 -ts_trajectory_max_units_ram 6 -ts_trajectory_memory_type cams
458 TEST*/
459