xref: /petsc/src/ts/tutorials/advection-diffusion-reaction/ex5adj_mf.c (revision ebead697dbf761eb322f829370bbe90b3bd93fa3)
1 static char help[] = "Demonstrates adjoint sensitivity analysis for Reaction-Diffusion Equations.\n";
2 
3 /*
4   See ex5.c for details on the equation.
5   This code demonestrates the TSAdjoint interface to a system of time-dependent partial differential equations.
6   It computes the sensitivity of a component in the final solution, which locates in the center of the 2D domain, w.r.t. the initial conditions.
7   The user does not need to provide any additional functions. The required functions in the original simulation are reused in the adjoint run.
8 
9   Runtime options:
10     -forwardonly  - run the forward simulation without adjoint
11     -implicitform - provide IFunction and IJacobian to TS, if not set, RHSFunction and RHSJacobian will be used
12     -aijpc        - set the preconditioner matrix to be aij (the Jacobian matrix can be of a different type such as ELL)
13 */
14 
15 #include "reaction_diffusion.h"
16 #include <petscdm.h>
17 #include <petscdmda.h>
18 
19 /* Matrix free support */
20 typedef struct {
21   PetscReal time;
22   Vec       U;
23   Vec       Udot;
24   PetscReal shift;
25   AppCtx*   appctx;
26   TS        ts;
27 } MCtx;
28 
29 /*
30    User-defined routines
31 */
32 PetscErrorCode InitialConditions(DM,Vec);
33 PetscErrorCode RHSJacobianShell(TS,PetscReal,Vec,Mat,Mat,void*);
34 PetscErrorCode IJacobianShell(TS,PetscReal,Vec,Vec,PetscReal,Mat,Mat,void*);
35 
36 PetscErrorCode InitializeLambda(DM da,Vec lambda,PetscReal x,PetscReal y)
37 {
38    PetscInt i,j,Mx,My,xs,ys,xm,ym;
39    Field **l;
40    PetscFunctionBegin;
41 
42    PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
43    /* locate the global i index for x and j index for y */
44    i = (PetscInt)(x*(Mx-1));
45    j = (PetscInt)(y*(My-1));
46    PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
47 
48    if (xs <= i && i < xs+xm && ys <= j && j < ys+ym) {
49      /* the i,j vertex is on this process */
50      PetscCall(DMDAVecGetArray(da,lambda,&l));
51      l[j][i].u = 1.0;
52      l[j][i].v = 1.0;
53      PetscCall(DMDAVecRestoreArray(da,lambda,&l));
54    }
55    PetscFunctionReturn(0);
56 }
57 
58 static PetscErrorCode MyRHSMatMultTranspose(Mat A_shell,Vec X,Vec Y)
59 {
60   MCtx           *mctx;
61   AppCtx         *appctx;
62   DM             da;
63   PetscInt       i,j,Mx,My,xs,ys,xm,ym;
64   PetscReal      hx,hy,sx,sy;
65   PetscScalar    uc,uxx,uyy,vc,vxx,vyy,ucb,vcb;
66   Field          **u,**x,**y;
67   Vec            localX;
68 
69   PetscFunctionBeginUser;
70   PetscCall(MatShellGetContext(A_shell,&mctx));
71   appctx = mctx->appctx;
72   PetscCall(TSGetDM(mctx->ts,&da));
73   PetscCall(DMGetLocalVector(da,&localX));
74   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
75   hx = 2.50/(PetscReal)Mx; sx = 1.0/(hx*hx);
76   hy = 2.50/(PetscReal)My; sy = 1.0/(hy*hy);
77 
78   /* Scatter ghost points to local vector,using the 2-step process
79      DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
80      By placing code between these two statements, computations can be
81      done while messages are in transition. */
82   PetscCall(DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX));
83   PetscCall(DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX));
84 
85   /* Get pointers to vector data */
86   PetscCall(DMDAVecGetArrayRead(da,localX,&x));
87   PetscCall(DMDAVecGetArrayRead(da,mctx->U,&u));
88   PetscCall(DMDAVecGetArray(da,Y,&y));
89 
90   /* Get local grid boundaries */
91   PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
92 
93   /* Compute function over the locally owned part of the grid */
94   for (j=ys; j<ys+ym; j++) {
95     for (i=xs; i<xs+xm; i++) {
96       uc        = u[j][i].u;
97       ucb       = x[j][i].u;
98       uxx       = (-2.0*ucb + x[j][i-1].u + x[j][i+1].u)*sx;
99       uyy       = (-2.0*ucb + x[j-1][i].u + x[j+1][i].u)*sy;
100       vc        = u[j][i].v;
101       vcb       = x[j][i].v;
102       vxx       = (-2.0*vcb + x[j][i-1].v + x[j][i+1].v)*sx;
103       vyy       = (-2.0*vcb + x[j-1][i].v + x[j+1][i].v)*sy;
104       y[j][i].u = appctx->D1*(uxx + uyy) - ucb*(vc*vc+appctx->gamma) + vc*vc*vcb;
105       y[j][i].v = appctx->D2*(vxx + vyy) - 2.0*uc*vc*ucb + (2.0*uc*vc-appctx->gamma - appctx->kappa)*vcb;
106     }
107   }
108   PetscCall(DMDAVecRestoreArrayRead(da,localX,&x));
109   PetscCall(DMDAVecRestoreArrayRead(da,mctx->U,&u));
110   PetscCall(DMDAVecRestoreArray(da,Y,&y));
111   PetscCall(DMRestoreLocalVector(da,&localX));
112   PetscFunctionReturn(0);
113 }
114 
115 static PetscErrorCode MyIMatMultTranspose(Mat A_shell,Vec X,Vec Y)
116 {
117   MCtx           *mctx;
118   AppCtx         *appctx;
119   DM             da;
120   PetscInt       i,j,Mx,My,xs,ys,xm,ym;
121   PetscReal      hx,hy,sx,sy;
122   PetscScalar    uc,uxx,uyy,vc,vxx,vyy,ucb,vcb;
123   Field          **u,**x,**y;
124   Vec            localX;
125 
126   PetscFunctionBeginUser;
127   PetscCall(MatShellGetContext(A_shell,&mctx));
128   appctx = mctx->appctx;
129   PetscCall(TSGetDM(mctx->ts,&da));
130   PetscCall(DMGetLocalVector(da,&localX));
131   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
132   hx = 2.50/(PetscReal)Mx; sx = 1.0/(hx*hx);
133   hy = 2.50/(PetscReal)My; sy = 1.0/(hy*hy);
134 
135   /* Scatter ghost points to local vector,using the 2-step process
136      DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
137      By placing code between these two statements, computations can be
138      done while messages are in transition. */
139   PetscCall(DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX));
140   PetscCall(DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX));
141 
142   /* Get pointers to vector data */
143   PetscCall(DMDAVecGetArrayRead(da,localX,&x));
144   PetscCall(DMDAVecGetArrayRead(da,mctx->U,&u));
145   PetscCall(DMDAVecGetArray(da,Y,&y));
146 
147   /* Get local grid boundaries */
148   PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
149 
150   /* Compute function over the locally owned part of the grid */
151   for (j=ys; j<ys+ym; j++) {
152     for (i=xs; i<xs+xm; i++) {
153       uc        = u[j][i].u;
154       ucb       = x[j][i].u;
155       uxx       = (-2.0*ucb + x[j][i-1].u + x[j][i+1].u)*sx;
156       uyy       = (-2.0*ucb + x[j-1][i].u + x[j+1][i].u)*sy;
157       vc        = u[j][i].v;
158       vcb       = x[j][i].v;
159       vxx       = (-2.0*vcb + x[j][i-1].v + x[j][i+1].v)*sx;
160       vyy       = (-2.0*vcb + x[j-1][i].v + x[j+1][i].v)*sy;
161       y[j][i].u = appctx->D1*(uxx + uyy) - ucb*(vc*vc+appctx->gamma) + vc*vc*vcb;
162       y[j][i].v = appctx->D2*(vxx + vyy) - 2.0*uc*vc*ucb + (2.0*uc*vc-appctx->gamma - appctx->kappa)*vcb;
163       y[j][i].u = mctx->shift*ucb - y[j][i].u;
164       y[j][i].v = mctx->shift*vcb - y[j][i].v;
165     }
166   }
167   PetscCall(DMDAVecRestoreArrayRead(da,localX,&x));
168   PetscCall(DMDAVecRestoreArrayRead(da,mctx->U,&u));
169   PetscCall(DMDAVecRestoreArray(da,Y,&y));
170   PetscCall(DMRestoreLocalVector(da,&localX));
171   PetscFunctionReturn(0);
172 }
173 
174 static PetscErrorCode MyIMatMult(Mat A_shell,Vec X,Vec Y)
175 {
176   MCtx           *mctx;
177   AppCtx         *appctx;
178   DM             da;
179   PetscInt       i,j,Mx,My,xs,ys,xm,ym;
180   PetscReal      hx,hy,sx,sy;
181   PetscScalar    uc,uxx,uyy,vc,vxx,vyy,ucb,vcb;
182   Field          **u,**x,**y;
183   Vec            localX;
184 
185   PetscFunctionBeginUser;
186   PetscCall(MatShellGetContext(A_shell,&mctx));
187   appctx = mctx->appctx;
188   PetscCall(TSGetDM(mctx->ts,&da));
189   PetscCall(DMGetLocalVector(da,&localX));
190   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
191   hx = 2.50/(PetscReal)Mx; sx = 1.0/(hx*hx);
192   hy = 2.50/(PetscReal)My; sy = 1.0/(hy*hy);
193 
194   /* Scatter ghost points to local vector,using the 2-step process
195      DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
196      By placing code between these two statements, computations can be
197      done while messages are in transition. */
198   PetscCall(DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX));
199   PetscCall(DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX));
200 
201   /* Get pointers to vector data */
202   PetscCall(DMDAVecGetArrayRead(da,localX,&x));
203   PetscCall(DMDAVecGetArrayRead(da,mctx->U,&u));
204   PetscCall(DMDAVecGetArray(da,Y,&y));
205 
206   /* Get local grid boundaries */
207   PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
208 
209   /* Compute function over the locally owned part of the grid */
210   for (j=ys; j<ys+ym; j++) {
211     for (i=xs; i<xs+xm; i++) {
212       uc        = u[j][i].u;
213       ucb       = x[j][i].u;
214       uxx       = (-2.0*ucb + x[j][i-1].u + x[j][i+1].u)*sx;
215       uyy       = (-2.0*ucb + x[j-1][i].u + x[j+1][i].u)*sy;
216       vc        = u[j][i].v;
217       vcb       = x[j][i].v;
218       vxx       = (-2.0*vcb + x[j][i-1].v + x[j][i+1].v)*sx;
219       vyy       = (-2.0*vcb + x[j-1][i].v + x[j+1][i].v)*sy;
220       y[j][i].u = appctx->D1*(uxx + uyy) - (vc*vc+appctx->gamma)*ucb - 2.0*uc*vc*vcb;
221       y[j][i].v = appctx->D2*(vxx + vyy) + vc*vc*ucb + (2.0*uc*vc-appctx->gamma-appctx->kappa)*vcb;
222       y[j][i].u = mctx->shift*ucb - y[j][i].u;
223       y[j][i].v = mctx->shift*vcb - y[j][i].v;
224     }
225   }
226   PetscCall(DMDAVecRestoreArrayRead(da,localX,&x));
227   PetscCall(DMDAVecRestoreArrayRead(da,mctx->U,&u));
228   PetscCall(DMDAVecRestoreArray(da,Y,&y));
229   PetscCall(DMRestoreLocalVector(da,&localX));
230   PetscFunctionReturn(0);
231 }
232 
233 int main(int argc,char **argv)
234 {
235   TS             ts;                  /* ODE integrator */
236   Vec            x;                   /* solution */
237   DM             da;
238   AppCtx         appctx;
239   MCtx           mctx;
240   Vec            lambda[1];
241   PetscBool      forwardonly=PETSC_FALSE,implicitform=PETSC_TRUE,mf = PETSC_FALSE;
242   PetscLogDouble v1,v2;
243 #if defined(PETSC_USE_LOG)
244   PetscLogStage  stage;
245 #endif
246 
247   PetscFunctionBeginUser;
248   PetscCall(PetscInitialize(&argc,&argv,(char*)0,help));
249   PetscCall(PetscOptionsGetBool(NULL,NULL,"-forwardonly",&forwardonly,NULL));
250   PetscCall(PetscOptionsGetBool(NULL,NULL,"-implicitform",&implicitform,NULL));
251   appctx.aijpc = PETSC_FALSE;
252   PetscCall(PetscOptionsGetBool(NULL,NULL,"-aijpc",&appctx.aijpc,NULL));
253   PetscCall(PetscOptionsGetBool(NULL,NULL,"-mf",&mf,NULL));
254 
255   appctx.D1    = 8.0e-5;
256   appctx.D2    = 4.0e-5;
257   appctx.gamma = .024;
258   appctx.kappa = .06;
259 
260   PetscLogStageRegister("MyAdjoint", &stage);
261   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
262      Create distributed array (DMDA) to manage parallel grid and vectors
263   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
264   PetscCall(DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_PERIODIC,DM_BOUNDARY_PERIODIC,DMDA_STENCIL_STAR,64,64,PETSC_DECIDE,PETSC_DECIDE,2,1,NULL,NULL,&da));
265   PetscCall(DMSetFromOptions(da));
266   PetscCall(DMSetUp(da));
267   PetscCall(DMDASetFieldName(da,0,"u"));
268   PetscCall(DMDASetFieldName(da,1,"v"));
269 
270   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
271      Extract global vectors from DMDA; then duplicate for remaining
272      vectors that are the same types
273    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
274   PetscCall(DMCreateGlobalVector(da,&x));
275 
276   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
277      Create timestepping solver context
278      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
279   PetscCall(TSCreate(PETSC_COMM_WORLD,&ts));
280   PetscCall(TSSetDM(ts,da));
281   PetscCall(TSSetProblemType(ts,TS_NONLINEAR));
282   PetscCall(TSSetEquationType(ts,TS_EQ_ODE_EXPLICIT)); /* less Jacobian evaluations when adjoint BEuler is used, otherwise no effect */
283   if (!implicitform) {
284     PetscCall(TSSetType(ts,TSRK));
285     PetscCall(TSSetRHSFunction(ts,NULL,RHSFunction,&appctx));
286     PetscCall(TSSetRHSJacobian(ts,NULL,NULL,RHSJacobian,&appctx));
287   } else {
288     PetscCall(TSSetType(ts,TSCN));
289     PetscCall(TSSetIFunction(ts,NULL,IFunction,&appctx));
290     if (appctx.aijpc) {
291       Mat                    A,B;
292 
293       PetscCall(DMSetMatType(da,MATSELL));
294       PetscCall(DMCreateMatrix(da,&A));
295       PetscCall(MatConvert(A,MATAIJ,MAT_INITIAL_MATRIX,&B));
296       /* FIXME do we need to change viewer to display matrix in natural ordering as DMCreateMatrix_DA does? */
297       PetscCall(TSSetIJacobian(ts,A,B,IJacobian,&appctx));
298       PetscCall(MatDestroy(&A));
299       PetscCall(MatDestroy(&B));
300     } else {
301       PetscCall(TSSetIJacobian(ts,NULL,NULL,IJacobian,&appctx));
302     }
303   }
304 
305   if (mf) {
306     PetscInt xm,ym,Mx,My,dof;
307     mctx.ts = ts;
308     mctx.appctx = &appctx;
309     PetscCall(VecDuplicate(x,&mctx.U));
310     /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
311       Create matrix free context
312       - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
313     PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,&dof,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
314     PetscCall(DMDAGetCorners(da,NULL,NULL,NULL,&xm,&ym,NULL));
315     PetscCall(MatCreateShell(PETSC_COMM_WORLD,dof*xm*ym,PETSC_DETERMINE,dof*Mx*My,dof*Mx*My,&mctx,&appctx.A));
316     PetscCall(MatShellSetOperation(appctx.A,MATOP_MULT_TRANSPOSE,(void (*)(void))MyRHSMatMultTranspose));
317     if (!implicitform) { /* for explicit methods only */
318       PetscCall(TSSetRHSJacobian(ts,appctx.A,appctx.A,RHSJacobianShell,&appctx));
319     } else {
320       /* PetscCall(VecDuplicate(appctx.U,&mctx.Udot)); */
321       PetscCall(MatShellSetOperation(appctx.A,MATOP_MULT,(void (*)(void))MyIMatMult));
322       PetscCall(MatShellSetOperation(appctx.A,MATOP_MULT_TRANSPOSE,(void (*)(void))MyIMatMultTranspose));
323       PetscCall(TSSetIJacobian(ts,appctx.A,appctx.A,IJacobianShell,&appctx));
324     }
325   }
326 
327   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
328      Set initial conditions
329    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
330   PetscCall(InitialConditions(da,x));
331   PetscCall(TSSetSolution(ts,x));
332 
333   /*
334     Have the TS save its trajectory so that TSAdjointSolve() may be used
335   */
336   if (!forwardonly) PetscCall(TSSetSaveTrajectory(ts));
337 
338   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
339      Set solver options
340    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
341   PetscCall(TSSetMaxTime(ts,200.0));
342   PetscCall(TSSetTimeStep(ts,0.5));
343   PetscCall(TSSetExactFinalTime(ts,TS_EXACTFINALTIME_MATCHSTEP));
344   PetscCall(TSSetFromOptions(ts));
345 
346   PetscCall(PetscTime(&v1));
347   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
348      Solve ODE system
349      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
350   PetscCall(TSSolve(ts,x));
351   if (!forwardonly) {
352     /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
353        Start the Adjoint model
354        - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
355     PetscCall(VecDuplicate(x,&lambda[0]));
356     /*   Reset initial conditions for the adjoint integration */
357     PetscCall(InitializeLambda(da,lambda[0],0.5,0.5));
358     PetscCall(TSSetCostGradients(ts,1,lambda,NULL));
359     PetscLogStagePush(stage);
360     PetscCall(TSAdjointSolve(ts));
361     PetscLogStagePop();
362     PetscCall(VecDestroy(&lambda[0]));
363   }
364   PetscCall(PetscTime(&v2));
365   PetscCall(PetscPrintf(PETSC_COMM_WORLD," %.3lf ",v2-v1));
366 
367   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
368      Free work space.  All PETSc objects should be destroyed when they
369      are no longer needed.
370    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
371   PetscCall(VecDestroy(&x));
372   PetscCall(TSDestroy(&ts));
373   PetscCall(DMDestroy(&da));
374   if (mf) {
375     PetscCall(VecDestroy(&mctx.U));
376     /* PetscCall(VecDestroy(&mctx.Udot));*/
377     PetscCall(MatDestroy(&appctx.A));
378   }
379   PetscCall(PetscFinalize());
380   return 0;
381 }
382 
383 /* ------------------------------------------------------------------- */
384 PetscErrorCode RHSJacobianShell(TS ts,PetscReal t,Vec U,Mat A,Mat BB,void *ctx)
385 {
386   MCtx           *mctx;
387 
388   PetscFunctionBegin;
389   PetscCall(MatShellGetContext(A,&mctx));
390   PetscCall(VecCopy(U,mctx->U));
391   PetscFunctionReturn(0);
392 }
393 
394 PetscErrorCode IJacobianShell(TS ts,PetscReal t,Vec U,Vec Udot,PetscReal a,Mat A,Mat BB,void *ctx)
395 {
396   MCtx           *mctx;
397 
398   PetscFunctionBegin;
399   PetscCall(MatShellGetContext(A,&mctx));
400   PetscCall(VecCopy(U,mctx->U));
401   /* PetscCall(VecCopy(Udot,mctx->Udot)); */
402   mctx->shift = a;
403   PetscFunctionReturn(0);
404 }
405 
406 /* ------------------------------------------------------------------- */
407 PetscErrorCode InitialConditions(DM da,Vec U)
408 {
409   PetscInt       i,j,xs,ys,xm,ym,Mx,My;
410   Field          **u;
411   PetscReal      hx,hy,x,y;
412 
413   PetscFunctionBegin;
414   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
415 
416   hx = 2.5/(PetscReal)Mx;
417   hy = 2.5/(PetscReal)My;
418 
419   /*
420      Get pointers to vector data
421   */
422   PetscCall(DMDAVecGetArray(da,U,&u));
423 
424   /*
425      Get local grid boundaries
426   */
427   PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
428 
429   /*
430      Compute function over the locally owned part of the grid
431   */
432   for (j=ys; j<ys+ym; j++) {
433     y = j*hy;
434     for (i=xs; i<xs+xm; i++) {
435       x = i*hx;
436       if ((1.0 <= x) && (x <= 1.5) && (1.0 <= y) && (y <= 1.5)) u[j][i].v = .25*PetscPowReal(PetscSinReal(4.0*PETSC_PI*x),2.0)*PetscPowReal(PetscSinReal(4.0*PETSC_PI*y),2.0);
437       else u[j][i].v = 0.0;
438 
439       u[j][i].u = 1.0 - 2.0*u[j][i].v;
440     }
441   }
442 
443   /*
444      Restore vectors
445   */
446   PetscCall(DMDAVecRestoreArray(da,U,&u));
447   PetscFunctionReturn(0);
448 }
449 
450 /*TEST
451 
452    build:
453       depends: reaction_diffusion.c
454       requires: !complex !single
455 
456    test:
457       requires: cams
458       args: -mf -ts_max_steps 10 -ts_monitor -ts_adjoint_monitor -ts_trajectory_type memory -ts_trajectory_solution_only 0 -ts_trajectory_max_units_ram 6 -ts_trajectory_memory_type cams
459 TEST*/
460