xref: /petsc/src/ts/tutorials/advection-diffusion-reaction/ex5adj_mf.c (revision 58d68138c660dfb4e9f5b03334792cd4f2ffd7cc)
1 static char help[] = "Demonstrates adjoint sensitivity analysis for Reaction-Diffusion Equations.\n";
2 
3 /*
4   See ex5.c for details on the equation.
5   This code demonestrates the TSAdjoint interface to a system of time-dependent partial differential equations.
6   It computes the sensitivity of a component in the final solution, which locates in the center of the 2D domain, w.r.t. the initial conditions.
7   The user does not need to provide any additional functions. The required functions in the original simulation are reused in the adjoint run.
8 
9   Runtime options:
10     -forwardonly  - run the forward simulation without adjoint
11     -implicitform - provide IFunction and IJacobian to TS, if not set, RHSFunction and RHSJacobian will be used
12     -aijpc        - set the preconditioner matrix to be aij (the Jacobian matrix can be of a different type such as ELL)
13 */
14 
15 #include "reaction_diffusion.h"
16 #include <petscdm.h>
17 #include <petscdmda.h>
18 
19 /* Matrix free support */
20 typedef struct {
21   PetscReal time;
22   Vec       U;
23   Vec       Udot;
24   PetscReal shift;
25   AppCtx   *appctx;
26   TS        ts;
27 } MCtx;
28 
29 /*
30    User-defined routines
31 */
32 PetscErrorCode InitialConditions(DM, Vec);
33 PetscErrorCode RHSJacobianShell(TS, PetscReal, Vec, Mat, Mat, void *);
34 PetscErrorCode IJacobianShell(TS, PetscReal, Vec, Vec, PetscReal, Mat, Mat, void *);
35 
36 PetscErrorCode InitializeLambda(DM da, Vec lambda, PetscReal x, PetscReal y) {
37   PetscInt i, j, Mx, My, xs, ys, xm, ym;
38   Field  **l;
39   PetscFunctionBegin;
40 
41   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
42   /* locate the global i index for x and j index for y */
43   i = (PetscInt)(x * (Mx - 1));
44   j = (PetscInt)(y * (My - 1));
45   PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
46 
47   if (xs <= i && i < xs + xm && ys <= j && j < ys + ym) {
48     /* the i,j vertex is on this process */
49     PetscCall(DMDAVecGetArray(da, lambda, &l));
50     l[j][i].u = 1.0;
51     l[j][i].v = 1.0;
52     PetscCall(DMDAVecRestoreArray(da, lambda, &l));
53   }
54   PetscFunctionReturn(0);
55 }
56 
57 static PetscErrorCode MyRHSMatMultTranspose(Mat A_shell, Vec X, Vec Y) {
58   MCtx       *mctx;
59   AppCtx     *appctx;
60   DM          da;
61   PetscInt    i, j, Mx, My, xs, ys, xm, ym;
62   PetscReal   hx, hy, sx, sy;
63   PetscScalar uc, uxx, uyy, vc, vxx, vyy, ucb, vcb;
64   Field     **u, **x, **y;
65   Vec         localX;
66 
67   PetscFunctionBeginUser;
68   PetscCall(MatShellGetContext(A_shell, &mctx));
69   appctx = mctx->appctx;
70   PetscCall(TSGetDM(mctx->ts, &da));
71   PetscCall(DMGetLocalVector(da, &localX));
72   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
73   hx = 2.50 / (PetscReal)Mx;
74   sx = 1.0 / (hx * hx);
75   hy = 2.50 / (PetscReal)My;
76   sy = 1.0 / (hy * hy);
77 
78   /* Scatter ghost points to local vector,using the 2-step process
79      DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
80      By placing code between these two statements, computations can be
81      done while messages are in transition. */
82   PetscCall(DMGlobalToLocalBegin(da, X, INSERT_VALUES, localX));
83   PetscCall(DMGlobalToLocalEnd(da, X, INSERT_VALUES, localX));
84 
85   /* Get pointers to vector data */
86   PetscCall(DMDAVecGetArrayRead(da, localX, &x));
87   PetscCall(DMDAVecGetArrayRead(da, mctx->U, &u));
88   PetscCall(DMDAVecGetArray(da, Y, &y));
89 
90   /* Get local grid boundaries */
91   PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
92 
93   /* Compute function over the locally owned part of the grid */
94   for (j = ys; j < ys + ym; j++) {
95     for (i = xs; i < xs + xm; i++) {
96       uc        = u[j][i].u;
97       ucb       = x[j][i].u;
98       uxx       = (-2.0 * ucb + x[j][i - 1].u + x[j][i + 1].u) * sx;
99       uyy       = (-2.0 * ucb + x[j - 1][i].u + x[j + 1][i].u) * sy;
100       vc        = u[j][i].v;
101       vcb       = x[j][i].v;
102       vxx       = (-2.0 * vcb + x[j][i - 1].v + x[j][i + 1].v) * sx;
103       vyy       = (-2.0 * vcb + x[j - 1][i].v + x[j + 1][i].v) * sy;
104       y[j][i].u = appctx->D1 * (uxx + uyy) - ucb * (vc * vc + appctx->gamma) + vc * vc * vcb;
105       y[j][i].v = appctx->D2 * (vxx + vyy) - 2.0 * uc * vc * ucb + (2.0 * uc * vc - appctx->gamma - appctx->kappa) * vcb;
106     }
107   }
108   PetscCall(DMDAVecRestoreArrayRead(da, localX, &x));
109   PetscCall(DMDAVecRestoreArrayRead(da, mctx->U, &u));
110   PetscCall(DMDAVecRestoreArray(da, Y, &y));
111   PetscCall(DMRestoreLocalVector(da, &localX));
112   PetscFunctionReturn(0);
113 }
114 
115 static PetscErrorCode MyIMatMultTranspose(Mat A_shell, Vec X, Vec Y) {
116   MCtx       *mctx;
117   AppCtx     *appctx;
118   DM          da;
119   PetscInt    i, j, Mx, My, xs, ys, xm, ym;
120   PetscReal   hx, hy, sx, sy;
121   PetscScalar uc, uxx, uyy, vc, vxx, vyy, ucb, vcb;
122   Field     **u, **x, **y;
123   Vec         localX;
124 
125   PetscFunctionBeginUser;
126   PetscCall(MatShellGetContext(A_shell, &mctx));
127   appctx = mctx->appctx;
128   PetscCall(TSGetDM(mctx->ts, &da));
129   PetscCall(DMGetLocalVector(da, &localX));
130   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
131   hx = 2.50 / (PetscReal)Mx;
132   sx = 1.0 / (hx * hx);
133   hy = 2.50 / (PetscReal)My;
134   sy = 1.0 / (hy * hy);
135 
136   /* Scatter ghost points to local vector,using the 2-step process
137      DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
138      By placing code between these two statements, computations can be
139      done while messages are in transition. */
140   PetscCall(DMGlobalToLocalBegin(da, X, INSERT_VALUES, localX));
141   PetscCall(DMGlobalToLocalEnd(da, X, INSERT_VALUES, localX));
142 
143   /* Get pointers to vector data */
144   PetscCall(DMDAVecGetArrayRead(da, localX, &x));
145   PetscCall(DMDAVecGetArrayRead(da, mctx->U, &u));
146   PetscCall(DMDAVecGetArray(da, Y, &y));
147 
148   /* Get local grid boundaries */
149   PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
150 
151   /* Compute function over the locally owned part of the grid */
152   for (j = ys; j < ys + ym; j++) {
153     for (i = xs; i < xs + xm; i++) {
154       uc        = u[j][i].u;
155       ucb       = x[j][i].u;
156       uxx       = (-2.0 * ucb + x[j][i - 1].u + x[j][i + 1].u) * sx;
157       uyy       = (-2.0 * ucb + x[j - 1][i].u + x[j + 1][i].u) * sy;
158       vc        = u[j][i].v;
159       vcb       = x[j][i].v;
160       vxx       = (-2.0 * vcb + x[j][i - 1].v + x[j][i + 1].v) * sx;
161       vyy       = (-2.0 * vcb + x[j - 1][i].v + x[j + 1][i].v) * sy;
162       y[j][i].u = appctx->D1 * (uxx + uyy) - ucb * (vc * vc + appctx->gamma) + vc * vc * vcb;
163       y[j][i].v = appctx->D2 * (vxx + vyy) - 2.0 * uc * vc * ucb + (2.0 * uc * vc - appctx->gamma - appctx->kappa) * vcb;
164       y[j][i].u = mctx->shift * ucb - y[j][i].u;
165       y[j][i].v = mctx->shift * vcb - y[j][i].v;
166     }
167   }
168   PetscCall(DMDAVecRestoreArrayRead(da, localX, &x));
169   PetscCall(DMDAVecRestoreArrayRead(da, mctx->U, &u));
170   PetscCall(DMDAVecRestoreArray(da, Y, &y));
171   PetscCall(DMRestoreLocalVector(da, &localX));
172   PetscFunctionReturn(0);
173 }
174 
175 static PetscErrorCode MyIMatMult(Mat A_shell, Vec X, Vec Y) {
176   MCtx       *mctx;
177   AppCtx     *appctx;
178   DM          da;
179   PetscInt    i, j, Mx, My, xs, ys, xm, ym;
180   PetscReal   hx, hy, sx, sy;
181   PetscScalar uc, uxx, uyy, vc, vxx, vyy, ucb, vcb;
182   Field     **u, **x, **y;
183   Vec         localX;
184 
185   PetscFunctionBeginUser;
186   PetscCall(MatShellGetContext(A_shell, &mctx));
187   appctx = mctx->appctx;
188   PetscCall(TSGetDM(mctx->ts, &da));
189   PetscCall(DMGetLocalVector(da, &localX));
190   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
191   hx = 2.50 / (PetscReal)Mx;
192   sx = 1.0 / (hx * hx);
193   hy = 2.50 / (PetscReal)My;
194   sy = 1.0 / (hy * hy);
195 
196   /* Scatter ghost points to local vector,using the 2-step process
197      DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
198      By placing code between these two statements, computations can be
199      done while messages are in transition. */
200   PetscCall(DMGlobalToLocalBegin(da, X, INSERT_VALUES, localX));
201   PetscCall(DMGlobalToLocalEnd(da, X, INSERT_VALUES, localX));
202 
203   /* Get pointers to vector data */
204   PetscCall(DMDAVecGetArrayRead(da, localX, &x));
205   PetscCall(DMDAVecGetArrayRead(da, mctx->U, &u));
206   PetscCall(DMDAVecGetArray(da, Y, &y));
207 
208   /* Get local grid boundaries */
209   PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
210 
211   /* Compute function over the locally owned part of the grid */
212   for (j = ys; j < ys + ym; j++) {
213     for (i = xs; i < xs + xm; i++) {
214       uc        = u[j][i].u;
215       ucb       = x[j][i].u;
216       uxx       = (-2.0 * ucb + x[j][i - 1].u + x[j][i + 1].u) * sx;
217       uyy       = (-2.0 * ucb + x[j - 1][i].u + x[j + 1][i].u) * sy;
218       vc        = u[j][i].v;
219       vcb       = x[j][i].v;
220       vxx       = (-2.0 * vcb + x[j][i - 1].v + x[j][i + 1].v) * sx;
221       vyy       = (-2.0 * vcb + x[j - 1][i].v + x[j + 1][i].v) * sy;
222       y[j][i].u = appctx->D1 * (uxx + uyy) - (vc * vc + appctx->gamma) * ucb - 2.0 * uc * vc * vcb;
223       y[j][i].v = appctx->D2 * (vxx + vyy) + vc * vc * ucb + (2.0 * uc * vc - appctx->gamma - appctx->kappa) * vcb;
224       y[j][i].u = mctx->shift * ucb - y[j][i].u;
225       y[j][i].v = mctx->shift * vcb - y[j][i].v;
226     }
227   }
228   PetscCall(DMDAVecRestoreArrayRead(da, localX, &x));
229   PetscCall(DMDAVecRestoreArrayRead(da, mctx->U, &u));
230   PetscCall(DMDAVecRestoreArray(da, Y, &y));
231   PetscCall(DMRestoreLocalVector(da, &localX));
232   PetscFunctionReturn(0);
233 }
234 
235 int main(int argc, char **argv) {
236   TS             ts; /* ODE integrator */
237   Vec            x;  /* solution */
238   DM             da;
239   AppCtx         appctx;
240   MCtx           mctx;
241   Vec            lambda[1];
242   PetscBool      forwardonly = PETSC_FALSE, implicitform = PETSC_TRUE, mf = PETSC_FALSE;
243   PetscLogDouble v1, v2;
244 #if defined(PETSC_USE_LOG)
245   PetscLogStage stage;
246 #endif
247 
248   PetscFunctionBeginUser;
249   PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
250   PetscCall(PetscOptionsGetBool(NULL, NULL, "-forwardonly", &forwardonly, NULL));
251   PetscCall(PetscOptionsGetBool(NULL, NULL, "-implicitform", &implicitform, NULL));
252   appctx.aijpc = PETSC_FALSE;
253   PetscCall(PetscOptionsGetBool(NULL, NULL, "-aijpc", &appctx.aijpc, NULL));
254   PetscCall(PetscOptionsGetBool(NULL, NULL, "-mf", &mf, NULL));
255 
256   appctx.D1    = 8.0e-5;
257   appctx.D2    = 4.0e-5;
258   appctx.gamma = .024;
259   appctx.kappa = .06;
260 
261   PetscLogStageRegister("MyAdjoint", &stage);
262   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
263      Create distributed array (DMDA) to manage parallel grid and vectors
264   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
265   PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC, DMDA_STENCIL_STAR, 64, 64, PETSC_DECIDE, PETSC_DECIDE, 2, 1, NULL, NULL, &da));
266   PetscCall(DMSetFromOptions(da));
267   PetscCall(DMSetUp(da));
268   PetscCall(DMDASetFieldName(da, 0, "u"));
269   PetscCall(DMDASetFieldName(da, 1, "v"));
270 
271   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
272      Extract global vectors from DMDA; then duplicate for remaining
273      vectors that are the same types
274    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
275   PetscCall(DMCreateGlobalVector(da, &x));
276 
277   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
278      Create timestepping solver context
279      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
280   PetscCall(TSCreate(PETSC_COMM_WORLD, &ts));
281   PetscCall(TSSetDM(ts, da));
282   PetscCall(TSSetProblemType(ts, TS_NONLINEAR));
283   PetscCall(TSSetEquationType(ts, TS_EQ_ODE_EXPLICIT)); /* less Jacobian evaluations when adjoint BEuler is used, otherwise no effect */
284   if (!implicitform) {
285     PetscCall(TSSetType(ts, TSRK));
286     PetscCall(TSSetRHSFunction(ts, NULL, RHSFunction, &appctx));
287     PetscCall(TSSetRHSJacobian(ts, NULL, NULL, RHSJacobian, &appctx));
288   } else {
289     PetscCall(TSSetType(ts, TSCN));
290     PetscCall(TSSetIFunction(ts, NULL, IFunction, &appctx));
291     if (appctx.aijpc) {
292       Mat A, B;
293 
294       PetscCall(DMSetMatType(da, MATSELL));
295       PetscCall(DMCreateMatrix(da, &A));
296       PetscCall(MatConvert(A, MATAIJ, MAT_INITIAL_MATRIX, &B));
297       /* FIXME do we need to change viewer to display matrix in natural ordering as DMCreateMatrix_DA does? */
298       PetscCall(TSSetIJacobian(ts, A, B, IJacobian, &appctx));
299       PetscCall(MatDestroy(&A));
300       PetscCall(MatDestroy(&B));
301     } else {
302       PetscCall(TSSetIJacobian(ts, NULL, NULL, IJacobian, &appctx));
303     }
304   }
305 
306   if (mf) {
307     PetscInt xm, ym, Mx, My, dof;
308     mctx.ts     = ts;
309     mctx.appctx = &appctx;
310     PetscCall(VecDuplicate(x, &mctx.U));
311     /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
312       Create matrix free context
313       - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
314     PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, &dof, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
315     PetscCall(DMDAGetCorners(da, NULL, NULL, NULL, &xm, &ym, NULL));
316     PetscCall(MatCreateShell(PETSC_COMM_WORLD, dof * xm * ym, PETSC_DETERMINE, dof * Mx * My, dof * Mx * My, &mctx, &appctx.A));
317     PetscCall(MatShellSetOperation(appctx.A, MATOP_MULT_TRANSPOSE, (void (*)(void))MyRHSMatMultTranspose));
318     if (!implicitform) { /* for explicit methods only */
319       PetscCall(TSSetRHSJacobian(ts, appctx.A, appctx.A, RHSJacobianShell, &appctx));
320     } else {
321       /* PetscCall(VecDuplicate(appctx.U,&mctx.Udot)); */
322       PetscCall(MatShellSetOperation(appctx.A, MATOP_MULT, (void (*)(void))MyIMatMult));
323       PetscCall(MatShellSetOperation(appctx.A, MATOP_MULT_TRANSPOSE, (void (*)(void))MyIMatMultTranspose));
324       PetscCall(TSSetIJacobian(ts, appctx.A, appctx.A, IJacobianShell, &appctx));
325     }
326   }
327 
328   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
329      Set initial conditions
330    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
331   PetscCall(InitialConditions(da, x));
332   PetscCall(TSSetSolution(ts, x));
333 
334   /*
335     Have the TS save its trajectory so that TSAdjointSolve() may be used
336   */
337   if (!forwardonly) PetscCall(TSSetSaveTrajectory(ts));
338 
339   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
340      Set solver options
341    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
342   PetscCall(TSSetMaxTime(ts, 200.0));
343   PetscCall(TSSetTimeStep(ts, 0.5));
344   PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_MATCHSTEP));
345   PetscCall(TSSetFromOptions(ts));
346 
347   PetscCall(PetscTime(&v1));
348   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
349      Solve ODE system
350      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
351   PetscCall(TSSolve(ts, x));
352   if (!forwardonly) {
353     /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
354        Start the Adjoint model
355        - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
356     PetscCall(VecDuplicate(x, &lambda[0]));
357     /*   Reset initial conditions for the adjoint integration */
358     PetscCall(InitializeLambda(da, lambda[0], 0.5, 0.5));
359     PetscCall(TSSetCostGradients(ts, 1, lambda, NULL));
360     PetscLogStagePush(stage);
361     PetscCall(TSAdjointSolve(ts));
362     PetscLogStagePop();
363     PetscCall(VecDestroy(&lambda[0]));
364   }
365   PetscCall(PetscTime(&v2));
366   PetscCall(PetscPrintf(PETSC_COMM_WORLD, " %.3lf ", v2 - v1));
367 
368   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
369      Free work space.  All PETSc objects should be destroyed when they
370      are no longer needed.
371    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
372   PetscCall(VecDestroy(&x));
373   PetscCall(TSDestroy(&ts));
374   PetscCall(DMDestroy(&da));
375   if (mf) {
376     PetscCall(VecDestroy(&mctx.U));
377     /* PetscCall(VecDestroy(&mctx.Udot));*/
378     PetscCall(MatDestroy(&appctx.A));
379   }
380   PetscCall(PetscFinalize());
381   return 0;
382 }
383 
384 /* ------------------------------------------------------------------- */
385 PetscErrorCode RHSJacobianShell(TS ts, PetscReal t, Vec U, Mat A, Mat BB, void *ctx) {
386   MCtx *mctx;
387 
388   PetscFunctionBegin;
389   PetscCall(MatShellGetContext(A, &mctx));
390   PetscCall(VecCopy(U, mctx->U));
391   PetscFunctionReturn(0);
392 }
393 
394 PetscErrorCode IJacobianShell(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal a, Mat A, Mat BB, void *ctx) {
395   MCtx *mctx;
396 
397   PetscFunctionBegin;
398   PetscCall(MatShellGetContext(A, &mctx));
399   PetscCall(VecCopy(U, mctx->U));
400   /* PetscCall(VecCopy(Udot,mctx->Udot)); */
401   mctx->shift = a;
402   PetscFunctionReturn(0);
403 }
404 
405 /* ------------------------------------------------------------------- */
406 PetscErrorCode InitialConditions(DM da, Vec U) {
407   PetscInt  i, j, xs, ys, xm, ym, Mx, My;
408   Field   **u;
409   PetscReal hx, hy, x, y;
410 
411   PetscFunctionBegin;
412   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
413 
414   hx = 2.5 / (PetscReal)Mx;
415   hy = 2.5 / (PetscReal)My;
416 
417   /*
418      Get pointers to vector data
419   */
420   PetscCall(DMDAVecGetArray(da, U, &u));
421 
422   /*
423      Get local grid boundaries
424   */
425   PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
426 
427   /*
428      Compute function over the locally owned part of the grid
429   */
430   for (j = ys; j < ys + ym; j++) {
431     y = j * hy;
432     for (i = xs; i < xs + xm; i++) {
433       x = i * hx;
434       if ((1.0 <= x) && (x <= 1.5) && (1.0 <= y) && (y <= 1.5)) u[j][i].v = .25 * PetscPowReal(PetscSinReal(4.0 * PETSC_PI * x), 2.0) * PetscPowReal(PetscSinReal(4.0 * PETSC_PI * y), 2.0);
435       else u[j][i].v = 0.0;
436 
437       u[j][i].u = 1.0 - 2.0 * u[j][i].v;
438     }
439   }
440 
441   /*
442      Restore vectors
443   */
444   PetscCall(DMDAVecRestoreArray(da, U, &u));
445   PetscFunctionReturn(0);
446 }
447 
448 /*TEST
449 
450    build:
451       depends: reaction_diffusion.c
452       requires: !complex !single
453 
454    test:
455       requires: cams
456       args: -mf -ts_max_steps 10 -ts_monitor -ts_adjoint_monitor -ts_trajectory_type memory -ts_trajectory_solution_only 0 -ts_trajectory_max_units_ram 6 -ts_trajectory_memory_type cams
457 TEST*/
458