1 static char help[] = "Demonstrates adjoint sensitivity analysis for Reaction-Diffusion Equations.\n";
2
3 /*
4 See ex5.c for details on the equation.
5 This code demonestrates the TSAdjoint interface to a system of time-dependent partial differential equations.
6 It computes the sensitivity of a component in the final solution, which locates in the center of the 2D domain, w.r.t. the initial conditions.
7 The user does not need to provide any additional functions. The required functions in the original simulation are reused in the adjoint run.
8
9 Runtime options:
10 -forwardonly - run the forward simulation without adjoint
11 -implicitform - provide IFunction and IJacobian to TS, if not set, RHSFunction and RHSJacobian will be used
12 -aijpc - set the matrix used to compute the preconditioner to be aij (the Jacobian matrix can be of a different type such as ELL)
13 */
14
15 #include "reaction_diffusion.h"
16 #include <petscdm.h>
17 #include <petscdmda.h>
18
19 /* Matrix free support */
20 typedef struct {
21 PetscReal time;
22 Vec U;
23 Vec Udot;
24 PetscReal shift;
25 AppCtx *appctx;
26 TS ts;
27 } MCtx;
28
29 /*
30 User-defined routines
31 */
32 PetscErrorCode InitialConditions(DM, Vec);
33 PetscErrorCode RHSJacobianShell(TS, PetscReal, Vec, Mat, Mat, void *);
34 PetscErrorCode IJacobianShell(TS, PetscReal, Vec, Vec, PetscReal, Mat, Mat, void *);
35
InitializeLambda(DM da,Vec lambda,PetscReal x,PetscReal y)36 PetscErrorCode InitializeLambda(DM da, Vec lambda, PetscReal x, PetscReal y)
37 {
38 PetscInt i, j, Mx, My, xs, ys, xm, ym;
39 Field **l;
40
41 PetscFunctionBegin;
42 PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
43 /* locate the global i index for x and j index for y */
44 i = (PetscInt)(x * (Mx - 1));
45 j = (PetscInt)(y * (My - 1));
46 PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
47
48 if (xs <= i && i < xs + xm && ys <= j && j < ys + ym) {
49 /* the i,j vertex is on this process */
50 PetscCall(DMDAVecGetArray(da, lambda, &l));
51 l[j][i].u = 1.0;
52 l[j][i].v = 1.0;
53 PetscCall(DMDAVecRestoreArray(da, lambda, &l));
54 }
55 PetscFunctionReturn(PETSC_SUCCESS);
56 }
57
MyRHSMatMultTranspose(Mat A_shell,Vec X,Vec Y)58 static PetscErrorCode MyRHSMatMultTranspose(Mat A_shell, Vec X, Vec Y)
59 {
60 MCtx *mctx;
61 AppCtx *appctx;
62 DM da;
63 PetscInt i, j, Mx, My, xs, ys, xm, ym;
64 PetscReal hx, hy, sx, sy;
65 PetscScalar uc, uxx, uyy, vc, vxx, vyy, ucb, vcb;
66 Field **u, **x, **y;
67 Vec localX;
68
69 PetscFunctionBeginUser;
70 PetscCall(MatShellGetContext(A_shell, &mctx));
71 appctx = mctx->appctx;
72 PetscCall(TSGetDM(mctx->ts, &da));
73 PetscCall(DMGetLocalVector(da, &localX));
74 PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
75 hx = 2.50 / (PetscReal)Mx;
76 sx = 1.0 / (hx * hx);
77 hy = 2.50 / (PetscReal)My;
78 sy = 1.0 / (hy * hy);
79
80 /* Scatter ghost points to local vector,using the 2-step process
81 DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
82 By placing code between these two statements, computations can be
83 done while messages are in transition. */
84 PetscCall(DMGlobalToLocalBegin(da, X, INSERT_VALUES, localX));
85 PetscCall(DMGlobalToLocalEnd(da, X, INSERT_VALUES, localX));
86
87 /* Get pointers to vector data */
88 PetscCall(DMDAVecGetArrayRead(da, localX, &x));
89 PetscCall(DMDAVecGetArrayRead(da, mctx->U, &u));
90 PetscCall(DMDAVecGetArray(da, Y, &y));
91
92 /* Get local grid boundaries */
93 PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
94
95 /* Compute function over the locally owned part of the grid */
96 for (j = ys; j < ys + ym; j++) {
97 for (i = xs; i < xs + xm; i++) {
98 uc = u[j][i].u;
99 ucb = x[j][i].u;
100 uxx = (-2.0 * ucb + x[j][i - 1].u + x[j][i + 1].u) * sx;
101 uyy = (-2.0 * ucb + x[j - 1][i].u + x[j + 1][i].u) * sy;
102 vc = u[j][i].v;
103 vcb = x[j][i].v;
104 vxx = (-2.0 * vcb + x[j][i - 1].v + x[j][i + 1].v) * sx;
105 vyy = (-2.0 * vcb + x[j - 1][i].v + x[j + 1][i].v) * sy;
106 y[j][i].u = appctx->D1 * (uxx + uyy) - ucb * (vc * vc + appctx->gamma) + vc * vc * vcb;
107 y[j][i].v = appctx->D2 * (vxx + vyy) - 2.0 * uc * vc * ucb + (2.0 * uc * vc - appctx->gamma - appctx->kappa) * vcb;
108 }
109 }
110 PetscCall(DMDAVecRestoreArrayRead(da, localX, &x));
111 PetscCall(DMDAVecRestoreArrayRead(da, mctx->U, &u));
112 PetscCall(DMDAVecRestoreArray(da, Y, &y));
113 PetscCall(DMRestoreLocalVector(da, &localX));
114 PetscFunctionReturn(PETSC_SUCCESS);
115 }
116
MyIMatMultTranspose(Mat A_shell,Vec X,Vec Y)117 static PetscErrorCode MyIMatMultTranspose(Mat A_shell, Vec X, Vec Y)
118 {
119 MCtx *mctx;
120 AppCtx *appctx;
121 DM da;
122 PetscInt i, j, Mx, My, xs, ys, xm, ym;
123 PetscReal hx, hy, sx, sy;
124 PetscScalar uc, uxx, uyy, vc, vxx, vyy, ucb, vcb;
125 Field **u, **x, **y;
126 Vec localX;
127
128 PetscFunctionBeginUser;
129 PetscCall(MatShellGetContext(A_shell, &mctx));
130 appctx = mctx->appctx;
131 PetscCall(TSGetDM(mctx->ts, &da));
132 PetscCall(DMGetLocalVector(da, &localX));
133 PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
134 hx = 2.50 / (PetscReal)Mx;
135 sx = 1.0 / (hx * hx);
136 hy = 2.50 / (PetscReal)My;
137 sy = 1.0 / (hy * hy);
138
139 /* Scatter ghost points to local vector,using the 2-step process
140 DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
141 By placing code between these two statements, computations can be
142 done while messages are in transition. */
143 PetscCall(DMGlobalToLocalBegin(da, X, INSERT_VALUES, localX));
144 PetscCall(DMGlobalToLocalEnd(da, X, INSERT_VALUES, localX));
145
146 /* Get pointers to vector data */
147 PetscCall(DMDAVecGetArrayRead(da, localX, &x));
148 PetscCall(DMDAVecGetArrayRead(da, mctx->U, &u));
149 PetscCall(DMDAVecGetArray(da, Y, &y));
150
151 /* Get local grid boundaries */
152 PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
153
154 /* Compute function over the locally owned part of the grid */
155 for (j = ys; j < ys + ym; j++) {
156 for (i = xs; i < xs + xm; i++) {
157 uc = u[j][i].u;
158 ucb = x[j][i].u;
159 uxx = (-2.0 * ucb + x[j][i - 1].u + x[j][i + 1].u) * sx;
160 uyy = (-2.0 * ucb + x[j - 1][i].u + x[j + 1][i].u) * sy;
161 vc = u[j][i].v;
162 vcb = x[j][i].v;
163 vxx = (-2.0 * vcb + x[j][i - 1].v + x[j][i + 1].v) * sx;
164 vyy = (-2.0 * vcb + x[j - 1][i].v + x[j + 1][i].v) * sy;
165 y[j][i].u = appctx->D1 * (uxx + uyy) - ucb * (vc * vc + appctx->gamma) + vc * vc * vcb;
166 y[j][i].v = appctx->D2 * (vxx + vyy) - 2.0 * uc * vc * ucb + (2.0 * uc * vc - appctx->gamma - appctx->kappa) * vcb;
167 y[j][i].u = mctx->shift * ucb - y[j][i].u;
168 y[j][i].v = mctx->shift * vcb - y[j][i].v;
169 }
170 }
171 PetscCall(DMDAVecRestoreArrayRead(da, localX, &x));
172 PetscCall(DMDAVecRestoreArrayRead(da, mctx->U, &u));
173 PetscCall(DMDAVecRestoreArray(da, Y, &y));
174 PetscCall(DMRestoreLocalVector(da, &localX));
175 PetscFunctionReturn(PETSC_SUCCESS);
176 }
177
MyIMatMult(Mat A_shell,Vec X,Vec Y)178 static PetscErrorCode MyIMatMult(Mat A_shell, Vec X, Vec Y)
179 {
180 MCtx *mctx;
181 AppCtx *appctx;
182 DM da;
183 PetscInt i, j, Mx, My, xs, ys, xm, ym;
184 PetscReal hx, hy, sx, sy;
185 PetscScalar uc, uxx, uyy, vc, vxx, vyy, ucb, vcb;
186 Field **u, **x, **y;
187 Vec localX;
188
189 PetscFunctionBeginUser;
190 PetscCall(MatShellGetContext(A_shell, &mctx));
191 appctx = mctx->appctx;
192 PetscCall(TSGetDM(mctx->ts, &da));
193 PetscCall(DMGetLocalVector(da, &localX));
194 PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
195 hx = 2.50 / (PetscReal)Mx;
196 sx = 1.0 / (hx * hx);
197 hy = 2.50 / (PetscReal)My;
198 sy = 1.0 / (hy * hy);
199
200 /* Scatter ghost points to local vector,using the 2-step process
201 DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
202 By placing code between these two statements, computations can be
203 done while messages are in transition. */
204 PetscCall(DMGlobalToLocalBegin(da, X, INSERT_VALUES, localX));
205 PetscCall(DMGlobalToLocalEnd(da, X, INSERT_VALUES, localX));
206
207 /* Get pointers to vector data */
208 PetscCall(DMDAVecGetArrayRead(da, localX, &x));
209 PetscCall(DMDAVecGetArrayRead(da, mctx->U, &u));
210 PetscCall(DMDAVecGetArray(da, Y, &y));
211
212 /* Get local grid boundaries */
213 PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
214
215 /* Compute function over the locally owned part of the grid */
216 for (j = ys; j < ys + ym; j++) {
217 for (i = xs; i < xs + xm; i++) {
218 uc = u[j][i].u;
219 ucb = x[j][i].u;
220 uxx = (-2.0 * ucb + x[j][i - 1].u + x[j][i + 1].u) * sx;
221 uyy = (-2.0 * ucb + x[j - 1][i].u + x[j + 1][i].u) * sy;
222 vc = u[j][i].v;
223 vcb = x[j][i].v;
224 vxx = (-2.0 * vcb + x[j][i - 1].v + x[j][i + 1].v) * sx;
225 vyy = (-2.0 * vcb + x[j - 1][i].v + x[j + 1][i].v) * sy;
226 y[j][i].u = appctx->D1 * (uxx + uyy) - (vc * vc + appctx->gamma) * ucb - 2.0 * uc * vc * vcb;
227 y[j][i].v = appctx->D2 * (vxx + vyy) + vc * vc * ucb + (2.0 * uc * vc - appctx->gamma - appctx->kappa) * vcb;
228 y[j][i].u = mctx->shift * ucb - y[j][i].u;
229 y[j][i].v = mctx->shift * vcb - y[j][i].v;
230 }
231 }
232 PetscCall(DMDAVecRestoreArrayRead(da, localX, &x));
233 PetscCall(DMDAVecRestoreArrayRead(da, mctx->U, &u));
234 PetscCall(DMDAVecRestoreArray(da, Y, &y));
235 PetscCall(DMRestoreLocalVector(da, &localX));
236 PetscFunctionReturn(PETSC_SUCCESS);
237 }
238
main(int argc,char ** argv)239 int main(int argc, char **argv)
240 {
241 TS ts; /* ODE integrator */
242 Vec x; /* solution */
243 DM da;
244 AppCtx appctx;
245 MCtx mctx;
246 Vec lambda[1];
247 PetscBool forwardonly = PETSC_FALSE, implicitform = PETSC_TRUE, mf = PETSC_FALSE;
248 PetscLogDouble v1, v2;
249 PetscLogStage stage;
250
251 PetscFunctionBeginUser;
252 PetscCall(PetscInitialize(&argc, &argv, NULL, help));
253 PetscCall(PetscOptionsGetBool(NULL, NULL, "-forwardonly", &forwardonly, NULL));
254 PetscCall(PetscOptionsGetBool(NULL, NULL, "-implicitform", &implicitform, NULL));
255 appctx.aijpc = PETSC_FALSE;
256 PetscCall(PetscOptionsGetBool(NULL, NULL, "-aijpc", &appctx.aijpc, NULL));
257 PetscCall(PetscOptionsGetBool(NULL, NULL, "-mf", &mf, NULL));
258
259 appctx.D1 = 8.0e-5;
260 appctx.D2 = 4.0e-5;
261 appctx.gamma = .024;
262 appctx.kappa = .06;
263
264 PetscCall(PetscLogStageRegister("MyAdjoint", &stage));
265 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
266 Create distributed array (DMDA) to manage parallel grid and vectors
267 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
268 PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC, DMDA_STENCIL_STAR, 64, 64, PETSC_DECIDE, PETSC_DECIDE, 2, 1, NULL, NULL, &da));
269 PetscCall(DMSetFromOptions(da));
270 PetscCall(DMSetUp(da));
271 PetscCall(DMDASetFieldName(da, 0, "u"));
272 PetscCall(DMDASetFieldName(da, 1, "v"));
273
274 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
275 Extract global vectors from DMDA; then duplicate for remaining
276 vectors that are the same types
277 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
278 PetscCall(DMCreateGlobalVector(da, &x));
279
280 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
281 Create timestepping solver context
282 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
283 PetscCall(TSCreate(PETSC_COMM_WORLD, &ts));
284 PetscCall(TSSetDM(ts, da));
285 PetscCall(TSSetProblemType(ts, TS_NONLINEAR));
286 PetscCall(TSSetEquationType(ts, TS_EQ_ODE_EXPLICIT)); /* less Jacobian evaluations when adjoint BEuler is used, otherwise no effect */
287 if (!implicitform) {
288 PetscCall(TSSetType(ts, TSRK));
289 PetscCall(TSSetRHSFunction(ts, NULL, RHSFunction, &appctx));
290 PetscCall(TSSetRHSJacobian(ts, NULL, NULL, RHSJacobian, &appctx));
291 } else {
292 PetscCall(TSSetType(ts, TSCN));
293 PetscCall(TSSetIFunction(ts, NULL, IFunction, &appctx));
294 if (appctx.aijpc) {
295 Mat A, B;
296
297 PetscCall(DMSetMatType(da, MATSELL));
298 PetscCall(DMCreateMatrix(da, &A));
299 PetscCall(MatConvert(A, MATAIJ, MAT_INITIAL_MATRIX, &B));
300 /* FIXME do we need to change viewer to display matrix in natural ordering as DMCreateMatrix_DA does? */
301 PetscCall(TSSetIJacobian(ts, A, B, IJacobian, &appctx));
302 PetscCall(MatDestroy(&A));
303 PetscCall(MatDestroy(&B));
304 } else {
305 PetscCall(TSSetIJacobian(ts, NULL, NULL, IJacobian, &appctx));
306 }
307 }
308
309 if (mf) {
310 PetscInt xm, ym, Mx, My, dof;
311 mctx.ts = ts;
312 mctx.appctx = &appctx;
313 PetscCall(VecDuplicate(x, &mctx.U));
314 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
315 Create matrix-free context
316 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
317 PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, &dof, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
318 PetscCall(DMDAGetCorners(da, NULL, NULL, NULL, &xm, &ym, NULL));
319 PetscCall(MatCreateShell(PETSC_COMM_WORLD, dof * xm * ym, PETSC_DETERMINE, dof * Mx * My, dof * Mx * My, &mctx, &appctx.A));
320 PetscCall(MatShellSetOperation(appctx.A, MATOP_MULT_TRANSPOSE, (PetscErrorCodeFn *)MyRHSMatMultTranspose));
321 if (!implicitform) { /* for explicit methods only */
322 PetscCall(TSSetRHSJacobian(ts, appctx.A, appctx.A, RHSJacobianShell, &appctx));
323 } else {
324 /* PetscCall(VecDuplicate(appctx.U,&mctx.Udot)); */
325 PetscCall(MatShellSetOperation(appctx.A, MATOP_MULT, (PetscErrorCodeFn *)MyIMatMult));
326 PetscCall(MatShellSetOperation(appctx.A, MATOP_MULT_TRANSPOSE, (PetscErrorCodeFn *)MyIMatMultTranspose));
327 PetscCall(TSSetIJacobian(ts, appctx.A, appctx.A, IJacobianShell, &appctx));
328 }
329 }
330
331 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
332 Set initial conditions
333 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
334 PetscCall(InitialConditions(da, x));
335 PetscCall(TSSetSolution(ts, x));
336
337 /*
338 Have the TS save its trajectory so that TSAdjointSolve() may be used
339 */
340 if (!forwardonly) PetscCall(TSSetSaveTrajectory(ts));
341
342 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
343 Set solver options
344 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
345 PetscCall(TSSetMaxTime(ts, 200.0));
346 PetscCall(TSSetTimeStep(ts, 0.5));
347 PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_MATCHSTEP));
348 PetscCall(TSSetFromOptions(ts));
349
350 PetscCall(PetscTime(&v1));
351 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
352 Solve ODE system
353 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
354 PetscCall(TSSolve(ts, x));
355 if (!forwardonly) {
356 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
357 Start the Adjoint model
358 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
359 PetscCall(VecDuplicate(x, &lambda[0]));
360 /* Reset initial conditions for the adjoint integration */
361 PetscCall(InitializeLambda(da, lambda[0], 0.5, 0.5));
362 PetscCall(TSSetCostGradients(ts, 1, lambda, NULL));
363 PetscCall(PetscLogStagePush(stage));
364 PetscCall(TSAdjointSolve(ts));
365 PetscCall(PetscLogStagePop());
366 PetscCall(VecDestroy(&lambda[0]));
367 }
368 PetscCall(PetscTime(&v2));
369 PetscCall(PetscPrintf(PETSC_COMM_WORLD, " %.3lf ", v2 - v1));
370
371 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
372 Free work space. All PETSc objects should be destroyed when they
373 are no longer needed.
374 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
375 PetscCall(VecDestroy(&x));
376 PetscCall(TSDestroy(&ts));
377 PetscCall(DMDestroy(&da));
378 if (mf) {
379 PetscCall(VecDestroy(&mctx.U));
380 /* PetscCall(VecDestroy(&mctx.Udot));*/
381 PetscCall(MatDestroy(&appctx.A));
382 }
383 PetscCall(PetscFinalize());
384 return 0;
385 }
386
387 /* ------------------------------------------------------------------- */
RHSJacobianShell(TS ts,PetscReal t,Vec U,Mat A,Mat BB,PetscCtx ctx)388 PetscErrorCode RHSJacobianShell(TS ts, PetscReal t, Vec U, Mat A, Mat BB, PetscCtx ctx)
389 {
390 MCtx *mctx;
391
392 PetscFunctionBegin;
393 PetscCall(MatShellGetContext(A, &mctx));
394 PetscCall(VecCopy(U, mctx->U));
395 PetscFunctionReturn(PETSC_SUCCESS);
396 }
397
IJacobianShell(TS ts,PetscReal t,Vec U,Vec Udot,PetscReal a,Mat A,Mat BB,PetscCtx ctx)398 PetscErrorCode IJacobianShell(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal a, Mat A, Mat BB, PetscCtx ctx)
399 {
400 MCtx *mctx;
401
402 PetscFunctionBegin;
403 PetscCall(MatShellGetContext(A, &mctx));
404 PetscCall(VecCopy(U, mctx->U));
405 /* PetscCall(VecCopy(Udot,mctx->Udot)); */
406 mctx->shift = a;
407 PetscFunctionReturn(PETSC_SUCCESS);
408 }
409
410 /* ------------------------------------------------------------------- */
InitialConditions(DM da,Vec U)411 PetscErrorCode InitialConditions(DM da, Vec U)
412 {
413 PetscInt i, j, xs, ys, xm, ym, Mx, My;
414 Field **u;
415 PetscReal hx, hy, x, y;
416
417 PetscFunctionBegin;
418 PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
419
420 hx = 2.5 / (PetscReal)Mx;
421 hy = 2.5 / (PetscReal)My;
422
423 /*
424 Get pointers to vector data
425 */
426 PetscCall(DMDAVecGetArray(da, U, &u));
427
428 /*
429 Get local grid boundaries
430 */
431 PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
432
433 /*
434 Compute function over the locally owned part of the grid
435 */
436 for (j = ys; j < ys + ym; j++) {
437 y = j * hy;
438 for (i = xs; i < xs + xm; i++) {
439 x = i * hx;
440 if ((1.0 <= x) && (x <= 1.5) && (1.0 <= y) && (y <= 1.5)) u[j][i].v = .25 * PetscPowReal(PetscSinReal(4.0 * PETSC_PI * x), 2.0) * PetscPowReal(PetscSinReal(4.0 * PETSC_PI * y), 2.0);
441 else u[j][i].v = 0.0;
442
443 u[j][i].u = 1.0 - 2.0 * u[j][i].v;
444 }
445 }
446
447 /*
448 Restore vectors
449 */
450 PetscCall(DMDAVecRestoreArray(da, U, &u));
451 PetscFunctionReturn(PETSC_SUCCESS);
452 }
453
454 /*TEST
455
456 build:
457 depends: reaction_diffusion.c
458 requires: !complex !single
459
460 test:
461 requires: cams
462 args: -mf -ts_max_steps 10 -ts_monitor -ts_adjoint_monitor -ts_trajectory_type memory -ts_trajectory_solution_only 0 -ts_trajectory_max_units_ram 6 -ts_trajectory_memory_type cams
463 TEST*/
464