xref: /petsc/src/ts/tutorials/autodiff/adr_ex5adj_mf.cxx (revision daa037dfd3c3bec8dc8659548d2b20b07c1dc6de)
1 static char help[] = "Demonstrates automatic, matrix-free Jacobian generation using ADOL-C for a time-dependent PDE in 2d, solved using implicit timestepping.\n";
2 
3 /*
4    REQUIRES configuration of PETSc with option --download-adolc.
5 
6    For documentation on ADOL-C, see
7      $PETSC_ARCH/externalpackages/ADOL-C-2.6.0/ADOL-C/doc/adolc-manual.pdf
8 */
9 /* ------------------------------------------------------------------------
10   See ../advection-diffusion-reaction/ex5 for a description of the problem
11   ------------------------------------------------------------------------- */
12 
13 #include <petscdmda.h>
14 #include <petscts.h>
15 #include "adolc-utils/init.cxx"
16 #include "adolc-utils/matfree.cxx"
17 #include <adolc/adolc.h>
18 
19 /* (Passive) field for the two variables */
20 typedef struct {
21   PetscScalar u,v;
22 } Field;
23 
24 /* Active field for the two variables */
25 typedef struct {
26   adouble u,v;
27 } AField;
28 
29 /* Application context */
30 typedef struct {
31   PetscReal D1,D2,gamma,kappa;
32   AField    **u_a,**f_a;
33   AdolcCtx  *adctx; /* Automatic differentation support */
34 } AppCtx;
35 
36 extern PetscErrorCode InitialConditions(DM da,Vec U);
37 extern PetscErrorCode InitializeLambda(DM da,Vec lambda,PetscReal x,PetscReal y);
38 extern PetscErrorCode IFunctionLocalPassive(DMDALocalInfo *info,PetscReal t,Field**u,Field**udot,Field**f,void *ptr);
39 extern PetscErrorCode IFunctionActive(TS ts,PetscReal ftime,Vec U,Vec Udot,Vec F,void *ptr);
40 extern PetscErrorCode IJacobianMatFree(TS ts,PetscReal t,Vec X,Vec Xdot,PetscReal a,Mat A_shell,Mat B,void *ctx);
41 
42 int main(int argc,char **argv)
43 {
44   TS             ts;                  /* ODE integrator */
45   Vec            x,r;                 /* solution, residual */
46   DM             da;
47   AppCtx         appctx;              /* Application context */
48   AdolcMatCtx    matctx;              /* Matrix (free) context */
49   Vec            lambda[1];
50   PetscBool      forwardonly=PETSC_FALSE;
51   Mat            A;                   /* (Matrix free) Jacobian matrix */
52   PetscInt       gxm,gym;
53 
54   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
55      Initialize program
56      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
57   PetscCall(PetscInitialize(&argc,&argv,NULL,help));
58   PetscCall(PetscOptionsGetBool(NULL,NULL,"-forwardonly",&forwardonly,NULL));
59   PetscFunctionBeginUser;
60   appctx.D1    = 8.0e-5;
61   appctx.D2    = 4.0e-5;
62   appctx.gamma = .024;
63   appctx.kappa = .06;
64   PetscCall(PetscLogEventRegister("df/dx forward",MAT_CLASSID,&matctx.event1));
65   PetscCall(PetscLogEventRegister("df/d(xdot) forward",MAT_CLASSID,&matctx.event2));
66   PetscCall(PetscLogEventRegister("df/dx reverse",MAT_CLASSID,&matctx.event3));
67   PetscCall(PetscLogEventRegister("df/d(xdot) reverse",MAT_CLASSID,&matctx.event4));
68 
69   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
70      Create distributed array (DMDA) to manage parallel grid and vectors
71   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
72   PetscCall(DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_PERIODIC,DM_BOUNDARY_PERIODIC,DMDA_STENCIL_STAR,65,65,PETSC_DECIDE,PETSC_DECIDE,2,1,NULL,NULL,&da));
73   PetscCall(DMSetFromOptions(da));
74   PetscCall(DMSetUp(da));
75   PetscCall(DMDASetFieldName(da,0,"u"));
76   PetscCall(DMDASetFieldName(da,1,"v"));
77 
78   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
79      Extract global vectors from DMDA; then duplicate for remaining
80      vectors that are the same types
81    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
82   PetscCall(DMCreateGlobalVector(da,&x));
83   PetscCall(VecDuplicate(x,&r));
84 
85   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
86     Create matrix free context and specify usage of PETSc-ADOL-C drivers
87     - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
88   PetscCall(DMSetMatType(da,MATSHELL));
89   PetscCall(DMCreateMatrix(da,&A));
90   PetscCall(MatShellSetContext(A,&matctx));
91   PetscCall(MatShellSetOperation(A,MATOP_MULT,(void (*)(void))PetscAdolcIJacobianVectorProductIDMass));
92   PetscCall(MatShellSetOperation(A,MATOP_MULT_TRANSPOSE,(void (*)(void))PetscAdolcIJacobianTransposeVectorProductIDMass));
93   PetscCall(VecDuplicate(x,&matctx.X));
94   PetscCall(VecDuplicate(x,&matctx.Xdot));
95   PetscCall(DMGetLocalVector(da,&matctx.localX0));
96 
97   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
98      Create timestepping solver context
99      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
100   PetscCall(TSCreate(PETSC_COMM_WORLD,&ts));
101   PetscCall(TSSetType(ts,TSCN));
102   PetscCall(TSSetDM(ts,da));
103   PetscCall(TSSetProblemType(ts,TS_NONLINEAR));
104   PetscCall(DMDATSSetIFunctionLocal(da,INSERT_VALUES,(DMDATSIFunctionLocal)IFunctionLocalPassive,&appctx));
105 
106   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
107     Some data required for matrix-free context
108      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
109   PetscCall(DMDAGetGhostCorners(da,NULL,NULL,NULL,&gxm,&gym,NULL));
110   matctx.m = 2*gxm*gym;matctx.n = 2*gxm*gym; /* Number of dependent and independent variables */
111   matctx.flg = PETSC_FALSE;                  /* Flag for reverse mode */
112   matctx.tag1 = 1;                           /* Tape identifier */
113 
114   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
115      Trace function just once
116    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
117   PetscCall(PetscNew(&appctx.adctx));
118   PetscCall(IFunctionActive(ts,1.,x,matctx.Xdot,r,&appctx));
119   PetscCall(PetscFree(appctx.adctx));
120 
121   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
122      Set Jacobian. In this case, IJacobian simply acts to pass context
123      information to the matrix-free Jacobian vector product.
124    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
125   PetscCall(TSSetIJacobian(ts,A,A,IJacobianMatFree,&appctx));
126 
127   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
128      Set initial conditions
129    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
130   PetscCall(InitialConditions(da,x));
131   PetscCall(TSSetSolution(ts,x));
132 
133   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
134     Have the TS save its trajectory so that TSAdjointSolve() may be used
135     and set solver options
136    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
137   if (!forwardonly) {
138     PetscCall(TSSetSaveTrajectory(ts));
139     PetscCall(TSSetMaxTime(ts,200.0));
140     PetscCall(TSSetTimeStep(ts,0.5));
141   } else {
142     PetscCall(TSSetMaxTime(ts,2000.0));
143     PetscCall(TSSetTimeStep(ts,10));
144   }
145   PetscCall(TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER));
146   PetscCall(TSSetFromOptions(ts));
147 
148   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
149      Solve ODE system
150      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
151   PetscCall(TSSolve(ts,x));
152   if (!forwardonly) {
153     /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
154        Start the Adjoint model
155        - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
156     PetscCall(VecDuplicate(x,&lambda[0]));
157     /*   Reset initial conditions for the adjoint integration */
158     PetscCall(InitializeLambda(da,lambda[0],0.5,0.5));
159     PetscCall(TSSetCostGradients(ts,1,lambda,NULL));
160     PetscCall(TSAdjointSolve(ts));
161     PetscCall(VecDestroy(&lambda[0]));
162   }
163 
164   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
165      Free work space.  All PETSc objects should be destroyed when they
166      are no longer needed.
167    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
168   PetscCall(DMRestoreLocalVector(da,&matctx.localX0));
169   PetscCall(VecDestroy(&r));
170   PetscCall(VecDestroy(&matctx.X));
171   PetscCall(VecDestroy(&matctx.Xdot));
172   PetscCall(MatDestroy(&A));
173   PetscCall(VecDestroy(&x));
174   PetscCall(TSDestroy(&ts));
175   PetscCall(DMDestroy(&da));
176 
177   PetscCall(PetscFinalize());
178   return 0;
179 }
180 
181 PetscErrorCode InitialConditions(DM da,Vec U)
182 {
183   PetscInt       i,j,xs,ys,xm,ym,Mx,My;
184   Field          **u;
185   PetscReal      hx,hy,x,y;
186 
187   PetscFunctionBegin;
188   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
189 
190   hx = 2.5/(PetscReal)Mx;
191   hy = 2.5/(PetscReal)My;
192 
193   /*
194      Get pointers to vector data
195   */
196   PetscCall(DMDAVecGetArray(da,U,&u));
197 
198   /*
199      Get local grid boundaries
200   */
201   PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
202 
203   /*
204      Compute function over the locally owned part of the grid
205   */
206   for (j=ys; j<ys+ym; j++) {
207     y = j*hy;
208     for (i=xs; i<xs+xm; i++) {
209       x = i*hx;
210       if (PetscApproximateGTE(x,1.0) && PetscApproximateLTE(x,1.5) && PetscApproximateGTE(y,1.0) && PetscApproximateLTE(y,1.5)) u[j][i].v = PetscPowReal(PetscSinReal(4.0*PETSC_PI*x),2.0)*PetscPowReal(PetscSinReal(4.0*PETSC_PI*y),2.0)/4.0;
211       else u[j][i].v = 0.0;
212 
213       u[j][i].u = 1.0 - 2.0*u[j][i].v;
214     }
215   }
216 
217   /*
218      Restore vectors
219   */
220   PetscCall(DMDAVecRestoreArray(da,U,&u));
221   PetscFunctionReturn(0);
222 }
223 
224 PetscErrorCode InitializeLambda(DM da,Vec lambda,PetscReal x,PetscReal y)
225 {
226    PetscInt i,j,Mx,My,xs,ys,xm,ym;
227    Field **l;
228 
229    PetscFunctionBegin;
230    PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
231    /* locate the global i index for x and j index for y */
232    i = (PetscInt)(x*(Mx-1));
233    j = (PetscInt)(y*(My-1));
234    PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
235 
236    if (xs <= i && i < xs+xm && ys <= j && j < ys+ym) {
237      /* the i,j vertex is on this process */
238      PetscCall(DMDAVecGetArray(da,lambda,&l));
239      l[j][i].u = 1.0;
240      l[j][i].v = 1.0;
241      PetscCall(DMDAVecRestoreArray(da,lambda,&l));
242    }
243    PetscFunctionReturn(0);
244 }
245 
246 PetscErrorCode IFunctionLocalPassive(DMDALocalInfo *info,PetscReal t,Field**u,Field**udot,Field**f,void *ptr)
247 {
248   AppCtx         *appctx = (AppCtx*)ptr;
249   PetscInt       i,j,xs,ys,xm,ym;
250   PetscReal      hx,hy,sx,sy;
251   PetscScalar    uc,uxx,uyy,vc,vxx,vyy;
252 
253   PetscFunctionBegin;
254   hx = 2.50/(PetscReal)(info->mx); sx = 1.0/(hx*hx);
255   hy = 2.50/(PetscReal)(info->my); sy = 1.0/(hy*hy);
256 
257   /* Get local grid boundaries */
258   xs = info->xs; xm = info->xm; ys = info->ys; ym = info->ym;
259 
260   /* Compute function over the locally owned part of the grid */
261   for (j=ys; j<ys+ym; j++) {
262     for (i=xs; i<xs+xm; i++) {
263       uc        = u[j][i].u;
264       uxx       = (-2.0*uc + u[j][i-1].u + u[j][i+1].u)*sx;
265       uyy       = (-2.0*uc + u[j-1][i].u + u[j+1][i].u)*sy;
266       vc        = u[j][i].v;
267       vxx       = (-2.0*vc + u[j][i-1].v + u[j][i+1].v)*sx;
268       vyy       = (-2.0*vc + u[j-1][i].v + u[j+1][i].v)*sy;
269       f[j][i].u = udot[j][i].u - appctx->D1*(uxx + uyy) + uc*vc*vc - appctx->gamma*(1.0 - uc);
270       f[j][i].v = udot[j][i].v - appctx->D2*(vxx + vyy) - uc*vc*vc + (appctx->gamma + appctx->kappa)*vc;
271     }
272   }
273   PetscCall(PetscLogFlops(16.0*xm*ym));
274   PetscFunctionReturn(0);
275 }
276 
277 PetscErrorCode IFunctionActive(TS ts,PetscReal ftime,Vec U,Vec Udot,Vec F,void *ptr)
278 {
279   AppCtx         *appctx = (AppCtx*)ptr;
280   DM             da;
281   DMDALocalInfo  info;
282   Field          **u,**f,**udot;
283   Vec            localU;
284   PetscInt       i,j,xs,ys,xm,ym,gxs,gys,gxm,gym;
285   PetscReal      hx,hy,sx,sy;
286   adouble        uc,uxx,uyy,vc,vxx,vyy;
287   AField         **f_a,*f_c,**u_a,*u_c;
288   PetscScalar    dummy;
289 
290   PetscFunctionBegin;
291   PetscCall(TSGetDM(ts,&da));
292   PetscCall(DMDAGetLocalInfo(da,&info));
293   PetscCall(DMGetLocalVector(da,&localU));
294   hx = 2.50/(PetscReal)(info.mx); sx = 1.0/(hx*hx);
295   hy = 2.50/(PetscReal)(info.my); sy = 1.0/(hy*hy);
296   xs = info.xs; xm = info.xm; gxs = info.gxs; gxm = info.gxm;
297   ys = info.ys; ym = info.ym; gys = info.gys; gym = info.gym;
298 
299   /*
300      Scatter ghost points to local vector,using the 2-step process
301         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
302      By placing code between these two statements, computations can be
303      done while messages are in transition.
304   */
305   PetscCall(DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU));
306   PetscCall(DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU));
307 
308   /*
309      Get pointers to vector data
310   */
311   PetscCall(DMDAVecGetArrayRead(da,localU,&u));
312   PetscCall(DMDAVecGetArray(da,F,&f));
313   PetscCall(DMDAVecGetArrayRead(da,Udot,&udot));
314 
315   /*
316     Create contiguous 1-arrays of AFields
317 
318     NOTE: Memory for ADOL-C active variables (such as adouble and AField)
319           cannot be allocated using PetscMalloc, as this does not call the
320           relevant class constructor. Instead, we use the C++ keyword `new`.
321   */
322   u_c = new AField[info.gxm*info.gym];
323   f_c = new AField[info.gxm*info.gym];
324 
325   /* Create corresponding 2-arrays of AFields */
326   u_a = new AField*[info.gym];
327   f_a = new AField*[info.gym];
328 
329   /* Align indices between array types to endow 2d array with ghost points */
330   PetscCall(GiveGhostPoints(da,u_c,&u_a));
331   PetscCall(GiveGhostPoints(da,f_c,&f_a));
332 
333   trace_on(1);  /* Start of active section on tape 1 */
334 
335   /*
336     Mark independence
337 
338     NOTE: Ghost points are marked as independent, in place of the points they represent on
339           other processors / on other boundaries.
340   */
341   for (j=gys; j<gys+gym; j++) {
342     for (i=gxs; i<gxs+gxm; i++) {
343       u_a[j][i].u <<= u[j][i].u;
344       u_a[j][i].v <<= u[j][i].v;
345     }
346   }
347 
348   /* Compute function over the locally owned part of the grid */
349   for (j=ys; j<ys+ym; j++) {
350     for (i=xs; i<xs+xm; i++) {
351       uc        = u_a[j][i].u;
352       uxx       = (-2.0*uc + u_a[j][i-1].u + u_a[j][i+1].u)*sx;
353       uyy       = (-2.0*uc + u_a[j-1][i].u + u_a[j+1][i].u)*sy;
354       vc        = u_a[j][i].v;
355       vxx       = (-2.0*vc + u_a[j][i-1].v + u_a[j][i+1].v)*sx;
356       vyy       = (-2.0*vc + u_a[j-1][i].v + u_a[j+1][i].v)*sy;
357       f_a[j][i].u = udot[j][i].u - appctx->D1*(uxx + uyy) + uc*vc*vc - appctx->gamma*(1.0 - uc);
358       f_a[j][i].v = udot[j][i].v - appctx->D2*(vxx + vyy) - uc*vc*vc + (appctx->gamma + appctx->kappa)*vc;
359     }
360   }
361 
362   /*
363     Mark dependence
364 
365     NOTE: Marking dependence of dummy variables makes the index notation much simpler when forming
366           the Jacobian later.
367   */
368   for (j=gys; j<gys+gym; j++) {
369     for (i=gxs; i<gxs+gxm; i++) {
370       if ((i < xs) || (i >= xs+xm) || (j < ys) || (j >= ys+ym)) {
371         f_a[j][i].u >>= dummy;
372         f_a[j][i].v >>= dummy;
373       } else {
374         f_a[j][i].u >>= f[j][i].u;
375         f_a[j][i].v >>= f[j][i].v;
376       }
377     }
378   }
379   trace_off();  /* End of active section */
380   PetscCall(PetscLogFlops(16.0*xm*ym));
381 
382   /* Restore vectors */
383   PetscCall(DMDAVecRestoreArray(da,F,&f));
384   PetscCall(DMDAVecRestoreArrayRead(da,localU,&u));
385   PetscCall(DMDAVecRestoreArrayRead(da,Udot,&udot));
386 
387   PetscCall(DMRestoreLocalVector(da,&localU));
388 
389   /* Destroy AFields appropriately */
390   f_a += info.gys;
391   u_a += info.gys;
392   delete[] f_a;
393   delete[] u_a;
394   delete[] f_c;
395   delete[] u_c;
396   PetscFunctionReturn(0);
397 }
398 
399 /*
400   Simply acts to pass TS information to the AdolcMatCtx
401 */
402 PetscErrorCode IJacobianMatFree(TS ts,PetscReal t,Vec X,Vec Xdot,PetscReal a,Mat A_shell,Mat B,void *ctx)
403 {
404   AdolcMatCtx       *mctx;
405   DM                da;
406 
407   PetscFunctionBeginUser;
408   PetscCall(MatShellGetContext(A_shell,&mctx));
409 
410   mctx->time  = t;
411   mctx->shift = a;
412   if (mctx->ts != ts) mctx->ts = ts;
413   PetscCall(VecCopy(X,mctx->X));
414   PetscCall(VecCopy(Xdot,mctx->Xdot));
415   PetscCall(TSGetDM(ts,&da));
416   PetscCall(DMGlobalToLocalBegin(da,mctx->X,INSERT_VALUES,mctx->localX0));
417   PetscCall(DMGlobalToLocalEnd(da,mctx->X,INSERT_VALUES,mctx->localX0));
418   PetscFunctionReturn(0);
419 }
420 
421 /*TEST
422 
423   build:
424     requires: double !complex adolc
425 
426   test:
427     suffix: 1
428     args: -ts_max_steps 1 -da_grid_x 12 -da_grid_y 12 -snes_test_jacobian
429     output_file: output/adr_ex5adj_mf_1.out
430 
431   test:
432     suffix: 2
433     nsize: 4
434     args: -ts_max_steps 10 -da_grid_x 12 -da_grid_y 12 -ts_monitor -ts_adjoint_monitor
435     output_file: output/adr_ex5adj_mf_2.out
436 
437 TEST*/
438