xref: /petsc/src/ts/tutorials/autodiff/adr_ex5adj_mf.cxx (revision 503c0ea9b45bcfbcebbb1ea5341243bbc69f0bea)
1 static char help[] = "Demonstrates automatic, matrix-free Jacobian generation using ADOL-C for a time-dependent PDE in 2d, solved using implicit timestepping.\n";
2 
3 /*
4    Concepts: TS^time-dependent nonlinear problems
5    Concepts: Automatic differentiation using ADOL-C
6    Concepts: Matrix-free methods
7 */
8 /*
9    REQUIRES configuration of PETSc with option --download-adolc.
10 
11    For documentation on ADOL-C, see
12      $PETSC_ARCH/externalpackages/ADOL-C-2.6.0/ADOL-C/doc/adolc-manual.pdf
13 */
14 /* ------------------------------------------------------------------------
15   See ../advection-diffusion-reaction/ex5 for a description of the problem
16   ------------------------------------------------------------------------- */
17 
18 #include <petscdmda.h>
19 #include <petscts.h>
20 #include "adolc-utils/init.cxx"
21 #include "adolc-utils/matfree.cxx"
22 #include <adolc/adolc.h>
23 
24 /* (Passive) field for the two variables */
25 typedef struct {
26   PetscScalar u,v;
27 } Field;
28 
29 /* Active field for the two variables */
30 typedef struct {
31   adouble u,v;
32 } AField;
33 
34 /* Application context */
35 typedef struct {
36   PetscReal D1,D2,gamma,kappa;
37   AField    **u_a,**f_a;
38   AdolcCtx  *adctx; /* Automatic differentation support */
39 } AppCtx;
40 
41 extern PetscErrorCode InitialConditions(DM da,Vec U);
42 extern PetscErrorCode InitializeLambda(DM da,Vec lambda,PetscReal x,PetscReal y);
43 extern PetscErrorCode IFunctionLocalPassive(DMDALocalInfo *info,PetscReal t,Field**u,Field**udot,Field**f,void *ptr);
44 extern PetscErrorCode IFunctionActive(TS ts,PetscReal ftime,Vec U,Vec Udot,Vec F,void *ptr);
45 extern PetscErrorCode IJacobianMatFree(TS ts,PetscReal t,Vec X,Vec Xdot,PetscReal a,Mat A_shell,Mat B,void *ctx);
46 
47 int main(int argc,char **argv)
48 {
49   TS             ts;                  /* ODE integrator */
50   Vec            x,r;                 /* solution, residual */
51   DM             da;
52   AppCtx         appctx;              /* Application context */
53   AdolcMatCtx    matctx;              /* Matrix (free) context */
54   Vec            lambda[1];
55   PetscBool      forwardonly=PETSC_FALSE;
56   Mat            A;                   /* (Matrix free) Jacobian matrix */
57   PetscInt       gxm,gym;
58 
59   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
60      Initialize program
61      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
62   PetscCall(PetscInitialize(&argc,&argv,NULL,help));
63   PetscCall(PetscOptionsGetBool(NULL,NULL,"-forwardonly",&forwardonly,NULL));
64   PetscFunctionBeginUser;
65   appctx.D1    = 8.0e-5;
66   appctx.D2    = 4.0e-5;
67   appctx.gamma = .024;
68   appctx.kappa = .06;
69   PetscCall(PetscLogEventRegister("df/dx forward",MAT_CLASSID,&matctx.event1));
70   PetscCall(PetscLogEventRegister("df/d(xdot) forward",MAT_CLASSID,&matctx.event2));
71   PetscCall(PetscLogEventRegister("df/dx reverse",MAT_CLASSID,&matctx.event3));
72   PetscCall(PetscLogEventRegister("df/d(xdot) reverse",MAT_CLASSID,&matctx.event4));
73 
74   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
75      Create distributed array (DMDA) to manage parallel grid and vectors
76   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
77   PetscCall(DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_PERIODIC,DM_BOUNDARY_PERIODIC,DMDA_STENCIL_STAR,65,65,PETSC_DECIDE,PETSC_DECIDE,2,1,NULL,NULL,&da));
78   PetscCall(DMSetFromOptions(da));
79   PetscCall(DMSetUp(da));
80   PetscCall(DMDASetFieldName(da,0,"u"));
81   PetscCall(DMDASetFieldName(da,1,"v"));
82 
83   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
84      Extract global vectors from DMDA; then duplicate for remaining
85      vectors that are the same types
86    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
87   PetscCall(DMCreateGlobalVector(da,&x));
88   PetscCall(VecDuplicate(x,&r));
89 
90   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
91     Create matrix free context and specify usage of PETSc-ADOL-C drivers
92     - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
93   PetscCall(DMSetMatType(da,MATSHELL));
94   PetscCall(DMCreateMatrix(da,&A));
95   PetscCall(MatShellSetContext(A,&matctx));
96   PetscCall(MatShellSetOperation(A,MATOP_MULT,(void (*)(void))PetscAdolcIJacobianVectorProductIDMass));
97   PetscCall(MatShellSetOperation(A,MATOP_MULT_TRANSPOSE,(void (*)(void))PetscAdolcIJacobianTransposeVectorProductIDMass));
98   PetscCall(VecDuplicate(x,&matctx.X));
99   PetscCall(VecDuplicate(x,&matctx.Xdot));
100   PetscCall(DMGetLocalVector(da,&matctx.localX0));
101 
102   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
103      Create timestepping solver context
104      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
105   PetscCall(TSCreate(PETSC_COMM_WORLD,&ts));
106   PetscCall(TSSetType(ts,TSCN));
107   PetscCall(TSSetDM(ts,da));
108   PetscCall(TSSetProblemType(ts,TS_NONLINEAR));
109   PetscCall(DMDATSSetIFunctionLocal(da,INSERT_VALUES,(DMDATSIFunctionLocal)IFunctionLocalPassive,&appctx));
110 
111   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
112     Some data required for matrix-free context
113      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
114   PetscCall(DMDAGetGhostCorners(da,NULL,NULL,NULL,&gxm,&gym,NULL));
115   matctx.m = 2*gxm*gym;matctx.n = 2*gxm*gym; /* Number of dependent and independent variables */
116   matctx.flg = PETSC_FALSE;                  /* Flag for reverse mode */
117   matctx.tag1 = 1;                           /* Tape identifier */
118 
119   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
120      Trace function just once
121    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
122   PetscCall(PetscNew(&appctx.adctx));
123   PetscCall(IFunctionActive(ts,1.,x,matctx.Xdot,r,&appctx));
124   PetscCall(PetscFree(appctx.adctx));
125 
126   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
127      Set Jacobian. In this case, IJacobian simply acts to pass context
128      information to the matrix-free Jacobian vector product.
129    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
130   PetscCall(TSSetIJacobian(ts,A,A,IJacobianMatFree,&appctx));
131 
132   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
133      Set initial conditions
134    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
135   PetscCall(InitialConditions(da,x));
136   PetscCall(TSSetSolution(ts,x));
137 
138   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
139     Have the TS save its trajectory so that TSAdjointSolve() may be used
140     and set solver options
141    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
142   if (!forwardonly) {
143     PetscCall(TSSetSaveTrajectory(ts));
144     PetscCall(TSSetMaxTime(ts,200.0));
145     PetscCall(TSSetTimeStep(ts,0.5));
146   } else {
147     PetscCall(TSSetMaxTime(ts,2000.0));
148     PetscCall(TSSetTimeStep(ts,10));
149   }
150   PetscCall(TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER));
151   PetscCall(TSSetFromOptions(ts));
152 
153   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
154      Solve ODE system
155      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
156   PetscCall(TSSolve(ts,x));
157   if (!forwardonly) {
158     /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
159        Start the Adjoint model
160        - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
161     PetscCall(VecDuplicate(x,&lambda[0]));
162     /*   Reset initial conditions for the adjoint integration */
163     PetscCall(InitializeLambda(da,lambda[0],0.5,0.5));
164     PetscCall(TSSetCostGradients(ts,1,lambda,NULL));
165     PetscCall(TSAdjointSolve(ts));
166     PetscCall(VecDestroy(&lambda[0]));
167   }
168 
169   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
170      Free work space.  All PETSc objects should be destroyed when they
171      are no longer needed.
172    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
173   PetscCall(DMRestoreLocalVector(da,&matctx.localX0));
174   PetscCall(VecDestroy(&r));
175   PetscCall(VecDestroy(&matctx.X));
176   PetscCall(VecDestroy(&matctx.Xdot));
177   PetscCall(MatDestroy(&A));
178   PetscCall(VecDestroy(&x));
179   PetscCall(TSDestroy(&ts));
180   PetscCall(DMDestroy(&da));
181 
182   PetscCall(PetscFinalize());
183   return 0;
184 }
185 
186 PetscErrorCode InitialConditions(DM da,Vec U)
187 {
188   PetscInt       i,j,xs,ys,xm,ym,Mx,My;
189   Field          **u;
190   PetscReal      hx,hy,x,y;
191 
192   PetscFunctionBegin;
193   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
194 
195   hx = 2.5/(PetscReal)Mx;
196   hy = 2.5/(PetscReal)My;
197 
198   /*
199      Get pointers to vector data
200   */
201   PetscCall(DMDAVecGetArray(da,U,&u));
202 
203   /*
204      Get local grid boundaries
205   */
206   PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
207 
208   /*
209      Compute function over the locally owned part of the grid
210   */
211   for (j=ys; j<ys+ym; j++) {
212     y = j*hy;
213     for (i=xs; i<xs+xm; i++) {
214       x = i*hx;
215       if (PetscApproximateGTE(x,1.0) && PetscApproximateLTE(x,1.5) && PetscApproximateGTE(y,1.0) && PetscApproximateLTE(y,1.5)) u[j][i].v = PetscPowReal(PetscSinReal(4.0*PETSC_PI*x),2.0)*PetscPowReal(PetscSinReal(4.0*PETSC_PI*y),2.0)/4.0;
216       else u[j][i].v = 0.0;
217 
218       u[j][i].u = 1.0 - 2.0*u[j][i].v;
219     }
220   }
221 
222   /*
223      Restore vectors
224   */
225   PetscCall(DMDAVecRestoreArray(da,U,&u));
226   PetscFunctionReturn(0);
227 }
228 
229 PetscErrorCode InitializeLambda(DM da,Vec lambda,PetscReal x,PetscReal y)
230 {
231    PetscInt i,j,Mx,My,xs,ys,xm,ym;
232    Field **l;
233 
234    PetscFunctionBegin;
235    PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
236    /* locate the global i index for x and j index for y */
237    i = (PetscInt)(x*(Mx-1));
238    j = (PetscInt)(y*(My-1));
239    PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
240 
241    if (xs <= i && i < xs+xm && ys <= j && j < ys+ym) {
242      /* the i,j vertex is on this process */
243      PetscCall(DMDAVecGetArray(da,lambda,&l));
244      l[j][i].u = 1.0;
245      l[j][i].v = 1.0;
246      PetscCall(DMDAVecRestoreArray(da,lambda,&l));
247    }
248    PetscFunctionReturn(0);
249 }
250 
251 PetscErrorCode IFunctionLocalPassive(DMDALocalInfo *info,PetscReal t,Field**u,Field**udot,Field**f,void *ptr)
252 {
253   AppCtx         *appctx = (AppCtx*)ptr;
254   PetscInt       i,j,xs,ys,xm,ym;
255   PetscReal      hx,hy,sx,sy;
256   PetscScalar    uc,uxx,uyy,vc,vxx,vyy;
257 
258   PetscFunctionBegin;
259   hx = 2.50/(PetscReal)(info->mx); sx = 1.0/(hx*hx);
260   hy = 2.50/(PetscReal)(info->my); sy = 1.0/(hy*hy);
261 
262   /* Get local grid boundaries */
263   xs = info->xs; xm = info->xm; ys = info->ys; ym = info->ym;
264 
265   /* Compute function over the locally owned part of the grid */
266   for (j=ys; j<ys+ym; j++) {
267     for (i=xs; i<xs+xm; i++) {
268       uc        = u[j][i].u;
269       uxx       = (-2.0*uc + u[j][i-1].u + u[j][i+1].u)*sx;
270       uyy       = (-2.0*uc + u[j-1][i].u + u[j+1][i].u)*sy;
271       vc        = u[j][i].v;
272       vxx       = (-2.0*vc + u[j][i-1].v + u[j][i+1].v)*sx;
273       vyy       = (-2.0*vc + u[j-1][i].v + u[j+1][i].v)*sy;
274       f[j][i].u = udot[j][i].u - appctx->D1*(uxx + uyy) + uc*vc*vc - appctx->gamma*(1.0 - uc);
275       f[j][i].v = udot[j][i].v - appctx->D2*(vxx + vyy) - uc*vc*vc + (appctx->gamma + appctx->kappa)*vc;
276     }
277   }
278   PetscCall(PetscLogFlops(16.0*xm*ym));
279   PetscFunctionReturn(0);
280 }
281 
282 PetscErrorCode IFunctionActive(TS ts,PetscReal ftime,Vec U,Vec Udot,Vec F,void *ptr)
283 {
284   AppCtx         *appctx = (AppCtx*)ptr;
285   DM             da;
286   DMDALocalInfo  info;
287   Field          **u,**f,**udot;
288   Vec            localU;
289   PetscInt       i,j,xs,ys,xm,ym,gxs,gys,gxm,gym;
290   PetscReal      hx,hy,sx,sy;
291   adouble        uc,uxx,uyy,vc,vxx,vyy;
292   AField         **f_a,*f_c,**u_a,*u_c;
293   PetscScalar    dummy;
294 
295   PetscFunctionBegin;
296   PetscCall(TSGetDM(ts,&da));
297   PetscCall(DMDAGetLocalInfo(da,&info));
298   PetscCall(DMGetLocalVector(da,&localU));
299   hx = 2.50/(PetscReal)(info.mx); sx = 1.0/(hx*hx);
300   hy = 2.50/(PetscReal)(info.my); sy = 1.0/(hy*hy);
301   xs = info.xs; xm = info.xm; gxs = info.gxs; gxm = info.gxm;
302   ys = info.ys; ym = info.ym; gys = info.gys; gym = info.gym;
303 
304   /*
305      Scatter ghost points to local vector,using the 2-step process
306         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
307      By placing code between these two statements, computations can be
308      done while messages are in transition.
309   */
310   PetscCall(DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU));
311   PetscCall(DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU));
312 
313   /*
314      Get pointers to vector data
315   */
316   PetscCall(DMDAVecGetArrayRead(da,localU,&u));
317   PetscCall(DMDAVecGetArray(da,F,&f));
318   PetscCall(DMDAVecGetArrayRead(da,Udot,&udot));
319 
320   /*
321     Create contiguous 1-arrays of AFields
322 
323     NOTE: Memory for ADOL-C active variables (such as adouble and AField)
324           cannot be allocated using PetscMalloc, as this does not call the
325           relevant class constructor. Instead, we use the C++ keyword `new`.
326   */
327   u_c = new AField[info.gxm*info.gym];
328   f_c = new AField[info.gxm*info.gym];
329 
330   /* Create corresponding 2-arrays of AFields */
331   u_a = new AField*[info.gym];
332   f_a = new AField*[info.gym];
333 
334   /* Align indices between array types to endow 2d array with ghost points */
335   PetscCall(GiveGhostPoints(da,u_c,&u_a));
336   PetscCall(GiveGhostPoints(da,f_c,&f_a));
337 
338   trace_on(1);  /* Start of active section on tape 1 */
339 
340   /*
341     Mark independence
342 
343     NOTE: Ghost points are marked as independent, in place of the points they represent on
344           other processors / on other boundaries.
345   */
346   for (j=gys; j<gys+gym; j++) {
347     for (i=gxs; i<gxs+gxm; i++) {
348       u_a[j][i].u <<= u[j][i].u;
349       u_a[j][i].v <<= u[j][i].v;
350     }
351   }
352 
353   /* Compute function over the locally owned part of the grid */
354   for (j=ys; j<ys+ym; j++) {
355     for (i=xs; i<xs+xm; i++) {
356       uc        = u_a[j][i].u;
357       uxx       = (-2.0*uc + u_a[j][i-1].u + u_a[j][i+1].u)*sx;
358       uyy       = (-2.0*uc + u_a[j-1][i].u + u_a[j+1][i].u)*sy;
359       vc        = u_a[j][i].v;
360       vxx       = (-2.0*vc + u_a[j][i-1].v + u_a[j][i+1].v)*sx;
361       vyy       = (-2.0*vc + u_a[j-1][i].v + u_a[j+1][i].v)*sy;
362       f_a[j][i].u = udot[j][i].u - appctx->D1*(uxx + uyy) + uc*vc*vc - appctx->gamma*(1.0 - uc);
363       f_a[j][i].v = udot[j][i].v - appctx->D2*(vxx + vyy) - uc*vc*vc + (appctx->gamma + appctx->kappa)*vc;
364     }
365   }
366 
367   /*
368     Mark dependence
369 
370     NOTE: Marking dependence of dummy variables makes the index notation much simpler when forming
371           the Jacobian later.
372   */
373   for (j=gys; j<gys+gym; j++) {
374     for (i=gxs; i<gxs+gxm; i++) {
375       if ((i < xs) || (i >= xs+xm) || (j < ys) || (j >= ys+ym)) {
376         f_a[j][i].u >>= dummy;
377         f_a[j][i].v >>= dummy;
378       } else {
379         f_a[j][i].u >>= f[j][i].u;
380         f_a[j][i].v >>= f[j][i].v;
381       }
382     }
383   }
384   trace_off();  /* End of active section */
385   PetscCall(PetscLogFlops(16.0*xm*ym));
386 
387   /* Restore vectors */
388   PetscCall(DMDAVecRestoreArray(da,F,&f));
389   PetscCall(DMDAVecRestoreArrayRead(da,localU,&u));
390   PetscCall(DMDAVecRestoreArrayRead(da,Udot,&udot));
391 
392   PetscCall(DMRestoreLocalVector(da,&localU));
393 
394   /* Destroy AFields appropriately */
395   f_a += info.gys;
396   u_a += info.gys;
397   delete[] f_a;
398   delete[] u_a;
399   delete[] f_c;
400   delete[] u_c;
401   PetscFunctionReturn(0);
402 }
403 
404 /*
405   Simply acts to pass TS information to the AdolcMatCtx
406 */
407 PetscErrorCode IJacobianMatFree(TS ts,PetscReal t,Vec X,Vec Xdot,PetscReal a,Mat A_shell,Mat B,void *ctx)
408 {
409   AdolcMatCtx       *mctx;
410   DM                da;
411 
412   PetscFunctionBeginUser;
413   PetscCall(MatShellGetContext(A_shell,&mctx));
414 
415   mctx->time  = t;
416   mctx->shift = a;
417   if (mctx->ts != ts) mctx->ts = ts;
418   PetscCall(VecCopy(X,mctx->X));
419   PetscCall(VecCopy(Xdot,mctx->Xdot));
420   PetscCall(TSGetDM(ts,&da));
421   PetscCall(DMGlobalToLocalBegin(da,mctx->X,INSERT_VALUES,mctx->localX0));
422   PetscCall(DMGlobalToLocalEnd(da,mctx->X,INSERT_VALUES,mctx->localX0));
423   PetscFunctionReturn(0);
424 }
425 
426 /*TEST
427 
428   build:
429     requires: double !complex adolc
430 
431   test:
432     suffix: 1
433     args: -ts_max_steps 1 -da_grid_x 12 -da_grid_y 12 -snes_test_jacobian
434     output_file: output/adr_ex5adj_mf_1.out
435 
436   test:
437     suffix: 2
438     nsize: 4
439     args: -ts_max_steps 10 -da_grid_x 12 -da_grid_y 12 -ts_monitor -ts_adjoint_monitor
440     output_file: output/adr_ex5adj_mf_2.out
441 
442 TEST*/
443