xref: /petsc/src/tao/unconstrained/tutorials/spectraladjointassimilation.c (revision 503c0ea9b45bcfbcebbb1ea5341243bbc69f0bea)
1 
2 static char help[] ="Solves a simple data assimilation problem with one dimensional advection diffusion equation using TSAdjoint\n\n";
3 
4 /*
5 
6     Not yet tested in parallel
7 
8 */
9 /*
10    Concepts: TS^time-dependent linear problems
11    Concepts: TS^heat equation
12    Concepts: TS^diffusion equation
13    Concepts: adjoints
14    Processors: n
15 */
16 
17 /* ------------------------------------------------------------------------
18 
19    This program uses the one-dimensional advection-diffusion equation),
20        u_t = mu*u_xx - a u_x,
21    on the domain 0 <= x <= 1, with periodic boundary conditions
22 
23    to demonstrate solving a data assimilation problem of finding the initial conditions
24    to produce a given solution at a fixed time.
25 
26    The operators are discretized with the spectral element method
27 
28   ------------------------------------------------------------------------- */
29 
30 /*
31    Include "petscts.h" so that we can use TS solvers.  Note that this file
32    automatically includes:
33      petscsys.h       - base PETSc routines   petscvec.h  - vectors
34      petscmat.h  - matrices
35      petscis.h     - index sets            petscksp.h  - Krylov subspace methods
36      petscviewer.h - viewers               petscpc.h   - preconditioners
37      petscksp.h   - linear solvers        petscsnes.h - nonlinear solvers
38 */
39 
40 #include <petsctao.h>
41 #include <petscts.h>
42 #include <petscdt.h>
43 #include <petscdraw.h>
44 #include <petscdmda.h>
45 
46 /*
47    User-defined application context - contains data needed by the
48    application-provided call-back routines.
49 */
50 
51 typedef struct {
52   PetscInt  n;                /* number of nodes */
53   PetscReal *nodes;           /* GLL nodes */
54   PetscReal *weights;         /* GLL weights */
55 } PetscGLL;
56 
57 typedef struct {
58   PetscInt    N;             /* grid points per elements*/
59   PetscInt    E;              /* number of elements */
60   PetscReal   tol_L2,tol_max; /* error norms */
61   PetscInt    steps;          /* number of timesteps */
62   PetscReal   Tend;           /* endtime */
63   PetscReal   mu;             /* viscosity */
64   PetscReal   a;              /* advection speed */
65   PetscReal   L;              /* total length of domain */
66   PetscReal   Le;
67   PetscReal   Tadj;
68 } PetscParam;
69 
70 typedef struct {
71   Vec         reference;               /* desired end state */
72   Vec         grid;              /* total grid */
73   Vec         grad;
74   Vec         ic;
75   Vec         curr_sol;
76   Vec         joe;
77   Vec         true_solution;     /* actual initial conditions for the final solution */
78 } PetscData;
79 
80 typedef struct {
81   Vec         grid;              /* total grid */
82   Vec         mass;              /* mass matrix for total integration */
83   Mat         stiff;             /* stifness matrix */
84   Mat         advec;
85   Mat         keptstiff;
86   PetscGLL    gll;
87 } PetscSEMOperators;
88 
89 typedef struct {
90   DM                da;                /* distributed array data structure */
91   PetscSEMOperators SEMop;
92   PetscParam        param;
93   PetscData         dat;
94   TS                ts;
95   PetscReal         initial_dt;
96   PetscReal         *solutioncoefficients;
97   PetscInt          ncoeff;
98 } AppCtx;
99 
100 /*
101    User-defined routines
102 */
103 extern PetscErrorCode FormFunctionGradient(Tao,Vec,PetscReal*,Vec,void*);
104 extern PetscErrorCode RHSLaplacian(TS,PetscReal,Vec,Mat,Mat,void*);
105 extern PetscErrorCode RHSAdvection(TS,PetscReal,Vec,Mat,Mat,void*);
106 extern PetscErrorCode InitialConditions(Vec,AppCtx*);
107 extern PetscErrorCode ComputeReference(TS,PetscReal,Vec,AppCtx*);
108 extern PetscErrorCode MonitorError(Tao,void*);
109 extern PetscErrorCode MonitorDestroy(void**);
110 extern PetscErrorCode ComputeSolutionCoefficients(AppCtx*);
111 extern PetscErrorCode RHSFunction(TS,PetscReal,Vec,Vec,void*);
112 extern PetscErrorCode RHSJacobian(TS,PetscReal,Vec,Mat,Mat,void*);
113 
114 int main(int argc,char **argv)
115 {
116   AppCtx         appctx;                 /* user-defined application context */
117   Tao            tao;
118   Vec            u;                      /* approximate solution vector */
119   PetscInt       i, xs, xm, ind, j, lenglob;
120   PetscReal      x, *wrk_ptr1, *wrk_ptr2;
121   MatNullSpace   nsp;
122 
123    /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
124      Initialize program and set problem parameters
125      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
126   PetscFunctionBegin;
127 
128   PetscCall(PetscInitialize(&argc,&argv,(char*)0,help));
129 
130   /*initialize parameters */
131   appctx.param.N    = 10;  /* order of the spectral element */
132   appctx.param.E    = 8;  /* number of elements */
133   appctx.param.L    = 1.0;  /* length of the domain */
134   appctx.param.mu   = 0.00001; /* diffusion coefficient */
135   appctx.param.a    = 0.0;     /* advection speed */
136   appctx.initial_dt = 1e-4;
137   appctx.param.steps = PETSC_MAX_INT;
138   appctx.param.Tend  = 0.01;
139   appctx.ncoeff      = 2;
140 
141   PetscCall(PetscOptionsGetInt(NULL,NULL,"-N",&appctx.param.N,NULL));
142   PetscCall(PetscOptionsGetInt(NULL,NULL,"-E",&appctx.param.E,NULL));
143   PetscCall(PetscOptionsGetInt(NULL,NULL,"-ncoeff",&appctx.ncoeff,NULL));
144   PetscCall(PetscOptionsGetReal(NULL,NULL,"-Tend",&appctx.param.Tend,NULL));
145   PetscCall(PetscOptionsGetReal(NULL,NULL,"-mu",&appctx.param.mu,NULL));
146   PetscCall(PetscOptionsGetReal(NULL,NULL,"-a",&appctx.param.a,NULL));
147   appctx.param.Le = appctx.param.L/appctx.param.E;
148 
149   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
150      Create GLL data structures
151      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
152   PetscCall(PetscMalloc2(appctx.param.N,&appctx.SEMop.gll.nodes,appctx.param.N,&appctx.SEMop.gll.weights));
153   PetscCall(PetscDTGaussLobattoLegendreQuadrature(appctx.param.N,PETSCGAUSSLOBATTOLEGENDRE_VIA_LINEAR_ALGEBRA,appctx.SEMop.gll.nodes,appctx.SEMop.gll.weights));
154   appctx.SEMop.gll.n = appctx.param.N;
155   lenglob  = appctx.param.E*(appctx.param.N-1);
156 
157   /*
158      Create distributed array (DMDA) to manage parallel grid and vectors
159      and to set up the ghost point communication pattern.  There are E*(Nl-1)+1
160      total grid values spread equally among all the processors, except first and last
161   */
162 
163   PetscCall(DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_PERIODIC,lenglob,1,1,NULL,&appctx.da));
164   PetscCall(DMSetFromOptions(appctx.da));
165   PetscCall(DMSetUp(appctx.da));
166 
167   /*
168      Extract global and local vectors from DMDA; we use these to store the
169      approximate solution.  Then duplicate these for remaining vectors that
170      have the same types.
171   */
172 
173   PetscCall(DMCreateGlobalVector(appctx.da,&u));
174   PetscCall(VecDuplicate(u,&appctx.dat.ic));
175   PetscCall(VecDuplicate(u,&appctx.dat.true_solution));
176   PetscCall(VecDuplicate(u,&appctx.dat.reference));
177   PetscCall(VecDuplicate(u,&appctx.SEMop.grid));
178   PetscCall(VecDuplicate(u,&appctx.SEMop.mass));
179   PetscCall(VecDuplicate(u,&appctx.dat.curr_sol));
180   PetscCall(VecDuplicate(u,&appctx.dat.joe));
181 
182   PetscCall(DMDAGetCorners(appctx.da,&xs,NULL,NULL,&xm,NULL,NULL));
183   PetscCall(DMDAVecGetArray(appctx.da,appctx.SEMop.grid,&wrk_ptr1));
184   PetscCall(DMDAVecGetArray(appctx.da,appctx.SEMop.mass,&wrk_ptr2));
185 
186   /* Compute function over the locally owned part of the grid */
187 
188     xs=xs/(appctx.param.N-1);
189     xm=xm/(appctx.param.N-1);
190 
191   /*
192      Build total grid and mass over entire mesh (multi-elemental)
193   */
194 
195   for (i=xs; i<xs+xm; i++) {
196     for (j=0; j<appctx.param.N-1; j++) {
197       x = (appctx.param.Le/2.0)*(appctx.SEMop.gll.nodes[j]+1.0)+appctx.param.Le*i;
198       ind=i*(appctx.param.N-1)+j;
199       wrk_ptr1[ind]=x;
200       wrk_ptr2[ind]=.5*appctx.param.Le*appctx.SEMop.gll.weights[j];
201       if (j==0) wrk_ptr2[ind]+=.5*appctx.param.Le*appctx.SEMop.gll.weights[j];
202     }
203   }
204   PetscCall(DMDAVecRestoreArray(appctx.da,appctx.SEMop.grid,&wrk_ptr1));
205   PetscCall(DMDAVecRestoreArray(appctx.da,appctx.SEMop.mass,&wrk_ptr2));
206 
207   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
208    Create matrix data structure; set matrix evaluation routine.
209    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
210   PetscCall(DMSetMatrixPreallocateOnly(appctx.da, PETSC_TRUE));
211   PetscCall(DMCreateMatrix(appctx.da,&appctx.SEMop.stiff));
212   PetscCall(DMCreateMatrix(appctx.da,&appctx.SEMop.advec));
213 
214   /*
215    For linear problems with a time-dependent f(u,t) in the equation
216    u_t = f(u,t), the user provides the discretized right-hand-side
217    as a time-dependent matrix.
218    */
219   PetscCall(RHSLaplacian(appctx.ts,0.0,u,appctx.SEMop.stiff,appctx.SEMop.stiff,&appctx));
220   PetscCall(RHSAdvection(appctx.ts,0.0,u,appctx.SEMop.advec,appctx.SEMop.advec,&appctx));
221   PetscCall(MatAXPY(appctx.SEMop.stiff,-1.0,appctx.SEMop.advec,DIFFERENT_NONZERO_PATTERN));
222   PetscCall(MatDuplicate(appctx.SEMop.stiff,MAT_COPY_VALUES,&appctx.SEMop.keptstiff));
223 
224   /* attach the null space to the matrix, this probably is not needed but does no harm */
225   PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD,PETSC_TRUE,0,NULL,&nsp));
226   PetscCall(MatSetNullSpace(appctx.SEMop.stiff,nsp));
227   PetscCall(MatNullSpaceTest(nsp,appctx.SEMop.stiff,NULL));
228   PetscCall(MatNullSpaceDestroy(&nsp));
229 
230   /* Create the TS solver that solves the ODE and its adjoint; set its options */
231   PetscCall(TSCreate(PETSC_COMM_WORLD,&appctx.ts));
232   PetscCall(TSSetSolutionFunction(appctx.ts,(PetscErrorCode (*)(TS,PetscReal,Vec, void *))ComputeReference,&appctx));
233   PetscCall(TSSetProblemType(appctx.ts,TS_LINEAR));
234   PetscCall(TSSetType(appctx.ts,TSRK));
235   PetscCall(TSSetDM(appctx.ts,appctx.da));
236   PetscCall(TSSetTime(appctx.ts,0.0));
237   PetscCall(TSSetTimeStep(appctx.ts,appctx.initial_dt));
238   PetscCall(TSSetMaxSteps(appctx.ts,appctx.param.steps));
239   PetscCall(TSSetMaxTime(appctx.ts,appctx.param.Tend));
240   PetscCall(TSSetExactFinalTime(appctx.ts,TS_EXACTFINALTIME_MATCHSTEP));
241   PetscCall(TSSetTolerances(appctx.ts,1e-7,NULL,1e-7,NULL));
242   PetscCall(TSSetFromOptions(appctx.ts));
243   /* Need to save initial timestep user may have set with -ts_dt so it can be reset for each new TSSolve() */
244   PetscCall(TSGetTimeStep(appctx.ts,&appctx.initial_dt));
245   PetscCall(TSSetRHSFunction(appctx.ts,NULL,TSComputeRHSFunctionLinear,&appctx));
246   PetscCall(TSSetRHSJacobian(appctx.ts,appctx.SEMop.stiff,appctx.SEMop.stiff,TSComputeRHSJacobianConstant,&appctx));
247   /*  PetscCall(TSSetRHSFunction(appctx.ts,NULL,RHSFunction,&appctx));
248       PetscCall(TSSetRHSJacobian(appctx.ts,appctx.SEMop.stiff,appctx.SEMop.stiff,RHSJacobian,&appctx)); */
249 
250   /* Set random initial conditions as initial guess, compute analytic reference solution and analytic (true) initial conditions */
251   PetscCall(ComputeSolutionCoefficients(&appctx));
252   PetscCall(InitialConditions(appctx.dat.ic,&appctx));
253   PetscCall(ComputeReference(appctx.ts,appctx.param.Tend,appctx.dat.reference,&appctx));
254   PetscCall(ComputeReference(appctx.ts,0.0,appctx.dat.true_solution,&appctx));
255 
256   /* Set up to save trajectory before TSSetFromOptions() so that TSTrajectory options can be captured */
257   PetscCall(TSSetSaveTrajectory(appctx.ts));
258   PetscCall(TSSetFromOptions(appctx.ts));
259 
260   /* Create TAO solver and set desired solution method  */
261   PetscCall(TaoCreate(PETSC_COMM_WORLD,&tao));
262   PetscCall(TaoSetMonitor(tao,MonitorError,&appctx,MonitorDestroy));
263   PetscCall(TaoSetType(tao,TAOBQNLS));
264   PetscCall(TaoSetSolution(tao,appctx.dat.ic));
265   /* Set routine for function and gradient evaluation  */
266   PetscCall(TaoSetObjectiveAndGradient(tao,NULL,FormFunctionGradient,(void *)&appctx));
267   /* Check for any TAO command line options  */
268   PetscCall(TaoSetTolerances(tao,1e-8,PETSC_DEFAULT,PETSC_DEFAULT));
269   PetscCall(TaoSetFromOptions(tao));
270   PetscCall(TaoSolve(tao));
271 
272   PetscCall(TaoDestroy(&tao));
273   PetscCall(PetscFree(appctx.solutioncoefficients));
274   PetscCall(MatDestroy(&appctx.SEMop.advec));
275   PetscCall(MatDestroy(&appctx.SEMop.stiff));
276   PetscCall(MatDestroy(&appctx.SEMop.keptstiff));
277   PetscCall(VecDestroy(&u));
278   PetscCall(VecDestroy(&appctx.dat.ic));
279   PetscCall(VecDestroy(&appctx.dat.joe));
280   PetscCall(VecDestroy(&appctx.dat.true_solution));
281   PetscCall(VecDestroy(&appctx.dat.reference));
282   PetscCall(VecDestroy(&appctx.SEMop.grid));
283   PetscCall(VecDestroy(&appctx.SEMop.mass));
284   PetscCall(VecDestroy(&appctx.dat.curr_sol));
285   PetscCall(PetscFree2(appctx.SEMop.gll.nodes,appctx.SEMop.gll.weights));
286   PetscCall(DMDestroy(&appctx.da));
287   PetscCall(TSDestroy(&appctx.ts));
288 
289   /*
290      Always call PetscFinalize() before exiting a program.  This routine
291        - finalizes the PETSc libraries as well as MPI
292        - provides summary and diagnostic information if certain runtime
293          options are chosen (e.g., -log_summary).
294   */
295     PetscCall(PetscFinalize());
296     return 0;
297 }
298 
299 /*
300     Computes the coefficients for the analytic solution to the PDE
301 */
302 PetscErrorCode ComputeSolutionCoefficients(AppCtx *appctx)
303 {
304   PetscRandom       rand;
305   PetscInt          i;
306 
307   PetscFunctionBegin;
308   PetscCall(PetscMalloc1(appctx->ncoeff,&appctx->solutioncoefficients));
309   PetscCall(PetscRandomCreate(PETSC_COMM_WORLD,&rand));
310   PetscCall(PetscRandomSetInterval(rand,.9,1.0));
311   for (i=0; i<appctx->ncoeff; i++) {
312     PetscCall(PetscRandomGetValue(rand,&appctx->solutioncoefficients[i]));
313   }
314   PetscCall(PetscRandomDestroy(&rand));
315   PetscFunctionReturn(0);
316 }
317 
318 /* --------------------------------------------------------------------- */
319 /*
320    InitialConditions - Computes the (random) initial conditions for the Tao optimization solve (these are also initial conditions for the first TSSolve()
321 
322    Input Parameter:
323    u - uninitialized solution vector (global)
324    appctx - user-defined application context
325 
326    Output Parameter:
327    u - vector with solution at initial time (global)
328 */
329 PetscErrorCode InitialConditions(Vec u,AppCtx *appctx)
330 {
331   PetscScalar       *s;
332   const PetscScalar *xg;
333   PetscInt          i,j,lenglob;
334   PetscReal         sum,val;
335   PetscRandom       rand;
336 
337   PetscFunctionBegin;
338   PetscCall(PetscRandomCreate(PETSC_COMM_WORLD,&rand));
339   PetscCall(PetscRandomSetInterval(rand,.9,1.0));
340   PetscCall(DMDAVecGetArray(appctx->da,u,&s));
341   PetscCall(DMDAVecGetArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg));
342   lenglob  = appctx->param.E*(appctx->param.N-1);
343   for (i=0; i<lenglob; i++) {
344     s[i]= 0;
345     for (j=0; j<appctx->ncoeff; j++) {
346       PetscCall(PetscRandomGetValue(rand,&val));
347       s[i] += val*PetscSinScalar(2*(j+1)*PETSC_PI*xg[i]);
348     }
349   }
350   PetscCall(PetscRandomDestroy(&rand));
351   PetscCall(DMDAVecRestoreArray(appctx->da,u,&s));
352   PetscCall(DMDAVecRestoreArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg));
353   /* make sure initial conditions do not contain the constant functions, since with periodic boundary conditions the constant functions introduce a null space */
354   PetscCall(VecSum(u,&sum));
355   PetscCall(VecShift(u,-sum/lenglob));
356   PetscFunctionReturn(0);
357 }
358 
359 /*
360    TrueSolution() computes the true solution for the Tao optimization solve which means they are the initial conditions for the objective function.
361 
362              InitialConditions() computes the initial conditions for the beginning of the Tao iterations
363 
364    Input Parameter:
365    u - uninitialized solution vector (global)
366    appctx - user-defined application context
367 
368    Output Parameter:
369    u - vector with solution at initial time (global)
370 */
371 PetscErrorCode TrueSolution(Vec u,AppCtx *appctx)
372 {
373   PetscScalar       *s;
374   const PetscScalar *xg;
375   PetscInt          i,j,lenglob;
376   PetscReal         sum;
377 
378   PetscFunctionBegin;
379   PetscCall(DMDAVecGetArray(appctx->da,u,&s));
380   PetscCall(DMDAVecGetArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg));
381   lenglob  = appctx->param.E*(appctx->param.N-1);
382   for (i=0; i<lenglob; i++) {
383     s[i]= 0;
384     for (j=0; j<appctx->ncoeff; j++) {
385       s[i] += appctx->solutioncoefficients[j]*PetscSinScalar(2*(j+1)*PETSC_PI*xg[i]);
386     }
387   }
388   PetscCall(DMDAVecRestoreArray(appctx->da,u,&s));
389   PetscCall(DMDAVecRestoreArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg));
390   /* make sure initial conditions do not contain the constant functions, since with periodic boundary conditions the constant functions introduce a null space */
391   PetscCall(VecSum(u,&sum));
392   PetscCall(VecShift(u,-sum/lenglob));
393   PetscFunctionReturn(0);
394 }
395 /* --------------------------------------------------------------------- */
396 /*
397    Sets the desired profile for the final end time
398 
399    Input Parameters:
400    t - final time
401    obj - vector storing the desired profile
402    appctx - user-defined application context
403 
404 */
405 PetscErrorCode ComputeReference(TS ts,PetscReal t,Vec obj,AppCtx *appctx)
406 {
407   PetscScalar       *s,tc;
408   const PetscScalar *xg;
409   PetscInt          i, j,lenglob;
410 
411   PetscFunctionBegin;
412   PetscCall(DMDAVecGetArray(appctx->da,obj,&s));
413   PetscCall(DMDAVecGetArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg));
414   lenglob  = appctx->param.E*(appctx->param.N-1);
415   for (i=0; i<lenglob; i++) {
416     s[i]= 0;
417     for (j=0; j<appctx->ncoeff; j++) {
418       tc    = -appctx->param.mu*(j+1)*(j+1)*4.0*PETSC_PI*PETSC_PI*t;
419       s[i] += appctx->solutioncoefficients[j]*PetscSinScalar(2*(j+1)*PETSC_PI*(xg[i] + appctx->param.a*t))*PetscExpReal(tc);
420     }
421   }
422   PetscCall(DMDAVecRestoreArray(appctx->da,obj,&s));
423   PetscCall(DMDAVecRestoreArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg));
424   PetscFunctionReturn(0);
425 }
426 
427 PetscErrorCode RHSFunction(TS ts,PetscReal t,Vec globalin,Vec globalout,void *ctx)
428 {
429   AppCtx          *appctx = (AppCtx*)ctx;
430 
431   PetscFunctionBegin;
432   PetscCall(MatMult(appctx->SEMop.keptstiff,globalin,globalout));
433   PetscFunctionReturn(0);
434 }
435 
436 PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec globalin,Mat A, Mat B,void *ctx)
437 {
438   AppCtx         *appctx = (AppCtx*)ctx;
439 
440   PetscFunctionBegin;
441   PetscCall(MatCopy(appctx->SEMop.keptstiff,A,DIFFERENT_NONZERO_PATTERN));
442   PetscFunctionReturn(0);
443 }
444 
445 /* --------------------------------------------------------------------- */
446 
447 /*
448    RHSLaplacian -   matrix for diffusion
449 
450    Input Parameters:
451    ts - the TS context
452    t - current time  (ignored)
453    X - current solution (ignored)
454    dummy - optional user-defined context, as set by TSetRHSJacobian()
455 
456    Output Parameters:
457    AA - Jacobian matrix
458    BB - optionally different matrix from which the preconditioner is built
459    str - flag indicating matrix structure
460 
461    Scales by the inverse of the mass matrix (perhaps that should be pulled out)
462 
463 */
464 PetscErrorCode RHSLaplacian(TS ts,PetscReal t,Vec X,Mat A,Mat BB,void *ctx)
465 {
466   PetscReal      **temp;
467   PetscReal      vv;
468   AppCtx         *appctx = (AppCtx*)ctx;     /* user-defined application context */
469   PetscInt       i,xs,xn,l,j;
470   PetscInt       *rowsDM;
471 
472   PetscFunctionBegin;
473   /*
474    Creates the element stiffness matrix for the given gll
475    */
476   PetscCall(PetscGaussLobattoLegendreElementLaplacianCreate(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp));
477 
478   /* scale by the size of the element */
479   for (i=0; i<appctx->param.N; i++) {
480     vv=-appctx->param.mu*2.0/appctx->param.Le;
481     for (j=0; j<appctx->param.N; j++) temp[i][j]=temp[i][j]*vv;
482   }
483 
484   PetscCall(MatSetOption(A,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE));
485   PetscCall(DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL));
486 
487   PetscCheck(appctx->param.N-1 >= 1,PETSC_COMM_WORLD,PETSC_ERR_ARG_OUTOFRANGE,"Polynomial order must be at least 2");
488   xs   = xs/(appctx->param.N-1);
489   xn   = xn/(appctx->param.N-1);
490 
491   PetscCall(PetscMalloc1(appctx->param.N,&rowsDM));
492   /*
493    loop over local elements
494    */
495   for (j=xs; j<xs+xn; j++) {
496     for (l=0; l<appctx->param.N; l++) {
497       rowsDM[l] = 1+(j-xs)*(appctx->param.N-1)+l;
498     }
499     PetscCall(MatSetValuesLocal(A,appctx->param.N,rowsDM,appctx->param.N,rowsDM,&temp[0][0],ADD_VALUES));
500   }
501   PetscCall(PetscFree(rowsDM));
502   PetscCall(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY));
503   PetscCall(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY));
504   PetscCall(VecReciprocal(appctx->SEMop.mass));
505   PetscCall(MatDiagonalScale(A,appctx->SEMop.mass,0));
506   PetscCall(VecReciprocal(appctx->SEMop.mass));
507 
508   PetscCall(PetscGaussLobattoLegendreElementLaplacianDestroy(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp));
509   PetscFunctionReturn(0);
510 }
511 
512 /*
513     Almost identical to Laplacian
514 
515     Note that the element matrix is NOT scaled by the size of element like the Laplacian term.
516  */
517 PetscErrorCode RHSAdvection(TS ts,PetscReal t,Vec X,Mat A,Mat BB,void *ctx)
518 {
519   PetscReal      **temp;
520   PetscReal      vv;
521   AppCtx         *appctx = (AppCtx*)ctx;     /* user-defined application context */
522   PetscInt       i,xs,xn,l,j;
523   PetscInt       *rowsDM;
524 
525   PetscFunctionBegin;
526   /*
527    Creates the element stiffness matrix for the given gll
528    */
529   PetscCall(PetscGaussLobattoLegendreElementAdvectionCreate(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp));
530 
531   /* scale by the size of the element */
532   for (i=0; i<appctx->param.N; i++) {
533     vv = -appctx->param.a;
534     for (j=0; j<appctx->param.N; j++) temp[i][j]=temp[i][j]*vv;
535   }
536 
537   PetscCall(MatSetOption(A,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE));
538   PetscCall(DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL));
539 
540   PetscCheck(appctx->param.N-1 >= 1,PETSC_COMM_WORLD,PETSC_ERR_ARG_OUTOFRANGE,"Polynomial order must be at least 2");
541   xs   = xs/(appctx->param.N-1);
542   xn   = xn/(appctx->param.N-1);
543 
544   PetscCall(PetscMalloc1(appctx->param.N,&rowsDM));
545   /*
546    loop over local elements
547    */
548   for (j=xs; j<xs+xn; j++) {
549     for (l=0; l<appctx->param.N; l++) {
550       rowsDM[l] = 1+(j-xs)*(appctx->param.N-1)+l;
551     }
552     PetscCall(MatSetValuesLocal(A,appctx->param.N,rowsDM,appctx->param.N,rowsDM,&temp[0][0],ADD_VALUES));
553   }
554   PetscCall(PetscFree(rowsDM));
555   PetscCall(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY));
556   PetscCall(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY));
557   PetscCall(VecReciprocal(appctx->SEMop.mass));
558   PetscCall(MatDiagonalScale(A,appctx->SEMop.mass,0));
559   PetscCall(VecReciprocal(appctx->SEMop.mass));
560 
561   PetscCall(PetscGaussLobattoLegendreElementAdvectionDestroy(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp));
562   PetscFunctionReturn(0);
563 }
564 
565 /* ------------------------------------------------------------------ */
566 /*
567    FormFunctionGradient - Evaluates the function and corresponding gradient.
568 
569    Input Parameters:
570    tao - the Tao context
571    ic   - the input vector
572    ctx - optional user-defined context, as set when calling TaoSetObjectiveAndGradient()
573 
574    Output Parameters:
575    f   - the newly evaluated function
576    G   - the newly evaluated gradient
577 
578    Notes:
579 
580           The forward equation is
581               M u_t = F(U)
582           which is converted to
583                 u_t = M^{-1} F(u)
584           in the user code since TS has no direct way of providing a mass matrix. The Jacobian of this is
585                  M^{-1} J
586           where J is the Jacobian of F. Now the adjoint equation is
587                 M v_t = J^T v
588           but TSAdjoint does not solve this since it can only solve the transposed system for the
589           Jacobian the user provided. Hence TSAdjoint solves
590                  w_t = J^T M^{-1} w  (where w = M v)
591           since there is no way to indicate the mass matrix as a separate entity to TS. Thus one
592           must be careful in initializing the "adjoint equation" and using the result. This is
593           why
594               G = -2 M(u(T) - u_d)
595           below (instead of -2(u(T) - u_d)
596 
597 */
598 PetscErrorCode FormFunctionGradient(Tao tao,Vec ic,PetscReal *f,Vec G,void *ctx)
599 {
600   AppCtx           *appctx = (AppCtx*)ctx;     /* user-defined application context */
601   Vec               temp;
602 
603   PetscFunctionBegin;
604   PetscCall(TSSetTime(appctx->ts,0.0));
605   PetscCall(TSSetStepNumber(appctx->ts,0));
606   PetscCall(TSSetTimeStep(appctx->ts,appctx->initial_dt));
607   PetscCall(VecCopy(ic,appctx->dat.curr_sol));
608 
609   PetscCall(TSSolve(appctx->ts,appctx->dat.curr_sol));
610   PetscCall(VecCopy(appctx->dat.curr_sol,appctx->dat.joe));
611 
612   /*     Compute the difference between the current ODE solution and target ODE solution */
613   PetscCall(VecWAXPY(G,-1.0,appctx->dat.curr_sol,appctx->dat.reference));
614 
615   /*     Compute the objective/cost function   */
616   PetscCall(VecDuplicate(G,&temp));
617   PetscCall(VecPointwiseMult(temp,G,G));
618   PetscCall(VecDot(temp,appctx->SEMop.mass,f));
619   PetscCall(VecDestroy(&temp));
620 
621   /*     Compute initial conditions for the adjoint integration. See Notes above  */
622   PetscCall(VecScale(G, -2.0));
623   PetscCall(VecPointwiseMult(G,G,appctx->SEMop.mass));
624   PetscCall(TSSetCostGradients(appctx->ts,1,&G,NULL));
625 
626   PetscCall(TSAdjointSolve(appctx->ts));
627   /* PetscCall(VecPointwiseDivide(G,G,appctx->SEMop.mass));*/
628   PetscFunctionReturn(0);
629 }
630 
631 PetscErrorCode MonitorError(Tao tao,void *ctx)
632 {
633   AppCtx         *appctx = (AppCtx*)ctx;
634   Vec            temp,grad;
635   PetscReal      nrm;
636   PetscInt       its;
637   PetscReal      fct,gnorm;
638 
639   PetscFunctionBegin;
640   PetscCall(VecDuplicate(appctx->dat.ic,&temp));
641   PetscCall(VecWAXPY(temp,-1.0,appctx->dat.ic,appctx->dat.true_solution));
642   PetscCall(VecPointwiseMult(temp,temp,temp));
643   PetscCall(VecDot(temp,appctx->SEMop.mass,&nrm));
644   nrm   = PetscSqrtReal(nrm);
645   PetscCall(TaoGetGradient(tao,&grad,NULL,NULL));
646   PetscCall(VecPointwiseMult(temp,temp,temp));
647   PetscCall(VecDot(temp,appctx->SEMop.mass,&gnorm));
648   gnorm = PetscSqrtReal(gnorm);
649   PetscCall(VecDestroy(&temp));
650   PetscCall(TaoGetIterationNumber(tao,&its));
651   PetscCall(TaoGetSolutionStatus(tao,NULL,&fct,NULL,NULL,NULL,NULL));
652   if (!its) {
653     PetscCall(PetscPrintf(PETSC_COMM_WORLD,"%% Iteration Error Objective Gradient-norm\n"));
654     PetscCall(PetscPrintf(PETSC_COMM_WORLD,"history = [\n"));
655   }
656   PetscCall(PetscPrintf(PETSC_COMM_WORLD,"%3D %g %g %g\n",its,(double)nrm,(double)fct,(double)gnorm));
657   PetscFunctionReturn(0);
658 }
659 
660 PetscErrorCode MonitorDestroy(void **ctx)
661 {
662   PetscFunctionBegin;
663   PetscCall(PetscPrintf(PETSC_COMM_WORLD,"];\n"));
664   PetscFunctionReturn(0);
665 }
666 
667 /*TEST
668 
669    build:
670      requires: !complex
671 
672    test:
673      requires: !single
674      args:  -ts_adapt_dt_max 3.e-3 -E 10 -N 8 -ncoeff 5 -tao_bqnls_mat_lmvm_scale_type none
675 
676    test:
677      suffix: cn
678      requires: !single
679      args:  -ts_type cn -ts_dt .003 -pc_type lu -E 10 -N 8 -ncoeff 5 -tao_bqnls_mat_lmvm_scale_type none
680 
681    test:
682      suffix: 2
683      requires: !single
684      args:  -ts_adapt_dt_max 3.e-3 -E 10 -N 8 -ncoeff 5  -a .1 -tao_bqnls_mat_lmvm_scale_type none
685 
686 TEST*/
687