xref: /petsc/src/tao/unconstrained/tutorials/spectraladjointassimilation.c (revision 4e8208cbcbc709572b8abe32f33c78b69c819375)
1 static char help[] = "Solves a simple data assimilation problem with one dimensional advection diffusion equation using TSAdjoint\n\n";
2 
3 /*
4 
5     Not yet tested in parallel
6 
7 */
8 
9 /* ------------------------------------------------------------------------
10 
11    This program uses the one-dimensional advection-diffusion equation),
12        u_t = mu*u_xx - a u_x,
13    on the domain 0 <= x <= 1, with periodic boundary conditions
14 
15    to demonstrate solving a data assimilation problem of finding the initial conditions
16    to produce a given solution at a fixed time.
17 
18    The operators are discretized with the spectral element method
19 
20   ------------------------------------------------------------------------- */
21 
22 /*
23    Include "petscts.h" so that we can use TS solvers.  Note that this file
24    automatically includes:
25      petscsys.h       - base PETSc routines   petscvec.h  - vectors
26      petscmat.h  - matrices
27      petscis.h     - index sets            petscksp.h  - Krylov subspace methods
28      petscviewer.h - viewers               petscpc.h   - preconditioners
29      petscksp.h   - linear solvers        petscsnes.h - nonlinear solvers
30 */
31 
32 #include <petsctao.h>
33 #include <petscts.h>
34 #include <petscdt.h>
35 #include <petscdraw.h>
36 #include <petscdmda.h>
37 
38 /*
39    User-defined application context - contains data needed by the
40    application-provided call-back routines.
41 */
42 
43 typedef struct {
44   PetscInt   n;       /* number of nodes */
45   PetscReal *nodes;   /* GLL nodes */
46   PetscReal *weights; /* GLL weights */
47 } PetscGLL;
48 
49 typedef struct {
50   PetscInt  N;               /* grid points per elements*/
51   PetscInt  E;               /* number of elements */
52   PetscReal tol_L2, tol_max; /* error norms */
53   PetscInt  steps;           /* number of timesteps */
54   PetscReal Tend;            /* endtime */
55   PetscReal mu;              /* viscosity */
56   PetscReal a;               /* advection speed */
57   PetscReal L;               /* total length of domain */
58   PetscReal Le;
59   PetscReal Tadj;
60 } PetscParam;
61 
62 typedef struct {
63   Vec reference; /* desired end state */
64   Vec grid;      /* total grid */
65   Vec grad;
66   Vec ic;
67   Vec curr_sol;
68   Vec joe;
69   Vec true_solution; /* actual initial conditions for the final solution */
70 } PetscData;
71 
72 typedef struct {
73   Vec      grid;  /* total grid */
74   Vec      mass;  /* mass matrix for total integration */
75   Mat      stiff; /* stiffness matrix */
76   Mat      advec;
77   Mat      keptstiff;
78   PetscGLL gll;
79 } PetscSEMOperators;
80 
81 typedef struct {
82   DM                da; /* distributed array data structure */
83   PetscSEMOperators SEMop;
84   PetscParam        param;
85   PetscData         dat;
86   TS                ts;
87   PetscReal         initial_dt;
88   PetscReal        *solutioncoefficients;
89   PetscInt          ncoeff;
90 } AppCtx;
91 
92 /*
93    User-defined routines
94 */
95 extern PetscErrorCode FormFunctionGradient(Tao, Vec, PetscReal *, Vec, void *);
96 extern PetscErrorCode RHSLaplacian(TS, PetscReal, Vec, Mat, Mat, void *);
97 extern PetscErrorCode RHSAdvection(TS, PetscReal, Vec, Mat, Mat, void *);
98 extern PetscErrorCode InitialConditions(Vec, AppCtx *);
99 extern PetscErrorCode ComputeReference(TS, PetscReal, Vec, AppCtx *);
100 extern PetscErrorCode MonitorError(Tao, void *);
101 extern PetscErrorCode MonitorDestroy(PetscCtxRt);
102 extern PetscErrorCode ComputeSolutionCoefficients(AppCtx *);
103 extern PetscErrorCode RHSFunction(TS, PetscReal, Vec, Vec, void *);
104 extern PetscErrorCode RHSJacobian(TS, PetscReal, Vec, Mat, Mat, void *);
105 
main(int argc,char ** argv)106 int main(int argc, char **argv)
107 {
108   AppCtx       appctx; /* user-defined application context */
109   Tao          tao;
110   Vec          u; /* approximate solution vector */
111   PetscInt     i, xs, xm, ind, j, lenglob;
112   PetscReal    x, *wrk_ptr1, *wrk_ptr2;
113   MatNullSpace nsp;
114 
115   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
116      Initialize program and set problem parameters
117      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
118   PetscFunctionBeginUser;
119   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
120 
121   /*initialize parameters */
122   appctx.param.N     = 10;      /* order of the spectral element */
123   appctx.param.E     = 8;       /* number of elements */
124   appctx.param.L     = 1.0;     /* length of the domain */
125   appctx.param.mu    = 0.00001; /* diffusion coefficient */
126   appctx.param.a     = 0.0;     /* advection speed */
127   appctx.initial_dt  = 1e-4;
128   appctx.param.steps = PETSC_INT_MAX;
129   appctx.param.Tend  = 0.01;
130   appctx.ncoeff      = 2;
131 
132   PetscCall(PetscOptionsGetInt(NULL, NULL, "-N", &appctx.param.N, NULL));
133   PetscCall(PetscOptionsGetInt(NULL, NULL, "-E", &appctx.param.E, NULL));
134   PetscCall(PetscOptionsGetInt(NULL, NULL, "-ncoeff", &appctx.ncoeff, NULL));
135   PetscCall(PetscOptionsGetReal(NULL, NULL, "-Tend", &appctx.param.Tend, NULL));
136   PetscCall(PetscOptionsGetReal(NULL, NULL, "-mu", &appctx.param.mu, NULL));
137   PetscCall(PetscOptionsGetReal(NULL, NULL, "-a", &appctx.param.a, NULL));
138   appctx.param.Le = appctx.param.L / appctx.param.E;
139 
140   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
141      Create GLL data structures
142      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
143   PetscCall(PetscMalloc2(appctx.param.N, &appctx.SEMop.gll.nodes, appctx.param.N, &appctx.SEMop.gll.weights));
144   PetscCall(PetscDTGaussLobattoLegendreQuadrature(appctx.param.N, PETSCGAUSSLOBATTOLEGENDRE_VIA_LINEAR_ALGEBRA, appctx.SEMop.gll.nodes, appctx.SEMop.gll.weights));
145   appctx.SEMop.gll.n = appctx.param.N;
146   lenglob            = appctx.param.E * (appctx.param.N - 1);
147 
148   /*
149      Create distributed array (DMDA) to manage parallel grid and vectors
150      and to set up the ghost point communication pattern.  There are E*(Nl-1)+1
151      total grid values spread equally among all the processors, except first and last
152   */
153 
154   PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_PERIODIC, lenglob, 1, 1, NULL, &appctx.da));
155   PetscCall(DMSetFromOptions(appctx.da));
156   PetscCall(DMSetUp(appctx.da));
157 
158   /*
159      Extract global and local vectors from DMDA; we use these to store the
160      approximate solution.  Then duplicate these for remaining vectors that
161      have the same types.
162   */
163 
164   PetscCall(DMCreateGlobalVector(appctx.da, &u));
165   PetscCall(VecDuplicate(u, &appctx.dat.ic));
166   PetscCall(VecDuplicate(u, &appctx.dat.true_solution));
167   PetscCall(VecDuplicate(u, &appctx.dat.reference));
168   PetscCall(VecDuplicate(u, &appctx.SEMop.grid));
169   PetscCall(VecDuplicate(u, &appctx.SEMop.mass));
170   PetscCall(VecDuplicate(u, &appctx.dat.curr_sol));
171   PetscCall(VecDuplicate(u, &appctx.dat.joe));
172 
173   PetscCall(DMDAGetCorners(appctx.da, &xs, NULL, NULL, &xm, NULL, NULL));
174   PetscCall(DMDAVecGetArray(appctx.da, appctx.SEMop.grid, &wrk_ptr1));
175   PetscCall(DMDAVecGetArray(appctx.da, appctx.SEMop.mass, &wrk_ptr2));
176 
177   /* Compute function over the locally owned part of the grid */
178 
179   xs = xs / (appctx.param.N - 1);
180   xm = xm / (appctx.param.N - 1);
181 
182   /*
183      Build total grid and mass over entire mesh (multi-elemental)
184   */
185 
186   for (i = xs; i < xs + xm; i++) {
187     for (j = 0; j < appctx.param.N - 1; j++) {
188       x             = (appctx.param.Le / 2.0) * (appctx.SEMop.gll.nodes[j] + 1.0) + appctx.param.Le * i;
189       ind           = i * (appctx.param.N - 1) + j;
190       wrk_ptr1[ind] = x;
191       wrk_ptr2[ind] = .5 * appctx.param.Le * appctx.SEMop.gll.weights[j];
192       if (j == 0) wrk_ptr2[ind] += .5 * appctx.param.Le * appctx.SEMop.gll.weights[j];
193     }
194   }
195   PetscCall(DMDAVecRestoreArray(appctx.da, appctx.SEMop.grid, &wrk_ptr1));
196   PetscCall(DMDAVecRestoreArray(appctx.da, appctx.SEMop.mass, &wrk_ptr2));
197 
198   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
199    Create matrix data structure; set matrix evaluation routine.
200    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
201   PetscCall(DMSetMatrixPreallocateOnly(appctx.da, PETSC_TRUE));
202   PetscCall(DMCreateMatrix(appctx.da, &appctx.SEMop.stiff));
203   PetscCall(DMCreateMatrix(appctx.da, &appctx.SEMop.advec));
204 
205   /*
206    For linear problems with a time-dependent f(u,t) in the equation
207    u_t = f(u,t), the user provides the discretized right-hand side
208    as a time-dependent matrix.
209    */
210   PetscCall(RHSLaplacian(appctx.ts, 0.0, u, appctx.SEMop.stiff, appctx.SEMop.stiff, &appctx));
211   PetscCall(RHSAdvection(appctx.ts, 0.0, u, appctx.SEMop.advec, appctx.SEMop.advec, &appctx));
212   PetscCall(MatAXPY(appctx.SEMop.stiff, -1.0, appctx.SEMop.advec, DIFFERENT_NONZERO_PATTERN));
213   PetscCall(MatDuplicate(appctx.SEMop.stiff, MAT_COPY_VALUES, &appctx.SEMop.keptstiff));
214 
215   /* attach the null space to the matrix, this probably is not needed but does no harm */
216   PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, NULL, &nsp));
217   PetscCall(MatSetNullSpace(appctx.SEMop.stiff, nsp));
218   PetscCall(MatNullSpaceTest(nsp, appctx.SEMop.stiff, NULL));
219   PetscCall(MatNullSpaceDestroy(&nsp));
220 
221   /* Create the TS solver that solves the ODE and its adjoint; set its options */
222   PetscCall(TSCreate(PETSC_COMM_WORLD, &appctx.ts));
223   PetscCall(TSSetSolutionFunction(appctx.ts, (PetscErrorCode (*)(TS, PetscReal, Vec, void *))ComputeReference, &appctx));
224   PetscCall(TSSetProblemType(appctx.ts, TS_LINEAR));
225   PetscCall(TSSetType(appctx.ts, TSRK));
226   PetscCall(TSSetDM(appctx.ts, appctx.da));
227   PetscCall(TSSetTime(appctx.ts, 0.0));
228   PetscCall(TSSetTimeStep(appctx.ts, appctx.initial_dt));
229   PetscCall(TSSetMaxSteps(appctx.ts, appctx.param.steps));
230   PetscCall(TSSetMaxTime(appctx.ts, appctx.param.Tend));
231   PetscCall(TSSetExactFinalTime(appctx.ts, TS_EXACTFINALTIME_MATCHSTEP));
232   PetscCall(TSSetTolerances(appctx.ts, 1e-7, NULL, 1e-7, NULL));
233   PetscCall(TSSetFromOptions(appctx.ts));
234   /* Need to save initial timestep user may have set with -ts_time_step so it can be reset for each new TSSolve() */
235   PetscCall(TSGetTimeStep(appctx.ts, &appctx.initial_dt));
236   PetscCall(TSSetRHSFunction(appctx.ts, NULL, TSComputeRHSFunctionLinear, &appctx));
237   PetscCall(TSSetRHSJacobian(appctx.ts, appctx.SEMop.stiff, appctx.SEMop.stiff, TSComputeRHSJacobianConstant, &appctx));
238   /*  PetscCall(TSSetRHSFunction(appctx.ts,NULL,RHSFunction,&appctx));
239       PetscCall(TSSetRHSJacobian(appctx.ts,appctx.SEMop.stiff,appctx.SEMop.stiff,RHSJacobian,&appctx)); */
240 
241   /* Set random initial conditions as initial guess, compute analytic reference solution and analytic (true) initial conditions */
242   PetscCall(ComputeSolutionCoefficients(&appctx));
243   PetscCall(InitialConditions(appctx.dat.ic, &appctx));
244   PetscCall(ComputeReference(appctx.ts, appctx.param.Tend, appctx.dat.reference, &appctx));
245   PetscCall(ComputeReference(appctx.ts, 0.0, appctx.dat.true_solution, &appctx));
246 
247   /* Set up to save trajectory before TSSetFromOptions() so that TSTrajectory options can be captured */
248   PetscCall(TSSetSaveTrajectory(appctx.ts));
249   PetscCall(TSSetFromOptions(appctx.ts));
250 
251   /* Create TAO solver and set desired solution method  */
252   PetscCall(TaoCreate(PETSC_COMM_WORLD, &tao));
253   PetscCall(TaoMonitorSet(tao, MonitorError, &appctx, MonitorDestroy));
254   PetscCall(TaoSetType(tao, TAOBQNLS));
255   PetscCall(TaoSetSolution(tao, appctx.dat.ic));
256   /* Set routine for function and gradient evaluation  */
257   PetscCall(TaoSetObjectiveAndGradient(tao, NULL, FormFunctionGradient, (void *)&appctx));
258   /* Check for any TAO command line options  */
259   PetscCall(TaoSetTolerances(tao, 1e-8, PETSC_CURRENT, PETSC_CURRENT));
260   PetscCall(TaoSetFromOptions(tao));
261   PetscCall(TaoSolve(tao));
262 
263   PetscCall(TaoDestroy(&tao));
264   PetscCall(PetscFree(appctx.solutioncoefficients));
265   PetscCall(MatDestroy(&appctx.SEMop.advec));
266   PetscCall(MatDestroy(&appctx.SEMop.stiff));
267   PetscCall(MatDestroy(&appctx.SEMop.keptstiff));
268   PetscCall(VecDestroy(&u));
269   PetscCall(VecDestroy(&appctx.dat.ic));
270   PetscCall(VecDestroy(&appctx.dat.joe));
271   PetscCall(VecDestroy(&appctx.dat.true_solution));
272   PetscCall(VecDestroy(&appctx.dat.reference));
273   PetscCall(VecDestroy(&appctx.SEMop.grid));
274   PetscCall(VecDestroy(&appctx.SEMop.mass));
275   PetscCall(VecDestroy(&appctx.dat.curr_sol));
276   PetscCall(PetscFree2(appctx.SEMop.gll.nodes, appctx.SEMop.gll.weights));
277   PetscCall(DMDestroy(&appctx.da));
278   PetscCall(TSDestroy(&appctx.ts));
279 
280   /*
281      Always call PetscFinalize() before exiting a program.  This routine
282        - finalizes the PETSc libraries as well as MPI
283        - provides summary and diagnostic information if certain runtime
284          options are chosen (e.g., -log_view).
285   */
286   PetscCall(PetscFinalize());
287   return 0;
288 }
289 
290 /*
291     Computes the coefficients for the analytic solution to the PDE
292 */
ComputeSolutionCoefficients(AppCtx * appctx)293 PetscErrorCode ComputeSolutionCoefficients(AppCtx *appctx)
294 {
295   PetscRandom rand;
296   PetscInt    i;
297 
298   PetscFunctionBegin;
299   PetscCall(PetscMalloc1(appctx->ncoeff, &appctx->solutioncoefficients));
300   PetscCall(PetscRandomCreate(PETSC_COMM_WORLD, &rand));
301   PetscCall(PetscRandomSetInterval(rand, .9, 1.0));
302   for (i = 0; i < appctx->ncoeff; i++) PetscCall(PetscRandomGetValue(rand, &appctx->solutioncoefficients[i]));
303   PetscCall(PetscRandomDestroy(&rand));
304   PetscFunctionReturn(PETSC_SUCCESS);
305 }
306 
307 /* --------------------------------------------------------------------- */
308 /*
309    InitialConditions - Computes the (random) initial conditions for the Tao optimization solve (these are also initial conditions for the first TSSolve()
310 
311    Input Parameter:
312    u - uninitialized solution vector (global)
313    appctx - user-defined application context
314 
315    Output Parameter:
316    u - vector with solution at initial time (global)
317 */
InitialConditions(Vec u,AppCtx * appctx)318 PetscErrorCode InitialConditions(Vec u, AppCtx *appctx)
319 {
320   PetscScalar       *s;
321   const PetscScalar *xg;
322   PetscInt           i, j, lenglob;
323   PetscReal          sum, val;
324   PetscRandom        rand;
325 
326   PetscFunctionBegin;
327   PetscCall(PetscRandomCreate(PETSC_COMM_WORLD, &rand));
328   PetscCall(PetscRandomSetInterval(rand, .9, 1.0));
329   PetscCall(DMDAVecGetArray(appctx->da, u, &s));
330   PetscCall(DMDAVecGetArrayRead(appctx->da, appctx->SEMop.grid, (void *)&xg));
331   lenglob = appctx->param.E * (appctx->param.N - 1);
332   for (i = 0; i < lenglob; i++) {
333     s[i] = 0;
334     for (j = 0; j < appctx->ncoeff; j++) {
335       PetscCall(PetscRandomGetValue(rand, &val));
336       s[i] += val * PetscSinScalar(2 * (j + 1) * PETSC_PI * xg[i]);
337     }
338   }
339   PetscCall(PetscRandomDestroy(&rand));
340   PetscCall(DMDAVecRestoreArray(appctx->da, u, &s));
341   PetscCall(DMDAVecRestoreArrayRead(appctx->da, appctx->SEMop.grid, (void *)&xg));
342   /* make sure initial conditions do not contain the constant functions, since with periodic boundary conditions the constant functions introduce a null space */
343   PetscCall(VecSum(u, &sum));
344   PetscCall(VecShift(u, -sum / lenglob));
345   PetscFunctionReturn(PETSC_SUCCESS);
346 }
347 
348 /*
349    TrueSolution() computes the true solution for the Tao optimization solve which means they are the initial conditions for the objective function.
350 
351              InitialConditions() computes the initial conditions for the beginning of the Tao iterations
352 
353    Input Parameter:
354    u - uninitialized solution vector (global)
355    appctx - user-defined application context
356 
357    Output Parameter:
358    u - vector with solution at initial time (global)
359 */
TrueSolution(Vec u,AppCtx * appctx)360 PetscErrorCode TrueSolution(Vec u, AppCtx *appctx)
361 {
362   PetscScalar       *s;
363   const PetscScalar *xg;
364   PetscInt           i, j, lenglob;
365   PetscReal          sum;
366 
367   PetscFunctionBegin;
368   PetscCall(DMDAVecGetArray(appctx->da, u, &s));
369   PetscCall(DMDAVecGetArrayRead(appctx->da, appctx->SEMop.grid, (void *)&xg));
370   lenglob = appctx->param.E * (appctx->param.N - 1);
371   for (i = 0; i < lenglob; i++) {
372     s[i] = 0;
373     for (j = 0; j < appctx->ncoeff; j++) s[i] += appctx->solutioncoefficients[j] * PetscSinScalar(2 * (j + 1) * PETSC_PI * xg[i]);
374   }
375   PetscCall(DMDAVecRestoreArray(appctx->da, u, &s));
376   PetscCall(DMDAVecRestoreArrayRead(appctx->da, appctx->SEMop.grid, (void *)&xg));
377   /* make sure initial conditions do not contain the constant functions, since with periodic boundary conditions the constant functions introduce a null space */
378   PetscCall(VecSum(u, &sum));
379   PetscCall(VecShift(u, -sum / lenglob));
380   PetscFunctionReturn(PETSC_SUCCESS);
381 }
382 /* --------------------------------------------------------------------- */
383 /*
384    Sets the desired profile for the final end time
385 
386    Input Parameters:
387    t - final time
388    obj - vector storing the desired profile
389    appctx - user-defined application context
390 
391 */
ComputeReference(TS ts,PetscReal t,Vec obj,AppCtx * appctx)392 PetscErrorCode ComputeReference(TS ts, PetscReal t, Vec obj, AppCtx *appctx)
393 {
394   PetscScalar       *s, tc;
395   const PetscScalar *xg;
396   PetscInt           i, j, lenglob;
397 
398   PetscFunctionBegin;
399   PetscCall(DMDAVecGetArray(appctx->da, obj, &s));
400   PetscCall(DMDAVecGetArrayRead(appctx->da, appctx->SEMop.grid, (void *)&xg));
401   lenglob = appctx->param.E * (appctx->param.N - 1);
402   for (i = 0; i < lenglob; i++) {
403     s[i] = 0;
404     for (j = 0; j < appctx->ncoeff; j++) {
405       tc = -appctx->param.mu * (j + 1) * (j + 1) * 4.0 * PETSC_PI * PETSC_PI * t;
406       s[i] += appctx->solutioncoefficients[j] * PetscSinScalar(2 * (j + 1) * PETSC_PI * (xg[i] + appctx->param.a * t)) * PetscExpReal(tc);
407     }
408   }
409   PetscCall(DMDAVecRestoreArray(appctx->da, obj, &s));
410   PetscCall(DMDAVecRestoreArrayRead(appctx->da, appctx->SEMop.grid, (void *)&xg));
411   PetscFunctionReturn(PETSC_SUCCESS);
412 }
413 
RHSFunction(TS ts,PetscReal t,Vec globalin,Vec globalout,PetscCtx ctx)414 PetscErrorCode RHSFunction(TS ts, PetscReal t, Vec globalin, Vec globalout, PetscCtx ctx)
415 {
416   AppCtx *appctx = (AppCtx *)ctx;
417 
418   PetscFunctionBegin;
419   PetscCall(MatMult(appctx->SEMop.keptstiff, globalin, globalout));
420   PetscFunctionReturn(PETSC_SUCCESS);
421 }
422 
RHSJacobian(TS ts,PetscReal t,Vec globalin,Mat A,Mat B,PetscCtx ctx)423 PetscErrorCode RHSJacobian(TS ts, PetscReal t, Vec globalin, Mat A, Mat B, PetscCtx ctx)
424 {
425   AppCtx *appctx = (AppCtx *)ctx;
426 
427   PetscFunctionBegin;
428   PetscCall(MatCopy(appctx->SEMop.keptstiff, A, DIFFERENT_NONZERO_PATTERN));
429   PetscFunctionReturn(PETSC_SUCCESS);
430 }
431 
432 /* --------------------------------------------------------------------- */
433 
434 /*
435    RHSLaplacian -   matrix for diffusion
436 
437    Input Parameters:
438    ts - the TS context
439    t - current time  (ignored)
440    X - current solution (ignored)
441    dummy - optional user-defined context, as set by TSetRHSJacobian()
442 
443    Output Parameters:
444    AA - Jacobian matrix
445    BB - optionally different matrix from which the preconditioner is built
446 
447    Scales by the inverse of the mass matrix (perhaps that should be pulled out)
448 
449 */
RHSLaplacian(TS ts,PetscReal t,Vec X,Mat A,Mat BB,PetscCtx ctx)450 PetscErrorCode RHSLaplacian(TS ts, PetscReal t, Vec X, Mat A, Mat BB, PetscCtx ctx)
451 {
452   PetscReal **temp;
453   PetscReal   vv;
454   AppCtx     *appctx = (AppCtx *)ctx; /* user-defined application context */
455   PetscInt    i, xs, xn, l, j;
456   PetscInt   *rowsDM;
457 
458   PetscFunctionBegin;
459   /*
460    Creates the element stiffness matrix for the given gll
461    */
462   PetscCall(PetscGaussLobattoLegendreElementLaplacianCreate(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
463 
464   /* scale by the size of the element */
465   for (i = 0; i < appctx->param.N; i++) {
466     vv = -appctx->param.mu * 2.0 / appctx->param.Le;
467     for (j = 0; j < appctx->param.N; j++) temp[i][j] = temp[i][j] * vv;
468   }
469 
470   PetscCall(MatSetOption(A, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE));
471   PetscCall(DMDAGetCorners(appctx->da, &xs, NULL, NULL, &xn, NULL, NULL));
472 
473   PetscCheck(appctx->param.N - 1 >= 1, PETSC_COMM_WORLD, PETSC_ERR_ARG_OUTOFRANGE, "Polynomial order must be at least 2");
474   xs = xs / (appctx->param.N - 1);
475   xn = xn / (appctx->param.N - 1);
476 
477   PetscCall(PetscMalloc1(appctx->param.N, &rowsDM));
478   /*
479    loop over local elements
480    */
481   for (j = xs; j < xs + xn; j++) {
482     for (l = 0; l < appctx->param.N; l++) rowsDM[l] = 1 + (j - xs) * (appctx->param.N - 1) + l;
483     PetscCall(MatSetValuesLocal(A, appctx->param.N, rowsDM, appctx->param.N, rowsDM, &temp[0][0], ADD_VALUES));
484   }
485   PetscCall(PetscFree(rowsDM));
486   PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
487   PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
488   PetscCall(VecReciprocal(appctx->SEMop.mass));
489   PetscCall(MatDiagonalScale(A, appctx->SEMop.mass, 0));
490   PetscCall(VecReciprocal(appctx->SEMop.mass));
491 
492   PetscCall(PetscGaussLobattoLegendreElementLaplacianDestroy(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
493   PetscFunctionReturn(PETSC_SUCCESS);
494 }
495 
496 /*
497     Almost identical to Laplacian
498 
499     Note that the element matrix is NOT scaled by the size of element like the Laplacian term.
500  */
RHSAdvection(TS ts,PetscReal t,Vec X,Mat A,Mat BB,PetscCtx ctx)501 PetscErrorCode RHSAdvection(TS ts, PetscReal t, Vec X, Mat A, Mat BB, PetscCtx ctx)
502 {
503   PetscReal **temp;
504   PetscReal   vv;
505   AppCtx     *appctx = (AppCtx *)ctx; /* user-defined application context */
506   PetscInt    i, xs, xn, l, j;
507   PetscInt   *rowsDM;
508 
509   PetscFunctionBegin;
510   /*
511    Creates the element stiffness matrix for the given gll
512    */
513   PetscCall(PetscGaussLobattoLegendreElementAdvectionCreate(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
514 
515   /* scale by the size of the element */
516   for (i = 0; i < appctx->param.N; i++) {
517     vv = -appctx->param.a;
518     for (j = 0; j < appctx->param.N; j++) temp[i][j] = temp[i][j] * vv;
519   }
520 
521   PetscCall(MatSetOption(A, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE));
522   PetscCall(DMDAGetCorners(appctx->da, &xs, NULL, NULL, &xn, NULL, NULL));
523 
524   PetscCheck(appctx->param.N - 1 >= 1, PETSC_COMM_WORLD, PETSC_ERR_ARG_OUTOFRANGE, "Polynomial order must be at least 2");
525   xs = xs / (appctx->param.N - 1);
526   xn = xn / (appctx->param.N - 1);
527 
528   PetscCall(PetscMalloc1(appctx->param.N, &rowsDM));
529   /*
530    loop over local elements
531    */
532   for (j = xs; j < xs + xn; j++) {
533     for (l = 0; l < appctx->param.N; l++) rowsDM[l] = 1 + (j - xs) * (appctx->param.N - 1) + l;
534     PetscCall(MatSetValuesLocal(A, appctx->param.N, rowsDM, appctx->param.N, rowsDM, &temp[0][0], ADD_VALUES));
535   }
536   PetscCall(PetscFree(rowsDM));
537   PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
538   PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
539   PetscCall(VecReciprocal(appctx->SEMop.mass));
540   PetscCall(MatDiagonalScale(A, appctx->SEMop.mass, 0));
541   PetscCall(VecReciprocal(appctx->SEMop.mass));
542 
543   PetscCall(PetscGaussLobattoLegendreElementAdvectionDestroy(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
544   PetscFunctionReturn(PETSC_SUCCESS);
545 }
546 
547 /* ------------------------------------------------------------------ */
548 /*
549    FormFunctionGradient - Evaluates the function and corresponding gradient.
550 
551    Input Parameters:
552    tao - the Tao context
553    ic   - the input vector
554    ctx - optional user-defined context, as set when calling TaoSetObjectiveAndGradient()
555 
556    Output Parameters:
557    f   - the newly evaluated function
558    G   - the newly evaluated gradient
559 
560    Notes:
561 
562           The forward equation is
563               M u_t = F(U)
564           which is converted to
565                 u_t = M^{-1} F(u)
566           in the user code since TS has no direct way of providing a mass matrix. The Jacobian of this is
567                  M^{-1} J
568           where J is the Jacobian of F. Now the adjoint equation is
569                 M v_t = J^T v
570           but TSAdjoint does not solve this since it can only solve the transposed system for the
571           Jacobian the user provided. Hence TSAdjoint solves
572                  w_t = J^T M^{-1} w  (where w = M v)
573           since there is no way to indicate the mass matrix as a separate entity to TS. Thus one
574           must be careful in initializing the "adjoint equation" and using the result. This is
575           why
576               G = -2 M(u(T) - u_d)
577           below (instead of -2(u(T) - u_d)
578 
579 */
FormFunctionGradient(Tao tao,Vec ic,PetscReal * f,Vec G,PetscCtx ctx)580 PetscErrorCode FormFunctionGradient(Tao tao, Vec ic, PetscReal *f, Vec G, PetscCtx ctx)
581 {
582   AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */
583   Vec     temp;
584 
585   PetscFunctionBegin;
586   PetscCall(TSSetTime(appctx->ts, 0.0));
587   PetscCall(TSSetStepNumber(appctx->ts, 0));
588   PetscCall(TSSetTimeStep(appctx->ts, appctx->initial_dt));
589   PetscCall(VecCopy(ic, appctx->dat.curr_sol));
590 
591   PetscCall(TSSolve(appctx->ts, appctx->dat.curr_sol));
592   PetscCall(VecCopy(appctx->dat.curr_sol, appctx->dat.joe));
593 
594   /*     Compute the difference between the current ODE solution and target ODE solution */
595   PetscCall(VecWAXPY(G, -1.0, appctx->dat.curr_sol, appctx->dat.reference));
596 
597   /*     Compute the objective/cost function   */
598   PetscCall(VecDuplicate(G, &temp));
599   PetscCall(VecPointwiseMult(temp, G, G));
600   PetscCall(VecDot(temp, appctx->SEMop.mass, f));
601   PetscCall(VecDestroy(&temp));
602 
603   /*     Compute initial conditions for the adjoint integration. See Notes above  */
604   PetscCall(VecScale(G, -2.0));
605   PetscCall(VecPointwiseMult(G, G, appctx->SEMop.mass));
606   PetscCall(TSSetCostGradients(appctx->ts, 1, &G, NULL));
607 
608   PetscCall(TSAdjointSolve(appctx->ts));
609   /* PetscCall(VecPointwiseDivide(G,G,appctx->SEMop.mass));*/
610   PetscFunctionReturn(PETSC_SUCCESS);
611 }
612 
MonitorError(Tao tao,PetscCtx ctx)613 PetscErrorCode MonitorError(Tao tao, PetscCtx ctx)
614 {
615   AppCtx   *appctx = (AppCtx *)ctx;
616   Vec       temp, grad;
617   PetscReal nrm;
618   PetscInt  its;
619   PetscReal fct, gnorm;
620 
621   PetscFunctionBegin;
622   PetscCall(VecDuplicate(appctx->dat.ic, &temp));
623   PetscCall(VecWAXPY(temp, -1.0, appctx->dat.ic, appctx->dat.true_solution));
624   PetscCall(VecPointwiseMult(temp, temp, temp));
625   PetscCall(VecDot(temp, appctx->SEMop.mass, &nrm));
626   nrm = PetscSqrtReal(nrm);
627   PetscCall(TaoGetGradient(tao, &grad, NULL, NULL));
628   PetscCall(VecPointwiseMult(temp, temp, temp));
629   PetscCall(VecDot(temp, appctx->SEMop.mass, &gnorm));
630   gnorm = PetscSqrtReal(gnorm);
631   PetscCall(VecDestroy(&temp));
632   PetscCall(TaoGetIterationNumber(tao, &its));
633   PetscCall(TaoGetSolutionStatus(tao, NULL, &fct, NULL, NULL, NULL, NULL));
634   if (!its) {
635     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%% Iteration Error Objective Gradient-norm\n"));
636     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "history = [\n"));
637   }
638   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%3" PetscInt_FMT " %g %g %g\n", its, (double)nrm, (double)fct, (double)gnorm));
639   PetscFunctionReturn(PETSC_SUCCESS);
640 }
641 
MonitorDestroy(PetscCtxRt ctx)642 PetscErrorCode MonitorDestroy(PetscCtxRt ctx)
643 {
644   PetscFunctionBegin;
645   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "];\n"));
646   PetscFunctionReturn(PETSC_SUCCESS);
647 }
648 
649 /*TEST
650 
651    build:
652      requires: !complex
653 
654    test:
655      requires: !single
656      args: -ts_adapt_dt_max 3.e-3 -E 10 -N 8 -ncoeff 5 -tao_bqnls_mat_lmvm_scale_type none
657 
658    test:
659      suffix: cn
660      requires: !single
661      args: -ts_type cn -ts_time_step .003 -pc_type lu -E 10 -N 8 -ncoeff 5 -tao_bqnls_mat_lmvm_scale_type none
662 
663    test:
664      suffix: 2
665      requires: !single
666      args: -ts_adapt_dt_max 3.e-3 -E 10 -N 8 -ncoeff 5 -a .1 -tao_bqnls_mat_lmvm_scale_type none
667 
668 TEST*/
669