xref: /petsc/src/ts/interface/sensitivity/tssen.c (revision df4cd43f92eaa320656440c40edb1046daee8f75)
1 #include <petsc/private/tsimpl.h> /*I "petscts.h"  I*/
2 #include <petscdraw.h>
3 
4 PetscLogEvent TS_AdjointStep, TS_ForwardStep, TS_JacobianPEval;
5 
6 /* #define TSADJOINT_STAGE */
7 
8 /* ------------------------ Sensitivity Context ---------------------------*/
9 
10 /*@C
11   TSSetRHSJacobianP - Sets the function that computes the Jacobian of G w.r.t. the parameters P where U_t = G(U,P,t), as well as the location to store the matrix.
12 
13   Logically Collective
14 
15   Input Parameters:
16 + ts - `TS` context obtained from `TSCreate()`
17 . Amat - JacobianP matrix
18 . func - function
19 - ctx - [optional] user-defined function context
20 
21   Calling sequence of func:
22 $ func (TS ts,PetscReal t,Vec y,Mat A,void *ctx);
23 +   t - current timestep
24 .   U - input vector (current ODE solution)
25 .   A - output matrix
26 -   ctx - [optional] user-defined function context
27 
28   Level: intermediate
29 
30   Note:
31     Amat has the same number of rows and the same row parallel layout as u, Amat has the same number of columns and parallel layout as p
32 
33 .seealso: [](chapter_ts), `TS`, `TSGetRHSJacobianP()`
34 @*/
35 PetscErrorCode TSSetRHSJacobianP(TS ts, Mat Amat, PetscErrorCode (*func)(TS, PetscReal, Vec, Mat, void *), void *ctx)
36 {
37   PetscFunctionBegin;
38   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
39   PetscValidHeaderSpecific(Amat, MAT_CLASSID, 2);
40 
41   ts->rhsjacobianp    = func;
42   ts->rhsjacobianpctx = ctx;
43   if (Amat) {
44     PetscCall(PetscObjectReference((PetscObject)Amat));
45     PetscCall(MatDestroy(&ts->Jacprhs));
46     ts->Jacprhs = Amat;
47   }
48   PetscFunctionReturn(PETSC_SUCCESS);
49 }
50 
51 /*@C
52   TSGetRHSJacobianP - Gets the function that computes the Jacobian of G w.r.t. the parameters P where U_t = G(U,P,t), as well as the location to store the matrix.
53 
54   Logically Collective
55 
56   Input Parameter:
57 . ts - `TS` context obtained from `TSCreate()`
58 
59   Output Parameters:
60 + Amat - JacobianP matrix
61 . func - function
62 - ctx - [optional] user-defined function context
63 
64   Calling sequence of func:
65 $ func (TS ts,PetscReal t,Vec y,Mat A,void *ctx);
66 +   t - current timestep
67 .   U - input vector (current ODE solution)
68 .   A - output matrix
69 -   ctx - [optional] user-defined function context
70 
71   Level: intermediate
72 
73   Note:
74     Amat has the same number of rows and the same row parallel layout as u, Amat has the same number of columns and parallel layout as p
75 
76 .seealso: [](chapter_ts), `TSSetRHSJacobianP()`, `TS`, `TSGetRHSJacobianP()`
77 @*/
78 PetscErrorCode TSGetRHSJacobianP(TS ts, Mat *Amat, PetscErrorCode (**func)(TS, PetscReal, Vec, Mat, void *), void **ctx)
79 {
80   PetscFunctionBegin;
81   if (func) *func = ts->rhsjacobianp;
82   if (ctx) *ctx = ts->rhsjacobianpctx;
83   if (Amat) *Amat = ts->Jacprhs;
84   PetscFunctionReturn(PETSC_SUCCESS);
85 }
86 
87 /*@C
88   TSComputeRHSJacobianP - Runs the user-defined JacobianP function.
89 
90   Collective
91 
92   Input Parameters:
93 . ts   - The `TS` context obtained from `TSCreate()`
94 
95   Level: developer
96 
97 .seealso: [](chapter_ts), `TSSetRHSJacobianP()`, `TS`
98 @*/
99 PetscErrorCode TSComputeRHSJacobianP(TS ts, PetscReal t, Vec U, Mat Amat)
100 {
101   PetscFunctionBegin;
102   if (!Amat) PetscFunctionReturn(PETSC_SUCCESS);
103   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
104   PetscValidHeaderSpecific(U, VEC_CLASSID, 3);
105 
106   if (ts->rhsjacobianp) PetscCallBack("TS callback JacobianP for sensitivity analysis", (*ts->rhsjacobianp)(ts, t, U, Amat, ts->rhsjacobianpctx));
107   else {
108     PetscBool assembled;
109     PetscCall(MatZeroEntries(Amat));
110     PetscCall(MatAssembled(Amat, &assembled));
111     if (!assembled) {
112       PetscCall(MatAssemblyBegin(Amat, MAT_FINAL_ASSEMBLY));
113       PetscCall(MatAssemblyEnd(Amat, MAT_FINAL_ASSEMBLY));
114     }
115   }
116   PetscFunctionReturn(PETSC_SUCCESS);
117 }
118 
119 /*@C
120   TSSetIJacobianP - Sets the function that computes the Jacobian of F w.r.t. the parameters P where F(Udot,U,t) = G(U,P,t), as well as the location to store the matrix.
121 
122   Logically Collective
123 
124   Input Parameters:
125 + ts - `TS` context obtained from `TSCreate()`
126 . Amat - JacobianP matrix
127 . func - function
128 - ctx - [optional] user-defined function context
129 
130   Calling sequence of func:
131 $ func (TS ts,PetscReal t,Vec y,Mat A,void *ctx);
132 +   t - current timestep
133 .   U - input vector (current ODE solution)
134 .   Udot - time derivative of state vector
135 .   shift - shift to apply, see note below
136 .   A - output matrix
137 -   ctx - [optional] user-defined function context
138 
139   Level: intermediate
140 
141   Note:
142     Amat has the same number of rows and the same row parallel layout as u, Amat has the same number of columns and parallel layout as p
143 
144 .seealso: [](chapter_ts), `TSSetRHSJacobianP()`, `TS`
145 @*/
146 PetscErrorCode TSSetIJacobianP(TS ts, Mat Amat, PetscErrorCode (*func)(TS, PetscReal, Vec, Vec, PetscReal, Mat, void *), void *ctx)
147 {
148   PetscFunctionBegin;
149   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
150   PetscValidHeaderSpecific(Amat, MAT_CLASSID, 2);
151 
152   ts->ijacobianp    = func;
153   ts->ijacobianpctx = ctx;
154   if (Amat) {
155     PetscCall(PetscObjectReference((PetscObject)Amat));
156     PetscCall(MatDestroy(&ts->Jacp));
157     ts->Jacp = Amat;
158   }
159   PetscFunctionReturn(PETSC_SUCCESS);
160 }
161 
162 /*@C
163   TSComputeIJacobianP - Runs the user-defined IJacobianP function.
164 
165   Collective
166 
167   Input Parameters:
168 + ts - the `TS` context
169 . t - current timestep
170 . U - state vector
171 . Udot - time derivative of state vector
172 . shift - shift to apply, see note below
173 - imex - flag indicates if the method is IMEX so that the RHSJacobianP should be kept separate
174 
175   Output Parameters:
176 . A - Jacobian matrix
177 
178   Level: developer
179 
180 .seealso: [](chapter_ts), `TS`, `TSSetIJacobianP()`
181 @*/
182 PetscErrorCode TSComputeIJacobianP(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal shift, Mat Amat, PetscBool imex)
183 {
184   PetscFunctionBegin;
185   if (!Amat) PetscFunctionReturn(PETSC_SUCCESS);
186   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
187   PetscValidHeaderSpecific(U, VEC_CLASSID, 3);
188   PetscValidHeaderSpecific(Udot, VEC_CLASSID, 4);
189 
190   PetscCall(PetscLogEventBegin(TS_JacobianPEval, ts, U, Amat, 0));
191   if (ts->ijacobianp) PetscCallBack("TS callback JacobianP for sensitivity analysis", (*ts->ijacobianp)(ts, t, U, Udot, shift, Amat, ts->ijacobianpctx));
192   if (imex) {
193     if (!ts->ijacobianp) { /* system was written as Udot = G(t,U) */
194       PetscBool assembled;
195       PetscCall(MatZeroEntries(Amat));
196       PetscCall(MatAssembled(Amat, &assembled));
197       if (!assembled) {
198         PetscCall(MatAssemblyBegin(Amat, MAT_FINAL_ASSEMBLY));
199         PetscCall(MatAssemblyEnd(Amat, MAT_FINAL_ASSEMBLY));
200       }
201     }
202   } else {
203     if (ts->rhsjacobianp) PetscCall(TSComputeRHSJacobianP(ts, t, U, ts->Jacprhs));
204     if (ts->Jacprhs == Amat) { /* No IJacobian, so we only have the RHS matrix */
205       PetscCall(MatScale(Amat, -1));
206     } else if (ts->Jacprhs) { /* Both IJacobian and RHSJacobian */
207       MatStructure axpy = DIFFERENT_NONZERO_PATTERN;
208       if (!ts->ijacobianp) { /* No IJacobianp provided, but we have a separate RHS matrix */
209         PetscCall(MatZeroEntries(Amat));
210       }
211       PetscCall(MatAXPY(Amat, -1, ts->Jacprhs, axpy));
212     }
213   }
214   PetscCall(PetscLogEventEnd(TS_JacobianPEval, ts, U, Amat, 0));
215   PetscFunctionReturn(PETSC_SUCCESS);
216 }
217 
218 /*@C
219     TSSetCostIntegrand - Sets the routine for evaluating the integral term in one or more cost functions
220 
221     Logically Collective
222 
223     Input Parameters:
224 +   ts - the `TS` context obtained from `TSCreate()`
225 .   numcost - number of gradients to be computed, this is the number of cost functions
226 .   costintegral - vector that stores the integral values
227 .   rf - routine for evaluating the integrand function
228 .   drduf - function that computes the gradients of the r's with respect to u
229 .   drdpf - function that computes the gradients of the r's with respect to p, can be NULL if parametric sensitivity is not desired (mu=NULL)
230 .   fwd - flag indicating whether to evaluate cost integral in the forward run or the adjoint run
231 -   ctx - [optional] user-defined context for private data for the function evaluation routine (may be NULL)
232 
233     Calling sequence of rf:
234 $   PetscErrorCode rf(TS ts,PetscReal t,Vec U,Vec F,void *ctx);
235 
236     Calling sequence of drduf:
237 $   PetscErroCode drduf(TS ts,PetscReal t,Vec U,Vec *dRdU,void *ctx);
238 
239     Calling sequence of drdpf:
240 $   PetscErroCode drdpf(TS ts,PetscReal t,Vec U,Vec *dRdP,void *ctx);
241 
242     Level: deprecated
243 
244     Note:
245     For optimization there is usually a single cost function (numcost = 1). For sensitivities there may be multiple cost functions
246 
247 .seealso: [](chapter_ts), `TS`, `TSSetRHSJacobianP()`, `TSGetCostGradients()`, `TSSetCostGradients()`
248 @*/
249 PetscErrorCode TSSetCostIntegrand(TS ts, PetscInt numcost, Vec costintegral, PetscErrorCode (*rf)(TS, PetscReal, Vec, Vec, void *), PetscErrorCode (*drduf)(TS, PetscReal, Vec, Vec *, void *), PetscErrorCode (*drdpf)(TS, PetscReal, Vec, Vec *, void *), PetscBool fwd, void *ctx)
250 {
251   PetscFunctionBegin;
252   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
253   if (costintegral) PetscValidHeaderSpecific(costintegral, VEC_CLASSID, 3);
254   PetscCheck(!ts->numcost || ts->numcost == numcost, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostGradients() or TSForwardSetIntegralGradients()");
255   if (!ts->numcost) ts->numcost = numcost;
256 
257   if (costintegral) {
258     PetscCall(PetscObjectReference((PetscObject)costintegral));
259     PetscCall(VecDestroy(&ts->vec_costintegral));
260     ts->vec_costintegral = costintegral;
261   } else {
262     if (!ts->vec_costintegral) { /* Create a seq vec if user does not provide one */
263       PetscCall(VecCreateSeq(PETSC_COMM_SELF, numcost, &ts->vec_costintegral));
264     } else {
265       PetscCall(VecSet(ts->vec_costintegral, 0.0));
266     }
267   }
268   if (!ts->vec_costintegrand) {
269     PetscCall(VecDuplicate(ts->vec_costintegral, &ts->vec_costintegrand));
270   } else {
271     PetscCall(VecSet(ts->vec_costintegrand, 0.0));
272   }
273   ts->costintegralfwd  = fwd; /* Evaluate the cost integral in forward run if fwd is true */
274   ts->costintegrand    = rf;
275   ts->costintegrandctx = ctx;
276   ts->drdufunction     = drduf;
277   ts->drdpfunction     = drdpf;
278   PetscFunctionReturn(PETSC_SUCCESS);
279 }
280 
281 /*@C
282    TSGetCostIntegral - Returns the values of the integral term in the cost functions.
283    It is valid to call the routine after a backward run.
284 
285    Not Collective
286 
287    Input Parameter:
288 .  ts - the `TS` context obtained from `TSCreate()`
289 
290    Output Parameter:
291 .  v - the vector containing the integrals for each cost function
292 
293    Level: intermediate
294 
295 .seealso: [](chapter_ts), `TS`, `TSAdjointSolve()`, ``TSSetCostIntegrand()`
296 @*/
297 PetscErrorCode TSGetCostIntegral(TS ts, Vec *v)
298 {
299   TS quadts;
300 
301   PetscFunctionBegin;
302   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
303   PetscValidPointer(v, 2);
304   PetscCall(TSGetQuadratureTS(ts, NULL, &quadts));
305   *v = quadts->vec_sol;
306   PetscFunctionReturn(PETSC_SUCCESS);
307 }
308 
309 /*@C
310    TSComputeCostIntegrand - Evaluates the integral function in the cost functions.
311 
312    Input Parameters:
313 +  ts - the `TS` context
314 .  t - current time
315 -  U - state vector, i.e. current solution
316 
317    Output Parameter:
318 .  Q - vector of size numcost to hold the outputs
319 
320    Level: deprecated
321 
322    Note:
323    Most users should not need to explicitly call this routine, as it
324    is used internally within the sensitivity analysis context.
325 
326 .seealso: [](chapter_ts), `TS`, `TSAdjointSolve()`, `TSSetCostIntegrand()`
327 @*/
328 PetscErrorCode TSComputeCostIntegrand(TS ts, PetscReal t, Vec U, Vec Q)
329 {
330   PetscFunctionBegin;
331   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
332   PetscValidHeaderSpecific(U, VEC_CLASSID, 3);
333   PetscValidHeaderSpecific(Q, VEC_CLASSID, 4);
334 
335   PetscCall(PetscLogEventBegin(TS_FunctionEval, ts, U, Q, 0));
336   if (ts->costintegrand) PetscCallBack("TS callback integrand in the cost function", (*ts->costintegrand)(ts, t, U, Q, ts->costintegrandctx));
337   else PetscCall(VecZeroEntries(Q));
338   PetscCall(PetscLogEventEnd(TS_FunctionEval, ts, U, Q, 0));
339   PetscFunctionReturn(PETSC_SUCCESS);
340 }
341 
342 /*@C
343   TSComputeDRDUFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobian()`
344 
345   Level: deprecated
346 
347 @*/
348 PetscErrorCode TSComputeDRDUFunction(TS ts, PetscReal t, Vec U, Vec *DRDU)
349 {
350   PetscFunctionBegin;
351   if (!DRDU) PetscFunctionReturn(PETSC_SUCCESS);
352   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
353   PetscValidHeaderSpecific(U, VEC_CLASSID, 3);
354 
355   PetscCallBack("TS callback DRDU for sensitivity analysis", (*ts->drdufunction)(ts, t, U, DRDU, ts->costintegrandctx));
356   PetscFunctionReturn(PETSC_SUCCESS);
357 }
358 
359 /*@C
360   TSComputeDRDPFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobianP()`
361 
362   Level: deprecated
363 
364 @*/
365 PetscErrorCode TSComputeDRDPFunction(TS ts, PetscReal t, Vec U, Vec *DRDP)
366 {
367   PetscFunctionBegin;
368   if (!DRDP) PetscFunctionReturn(PETSC_SUCCESS);
369   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
370   PetscValidHeaderSpecific(U, VEC_CLASSID, 3);
371 
372   PetscCallBack("TS callback DRDP for sensitivity analysis", (*ts->drdpfunction)(ts, t, U, DRDP, ts->costintegrandctx));
373   PetscFunctionReturn(PETSC_SUCCESS);
374 }
375 
376 /*@C
377   TSSetIHessianProduct - Sets the function that computes the vector-Hessian-vector product. The Hessian is the second-order derivative of F (IFunction) w.r.t. the state variable.
378 
379   Logically Collective
380 
381   Input Parameters:
382 + ts - `TS` context obtained from `TSCreate()`
383 . ihp1 - an array of vectors storing the result of vector-Hessian-vector product for F_UU
384 . hessianproductfunc1 - vector-Hessian-vector product function for F_UU
385 . ihp2 - an array of vectors storing the result of vector-Hessian-vector product for F_UP
386 . hessianproductfunc2 - vector-Hessian-vector product function for F_UP
387 . ihp3 - an array of vectors storing the result of vector-Hessian-vector product for F_PU
388 . hessianproductfunc3 - vector-Hessian-vector product function for F_PU
389 . ihp4 - an array of vectors storing the result of vector-Hessian-vector product for F_PP
390 - hessianproductfunc4 - vector-Hessian-vector product function for F_PP
391 
392   Calling sequence of ihessianproductfunc:
393 $ ihessianproductfunc (TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV,void *ctx);
394 +   t - current timestep
395 .   U - input vector (current ODE solution)
396 .   Vl - an array of input vectors to be left-multiplied with the Hessian
397 .   Vr - input vector to be right-multiplied with the Hessian
398 .   VHV - an array of output vectors for vector-Hessian-vector product
399 -   ctx - [optional] user-defined function context
400 
401   Level: intermediate
402 
403   Notes:
404   The first Hessian function and the working array are required.
405   As an example to implement the callback functions, the second callback function calculates the vector-Hessian-vector product
406   $ Vl_n^T*F_UP*Vr
407   where the vector Vl_n (n-th element in the array Vl) and Vr are of size N and M respectively, and the Hessian F_UP is of size N x N x M.
408   Each entry of F_UP corresponds to the derivative
409   $ F_UP[i][j][k] = \frac{\partial^2 F[i]}{\partial U[j] \partial P[k]}.
410   The result of the vector-Hessian-vector product for Vl_n needs to be stored in vector VHV_n with the j-th entry being
411   $ VHV_n[j] = \sum_i \sum_k {Vl_n[i] * F_UP[i][j][k] * Vr[k]}
412   If the cost function is a scalar, there will be only one vector in Vl and VHV.
413 
414 .seealso: [](chapter_ts), `TS`
415 @*/
416 PetscErrorCode TSSetIHessianProduct(TS ts, Vec *ihp1, PetscErrorCode (*ihessianproductfunc1)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *ihp2, PetscErrorCode (*ihessianproductfunc2)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *ihp3, PetscErrorCode (*ihessianproductfunc3)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *ihp4, PetscErrorCode (*ihessianproductfunc4)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), void *ctx)
417 {
418   PetscFunctionBegin;
419   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
420   PetscValidPointer(ihp1, 2);
421 
422   ts->ihessianproductctx = ctx;
423   if (ihp1) ts->vecs_fuu = ihp1;
424   if (ihp2) ts->vecs_fup = ihp2;
425   if (ihp3) ts->vecs_fpu = ihp3;
426   if (ihp4) ts->vecs_fpp = ihp4;
427   ts->ihessianproduct_fuu = ihessianproductfunc1;
428   ts->ihessianproduct_fup = ihessianproductfunc2;
429   ts->ihessianproduct_fpu = ihessianproductfunc3;
430   ts->ihessianproduct_fpp = ihessianproductfunc4;
431   PetscFunctionReturn(PETSC_SUCCESS);
432 }
433 
434 /*@C
435   TSComputeIHessianProductFunctionUU - Runs the user-defined vector-Hessian-vector product function for Fuu.
436 
437   Collective
438 
439   Input Parameters:
440 . ts   - The `TS` context obtained from `TSCreate()`
441 
442   Level: developer
443 
444   Note:
445   `TSComputeIHessianProductFunctionUU()` is typically used for sensitivity implementation,
446   so most users would not generally call this routine themselves.
447 
448 .seealso: [](chapter_ts), `TSSetIHessianProduct()`
449 @*/
450 PetscErrorCode TSComputeIHessianProductFunctionUU(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV)
451 {
452   PetscFunctionBegin;
453   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);
454   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
455   PetscValidHeaderSpecific(U, VEC_CLASSID, 3);
456 
457   if (ts->ihessianproduct_fuu) PetscCallBack("TS callback IHessianProduct 1 for sensitivity analysis", (*ts->ihessianproduct_fuu)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx));
458 
459   /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */
460   if (ts->rhshessianproduct_guu) {
461     PetscInt nadj;
462     PetscCall(TSComputeRHSHessianProductFunctionUU(ts, t, U, Vl, Vr, VHV));
463     for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1));
464   }
465   PetscFunctionReturn(PETSC_SUCCESS);
466 }
467 
468 /*@C
469   TSComputeIHessianProductFunctionUP - Runs the user-defined vector-Hessian-vector product function for Fup.
470 
471   Collective
472 
473   Input Parameters:
474 . ts   - The `TS` context obtained from `TSCreate()`
475 
476   Level: developer
477 
478   Note:
479   `TSComputeIHessianProductFunctionUP()` is typically used for sensitivity implementation,
480   so most users would not generally call this routine themselves.
481 
482 .seealso: [](chapter_ts), `TSSetIHessianProduct()`
483 @*/
484 PetscErrorCode TSComputeIHessianProductFunctionUP(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV)
485 {
486   PetscFunctionBegin;
487   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);
488   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
489   PetscValidHeaderSpecific(U, VEC_CLASSID, 3);
490 
491   if (ts->ihessianproduct_fup) PetscCallBack("TS callback IHessianProduct 2 for sensitivity analysis", (*ts->ihessianproduct_fup)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx));
492 
493   /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */
494   if (ts->rhshessianproduct_gup) {
495     PetscInt nadj;
496     PetscCall(TSComputeRHSHessianProductFunctionUP(ts, t, U, Vl, Vr, VHV));
497     for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1));
498   }
499   PetscFunctionReturn(PETSC_SUCCESS);
500 }
501 
502 /*@C
503   TSComputeIHessianProductFunctionPU - Runs the user-defined vector-Hessian-vector product function for Fpu.
504 
505   Collective
506 
507   Input Parameters:
508 . ts   - The `TS` context obtained from `TSCreate()`
509 
510   Level: developer
511 
512   Note:
513   `TSComputeIHessianProductFunctionPU()` is typically used for sensitivity implementation,
514   so most users would not generally call this routine themselves.
515 
516 .seealso: [](chapter_ts), `TSSetIHessianProduct()`
517 @*/
518 PetscErrorCode TSComputeIHessianProductFunctionPU(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV)
519 {
520   PetscFunctionBegin;
521   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);
522   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
523   PetscValidHeaderSpecific(U, VEC_CLASSID, 3);
524 
525   if (ts->ihessianproduct_fpu) PetscCallBack("TS callback IHessianProduct 3 for sensitivity analysis", (*ts->ihessianproduct_fpu)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx));
526 
527   /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */
528   if (ts->rhshessianproduct_gpu) {
529     PetscInt nadj;
530     PetscCall(TSComputeRHSHessianProductFunctionPU(ts, t, U, Vl, Vr, VHV));
531     for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1));
532   }
533   PetscFunctionReturn(PETSC_SUCCESS);
534 }
535 
536 /*@C
537   TSComputeIHessianProductFunctionPP - Runs the user-defined vector-Hessian-vector product function for Fpp.
538 
539   Collective
540 
541   Input Parameters:
542 . ts   - The `TS` context obtained from `TSCreate()`
543 
544   Level: developer
545 
546   Note:
547   `TSComputeIHessianProductFunctionPP()` is typically used for sensitivity implementation,
548   so most users would not generally call this routine themselves.
549 
550 .seealso: [](chapter_ts), `TSSetIHessianProduct()`
551 @*/
552 PetscErrorCode TSComputeIHessianProductFunctionPP(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV)
553 {
554   PetscFunctionBegin;
555   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);
556   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
557   PetscValidHeaderSpecific(U, VEC_CLASSID, 3);
558 
559   if (ts->ihessianproduct_fpp) PetscCallBack("TS callback IHessianProduct 3 for sensitivity analysis", (*ts->ihessianproduct_fpp)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx));
560 
561   /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */
562   if (ts->rhshessianproduct_gpp) {
563     PetscInt nadj;
564     PetscCall(TSComputeRHSHessianProductFunctionPP(ts, t, U, Vl, Vr, VHV));
565     for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1));
566   }
567   PetscFunctionReturn(PETSC_SUCCESS);
568 }
569 
570 /*@C
571   TSSetRHSHessianProduct - Sets the function that computes the vector-Hessian-vector product. The Hessian is the second-order derivative of G (RHSFunction) w.r.t. the state variable.
572 
573   Logically Collective
574 
575   Input Parameters:
576 + ts - `TS` context obtained from `TSCreate()`
577 . rhshp1 - an array of vectors storing the result of vector-Hessian-vector product for G_UU
578 . hessianproductfunc1 - vector-Hessian-vector product function for G_UU
579 . rhshp2 - an array of vectors storing the result of vector-Hessian-vector product for G_UP
580 . hessianproductfunc2 - vector-Hessian-vector product function for G_UP
581 . rhshp3 - an array of vectors storing the result of vector-Hessian-vector product for G_PU
582 . hessianproductfunc3 - vector-Hessian-vector product function for G_PU
583 . rhshp4 - an array of vectors storing the result of vector-Hessian-vector product for G_PP
584 - hessianproductfunc4 - vector-Hessian-vector product function for G_PP
585 
586   Calling sequence of ihessianproductfunc:
587 $ rhshessianproductfunc (TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV,void *ctx);
588 +   t - current timestep
589 .   U - input vector (current ODE solution)
590 .   Vl - an array of input vectors to be left-multiplied with the Hessian
591 .   Vr - input vector to be right-multiplied with the Hessian
592 .   VHV - an array of output vectors for vector-Hessian-vector product
593 -   ctx - [optional] user-defined function context
594 
595   Level: intermediate
596 
597   Notes:
598   The first Hessian function and the working array are required.
599   As an example to implement the callback functions, the second callback function calculates the vector-Hessian-vector product
600   $ Vl_n^T*G_UP*Vr
601   where the vector Vl_n (n-th element in the array Vl) and Vr are of size N and M respectively, and the Hessian G_UP is of size N x N x M.
602   Each entry of G_UP corresponds to the derivative
603   $ G_UP[i][j][k] = \frac{\partial^2 G[i]}{\partial U[j] \partial P[k]}.
604   The result of the vector-Hessian-vector product for Vl_n needs to be stored in vector VHV_n with j-th entry being
605   $ VHV_n[j] = \sum_i \sum_k {Vl_n[i] * G_UP[i][j][k] * Vr[k]}
606   If the cost function is a scalar, there will be only one vector in Vl and VHV.
607 
608 .seealso: `TS`
609 @*/
610 PetscErrorCode TSSetRHSHessianProduct(TS ts, Vec *rhshp1, PetscErrorCode (*rhshessianproductfunc1)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *rhshp2, PetscErrorCode (*rhshessianproductfunc2)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *rhshp3, PetscErrorCode (*rhshessianproductfunc3)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *rhshp4, PetscErrorCode (*rhshessianproductfunc4)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), void *ctx)
611 {
612   PetscFunctionBegin;
613   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
614   PetscValidPointer(rhshp1, 2);
615 
616   ts->rhshessianproductctx = ctx;
617   if (rhshp1) ts->vecs_guu = rhshp1;
618   if (rhshp2) ts->vecs_gup = rhshp2;
619   if (rhshp3) ts->vecs_gpu = rhshp3;
620   if (rhshp4) ts->vecs_gpp = rhshp4;
621   ts->rhshessianproduct_guu = rhshessianproductfunc1;
622   ts->rhshessianproduct_gup = rhshessianproductfunc2;
623   ts->rhshessianproduct_gpu = rhshessianproductfunc3;
624   ts->rhshessianproduct_gpp = rhshessianproductfunc4;
625   PetscFunctionReturn(PETSC_SUCCESS);
626 }
627 
628 /*@C
629   TSComputeRHSHessianProductFunctionUU - Runs the user-defined vector-Hessian-vector product function for Guu.
630 
631   Collective
632 
633   Input Parameters:
634 . ts   - The `TS` context obtained from `TSCreate()`
635 
636   Level: developer
637 
638   Note:
639   `TSComputeRHSHessianProductFunctionUU()` is typically used for sensitivity implementation,
640   so most users would not generally call this routine themselves.
641 
642 .seealso: [](chapter_ts), `TS`, `TSSetRHSHessianProduct()`
643 @*/
644 PetscErrorCode TSComputeRHSHessianProductFunctionUU(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV)
645 {
646   PetscFunctionBegin;
647   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);
648   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
649   PetscValidHeaderSpecific(U, VEC_CLASSID, 3);
650 
651   PetscCallBack("TS callback RHSHessianProduct 1 for sensitivity analysis", (*ts->rhshessianproduct_guu)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx));
652   PetscFunctionReturn(PETSC_SUCCESS);
653 }
654 
655 /*@C
656   TSComputeRHSHessianProductFunctionUP - Runs the user-defined vector-Hessian-vector product function for Gup.
657 
658   Collective
659 
660   Input Parameters:
661 . ts   - The `TS` context obtained from `TSCreate()`
662 
663   Level: developer
664 
665   Note:
666   `TSComputeRHSHessianProductFunctionUP()` is typically used for sensitivity implementation,
667   so most users would not generally call this routine themselves.
668 
669 .seealso: [](chapter_ts), `TS`, `TSSetRHSHessianProduct()`
670 @*/
671 PetscErrorCode TSComputeRHSHessianProductFunctionUP(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV)
672 {
673   PetscFunctionBegin;
674   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);
675   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
676   PetscValidHeaderSpecific(U, VEC_CLASSID, 3);
677 
678   PetscCallBack("TS callback RHSHessianProduct 2 for sensitivity analysis", (*ts->rhshessianproduct_gup)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx));
679   PetscFunctionReturn(PETSC_SUCCESS);
680 }
681 
682 /*@C
683   TSComputeRHSHessianProductFunctionPU - Runs the user-defined vector-Hessian-vector product function for Gpu.
684 
685   Collective
686 
687   Input Parameters:
688 . ts   - The `TS` context obtained from `TSCreate()`
689 
690   Level: developer
691 
692   Note:
693   `TSComputeRHSHessianProductFunctionPU()` is typically used for sensitivity implementation,
694   so most users would not generally call this routine themselves.
695 
696 .seealso: [](chapter_ts), `TSSetRHSHessianProduct()`
697 @*/
698 PetscErrorCode TSComputeRHSHessianProductFunctionPU(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV)
699 {
700   PetscFunctionBegin;
701   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);
702   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
703   PetscValidHeaderSpecific(U, VEC_CLASSID, 3);
704 
705   PetscCallBack("TS callback RHSHessianProduct 3 for sensitivity analysis", (*ts->rhshessianproduct_gpu)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx));
706   PetscFunctionReturn(PETSC_SUCCESS);
707 }
708 
709 /*@C
710   TSComputeRHSHessianProductFunctionPP - Runs the user-defined vector-Hessian-vector product function for Gpp.
711 
712   Collective
713 
714   Input Parameters:
715 . ts   - The `TS` context obtained from `TSCreate()`
716 
717   Level: developer
718 
719   Note:
720   `TSComputeRHSHessianProductFunctionPP()` is typically used for sensitivity implementation,
721   so most users would not generally call this routine themselves.
722 
723 .seealso: [](chapter_ts), `TSSetRHSHessianProduct()`
724 @*/
725 PetscErrorCode TSComputeRHSHessianProductFunctionPP(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV)
726 {
727   PetscFunctionBegin;
728   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);
729   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
730   PetscValidHeaderSpecific(U, VEC_CLASSID, 3);
731 
732   PetscCallBack("TS callback RHSHessianProduct 3 for sensitivity analysis", (*ts->rhshessianproduct_gpp)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx));
733   PetscFunctionReturn(PETSC_SUCCESS);
734 }
735 
736 /* --------------------------- Adjoint sensitivity ---------------------------*/
737 
738 /*@
739    TSSetCostGradients - Sets the initial value of the gradients of the cost function w.r.t. initial values and w.r.t. the problem parameters
740       for use by the `TS` adjoint routines.
741 
742    Logically Collective
743 
744    Input Parameters:
745 +  ts - the `TS` context obtained from `TSCreate()`
746 .  numcost - number of gradients to be computed, this is the number of cost functions
747 .  lambda - gradients with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector
748 -  mu - gradients with respect to the parameters, the number of entries in these vectors is the same as the number of parameters
749 
750    Level: beginner
751 
752    Notes:
753     the entries in these vectors must be correctly initialized with the values lambda_i = df/dy|finaltime  mu_i = df/dp|finaltime
754 
755    After `TSAdjointSolve()` is called the lambda and the mu contain the computed sensitivities
756 
757 .seealso: `TS`, `TSAdjointSolve()`, `TSGetCostGradients()`
758 @*/
759 PetscErrorCode TSSetCostGradients(TS ts, PetscInt numcost, Vec *lambda, Vec *mu)
760 {
761   PetscFunctionBegin;
762   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
763   PetscValidPointer(lambda, 3);
764   ts->vecs_sensi  = lambda;
765   ts->vecs_sensip = mu;
766   PetscCheck(!ts->numcost || ts->numcost == numcost, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand");
767   ts->numcost = numcost;
768   PetscFunctionReturn(PETSC_SUCCESS);
769 }
770 
771 /*@
772    TSGetCostGradients - Returns the gradients from the `TSAdjointSolve()`
773 
774    Not Collective, but the vectors returned are parallel if `TS` is parallel
775 
776    Input Parameter:
777 .  ts - the `TS` context obtained from `TSCreate()`
778 
779    Output Parameters:
780 +  numcost - size of returned arrays
781 .  lambda - vectors containing the gradients of the cost functions with respect to the ODE/DAE solution variables
782 -  mu - vectors containing the gradients of the cost functions with respect to the problem parameters
783 
784    Level: intermediate
785 
786 .seealso: [](chapter_ts), `TS`, `TSAdjointSolve()`, `TSSetCostGradients()`
787 @*/
788 PetscErrorCode TSGetCostGradients(TS ts, PetscInt *numcost, Vec **lambda, Vec **mu)
789 {
790   PetscFunctionBegin;
791   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
792   if (numcost) *numcost = ts->numcost;
793   if (lambda) *lambda = ts->vecs_sensi;
794   if (mu) *mu = ts->vecs_sensip;
795   PetscFunctionReturn(PETSC_SUCCESS);
796 }
797 
798 /*@
799    TSSetCostHessianProducts - Sets the initial value of the Hessian-vector products of the cost function w.r.t. initial values and w.r.t. the problem parameters
800    for use by the `TS` adjoint routines.
801 
802    Logically Collective
803 
804    Input Parameters:
805 +  ts - the `TS` context obtained from `TSCreate()`
806 .  numcost - number of cost functions
807 .  lambda2 - Hessian-vector product with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector
808 .  mu2 - Hessian-vector product with respect to the parameters, the number of entries in these vectors is the same as the number of parameters
809 -  dir - the direction vector that are multiplied with the Hessian of the cost functions
810 
811    Level: beginner
812 
813    Notes:
814    Hessian of the cost function is completely different from Hessian of the ODE/DAE system
815 
816    For second-order adjoint, one needs to call this function and then `TSAdjointSetForward()` before `TSSolve()`.
817 
818    After `TSAdjointSolve()` is called, the lambda2 and the mu2 will contain the computed second-order adjoint sensitivities, and can be used to produce Hessian-vector product (not the full Hessian matrix). Users must provide a direction vector; it is usually generated by an optimization solver.
819 
820    Passing NULL for lambda2 disables the second-order calculation.
821 
822 .seealso: [](chapter_ts), `TS`, `TSAdjointSolve()`, `TSAdjointSetForward()`
823 @*/
824 PetscErrorCode TSSetCostHessianProducts(TS ts, PetscInt numcost, Vec *lambda2, Vec *mu2, Vec dir)
825 {
826   PetscFunctionBegin;
827   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
828   PetscCheck(!ts->numcost || ts->numcost == numcost, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand");
829   ts->numcost      = numcost;
830   ts->vecs_sensi2  = lambda2;
831   ts->vecs_sensi2p = mu2;
832   ts->vec_dir      = dir;
833   PetscFunctionReturn(PETSC_SUCCESS);
834 }
835 
836 /*@
837    TSGetCostHessianProducts - Returns the gradients from the `TSAdjointSolve()`
838 
839    Not Collective, but vectors returned are parallel if `TS` is parallel
840 
841    Input Parameter:
842 .  ts - the `TS` context obtained from `TSCreate()`
843 
844    Output Parameters:
845 +  numcost - number of cost functions
846 .  lambda2 - Hessian-vector product with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector
847 .  mu2 - Hessian-vector product with respect to the parameters, the number of entries in these vectors is the same as the number of parameters
848 -  dir - the direction vector that are multiplied with the Hessian of the cost functions
849 
850    Level: intermediate
851 
852 .seealso: [](chapter_ts), `TSAdjointSolve()`, `TSSetCostHessianProducts()`
853 @*/
854 PetscErrorCode TSGetCostHessianProducts(TS ts, PetscInt *numcost, Vec **lambda2, Vec **mu2, Vec *dir)
855 {
856   PetscFunctionBegin;
857   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
858   if (numcost) *numcost = ts->numcost;
859   if (lambda2) *lambda2 = ts->vecs_sensi2;
860   if (mu2) *mu2 = ts->vecs_sensi2p;
861   if (dir) *dir = ts->vec_dir;
862   PetscFunctionReturn(PETSC_SUCCESS);
863 }
864 
865 /*@
866   TSAdjointSetForward - Trigger the tangent linear solver and initialize the forward sensitivities
867 
868   Logically Collective
869 
870   Input Parameters:
871 +  ts - the `TS` context obtained from `TSCreate()`
872 -  didp - the derivative of initial values w.r.t. parameters
873 
874   Level: intermediate
875 
876   Notes:
877   When computing sensitivies w.r.t. initial condition, set didp to NULL so that the solver will take it as an identity matrix mathematically.
878   `TS` adjoint does not reset the tangent linear solver automatically, `TSAdjointResetForward()` should be called to reset the tangent linear solver.
879 
880 .seealso: [](chapter_ts), `TSAdjointSolve()`, `TSSetCostHessianProducts()`, `TSAdjointResetForward()`
881 @*/
882 PetscErrorCode TSAdjointSetForward(TS ts, Mat didp)
883 {
884   Mat          A;
885   Vec          sp;
886   PetscScalar *xarr;
887   PetscInt     lsize;
888 
889   PetscFunctionBegin;
890   ts->forward_solve = PETSC_TRUE; /* turn on tangent linear mode */
891   PetscCheck(ts->vecs_sensi2, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "Must call TSSetCostHessianProducts() first");
892   PetscCheck(ts->vec_dir, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "Directional vector is missing. Call TSSetCostHessianProducts() to set it.");
893   /* create a single-column dense matrix */
894   PetscCall(VecGetLocalSize(ts->vec_sol, &lsize));
895   PetscCall(MatCreateDense(PetscObjectComm((PetscObject)ts), lsize, PETSC_DECIDE, PETSC_DECIDE, 1, NULL, &A));
896 
897   PetscCall(VecDuplicate(ts->vec_sol, &sp));
898   PetscCall(MatDenseGetColumn(A, 0, &xarr));
899   PetscCall(VecPlaceArray(sp, xarr));
900   if (ts->vecs_sensi2p) { /* tangent linear variable initialized as 2*dIdP*dir */
901     if (didp) {
902       PetscCall(MatMult(didp, ts->vec_dir, sp));
903       PetscCall(VecScale(sp, 2.));
904     } else {
905       PetscCall(VecZeroEntries(sp));
906     }
907   } else { /* tangent linear variable initialized as dir */
908     PetscCall(VecCopy(ts->vec_dir, sp));
909   }
910   PetscCall(VecResetArray(sp));
911   PetscCall(MatDenseRestoreColumn(A, &xarr));
912   PetscCall(VecDestroy(&sp));
913 
914   PetscCall(TSForwardSetInitialSensitivities(ts, A)); /* if didp is NULL, identity matrix is assumed */
915 
916   PetscCall(MatDestroy(&A));
917   PetscFunctionReturn(PETSC_SUCCESS);
918 }
919 
920 /*@
921   TSAdjointResetForward - Reset the tangent linear solver and destroy the tangent linear context
922 
923   Logically Collective
924 
925   Input Parameters:
926 .  ts - the `TS` context obtained from `TSCreate()`
927 
928   Level: intermediate
929 
930 .seealso: [](chapter_ts), `TSAdjointSetForward()`
931 @*/
932 PetscErrorCode TSAdjointResetForward(TS ts)
933 {
934   PetscFunctionBegin;
935   ts->forward_solve = PETSC_FALSE; /* turn off tangent linear mode */
936   PetscCall(TSForwardReset(ts));
937   PetscFunctionReturn(PETSC_SUCCESS);
938 }
939 
940 /*@
941    TSAdjointSetUp - Sets up the internal data structures for the later use
942    of an adjoint solver
943 
944    Collective
945 
946    Input Parameter:
947 .  ts - the `TS` context obtained from `TSCreate()`
948 
949    Level: advanced
950 
951 .seealso: [](chapter_ts), `TSCreate()`, `TSAdjointStep()`, `TSSetCostGradients()`
952 @*/
953 PetscErrorCode TSAdjointSetUp(TS ts)
954 {
955   TSTrajectory tj;
956   PetscBool    match;
957 
958   PetscFunctionBegin;
959   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
960   if (ts->adjointsetupcalled) PetscFunctionReturn(PETSC_SUCCESS);
961   PetscCheck(ts->vecs_sensi, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_WRONGSTATE, "Must call TSSetCostGradients() first");
962   PetscCheck(!ts->vecs_sensip || ts->Jacp || ts->Jacprhs, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_WRONGSTATE, "Must call TSSetRHSJacobianP() or TSSetIJacobianP() first");
963   PetscCall(TSGetTrajectory(ts, &tj));
964   PetscCall(PetscObjectTypeCompare((PetscObject)tj, TSTRAJECTORYBASIC, &match));
965   if (match) {
966     PetscBool solution_only;
967     PetscCall(TSTrajectoryGetSolutionOnly(tj, &solution_only));
968     PetscCheck(!solution_only, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "TSAdjoint cannot use the solution-only mode when choosing the Basic TSTrajectory type. Turn it off with -ts_trajectory_solution_only 0");
969   }
970   PetscCall(TSTrajectorySetUseHistory(tj, PETSC_FALSE)); /* not use TSHistory */
971 
972   if (ts->quadraturets) { /* if there is integral in the cost function */
973     PetscCall(VecDuplicate(ts->vecs_sensi[0], &ts->vec_drdu_col));
974     if (ts->vecs_sensip) PetscCall(VecDuplicate(ts->vecs_sensip[0], &ts->vec_drdp_col));
975   }
976 
977   PetscTryTypeMethod(ts, adjointsetup);
978   ts->adjointsetupcalled = PETSC_TRUE;
979   PetscFunctionReturn(PETSC_SUCCESS);
980 }
981 
982 /*@
983   TSAdjointReset - Resets a `TS` adjoint context and removes any allocated `Vec`s and `Mat`s.
984 
985    Collective
986 
987    Input Parameter:
988 .  ts - the `TS` context obtained from `TSCreate()`
989 
990    Level: beginner
991 
992 .seealso: [](chapter_ts), `TSCreate()`, `TSAdjointSetUp()`, `TSADestroy()`
993 @*/
994 PetscErrorCode TSAdjointReset(TS ts)
995 {
996   PetscFunctionBegin;
997   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
998   PetscTryTypeMethod(ts, adjointreset);
999   if (ts->quadraturets) { /* if there is integral in the cost function */
1000     PetscCall(VecDestroy(&ts->vec_drdu_col));
1001     if (ts->vecs_sensip) PetscCall(VecDestroy(&ts->vec_drdp_col));
1002   }
1003   ts->vecs_sensi         = NULL;
1004   ts->vecs_sensip        = NULL;
1005   ts->vecs_sensi2        = NULL;
1006   ts->vecs_sensi2p       = NULL;
1007   ts->vec_dir            = NULL;
1008   ts->adjointsetupcalled = PETSC_FALSE;
1009   PetscFunctionReturn(PETSC_SUCCESS);
1010 }
1011 
1012 /*@
1013    TSAdjointSetSteps - Sets the number of steps the adjoint solver should take backward in time
1014 
1015    Logically Collective
1016 
1017    Input Parameters:
1018 +  ts - the `TS` context obtained from `TSCreate()`
1019 -  steps - number of steps to use
1020 
1021    Level: intermediate
1022 
1023    Notes:
1024     Normally one does not call this and `TSAdjointSolve()` integrates back to the original timestep. One can call this
1025           so as to integrate back to less than the original timestep
1026 
1027 .seealso: [](chapter_ts), `TSAdjointSolve()`, `TS`, `TSSetExactFinalTime()`
1028 @*/
1029 PetscErrorCode TSAdjointSetSteps(TS ts, PetscInt steps)
1030 {
1031   PetscFunctionBegin;
1032   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1033   PetscValidLogicalCollectiveInt(ts, steps, 2);
1034   PetscCheck(steps >= 0, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_OUTOFRANGE, "Cannot step back a negative number of steps");
1035   PetscCheck(steps <= ts->steps, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_OUTOFRANGE, "Cannot step back more than the total number of forward steps");
1036   ts->adjoint_max_steps = steps;
1037   PetscFunctionReturn(PETSC_SUCCESS);
1038 }
1039 
1040 /*@C
1041   TSAdjointSetRHSJacobian - Deprecated, use `TSSetRHSJacobianP()`
1042 
1043   Level: deprecated
1044 
1045 @*/
1046 PetscErrorCode TSAdjointSetRHSJacobian(TS ts, Mat Amat, PetscErrorCode (*func)(TS, PetscReal, Vec, Mat, void *), void *ctx)
1047 {
1048   PetscFunctionBegin;
1049   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1050   PetscValidHeaderSpecific(Amat, MAT_CLASSID, 2);
1051 
1052   ts->rhsjacobianp    = func;
1053   ts->rhsjacobianpctx = ctx;
1054   if (Amat) {
1055     PetscCall(PetscObjectReference((PetscObject)Amat));
1056     PetscCall(MatDestroy(&ts->Jacp));
1057     ts->Jacp = Amat;
1058   }
1059   PetscFunctionReturn(PETSC_SUCCESS);
1060 }
1061 
1062 /*@C
1063   TSAdjointComputeRHSJacobian - Deprecated, use `TSComputeRHSJacobianP()`
1064 
1065   Level: deprecated
1066 
1067 @*/
1068 PetscErrorCode TSAdjointComputeRHSJacobian(TS ts, PetscReal t, Vec U, Mat Amat)
1069 {
1070   PetscFunctionBegin;
1071   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1072   PetscValidHeaderSpecific(U, VEC_CLASSID, 3);
1073   PetscValidPointer(Amat, 4);
1074 
1075   PetscCallBack("TS callback JacobianP for sensitivity analysis", (*ts->rhsjacobianp)(ts, t, U, Amat, ts->rhsjacobianpctx));
1076   PetscFunctionReturn(PETSC_SUCCESS);
1077 }
1078 
1079 /*@
1080   TSAdjointComputeDRDYFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobian()`
1081 
1082   Level: deprecated
1083 
1084 @*/
1085 PetscErrorCode TSAdjointComputeDRDYFunction(TS ts, PetscReal t, Vec U, Vec *DRDU)
1086 {
1087   PetscFunctionBegin;
1088   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1089   PetscValidHeaderSpecific(U, VEC_CLASSID, 3);
1090 
1091   PetscCallBack("TS callback DRDY for sensitivity analysis", (*ts->drdufunction)(ts, t, U, DRDU, ts->costintegrandctx));
1092   PetscFunctionReturn(PETSC_SUCCESS);
1093 }
1094 
1095 /*@
1096   TSAdjointComputeDRDPFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobianP()`
1097 
1098   Level: deprecated
1099 
1100 @*/
1101 PetscErrorCode TSAdjointComputeDRDPFunction(TS ts, PetscReal t, Vec U, Vec *DRDP)
1102 {
1103   PetscFunctionBegin;
1104   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1105   PetscValidHeaderSpecific(U, VEC_CLASSID, 3);
1106 
1107   PetscCallBack("TS callback DRDP for sensitivity analysis", (*ts->drdpfunction)(ts, t, U, DRDP, ts->costintegrandctx));
1108   PetscFunctionReturn(PETSC_SUCCESS);
1109 }
1110 
1111 /*@C
1112    TSAdjointMonitorSensi - monitors the first lambda sensitivity
1113 
1114    Level: intermediate
1115 
1116 .seealso: [](chapter_ts), `TSAdjointMonitorSet()`
1117 @*/
1118 PetscErrorCode TSAdjointMonitorSensi(TS ts, PetscInt step, PetscReal ptime, Vec v, PetscInt numcost, Vec *lambda, Vec *mu, PetscViewerAndFormat *vf)
1119 {
1120   PetscViewer viewer = vf->viewer;
1121 
1122   PetscFunctionBegin;
1123   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 8);
1124   PetscCall(PetscViewerPushFormat(viewer, vf->format));
1125   PetscCall(VecView(lambda[0], viewer));
1126   PetscCall(PetscViewerPopFormat(viewer));
1127   PetscFunctionReturn(PETSC_SUCCESS);
1128 }
1129 
1130 /*@C
1131    TSAdjointMonitorSetFromOptions - Sets a monitor function and viewer appropriate for the type indicated by the user
1132 
1133    Collective
1134 
1135    Input Parameters:
1136 +  ts - `TS` object you wish to monitor
1137 .  name - the monitor type one is seeking
1138 .  help - message indicating what monitoring is done
1139 .  manual - manual page for the monitor
1140 .  monitor - the monitor function
1141 -  monitorsetup - a function that is called once ONLY if the user selected this monitor that may set additional features of the `TS` or `PetscViewer` objects
1142 
1143    Level: developer
1144 
1145 .seealso: [](chapter_ts), `PetscOptionsGetViewer()`, `PetscOptionsGetReal()`, `PetscOptionsHasName()`, `PetscOptionsGetString()`,
1146           `PetscOptionsGetIntArray()`, `PetscOptionsGetRealArray()`, `PetscOptionsBool()`
1147           `PetscOptionsInt()`, `PetscOptionsString()`, `PetscOptionsReal()`, `PetscOptionsBool()`,
1148           `PetscOptionsName()`, `PetscOptionsBegin()`, `PetscOptionsEnd()`, `PetscOptionsHeadBegin()`,
1149           `PetscOptionsStringArray()`, `PetscOptionsRealArray()`, `PetscOptionsScalar()`,
1150           `PetscOptionsBoolGroupBegin()`, `PetscOptionsBoolGroup()`, `PetscOptionsBoolGroupEnd()`,
1151           `PetscOptionsFList()`, `PetscOptionsEList()`
1152 @*/
1153 PetscErrorCode TSAdjointMonitorSetFromOptions(TS ts, const char name[], const char help[], const char manual[], PetscErrorCode (*monitor)(TS, PetscInt, PetscReal, Vec, PetscInt, Vec *, Vec *, PetscViewerAndFormat *), PetscErrorCode (*monitorsetup)(TS, PetscViewerAndFormat *))
1154 {
1155   PetscViewer       viewer;
1156   PetscViewerFormat format;
1157   PetscBool         flg;
1158 
1159   PetscFunctionBegin;
1160   PetscCall(PetscOptionsGetViewer(PetscObjectComm((PetscObject)ts), ((PetscObject)ts)->options, ((PetscObject)ts)->prefix, name, &viewer, &format, &flg));
1161   if (flg) {
1162     PetscViewerAndFormat *vf;
1163     PetscCall(PetscViewerAndFormatCreate(viewer, format, &vf));
1164     PetscCall(PetscObjectDereference((PetscObject)viewer));
1165     if (monitorsetup) PetscCall((*monitorsetup)(ts, vf));
1166     PetscCall(TSAdjointMonitorSet(ts, (PetscErrorCode(*)(TS, PetscInt, PetscReal, Vec, PetscInt, Vec *, Vec *, void *))monitor, vf, (PetscErrorCode(*)(void **))PetscViewerAndFormatDestroy));
1167   }
1168   PetscFunctionReturn(PETSC_SUCCESS);
1169 }
1170 
1171 /*@C
1172    TSAdjointMonitorSet - Sets an ADDITIONAL function that is to be used at every
1173    timestep to display the iteration's  progress.
1174 
1175    Logically Collective
1176 
1177    Input Parameters:
1178 +  ts - the `TS` context obtained from `TSCreate()`
1179 .  adjointmonitor - monitoring routine
1180 .  adjointmctx - [optional] user-defined context for private data for the
1181              monitor routine (use NULL if no context is desired)
1182 -  adjointmonitordestroy - [optional] routine that frees monitor context
1183           (may be NULL)
1184 
1185    Calling sequence of monitor:
1186 $    int adjointmonitor(TS ts,PetscInt steps,PetscReal time,Vec u,PetscInt numcost,Vec *lambda, Vec *mu,void *adjointmctx)
1187 
1188 +    ts - the `TS` context
1189 .    steps - iteration number (after the final time step the monitor routine is called with a step of -1, this is at the final time which may have
1190                                been interpolated to)
1191 .    time - current time
1192 .    u - current iterate
1193 .    numcost - number of cost functionos
1194 .    lambda - sensitivities to initial conditions
1195 .    mu - sensitivities to parameters
1196 -    adjointmctx - [optional] adjoint monitoring context
1197 
1198    Level: intermediate
1199 
1200    Note:
1201    This routine adds an additional monitor to the list of monitors that
1202    already has been loaded.
1203 
1204    Fortran Note:
1205    Only a single monitor function can be set for each TS object
1206 
1207 .seealso: [](chapter_ts), `TS`, `TSAdjointSolve()`, `TSAdjointMonitorCancel()`
1208 @*/
1209 PetscErrorCode TSAdjointMonitorSet(TS ts, PetscErrorCode (*adjointmonitor)(TS, PetscInt, PetscReal, Vec, PetscInt, Vec *, Vec *, void *), void *adjointmctx, PetscErrorCode (*adjointmdestroy)(void **))
1210 {
1211   PetscInt  i;
1212   PetscBool identical;
1213 
1214   PetscFunctionBegin;
1215   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1216   for (i = 0; i < ts->numbermonitors; i++) {
1217     PetscCall(PetscMonitorCompare((PetscErrorCode(*)(void))adjointmonitor, adjointmctx, adjointmdestroy, (PetscErrorCode(*)(void))ts->adjointmonitor[i], ts->adjointmonitorcontext[i], ts->adjointmonitordestroy[i], &identical));
1218     if (identical) PetscFunctionReturn(PETSC_SUCCESS);
1219   }
1220   PetscCheck(ts->numberadjointmonitors < MAXTSMONITORS, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many adjoint monitors set");
1221   ts->adjointmonitor[ts->numberadjointmonitors]          = adjointmonitor;
1222   ts->adjointmonitordestroy[ts->numberadjointmonitors]   = adjointmdestroy;
1223   ts->adjointmonitorcontext[ts->numberadjointmonitors++] = (void *)adjointmctx;
1224   PetscFunctionReturn(PETSC_SUCCESS);
1225 }
1226 
1227 /*@C
1228    TSAdjointMonitorCancel - Clears all the adjoint monitors that have been set on a time-step object.
1229 
1230    Logically Collective
1231 
1232    Input Parameters:
1233 .  ts - the `TS` context obtained from `TSCreate()`
1234 
1235    Notes:
1236    There is no way to remove a single, specific monitor.
1237 
1238    Level: intermediate
1239 
1240 .seealso: [](chapter_ts), `TS`, `TSAdjointSolve()`, `TSAdjointMonitorSet()`
1241 @*/
1242 PetscErrorCode TSAdjointMonitorCancel(TS ts)
1243 {
1244   PetscInt i;
1245 
1246   PetscFunctionBegin;
1247   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1248   for (i = 0; i < ts->numberadjointmonitors; i++) {
1249     if (ts->adjointmonitordestroy[i]) PetscCall((*ts->adjointmonitordestroy[i])(&ts->adjointmonitorcontext[i]));
1250   }
1251   ts->numberadjointmonitors = 0;
1252   PetscFunctionReturn(PETSC_SUCCESS);
1253 }
1254 
1255 /*@C
1256    TSAdjointMonitorDefault - the default monitor of adjoint computations
1257 
1258    Level: intermediate
1259 
1260 .seealso: [](chapter_ts), `TS`, `TSAdjointSolve()`, `TSAdjointMonitorSet()`
1261 @*/
1262 PetscErrorCode TSAdjointMonitorDefault(TS ts, PetscInt step, PetscReal ptime, Vec v, PetscInt numcost, Vec *lambda, Vec *mu, PetscViewerAndFormat *vf)
1263 {
1264   PetscViewer viewer = vf->viewer;
1265 
1266   PetscFunctionBegin;
1267   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 8);
1268   PetscCall(PetscViewerPushFormat(viewer, vf->format));
1269   PetscCall(PetscViewerASCIIAddTab(viewer, ((PetscObject)ts)->tablevel));
1270   PetscCall(PetscViewerASCIIPrintf(viewer, "%" PetscInt_FMT " TS dt %g time %g%s", step, (double)ts->time_step, (double)ptime, ts->steprollback ? " (r)\n" : "\n"));
1271   PetscCall(PetscViewerASCIISubtractTab(viewer, ((PetscObject)ts)->tablevel));
1272   PetscCall(PetscViewerPopFormat(viewer));
1273   PetscFunctionReturn(PETSC_SUCCESS);
1274 }
1275 
1276 /*@C
1277    TSAdjointMonitorDrawSensi - Monitors progress of the adjoint `TS` solvers by calling
1278    `VecView()` for the sensitivities to initial states at each timestep
1279 
1280    Collective
1281 
1282    Input Parameters:
1283 +  ts - the `TS` context
1284 .  step - current time-step
1285 .  ptime - current time
1286 .  u - current state
1287 .  numcost - number of cost functions
1288 .  lambda - sensitivities to initial conditions
1289 .  mu - sensitivities to parameters
1290 -  dummy - either a viewer or NULL
1291 
1292    Level: intermediate
1293 
1294 .seealso: [](chapter_ts), `TSAdjointSolve()`, `TSAdjointMonitorSet()`, `TSAdjointMonitorDefault()`, `VecView()`
1295 @*/
1296 PetscErrorCode TSAdjointMonitorDrawSensi(TS ts, PetscInt step, PetscReal ptime, Vec u, PetscInt numcost, Vec *lambda, Vec *mu, void *dummy)
1297 {
1298   TSMonitorDrawCtx ictx = (TSMonitorDrawCtx)dummy;
1299   PetscDraw        draw;
1300   PetscReal        xl, yl, xr, yr, h;
1301   char             time[32];
1302 
1303   PetscFunctionBegin;
1304   if (!(((ictx->howoften > 0) && (!(step % ictx->howoften))) || ((ictx->howoften == -1) && ts->reason))) PetscFunctionReturn(PETSC_SUCCESS);
1305 
1306   PetscCall(VecView(lambda[0], ictx->viewer));
1307   PetscCall(PetscViewerDrawGetDraw(ictx->viewer, 0, &draw));
1308   PetscCall(PetscSNPrintf(time, 32, "Timestep %d Time %g", (int)step, (double)ptime));
1309   PetscCall(PetscDrawGetCoordinates(draw, &xl, &yl, &xr, &yr));
1310   h = yl + .95 * (yr - yl);
1311   PetscCall(PetscDrawStringCentered(draw, .5 * (xl + xr), h, PETSC_DRAW_BLACK, time));
1312   PetscCall(PetscDrawFlush(draw));
1313   PetscFunctionReturn(PETSC_SUCCESS);
1314 }
1315 
1316 /*
1317    TSAdjointSetFromOptions - Sets various `TS` adjoint parameters from user options.
1318 
1319    Collective
1320 
1321    Input Parameter:
1322 .  ts - the `TS` context
1323 
1324    Options Database Keys:
1325 +  -ts_adjoint_solve <yes,no> After solving the ODE/DAE solve the adjoint problem (requires -ts_save_trajectory)
1326 .  -ts_adjoint_monitor - print information at each adjoint time step
1327 -  -ts_adjoint_monitor_draw_sensi - monitor the sensitivity of the first cost function wrt initial conditions (lambda[0]) graphically
1328 
1329    Level: developer
1330 
1331    Note:
1332     This is not normally called directly by users
1333 
1334 .seealso: [](chapter_ts), `TSSetSaveTrajectory()`, `TSTrajectorySetUp()`
1335 */
1336 PetscErrorCode TSAdjointSetFromOptions(TS ts, PetscOptionItems *PetscOptionsObject)
1337 {
1338   PetscBool tflg, opt;
1339 
1340   PetscFunctionBegin;
1341   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1342   PetscOptionsHeadBegin(PetscOptionsObject, "TS Adjoint options");
1343   tflg = ts->adjoint_solve ? PETSC_TRUE : PETSC_FALSE;
1344   PetscCall(PetscOptionsBool("-ts_adjoint_solve", "Solve the adjoint problem immediately after solving the forward problem", "", tflg, &tflg, &opt));
1345   if (opt) {
1346     PetscCall(TSSetSaveTrajectory(ts));
1347     ts->adjoint_solve = tflg;
1348   }
1349   PetscCall(TSAdjointMonitorSetFromOptions(ts, "-ts_adjoint_monitor", "Monitor adjoint timestep size", "TSAdjointMonitorDefault", TSAdjointMonitorDefault, NULL));
1350   PetscCall(TSAdjointMonitorSetFromOptions(ts, "-ts_adjoint_monitor_sensi", "Monitor sensitivity in the adjoint computation", "TSAdjointMonitorSensi", TSAdjointMonitorSensi, NULL));
1351   opt = PETSC_FALSE;
1352   PetscCall(PetscOptionsName("-ts_adjoint_monitor_draw_sensi", "Monitor adjoint sensitivities (lambda only) graphically", "TSAdjointMonitorDrawSensi", &opt));
1353   if (opt) {
1354     TSMonitorDrawCtx ctx;
1355     PetscInt         howoften = 1;
1356 
1357     PetscCall(PetscOptionsInt("-ts_adjoint_monitor_draw_sensi", "Monitor adjoint sensitivities (lambda only) graphically", "TSAdjointMonitorDrawSensi", howoften, &howoften, NULL));
1358     PetscCall(TSMonitorDrawCtxCreate(PetscObjectComm((PetscObject)ts), NULL, NULL, PETSC_DECIDE, PETSC_DECIDE, 300, 300, howoften, &ctx));
1359     PetscCall(TSAdjointMonitorSet(ts, TSAdjointMonitorDrawSensi, ctx, (PetscErrorCode(*)(void **))TSMonitorDrawCtxDestroy));
1360   }
1361   PetscFunctionReturn(PETSC_SUCCESS);
1362 }
1363 
1364 /*@
1365    TSAdjointStep - Steps one time step backward in the adjoint run
1366 
1367    Collective
1368 
1369    Input Parameter:
1370 .  ts - the `TS` context obtained from `TSCreate()`
1371 
1372    Level: intermediate
1373 
1374 .seealso: [](chapter_ts), `TSAdjointSetUp()`, `TSAdjointSolve()`
1375 @*/
1376 PetscErrorCode TSAdjointStep(TS ts)
1377 {
1378   DM dm;
1379 
1380   PetscFunctionBegin;
1381   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1382   PetscCall(TSGetDM(ts, &dm));
1383   PetscCall(TSAdjointSetUp(ts));
1384   ts->steps--; /* must decrease the step index before the adjoint step is taken. */
1385 
1386   ts->reason     = TS_CONVERGED_ITERATING;
1387   ts->ptime_prev = ts->ptime;
1388   PetscCall(PetscLogEventBegin(TS_AdjointStep, ts, 0, 0, 0));
1389   PetscUseTypeMethod(ts, adjointstep);
1390   PetscCall(PetscLogEventEnd(TS_AdjointStep, ts, 0, 0, 0));
1391   ts->adjoint_steps++;
1392 
1393   if (ts->reason < 0) {
1394     PetscCheck(!ts->errorifstepfailed, PetscObjectComm((PetscObject)ts), PETSC_ERR_NOT_CONVERGED, "TSAdjointStep has failed due to %s", TSConvergedReasons[ts->reason]);
1395   } else if (!ts->reason) {
1396     if (ts->adjoint_steps >= ts->adjoint_max_steps) ts->reason = TS_CONVERGED_ITS;
1397   }
1398   PetscFunctionReturn(PETSC_SUCCESS);
1399 }
1400 
1401 /*@
1402    TSAdjointSolve - Solves the discrete ajoint problem for an ODE/DAE
1403 
1404    Collective
1405 `
1406    Input Parameter:
1407 .  ts - the `TS` context obtained from `TSCreate()`
1408 
1409    Options Database Key:
1410 . -ts_adjoint_view_solution <viewerinfo> - views the first gradient with respect to the initial values
1411 
1412    Level: intermediate
1413 
1414    Notes:
1415    This must be called after a call to `TSSolve()` that solves the forward problem
1416 
1417    By default this will integrate back to the initial time, one can use `TSAdjointSetSteps()` to step back to a later time
1418 
1419 .seealso: [](chapter_ts), `TSAdjointSolve()`, `TSCreate()`, `TSSetCostGradients()`, `TSSetSolution()`, `TSAdjointStep()`
1420 @*/
1421 PetscErrorCode TSAdjointSolve(TS ts)
1422 {
1423   static PetscBool cite = PETSC_FALSE;
1424 #if defined(TSADJOINT_STAGE)
1425   PetscLogStage adjoint_stage;
1426 #endif
1427 
1428   PetscFunctionBegin;
1429   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1430   PetscCall(PetscCitationsRegister("@article{Zhang2022tsadjoint,\n"
1431                                    "  title         = {{PETSc TSAdjoint: A Discrete Adjoint ODE Solver for First-Order and Second-Order Sensitivity Analysis}},\n"
1432                                    "  author        = {Zhang, Hong and Constantinescu, Emil M.  and Smith, Barry F.},\n"
1433                                    "  journal       = {SIAM Journal on Scientific Computing},\n"
1434                                    "  volume        = {44},\n"
1435                                    "  number        = {1},\n"
1436                                    "  pages         = {C1-C24},\n"
1437                                    "  doi           = {10.1137/21M140078X},\n"
1438                                    "  year          = {2022}\n}\n",
1439                                    &cite));
1440 #if defined(TSADJOINT_STAGE)
1441   PetscCall(PetscLogStageRegister("TSAdjoint", &adjoint_stage));
1442   PetscCall(PetscLogStagePush(adjoint_stage));
1443 #endif
1444   PetscCall(TSAdjointSetUp(ts));
1445 
1446   /* reset time step and iteration counters */
1447   ts->adjoint_steps     = 0;
1448   ts->ksp_its           = 0;
1449   ts->snes_its          = 0;
1450   ts->num_snes_failures = 0;
1451   ts->reject            = 0;
1452   ts->reason            = TS_CONVERGED_ITERATING;
1453 
1454   if (!ts->adjoint_max_steps) ts->adjoint_max_steps = ts->steps;
1455   if (ts->adjoint_steps >= ts->adjoint_max_steps) ts->reason = TS_CONVERGED_ITS;
1456 
1457   while (!ts->reason) {
1458     PetscCall(TSTrajectoryGet(ts->trajectory, ts, ts->steps, &ts->ptime));
1459     PetscCall(TSAdjointMonitor(ts, ts->steps, ts->ptime, ts->vec_sol, ts->numcost, ts->vecs_sensi, ts->vecs_sensip));
1460     PetscCall(TSAdjointEventHandler(ts));
1461     PetscCall(TSAdjointStep(ts));
1462     if ((ts->vec_costintegral || ts->quadraturets) && !ts->costintegralfwd) PetscCall(TSAdjointCostIntegral(ts));
1463   }
1464   if (!ts->steps) {
1465     PetscCall(TSTrajectoryGet(ts->trajectory, ts, ts->steps, &ts->ptime));
1466     PetscCall(TSAdjointMonitor(ts, ts->steps, ts->ptime, ts->vec_sol, ts->numcost, ts->vecs_sensi, ts->vecs_sensip));
1467   }
1468   ts->solvetime = ts->ptime;
1469   PetscCall(TSTrajectoryViewFromOptions(ts->trajectory, NULL, "-ts_trajectory_view"));
1470   PetscCall(VecViewFromOptions(ts->vecs_sensi[0], (PetscObject)ts, "-ts_adjoint_view_solution"));
1471   ts->adjoint_max_steps = 0;
1472 #if defined(TSADJOINT_STAGE)
1473   PetscCall(PetscLogStagePop());
1474 #endif
1475   PetscFunctionReturn(PETSC_SUCCESS);
1476 }
1477 
1478 /*@C
1479    TSAdjointMonitor - Runs all user-provided adjoint monitor routines set using `TSAdjointMonitorSet()`
1480 
1481    Collective
1482 
1483    Input Parameters:
1484 +  ts - time stepping context obtained from `TSCreate()`
1485 .  step - step number that has just completed
1486 .  ptime - model time of the state
1487 .  u - state at the current model time
1488 .  numcost - number of cost functions (dimension of lambda  or mu)
1489 .  lambda - vectors containing the gradients of the cost functions with respect to the ODE/DAE solution variables
1490 -  mu - vectors containing the gradients of the cost functions with respect to the problem parameters
1491 
1492    Level: developer
1493 
1494    Note:
1495    `TSAdjointMonitor()` is typically used automatically within the time stepping implementations.
1496    Users would almost never call this routine directly.
1497 
1498 .seealso: `TSAdjointMonitorSet()`, `TSAdjointSolve()`
1499 @*/
1500 PetscErrorCode TSAdjointMonitor(TS ts, PetscInt step, PetscReal ptime, Vec u, PetscInt numcost, Vec *lambda, Vec *mu)
1501 {
1502   PetscInt i, n = ts->numberadjointmonitors;
1503 
1504   PetscFunctionBegin;
1505   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1506   PetscValidHeaderSpecific(u, VEC_CLASSID, 4);
1507   PetscCall(VecLockReadPush(u));
1508   for (i = 0; i < n; i++) PetscCall((*ts->adjointmonitor[i])(ts, step, ptime, u, numcost, lambda, mu, ts->adjointmonitorcontext[i]));
1509   PetscCall(VecLockReadPop(u));
1510   PetscFunctionReturn(PETSC_SUCCESS);
1511 }
1512 
1513 /*@
1514  TSAdjointCostIntegral - Evaluate the cost integral in the adjoint run.
1515 
1516  Collective
1517 
1518  Input Parameter:
1519  .  ts - time stepping context
1520 
1521  Level: advanced
1522 
1523  Notes:
1524  This function cannot be called until `TSAdjointStep()` has been completed.
1525 
1526  .seealso: [](chapter_ts), `TSAdjointSolve()`, `TSAdjointStep()`
1527  @*/
1528 PetscErrorCode TSAdjointCostIntegral(TS ts)
1529 {
1530   PetscFunctionBegin;
1531   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1532   PetscUseTypeMethod(ts, adjointintegral);
1533   PetscFunctionReturn(PETSC_SUCCESS);
1534 }
1535 
1536 /* ------------------ Forward (tangent linear) sensitivity  ------------------*/
1537 
1538 /*@
1539   TSForwardSetUp - Sets up the internal data structures for the later use
1540   of forward sensitivity analysis
1541 
1542   Collective
1543 
1544   Input Parameter:
1545 . ts - the `TS` context obtained from `TSCreate()`
1546 
1547   Level: advanced
1548 
1549 .seealso: [](chapter_ts), `TS`, `TSCreate()`, `TSDestroy()`, `TSSetUp()`
1550 @*/
1551 PetscErrorCode TSForwardSetUp(TS ts)
1552 {
1553   PetscFunctionBegin;
1554   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1555   if (ts->forwardsetupcalled) PetscFunctionReturn(PETSC_SUCCESS);
1556   PetscTryTypeMethod(ts, forwardsetup);
1557   PetscCall(VecDuplicate(ts->vec_sol, &ts->vec_sensip_col));
1558   ts->forwardsetupcalled = PETSC_TRUE;
1559   PetscFunctionReturn(PETSC_SUCCESS);
1560 }
1561 
1562 /*@
1563   TSForwardReset - Reset the internal data structures used by forward sensitivity analysis
1564 
1565   Collective
1566 
1567   Input Parameter:
1568 . ts - the `TS` context obtained from `TSCreate()`
1569 
1570   Level: advanced
1571 
1572 .seealso: [](chapter_ts), `TSCreate()`, `TSDestroy()`, `TSForwardSetUp()`
1573 @*/
1574 PetscErrorCode TSForwardReset(TS ts)
1575 {
1576   TS quadts = ts->quadraturets;
1577 
1578   PetscFunctionBegin;
1579   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1580   PetscTryTypeMethod(ts, forwardreset);
1581   PetscCall(MatDestroy(&ts->mat_sensip));
1582   if (quadts) PetscCall(MatDestroy(&quadts->mat_sensip));
1583   PetscCall(VecDestroy(&ts->vec_sensip_col));
1584   ts->forward_solve      = PETSC_FALSE;
1585   ts->forwardsetupcalled = PETSC_FALSE;
1586   PetscFunctionReturn(PETSC_SUCCESS);
1587 }
1588 
1589 /*@
1590   TSForwardSetIntegralGradients - Set the vectors holding forward sensitivities of the integral term.
1591 
1592   Input Parameters:
1593 + ts - the `TS` context obtained from `TSCreate()`
1594 . numfwdint - number of integrals
1595 - vp - the vectors containing the gradients for each integral w.r.t. parameters
1596 
1597   Level: deprecated
1598 
1599 .seealso: [](chapter_ts), `TSForwardGetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()`
1600 @*/
1601 PetscErrorCode TSForwardSetIntegralGradients(TS ts, PetscInt numfwdint, Vec *vp)
1602 {
1603   PetscFunctionBegin;
1604   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1605   PetscCheck(!ts->numcost || ts->numcost == numfwdint, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand()");
1606   if (!ts->numcost) ts->numcost = numfwdint;
1607 
1608   ts->vecs_integral_sensip = vp;
1609   PetscFunctionReturn(PETSC_SUCCESS);
1610 }
1611 
1612 /*@
1613   TSForwardGetIntegralGradients - Returns the forward sensitivities ofthe integral term.
1614 
1615   Input Parameter:
1616 . ts - the `TS` context obtained from `TSCreate()`
1617 
1618   Output Parameter:
1619 . vp - the vectors containing the gradients for each integral w.r.t. parameters
1620 
1621   Level: deprecated
1622 
1623 .seealso: [](chapter_ts), `TSForwardSetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()`
1624 @*/
1625 PetscErrorCode TSForwardGetIntegralGradients(TS ts, PetscInt *numfwdint, Vec **vp)
1626 {
1627   PetscFunctionBegin;
1628   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1629   PetscValidPointer(vp, 3);
1630   if (numfwdint) *numfwdint = ts->numcost;
1631   if (vp) *vp = ts->vecs_integral_sensip;
1632   PetscFunctionReturn(PETSC_SUCCESS);
1633 }
1634 
1635 /*@
1636   TSForwardStep - Compute the forward sensitivity for one time step.
1637 
1638   Collective
1639 
1640   Input Parameter:
1641 . ts - time stepping context
1642 
1643   Level: advanced
1644 
1645   Notes:
1646   This function cannot be called until `TSStep()` has been completed.
1647 
1648 .seealso: [](chapter_ts), `TSForwardSetSensitivities()`, `TSForwardGetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardSetUp()`
1649 @*/
1650 PetscErrorCode TSForwardStep(TS ts)
1651 {
1652   PetscFunctionBegin;
1653   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1654   PetscCall(PetscLogEventBegin(TS_ForwardStep, ts, 0, 0, 0));
1655   PetscUseTypeMethod(ts, forwardstep);
1656   PetscCall(PetscLogEventEnd(TS_ForwardStep, ts, 0, 0, 0));
1657   PetscCheck(ts->reason >= 0 || !ts->errorifstepfailed, PetscObjectComm((PetscObject)ts), PETSC_ERR_NOT_CONVERGED, "TSFowardStep has failed due to %s", TSConvergedReasons[ts->reason]);
1658   PetscFunctionReturn(PETSC_SUCCESS);
1659 }
1660 
1661 /*@
1662   TSForwardSetSensitivities - Sets the initial value of the trajectory sensitivities of solution  w.r.t. the problem parameters and initial values.
1663 
1664   Logically Collective
1665 
1666   Input Parameters:
1667 + ts - the `TS` context obtained from `TSCreate()`
1668 . nump - number of parameters
1669 - Smat - sensitivities with respect to the parameters, the number of entries in these vectors is the same as the number of parameters
1670 
1671   Level: beginner
1672 
1673   Notes:
1674   Forward sensitivity is also called 'trajectory sensitivity' in some fields such as power systems.
1675   This function turns on a flag to trigger `TSSolve()` to compute forward sensitivities automatically.
1676   You must call this function before `TSSolve()`.
1677   The entries in the sensitivity matrix must be correctly initialized with the values S = dy/dp|startingtime.
1678 
1679 .seealso: [](chapter_ts), `TSForwardGetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()`
1680 @*/
1681 PetscErrorCode TSForwardSetSensitivities(TS ts, PetscInt nump, Mat Smat)
1682 {
1683   PetscFunctionBegin;
1684   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1685   PetscValidHeaderSpecific(Smat, MAT_CLASSID, 3);
1686   ts->forward_solve = PETSC_TRUE;
1687   if (nump == PETSC_DEFAULT) {
1688     PetscCall(MatGetSize(Smat, NULL, &ts->num_parameters));
1689   } else ts->num_parameters = nump;
1690   PetscCall(PetscObjectReference((PetscObject)Smat));
1691   PetscCall(MatDestroy(&ts->mat_sensip));
1692   ts->mat_sensip = Smat;
1693   PetscFunctionReturn(PETSC_SUCCESS);
1694 }
1695 
1696 /*@
1697   TSForwardGetSensitivities - Returns the trajectory sensitivities
1698 
1699   Not Collective, but Smat returned is parallel if ts is parallel
1700 
1701   Output Parameters:
1702 + ts - the `TS` context obtained from `TSCreate()`
1703 . nump - number of parameters
1704 - Smat - sensitivities with respect to the parameters, the number of entries in these vectors is the same as the number of parameters
1705 
1706   Level: intermediate
1707 
1708 .seealso: [](chapter_ts), `TSForwardSetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()`
1709 @*/
1710 PetscErrorCode TSForwardGetSensitivities(TS ts, PetscInt *nump, Mat *Smat)
1711 {
1712   PetscFunctionBegin;
1713   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1714   if (nump) *nump = ts->num_parameters;
1715   if (Smat) *Smat = ts->mat_sensip;
1716   PetscFunctionReturn(PETSC_SUCCESS);
1717 }
1718 
1719 /*@
1720    TSForwardCostIntegral - Evaluate the cost integral in the forward run.
1721 
1722    Collective
1723 
1724    Input Parameter:
1725 .  ts - time stepping context
1726 
1727    Level: advanced
1728 
1729    Note:
1730    This function cannot be called until `TSStep()` has been completed.
1731 
1732 .seealso: [](chapter_ts), `TS`, `TSSolve()`, `TSAdjointCostIntegral()`
1733 @*/
1734 PetscErrorCode TSForwardCostIntegral(TS ts)
1735 {
1736   PetscFunctionBegin;
1737   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1738   PetscUseTypeMethod(ts, forwardintegral);
1739   PetscFunctionReturn(PETSC_SUCCESS);
1740 }
1741 
1742 /*@
1743   TSForwardSetInitialSensitivities - Set initial values for tangent linear sensitivities
1744 
1745   Collective
1746 
1747   Input Parameters:
1748 + ts - the `TS` context obtained from `TSCreate()`
1749 - didp - parametric sensitivities of the initial condition
1750 
1751   Level: intermediate
1752 
1753   Notes:
1754   `TSSolve()` allows users to pass the initial solution directly to `TS`. But the tangent linear variables cannot be initialized in this way.
1755    This function is used to set initial values for tangent linear variables.
1756 
1757 .seealso: [](chapter_ts), `TS`, `TSForwardSetSensitivities()`
1758 @*/
1759 PetscErrorCode TSForwardSetInitialSensitivities(TS ts, Mat didp)
1760 {
1761   PetscFunctionBegin;
1762   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1763   PetscValidHeaderSpecific(didp, MAT_CLASSID, 2);
1764   if (!ts->mat_sensip) PetscCall(TSForwardSetSensitivities(ts, PETSC_DEFAULT, didp));
1765   PetscFunctionReturn(PETSC_SUCCESS);
1766 }
1767 
1768 /*@
1769    TSForwardGetStages - Get the number of stages and the tangent linear sensitivities at the intermediate stages
1770 
1771    Input Parameter:
1772 .  ts - the `TS` context obtained from `TSCreate()`
1773 
1774    Output Parameters:
1775 +  ns - number of stages
1776 -  S - tangent linear sensitivities at the intermediate stages
1777 
1778    Level: advanced
1779 
1780 .seealso: `TS`
1781 @*/
1782 PetscErrorCode TSForwardGetStages(TS ts, PetscInt *ns, Mat **S)
1783 {
1784   PetscFunctionBegin;
1785   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1786 
1787   if (!ts->ops->getstages) *S = NULL;
1788   else PetscUseTypeMethod(ts, forwardgetstages, ns, S);
1789   PetscFunctionReturn(PETSC_SUCCESS);
1790 }
1791 
1792 /*@
1793    TSCreateQuadratureTS - Create a sub-`TS` that evaluates integrals over time
1794 
1795    Input Parameters:
1796 +  ts - the `TS` context obtained from `TSCreate()`
1797 -  fwd - flag indicating whether to evaluate cost integral in the forward run or the adjoint run
1798 
1799    Output Parameters:
1800 .  quadts - the child `TS` context
1801 
1802    Level: intermediate
1803 
1804 .seealso: [](chapter_ts), `TSGetQuadratureTS()`
1805 @*/
1806 PetscErrorCode TSCreateQuadratureTS(TS ts, PetscBool fwd, TS *quadts)
1807 {
1808   char prefix[128];
1809 
1810   PetscFunctionBegin;
1811   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1812   PetscValidPointer(quadts, 3);
1813   PetscCall(TSDestroy(&ts->quadraturets));
1814   PetscCall(TSCreate(PetscObjectComm((PetscObject)ts), &ts->quadraturets));
1815   PetscCall(PetscObjectIncrementTabLevel((PetscObject)ts->quadraturets, (PetscObject)ts, 1));
1816   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%squad_", ((PetscObject)ts)->prefix ? ((PetscObject)ts)->prefix : ""));
1817   PetscCall(TSSetOptionsPrefix(ts->quadraturets, prefix));
1818   *quadts = ts->quadraturets;
1819 
1820   if (ts->numcost) {
1821     PetscCall(VecCreateSeq(PETSC_COMM_SELF, ts->numcost, &(*quadts)->vec_sol));
1822   } else {
1823     PetscCall(VecCreateSeq(PETSC_COMM_SELF, 1, &(*quadts)->vec_sol));
1824   }
1825   ts->costintegralfwd = fwd;
1826   PetscFunctionReturn(PETSC_SUCCESS);
1827 }
1828 
1829 /*@
1830    TSGetQuadratureTS - Return the sub-`TS` that evaluates integrals over time
1831 
1832    Input Parameter:
1833 .  ts - the `TS` context obtained from `TSCreate()`
1834 
1835    Output Parameters:
1836 +  fwd - flag indicating whether to evaluate cost integral in the forward run or the adjoint run
1837 -  quadts - the child `TS` context
1838 
1839    Level: intermediate
1840 
1841 .seealso: [](chapter_ts), `TSCreateQuadratureTS()`
1842 @*/
1843 PetscErrorCode TSGetQuadratureTS(TS ts, PetscBool *fwd, TS *quadts)
1844 {
1845   PetscFunctionBegin;
1846   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1847   if (fwd) *fwd = ts->costintegralfwd;
1848   if (quadts) *quadts = ts->quadraturets;
1849   PetscFunctionReturn(PETSC_SUCCESS);
1850 }
1851 
1852 /*@
1853    TSComputeSNESJacobian - Compute the Jacobian needed for the `SNESSolve()` in `TS`
1854 
1855    Collective
1856 
1857    Input Parameters:
1858 +  ts - the `TS` context obtained from `TSCreate()`
1859 -  x - state vector
1860 
1861    Output Parameters:
1862 +  J - Jacobian matrix
1863 -  Jpre - preconditioning matrix for J (may be same as J)
1864 
1865    Level: developer
1866 
1867    Note:
1868    Uses finite differencing when `TS` Jacobian is not available.
1869 
1870 .seealso: `SNES`, `TS`, `SNESSetJacobian()`, TSSetRHSJacobian()`, `TSSetIJacobian()`
1871 @*/
1872 PetscErrorCode TSComputeSNESJacobian(TS ts, Vec x, Mat J, Mat Jpre)
1873 {
1874   SNES snes                                          = ts->snes;
1875   PetscErrorCode (*jac)(SNES, Vec, Mat, Mat, void *) = NULL;
1876 
1877   PetscFunctionBegin;
1878   /*
1879     Unlike implicit methods, explicit methods do not have SNESMatFDColoring in the snes object
1880     because SNESSolve() has not been called yet; so querying SNESMatFDColoring does not work for
1881     explicit methods. Instead, we check the Jacobian compute function directly to determine if FD
1882     coloring is used.
1883   */
1884   PetscCall(SNESGetJacobian(snes, NULL, NULL, &jac, NULL));
1885   if (jac == SNESComputeJacobianDefaultColor) {
1886     Vec f;
1887     PetscCall(SNESSetSolution(snes, x));
1888     PetscCall(SNESGetFunction(snes, &f, NULL, NULL));
1889     /* Force MatFDColoringApply to evaluate the SNES residual function for the base vector */
1890     PetscCall(SNESComputeFunction(snes, x, f));
1891   }
1892   PetscCall(SNESComputeJacobian(snes, x, J, Jpre));
1893   PetscFunctionReturn(PETSC_SUCCESS);
1894 }
1895