xref: /petsc/src/ts/interface/sensitivity/tssen.c (revision 2ff79c18c26c94ed8cb599682f680f231dca6444)
1 #include <petsc/private/tsimpl.h> /*I <petscts.h>  I*/
2 #include <petscdraw.h>
3 
4 PetscLogEvent TS_AdjointStep, TS_ForwardStep, TS_JacobianPEval;
5 
6 /* #define TSADJOINT_STAGE */
7 
8 /* ------------------------ Sensitivity Context ---------------------------*/
9 
10 /*@C
11   TSSetRHSJacobianP - Sets the function that computes the Jacobian of $G$ w.r.t. the parameters $p$ where $U_t = G(U,p,t)$, as well as the location to store the matrix.
12 
13   Logically Collective
14 
15   Input Parameters:
16 + ts   - `TS` context obtained from `TSCreate()`
17 . Amat - JacobianP matrix
18 . func - function
19 - ctx  - [optional] user-defined function context
20 
21   Level: intermediate
22 
23   Note:
24   `Amat` has the same number of rows and the same row parallel layout as `u`, `Amat` has the same number of columns and parallel layout as `p`
25 
26 .seealso: [](ch_ts), `TS`, `TSRHSJacobianPFn`, `TSGetRHSJacobianP()`
27 @*/
28 PetscErrorCode TSSetRHSJacobianP(TS ts, Mat Amat, TSRHSJacobianPFn *func, void *ctx)
29 {
30   PetscFunctionBegin;
31   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
32   PetscValidHeaderSpecific(Amat, MAT_CLASSID, 2);
33 
34   ts->rhsjacobianp    = func;
35   ts->rhsjacobianpctx = ctx;
36   if (Amat) {
37     PetscCall(PetscObjectReference((PetscObject)Amat));
38     PetscCall(MatDestroy(&ts->Jacprhs));
39     ts->Jacprhs = Amat;
40   }
41   PetscFunctionReturn(PETSC_SUCCESS);
42 }
43 
44 /*@C
45   TSGetRHSJacobianP - Gets the function that computes the Jacobian of $G $ w.r.t. the parameters $p$ where $ U_t = G(U,p,t)$, as well as the location to store the matrix.
46 
47   Logically Collective
48 
49   Input Parameter:
50 . ts - `TS` context obtained from `TSCreate()`
51 
52   Output Parameters:
53 + Amat - JacobianP matrix
54 . func - function
55 - ctx  - [optional] user-defined function context
56 
57   Level: intermediate
58 
59   Note:
60   `Amat` has the same number of rows and the same row parallel layout as `u`, `Amat` has the same number of columns and parallel layout as `p`
61 
62 .seealso: [](ch_ts), `TSSetRHSJacobianP()`, `TS`, `TSRHSJacobianPFn`
63 @*/
64 PetscErrorCode TSGetRHSJacobianP(TS ts, Mat *Amat, TSRHSJacobianPFn **func, void **ctx)
65 {
66   PetscFunctionBegin;
67   if (func) *func = ts->rhsjacobianp;
68   if (ctx) *ctx = ts->rhsjacobianpctx;
69   if (Amat) *Amat = ts->Jacprhs;
70   PetscFunctionReturn(PETSC_SUCCESS);
71 }
72 
73 /*@C
74   TSComputeRHSJacobianP - Runs the user-defined JacobianP function.
75 
76   Collective
77 
78   Input Parameters:
79 + ts - The `TS` context obtained from `TSCreate()`
80 . t  - the time
81 - U  - the solution at which to compute the Jacobian
82 
83   Output Parameter:
84 . Amat - the computed Jacobian
85 
86   Level: developer
87 
88 .seealso: [](ch_ts), `TSSetRHSJacobianP()`, `TS`
89 @*/
90 PetscErrorCode TSComputeRHSJacobianP(TS ts, PetscReal t, Vec U, Mat Amat)
91 {
92   PetscFunctionBegin;
93   if (!Amat) PetscFunctionReturn(PETSC_SUCCESS);
94   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
95   PetscValidHeaderSpecific(U, VEC_CLASSID, 3);
96 
97   if (ts->rhsjacobianp) PetscCallBack("TS callback JacobianP for sensitivity analysis", (*ts->rhsjacobianp)(ts, t, U, Amat, ts->rhsjacobianpctx));
98   else {
99     PetscBool assembled;
100     PetscCall(MatZeroEntries(Amat));
101     PetscCall(MatAssembled(Amat, &assembled));
102     if (!assembled) {
103       PetscCall(MatAssemblyBegin(Amat, MAT_FINAL_ASSEMBLY));
104       PetscCall(MatAssemblyEnd(Amat, MAT_FINAL_ASSEMBLY));
105     }
106   }
107   PetscFunctionReturn(PETSC_SUCCESS);
108 }
109 
110 /*@C
111   TSSetIJacobianP - Sets the function that computes the Jacobian of $F$ w.r.t. the parameters $p$ where $F(Udot,U,p,t) = G(U,p,t)$, as well as the location to store the matrix.
112 
113   Logically Collective
114 
115   Input Parameters:
116 + ts   - `TS` context obtained from `TSCreate()`
117 . Amat - JacobianP matrix
118 . func - function
119 - ctx  - [optional] user-defined function context
120 
121   Calling sequence of `func`:
122 + ts    - the `TS` context
123 . t     - current timestep
124 . U     - input vector (current ODE solution)
125 . Udot  - time derivative of state vector
126 . shift - shift to apply, see the note in `TSSetIJacobian()`
127 . A     - output matrix
128 - ctx   - [optional] user-defined function context
129 
130   Level: intermediate
131 
132   Note:
133   `Amat` has the same number of rows and the same row parallel layout as `u`, `Amat` has the same number of columns and parallel layout as `p`
134 
135 .seealso: [](ch_ts), `TSSetRHSJacobianP()`, `TS`
136 @*/
137 PetscErrorCode TSSetIJacobianP(TS ts, Mat Amat, PetscErrorCode (*func)(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal shift, Mat A, void *ctx), void *ctx)
138 {
139   PetscFunctionBegin;
140   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
141   PetscValidHeaderSpecific(Amat, MAT_CLASSID, 2);
142 
143   ts->ijacobianp    = func;
144   ts->ijacobianpctx = ctx;
145   if (Amat) {
146     PetscCall(PetscObjectReference((PetscObject)Amat));
147     PetscCall(MatDestroy(&ts->Jacp));
148     ts->Jacp = Amat;
149   }
150   PetscFunctionReturn(PETSC_SUCCESS);
151 }
152 
153 /*@C
154   TSGetIJacobianP - Gets the function that computes the Jacobian of $ F$ w.r.t. the parameters $p$ where $F(Udot,U,p,t) = G(U,p,t) $, as well as the location to store the matrix.
155 
156   Logically Collective
157 
158   Input Parameter:
159 . ts - `TS` context obtained from `TSCreate()`
160 
161   Output Parameters:
162 + Amat - JacobianP matrix
163 . func - the function that computes the JacobianP
164 - ctx  - [optional] user-defined function context
165 
166   Calling sequence of `func`:
167 + ts    - the `TS` context
168 . t     - current timestep
169 . U     - input vector (current ODE solution)
170 . Udot  - time derivative of state vector
171 . shift - shift to apply, see the note in `TSSetIJacobian()`
172 . A     - output matrix
173 - ctx   - [optional] user-defined function context
174 
175   Level: intermediate
176 
177   Note:
178   `Amat` has the same number of rows and the same row parallel layout as `u`, `Amat` has the same number of columns and parallel layout as `p`
179 
180 .seealso: [](ch_ts), `TSSetRHSJacobianP()`, `TS`, `TSSetIJacobianP()`, `TSGetRHSJacobianP()`
181 @*/
182 PetscErrorCode TSGetIJacobianP(TS ts, Mat *Amat, PetscErrorCode (**func)(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal shift, Mat A, void *ctx), void **ctx)
183 {
184   PetscFunctionBegin;
185   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
186 
187   if (func) *func = ts->ijacobianp;
188   if (ctx) *ctx = ts->ijacobianpctx;
189   if (Amat) *Amat = ts->Jacp;
190   PetscFunctionReturn(PETSC_SUCCESS);
191 }
192 
193 /*@
194   TSComputeIJacobianP - Runs the user-defined IJacobianP function.
195 
196   Collective
197 
198   Input Parameters:
199 + ts    - the `TS` context
200 . t     - current timestep
201 . U     - state vector
202 . Udot  - time derivative of state vector
203 . shift - shift to apply, see note below
204 - imex  - flag indicates if the method is IMEX so that the `RHSJacobianP` should be kept separate
205 
206   Output Parameter:
207 . Amat - Jacobian matrix
208 
209   Level: developer
210 
211 .seealso: [](ch_ts), `TS`, `TSSetIJacobianP()`
212 @*/
213 PetscErrorCode TSComputeIJacobianP(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal shift, Mat Amat, PetscBool imex)
214 {
215   PetscFunctionBegin;
216   if (!Amat) PetscFunctionReturn(PETSC_SUCCESS);
217   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
218   PetscValidHeaderSpecific(U, VEC_CLASSID, 3);
219   PetscValidHeaderSpecific(Udot, VEC_CLASSID, 4);
220 
221   PetscCall(PetscLogEventBegin(TS_JacobianPEval, ts, U, Amat, 0));
222   if (ts->ijacobianp) PetscCallBack("TS callback JacobianP for sensitivity analysis", (*ts->ijacobianp)(ts, t, U, Udot, shift, Amat, ts->ijacobianpctx));
223   if (imex) {
224     if (!ts->ijacobianp) { /* system was written as Udot = G(t,U) */
225       PetscBool assembled;
226       PetscCall(MatZeroEntries(Amat));
227       PetscCall(MatAssembled(Amat, &assembled));
228       if (!assembled) {
229         PetscCall(MatAssemblyBegin(Amat, MAT_FINAL_ASSEMBLY));
230         PetscCall(MatAssemblyEnd(Amat, MAT_FINAL_ASSEMBLY));
231       }
232     }
233   } else {
234     if (ts->rhsjacobianp) PetscCall(TSComputeRHSJacobianP(ts, t, U, ts->Jacprhs));
235     if (ts->Jacprhs == Amat) { /* No IJacobian, so we only have the RHS matrix */
236       PetscCall(MatScale(Amat, -1));
237     } else if (ts->Jacprhs) { /* Both IJacobian and RHSJacobian */
238       MatStructure axpy = DIFFERENT_NONZERO_PATTERN;
239       if (!ts->ijacobianp) { /* No IJacobianp provided, but we have a separate RHS matrix */
240         PetscCall(MatZeroEntries(Amat));
241       }
242       PetscCall(MatAXPY(Amat, -1, ts->Jacprhs, axpy));
243     }
244   }
245   PetscCall(PetscLogEventEnd(TS_JacobianPEval, ts, U, Amat, 0));
246   PetscFunctionReturn(PETSC_SUCCESS);
247 }
248 
249 /*@C
250   TSSetCostIntegrand - Sets the routine for evaluating the integral term in one or more cost functions
251 
252   Logically Collective
253 
254   Input Parameters:
255 + ts           - the `TS` context obtained from `TSCreate()`
256 . numcost      - number of gradients to be computed, this is the number of cost functions
257 . costintegral - vector that stores the integral values
258 . rf           - routine for evaluating the integrand function
259 . drduf        - function that computes the gradients of the r with respect to u
260 . drdpf        - function that computes the gradients of the r with respect to p, can be `NULL` if parametric sensitivity is not desired (`mu` = `NULL`)
261 . fwd          - flag indicating whether to evaluate cost integral in the forward run or the adjoint run
262 - ctx          - [optional] user-defined context for private data for the function evaluation routine (may be `NULL`)
263 
264   Calling sequence of `rf`:
265 + ts  - the integrator
266 . t   - the time
267 . U   - the solution
268 . F   - the computed value of the function
269 - ctx - the user context
270 
271   Calling sequence of `drduf`:
272 + ts   - the integrator
273 . t    - the time
274 . U    - the solution
275 . dRdU - the computed gradients of the r with respect to u
276 - ctx  - the user context
277 
278   Calling sequence of `drdpf`:
279 + ts   - the integrator
280 . t    - the time
281 . U    - the solution
282 . dRdP - the computed gradients of the r with respect to p
283 - ctx  - the user context
284 
285   Level: deprecated
286 
287   Notes:
288   For optimization there is usually a single cost function (numcost = 1). For sensitivities there may be multiple cost functions
289 
290   Use `TSCreateQuadratureTS()` and `TSForwardSetSensitivities()` instead
291 
292 .seealso: [](ch_ts), `TS`, `TSSetRHSJacobianP()`, `TSGetCostGradients()`, `TSSetCostGradients()`,
293           `TSCreateQuadratureTS()`, `TSForwardSetSensitivities()`
294 @*/
295 PetscErrorCode TSSetCostIntegrand(TS ts, PetscInt numcost, Vec costintegral, PetscErrorCode (*rf)(TS ts, PetscReal t, Vec U, Vec F, void *ctx), PetscErrorCode (*drduf)(TS ts, PetscReal t, Vec U, Vec *dRdU, void *ctx), PetscErrorCode (*drdpf)(TS ts, PetscReal t, Vec U, Vec *dRdP, void *ctx), PetscBool fwd, void *ctx)
296 {
297   PetscFunctionBegin;
298   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
299   if (costintegral) PetscValidHeaderSpecific(costintegral, VEC_CLASSID, 3);
300   PetscCheck(!ts->numcost || ts->numcost == numcost, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostGradients() or TSForwardSetIntegralGradients()");
301   if (!ts->numcost) ts->numcost = numcost;
302 
303   if (costintegral) {
304     PetscCall(PetscObjectReference((PetscObject)costintegral));
305     PetscCall(VecDestroy(&ts->vec_costintegral));
306     ts->vec_costintegral = costintegral;
307   } else {
308     if (!ts->vec_costintegral) { /* Create a seq vec if user does not provide one */
309       PetscCall(VecCreateSeq(PETSC_COMM_SELF, numcost, &ts->vec_costintegral));
310     } else {
311       PetscCall(VecSet(ts->vec_costintegral, 0.0));
312     }
313   }
314   if (!ts->vec_costintegrand) {
315     PetscCall(VecDuplicate(ts->vec_costintegral, &ts->vec_costintegrand));
316   } else {
317     PetscCall(VecSet(ts->vec_costintegrand, 0.0));
318   }
319   ts->costintegralfwd  = fwd; /* Evaluate the cost integral in forward run if fwd is true */
320   ts->costintegrand    = rf;
321   ts->costintegrandctx = ctx;
322   ts->drdufunction     = drduf;
323   ts->drdpfunction     = drdpf;
324   PetscFunctionReturn(PETSC_SUCCESS);
325 }
326 
327 /*@
328   TSGetCostIntegral - Returns the values of the integral term in the cost functions.
329   It is valid to call the routine after a backward run.
330 
331   Not Collective
332 
333   Input Parameter:
334 . ts - the `TS` context obtained from `TSCreate()`
335 
336   Output Parameter:
337 . v - the vector containing the integrals for each cost function
338 
339   Level: intermediate
340 
341 .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSSetCostIntegrand()`
342 @*/
343 PetscErrorCode TSGetCostIntegral(TS ts, Vec *v)
344 {
345   TS quadts;
346 
347   PetscFunctionBegin;
348   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
349   PetscAssertPointer(v, 2);
350   PetscCall(TSGetQuadratureTS(ts, NULL, &quadts));
351   *v = quadts->vec_sol;
352   PetscFunctionReturn(PETSC_SUCCESS);
353 }
354 
355 /*@
356   TSComputeCostIntegrand - Evaluates the integral function in the cost functions.
357 
358   Input Parameters:
359 + ts - the `TS` context
360 . t  - current time
361 - U  - state vector, i.e. current solution
362 
363   Output Parameter:
364 . Q - vector of size numcost to hold the outputs
365 
366   Level: deprecated
367 
368   Note:
369   Most users should not need to explicitly call this routine, as it
370   is used internally within the sensitivity analysis context.
371 
372 .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSSetCostIntegrand()`
373 @*/
374 PetscErrorCode TSComputeCostIntegrand(TS ts, PetscReal t, Vec U, Vec Q)
375 {
376   PetscFunctionBegin;
377   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
378   PetscValidHeaderSpecific(U, VEC_CLASSID, 3);
379   PetscValidHeaderSpecific(Q, VEC_CLASSID, 4);
380 
381   PetscCall(PetscLogEventBegin(TS_FunctionEval, ts, U, Q, 0));
382   if (ts->costintegrand) PetscCallBack("TS callback integrand in the cost function", (*ts->costintegrand)(ts, t, U, Q, ts->costintegrandctx));
383   else PetscCall(VecZeroEntries(Q));
384   PetscCall(PetscLogEventEnd(TS_FunctionEval, ts, U, Q, 0));
385   PetscFunctionReturn(PETSC_SUCCESS);
386 }
387 
388 // PetscClangLinter pragma disable: -fdoc-*
389 /*@
390   TSComputeDRDUFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobian()`
391 
392   Level: deprecated
393 
394 @*/
395 PetscErrorCode TSComputeDRDUFunction(TS ts, PetscReal t, Vec U, Vec *DRDU)
396 {
397   PetscFunctionBegin;
398   if (!DRDU) PetscFunctionReturn(PETSC_SUCCESS);
399   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
400   PetscValidHeaderSpecific(U, VEC_CLASSID, 3);
401 
402   PetscCallBack("TS callback DRDU for sensitivity analysis", (*ts->drdufunction)(ts, t, U, DRDU, ts->costintegrandctx));
403   PetscFunctionReturn(PETSC_SUCCESS);
404 }
405 
406 // PetscClangLinter pragma disable: -fdoc-*
407 /*@
408   TSComputeDRDPFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobianP()`
409 
410   Level: deprecated
411 
412 @*/
413 PetscErrorCode TSComputeDRDPFunction(TS ts, PetscReal t, Vec U, Vec *DRDP)
414 {
415   PetscFunctionBegin;
416   if (!DRDP) PetscFunctionReturn(PETSC_SUCCESS);
417   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
418   PetscValidHeaderSpecific(U, VEC_CLASSID, 3);
419 
420   PetscCallBack("TS callback DRDP for sensitivity analysis", (*ts->drdpfunction)(ts, t, U, DRDP, ts->costintegrandctx));
421   PetscFunctionReturn(PETSC_SUCCESS);
422 }
423 
424 // PetscClangLinter pragma disable: -fdoc-param-list-func-parameter-documentation
425 // PetscClangLinter pragma disable: -fdoc-section-header-unknown
426 /*@C
427   TSSetIHessianProduct - Sets the function that computes the vector-Hessian-vector product. The Hessian is the second-order derivative of F (IFunction) w.r.t. the state variable.
428 
429   Logically Collective
430 
431   Input Parameters:
432 + ts   - `TS` context obtained from `TSCreate()`
433 . ihp1 - an array of vectors storing the result of vector-Hessian-vector product for F_UU
434 . hessianproductfunc1 - vector-Hessian-vector product function for F_UU
435 . ihp2 - an array of vectors storing the result of vector-Hessian-vector product for F_UP
436 . hessianproductfunc2 - vector-Hessian-vector product function for F_UP
437 . ihp3 - an array of vectors storing the result of vector-Hessian-vector product for F_PU
438 . hessianproductfunc3 - vector-Hessian-vector product function for F_PU
439 . ihp4 - an array of vectors storing the result of vector-Hessian-vector product for F_PP
440 - hessianproductfunc4 - vector-Hessian-vector product function for F_PP
441 
442   Calling sequence of `ihessianproductfunc1`:
443 + ts  - the `TS` context
444 + t   - current timestep
445 . U   - input vector (current ODE solution)
446 . Vl  - an array of input vectors to be left-multiplied with the Hessian
447 . Vr  - input vector to be right-multiplied with the Hessian
448 . VHV - an array of output vectors for vector-Hessian-vector product
449 - ctx - [optional] user-defined function context
450 
451   Level: intermediate
452 
453   Notes:
454   All other functions have the same calling sequence as `rhhessianproductfunc1`, so their
455   descriptions are omitted for brevity.
456 
457   The first Hessian function and the working array are required.
458   As an example to implement the callback functions, the second callback function calculates the vector-Hessian-vector product
459   $ Vl_n^T*F_UP*Vr
460   where the vector Vl_n (n-th element in the array Vl) and Vr are of size N and M respectively, and the Hessian F_UP is of size N x N x M.
461   Each entry of F_UP corresponds to the derivative
462   $ F_UP[i][j][k] = \frac{\partial^2 F[i]}{\partial U[j] \partial P[k]}.
463   The result of the vector-Hessian-vector product for Vl_n needs to be stored in vector VHV_n with the j-th entry being
464   $ VHV_n[j] = \sum_i \sum_k {Vl_n[i] * F_UP[i][j][k] * Vr[k]}
465   If the cost function is a scalar, there will be only one vector in Vl and VHV.
466 
467 .seealso: [](ch_ts), `TS`
468 @*/
469 PetscErrorCode TSSetIHessianProduct(TS ts, Vec *ihp1, PetscErrorCode (*ihessianproductfunc1)(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV, void *ctx), Vec *ihp2, PetscErrorCode (*ihessianproductfunc2)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *ihp3, PetscErrorCode (*ihessianproductfunc3)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *ihp4, PetscErrorCode (*ihessianproductfunc4)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), void *ctx)
470 {
471   PetscFunctionBegin;
472   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
473   PetscAssertPointer(ihp1, 2);
474 
475   ts->ihessianproductctx = ctx;
476   if (ihp1) ts->vecs_fuu = ihp1;
477   if (ihp2) ts->vecs_fup = ihp2;
478   if (ihp3) ts->vecs_fpu = ihp3;
479   if (ihp4) ts->vecs_fpp = ihp4;
480   ts->ihessianproduct_fuu = ihessianproductfunc1;
481   ts->ihessianproduct_fup = ihessianproductfunc2;
482   ts->ihessianproduct_fpu = ihessianproductfunc3;
483   ts->ihessianproduct_fpp = ihessianproductfunc4;
484   PetscFunctionReturn(PETSC_SUCCESS);
485 }
486 
487 /*@
488   TSComputeIHessianProductFunctionUU - Runs the user-defined vector-Hessian-vector product function for Fuu.
489 
490   Collective
491 
492   Input Parameters:
493 + ts - The `TS` context obtained from `TSCreate()`
494 . t  - the time
495 . U  - the solution at which to compute the Hessian product
496 . Vl - the array of input vectors to be multiplied with the Hessian from the left
497 - Vr - the input vector to be multiplied with the Hessian from the right
498 
499   Output Parameter:
500 . VHV - the array of output vectors that store the Hessian product
501 
502   Level: developer
503 
504   Note:
505   `TSComputeIHessianProductFunctionUU()` is typically used for sensitivity implementation,
506   so most users would not generally call this routine themselves.
507 
508 .seealso: [](ch_ts), `TSSetIHessianProduct()`
509 @*/
510 PetscErrorCode TSComputeIHessianProductFunctionUU(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
511 {
512   PetscFunctionBegin;
513   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);
514   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
515   PetscValidHeaderSpecific(U, VEC_CLASSID, 3);
516 
517   if (ts->ihessianproduct_fuu) PetscCallBack("TS callback IHessianProduct 1 for sensitivity analysis", (*ts->ihessianproduct_fuu)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx));
518 
519   /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */
520   if (ts->rhshessianproduct_guu) {
521     PetscInt nadj;
522     PetscCall(TSComputeRHSHessianProductFunctionUU(ts, t, U, Vl, Vr, VHV));
523     for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1));
524   }
525   PetscFunctionReturn(PETSC_SUCCESS);
526 }
527 
528 /*@
529   TSComputeIHessianProductFunctionUP - Runs the user-defined vector-Hessian-vector product function for Fup.
530 
531   Collective
532 
533   Input Parameters:
534 + ts - The `TS` context obtained from `TSCreate()`
535 . t  - the time
536 . U  - the solution at which to compute the Hessian product
537 . Vl - the array of input vectors to be multiplied with the Hessian from the left
538 - Vr - the input vector to be multiplied with the Hessian from the right
539 
540   Output Parameter:
541 . VHV - the array of output vectors that store the Hessian product
542 
543   Level: developer
544 
545   Note:
546   `TSComputeIHessianProductFunctionUP()` is typically used for sensitivity implementation,
547   so most users would not generally call this routine themselves.
548 
549 .seealso: [](ch_ts), `TSSetIHessianProduct()`
550 @*/
551 PetscErrorCode TSComputeIHessianProductFunctionUP(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
552 {
553   PetscFunctionBegin;
554   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);
555   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
556   PetscValidHeaderSpecific(U, VEC_CLASSID, 3);
557 
558   if (ts->ihessianproduct_fup) PetscCallBack("TS callback IHessianProduct 2 for sensitivity analysis", (*ts->ihessianproduct_fup)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx));
559 
560   /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */
561   if (ts->rhshessianproduct_gup) {
562     PetscInt nadj;
563     PetscCall(TSComputeRHSHessianProductFunctionUP(ts, t, U, Vl, Vr, VHV));
564     for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1));
565   }
566   PetscFunctionReturn(PETSC_SUCCESS);
567 }
568 
569 /*@
570   TSComputeIHessianProductFunctionPU - Runs the user-defined vector-Hessian-vector product function for Fpu.
571 
572   Collective
573 
574   Input Parameters:
575 + ts - The `TS` context obtained from `TSCreate()`
576 . t  - the time
577 . U  - the solution at which to compute the Hessian product
578 . Vl - the array of input vectors to be multiplied with the Hessian from the left
579 - Vr - the input vector to be multiplied with the Hessian from the right
580 
581   Output Parameter:
582 . VHV - the array of output vectors that store the Hessian product
583 
584   Level: developer
585 
586   Note:
587   `TSComputeIHessianProductFunctionPU()` is typically used for sensitivity implementation,
588   so most users would not generally call this routine themselves.
589 
590 .seealso: [](ch_ts), `TSSetIHessianProduct()`
591 @*/
592 PetscErrorCode TSComputeIHessianProductFunctionPU(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
593 {
594   PetscFunctionBegin;
595   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);
596   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
597   PetscValidHeaderSpecific(U, VEC_CLASSID, 3);
598 
599   if (ts->ihessianproduct_fpu) PetscCallBack("TS callback IHessianProduct 3 for sensitivity analysis", (*ts->ihessianproduct_fpu)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx));
600 
601   /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */
602   if (ts->rhshessianproduct_gpu) {
603     PetscInt nadj;
604     PetscCall(TSComputeRHSHessianProductFunctionPU(ts, t, U, Vl, Vr, VHV));
605     for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1));
606   }
607   PetscFunctionReturn(PETSC_SUCCESS);
608 }
609 
610 /*@C
611   TSComputeIHessianProductFunctionPP - Runs the user-defined vector-Hessian-vector product function for Fpp.
612 
613   Collective
614 
615   Input Parameters:
616 + ts - The `TS` context obtained from `TSCreate()`
617 . t  - the time
618 . U  - the solution at which to compute the Hessian product
619 . Vl - the array of input vectors to be multiplied with the Hessian from the left
620 - Vr - the input vector to be multiplied with the Hessian from the right
621 
622   Output Parameter:
623 . VHV - the array of output vectors that store the Hessian product
624 
625   Level: developer
626 
627   Note:
628   `TSComputeIHessianProductFunctionPP()` is typically used for sensitivity implementation,
629   so most users would not generally call this routine themselves.
630 
631 .seealso: [](ch_ts), `TSSetIHessianProduct()`
632 @*/
633 PetscErrorCode TSComputeIHessianProductFunctionPP(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
634 {
635   PetscFunctionBegin;
636   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);
637   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
638   PetscValidHeaderSpecific(U, VEC_CLASSID, 3);
639 
640   if (ts->ihessianproduct_fpp) PetscCallBack("TS callback IHessianProduct 3 for sensitivity analysis", (*ts->ihessianproduct_fpp)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx));
641 
642   /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */
643   if (ts->rhshessianproduct_gpp) {
644     PetscInt nadj;
645     PetscCall(TSComputeRHSHessianProductFunctionPP(ts, t, U, Vl, Vr, VHV));
646     for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1));
647   }
648   PetscFunctionReturn(PETSC_SUCCESS);
649 }
650 
651 // PetscClangLinter pragma disable: -fdoc-param-list-func-parameter-documentation
652 // PetscClangLinter pragma disable: -fdoc-section-header-unknown
653 /*@C
654   TSSetRHSHessianProduct - Sets the function that computes the vector-Hessian-vector
655   product. The Hessian is the second-order derivative of G (RHSFunction) w.r.t. the state
656   variable.
657 
658   Logically Collective
659 
660   Input Parameters:
661 + ts     - `TS` context obtained from `TSCreate()`
662 . rhshp1 - an array of vectors storing the result of vector-Hessian-vector product for G_UU
663 . hessianproductfunc1 - vector-Hessian-vector product function for G_UU
664 . rhshp2 - an array of vectors storing the result of vector-Hessian-vector product for G_UP
665 . hessianproductfunc2 - vector-Hessian-vector product function for G_UP
666 . rhshp3 - an array of vectors storing the result of vector-Hessian-vector product for G_PU
667 . hessianproductfunc3 - vector-Hessian-vector product function for G_PU
668 . rhshp4 - an array of vectors storing the result of vector-Hessian-vector product for G_PP
669 . hessianproductfunc4 - vector-Hessian-vector product function for G_PP
670 - ctx    - [optional] user-defined function context
671 
672   Calling sequence of `rhshessianproductfunc1`:
673 + ts  - the `TS` context
674 . t   - current timestep
675 . U   - input vector (current ODE solution)
676 . Vl  - an array of input vectors to be left-multiplied with the Hessian
677 . Vr  - input vector to be right-multiplied with the Hessian
678 . VHV - an array of output vectors for vector-Hessian-vector product
679 - ctx - [optional] user-defined function context
680 
681   Level: intermediate
682 
683   Notes:
684   All other functions have the same calling sequence as `rhhessianproductfunc1`, so their
685   descriptions are omitted for brevity.
686 
687   The first Hessian function and the working array are required.
688 
689   As an example to implement the callback functions, the second callback function calculates the vector-Hessian-vector product
690   $ Vl_n^T*G_UP*Vr
691   where the vector Vl_n (n-th element in the array Vl) and Vr are of size N and M respectively, and the Hessian G_UP is of size N x N x M.
692   Each entry of G_UP corresponds to the derivative
693   $ G_UP[i][j][k] = \frac{\partial^2 G[i]}{\partial U[j] \partial P[k]}.
694   The result of the vector-Hessian-vector product for Vl_n needs to be stored in vector VHV_n with j-th entry being
695   $ VHV_n[j] = \sum_i \sum_k {Vl_n[i] * G_UP[i][j][k] * Vr[k]}
696   If the cost function is a scalar, there will be only one vector in Vl and VHV.
697 
698 .seealso: `TS`, `TSAdjoint`
699 @*/
700 PetscErrorCode TSSetRHSHessianProduct(TS ts, Vec rhshp1[], PetscErrorCode (*rhshessianproductfunc1)(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV, void *ctx), Vec rhshp2[], PetscErrorCode (*rhshessianproductfunc2)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec rhshp3[], PetscErrorCode (*rhshessianproductfunc3)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec rhshp4[], PetscErrorCode (*rhshessianproductfunc4)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), void *ctx)
701 {
702   PetscFunctionBegin;
703   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
704   PetscAssertPointer(rhshp1, 2);
705 
706   ts->rhshessianproductctx = ctx;
707   if (rhshp1) ts->vecs_guu = rhshp1;
708   if (rhshp2) ts->vecs_gup = rhshp2;
709   if (rhshp3) ts->vecs_gpu = rhshp3;
710   if (rhshp4) ts->vecs_gpp = rhshp4;
711   ts->rhshessianproduct_guu = rhshessianproductfunc1;
712   ts->rhshessianproduct_gup = rhshessianproductfunc2;
713   ts->rhshessianproduct_gpu = rhshessianproductfunc3;
714   ts->rhshessianproduct_gpp = rhshessianproductfunc4;
715   PetscFunctionReturn(PETSC_SUCCESS);
716 }
717 
718 /*@
719   TSComputeRHSHessianProductFunctionUU - Runs the user-defined vector-Hessian-vector product function for Guu.
720 
721   Collective
722 
723   Input Parameters:
724 + ts - The `TS` context obtained from `TSCreate()`
725 . t  - the time
726 . U  - the solution at which to compute the Hessian product
727 . Vl - the array of input vectors to be multiplied with the Hessian from the left
728 - Vr - the input vector to be multiplied with the Hessian from the right
729 
730   Output Parameter:
731 . VHV - the array of output vectors that store the Hessian product
732 
733   Level: developer
734 
735   Note:
736   `TSComputeRHSHessianProductFunctionUU()` is typically used for sensitivity implementation,
737   so most users would not generally call this routine themselves.
738 
739 .seealso: [](ch_ts), `TS`, `TSSetRHSHessianProduct()`
740 @*/
741 PetscErrorCode TSComputeRHSHessianProductFunctionUU(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
742 {
743   PetscFunctionBegin;
744   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);
745   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
746   PetscValidHeaderSpecific(U, VEC_CLASSID, 3);
747 
748   PetscCallBack("TS callback RHSHessianProduct 1 for sensitivity analysis", (*ts->rhshessianproduct_guu)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx));
749   PetscFunctionReturn(PETSC_SUCCESS);
750 }
751 
752 /*@
753   TSComputeRHSHessianProductFunctionUP - Runs the user-defined vector-Hessian-vector product function for Gup.
754 
755   Collective
756 
757   Input Parameters:
758 + ts - The `TS` context obtained from `TSCreate()`
759 . t  - the time
760 . U  - the solution at which to compute the Hessian product
761 . Vl - the array of input vectors to be multiplied with the Hessian from the left
762 - Vr - the input vector to be multiplied with the Hessian from the right
763 
764   Output Parameter:
765 . VHV - the array of output vectors that store the Hessian product
766 
767   Level: developer
768 
769   Note:
770   `TSComputeRHSHessianProductFunctionUP()` is typically used for sensitivity implementation,
771   so most users would not generally call this routine themselves.
772 
773 .seealso: [](ch_ts), `TS`, `TSSetRHSHessianProduct()`
774 @*/
775 PetscErrorCode TSComputeRHSHessianProductFunctionUP(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
776 {
777   PetscFunctionBegin;
778   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);
779   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
780   PetscValidHeaderSpecific(U, VEC_CLASSID, 3);
781 
782   PetscCallBack("TS callback RHSHessianProduct 2 for sensitivity analysis", (*ts->rhshessianproduct_gup)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx));
783   PetscFunctionReturn(PETSC_SUCCESS);
784 }
785 
786 /*@
787   TSComputeRHSHessianProductFunctionPU - Runs the user-defined vector-Hessian-vector product function for Gpu.
788 
789   Collective
790 
791   Input Parameters:
792 + ts - The `TS` context obtained from `TSCreate()`
793 . t  - the time
794 . U  - the solution at which to compute the Hessian product
795 . Vl - the array of input vectors to be multiplied with the Hessian from the left
796 - Vr - the input vector to be multiplied with the Hessian from the right
797 
798   Output Parameter:
799 . VHV - the array of output vectors that store the Hessian product
800 
801   Level: developer
802 
803   Note:
804   `TSComputeRHSHessianProductFunctionPU()` is typically used for sensitivity implementation,
805   so most users would not generally call this routine themselves.
806 
807 .seealso: [](ch_ts), `TSSetRHSHessianProduct()`
808 @*/
809 PetscErrorCode TSComputeRHSHessianProductFunctionPU(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
810 {
811   PetscFunctionBegin;
812   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);
813   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
814   PetscValidHeaderSpecific(U, VEC_CLASSID, 3);
815 
816   PetscCallBack("TS callback RHSHessianProduct 3 for sensitivity analysis", (*ts->rhshessianproduct_gpu)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx));
817   PetscFunctionReturn(PETSC_SUCCESS);
818 }
819 
820 /*@
821   TSComputeRHSHessianProductFunctionPP - Runs the user-defined vector-Hessian-vector product function for Gpp.
822 
823   Collective
824 
825   Input Parameters:
826 + ts - The `TS` context obtained from `TSCreate()`
827 . t  - the time
828 . U  - the solution at which to compute the Hessian product
829 . Vl - the array of input vectors to be multiplied with the Hessian from the left
830 - Vr - the input vector to be multiplied with the Hessian from the right
831 
832   Output Parameter:
833 . VHV - the array of output vectors that store the Hessian product
834 
835   Level: developer
836 
837   Note:
838   `TSComputeRHSHessianProductFunctionPP()` is typically used for sensitivity implementation,
839   so most users would not generally call this routine themselves.
840 
841 .seealso: [](ch_ts), `TSSetRHSHessianProduct()`
842 @*/
843 PetscErrorCode TSComputeRHSHessianProductFunctionPP(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
844 {
845   PetscFunctionBegin;
846   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);
847   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
848   PetscValidHeaderSpecific(U, VEC_CLASSID, 3);
849 
850   PetscCallBack("TS callback RHSHessianProduct 3 for sensitivity analysis", (*ts->rhshessianproduct_gpp)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx));
851   PetscFunctionReturn(PETSC_SUCCESS);
852 }
853 
854 /* --------------------------- Adjoint sensitivity ---------------------------*/
855 
856 /*@
857   TSSetCostGradients - Sets the initial value of the gradients of the cost function w.r.t. initial values and w.r.t. the problem parameters
858   for use by the `TS` adjoint routines.
859 
860   Logically Collective
861 
862   Input Parameters:
863 + ts      - the `TS` context obtained from `TSCreate()`
864 . numcost - number of gradients to be computed, this is the number of cost functions
865 . lambda  - gradients with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector
866 - mu      - gradients with respect to the parameters, the number of entries in these vectors is the same as the number of parameters
867 
868   Level: beginner
869 
870   Notes:
871   the entries in these vectors must be correctly initialized with the values lambda_i = df/dy|finaltime  mu_i = df/dp|finaltime
872 
873   After `TSAdjointSolve()` is called the lambda and the mu contain the computed sensitivities
874 
875 .seealso: `TS`, `TSAdjointSolve()`, `TSGetCostGradients()`
876 @*/
877 PetscErrorCode TSSetCostGradients(TS ts, PetscInt numcost, Vec lambda[], Vec mu[])
878 {
879   PetscFunctionBegin;
880   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
881   PetscAssertPointer(lambda, 3);
882   ts->vecs_sensi  = lambda;
883   ts->vecs_sensip = mu;
884   PetscCheck(!ts->numcost || ts->numcost == numcost, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand");
885   ts->numcost = numcost;
886   PetscFunctionReturn(PETSC_SUCCESS);
887 }
888 
889 /*@
890   TSGetCostGradients - Returns the gradients from the `TSAdjointSolve()`
891 
892   Not Collective, but the vectors returned are parallel if `TS` is parallel
893 
894   Input Parameter:
895 . ts - the `TS` context obtained from `TSCreate()`
896 
897   Output Parameters:
898 + numcost - size of returned arrays
899 . lambda  - vectors containing the gradients of the cost functions with respect to the ODE/DAE solution variables
900 - mu      - vectors containing the gradients of the cost functions with respect to the problem parameters
901 
902   Level: intermediate
903 
904 .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSSetCostGradients()`
905 @*/
906 PetscErrorCode TSGetCostGradients(TS ts, PetscInt *numcost, Vec *lambda[], Vec *mu[])
907 {
908   PetscFunctionBegin;
909   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
910   if (numcost) *numcost = ts->numcost;
911   if (lambda) *lambda = ts->vecs_sensi;
912   if (mu) *mu = ts->vecs_sensip;
913   PetscFunctionReturn(PETSC_SUCCESS);
914 }
915 
916 /*@
917   TSSetCostHessianProducts - Sets the initial value of the Hessian-vector products of the cost function w.r.t. initial values and w.r.t. the problem parameters
918   for use by the `TS` adjoint routines.
919 
920   Logically Collective
921 
922   Input Parameters:
923 + ts      - the `TS` context obtained from `TSCreate()`
924 . numcost - number of cost functions
925 . lambda2 - Hessian-vector product with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector
926 . mu2     - Hessian-vector product with respect to the parameters, the number of entries in these vectors is the same as the number of parameters
927 - dir     - the direction vector that are multiplied with the Hessian of the cost functions
928 
929   Level: beginner
930 
931   Notes:
932   Hessian of the cost function is completely different from Hessian of the ODE/DAE system
933 
934   For second-order adjoint, one needs to call this function and then `TSAdjointSetForward()` before `TSSolve()`.
935 
936   After `TSAdjointSolve()` is called, the lambda2 and the mu2 will contain the computed second-order adjoint sensitivities, and can be used to produce Hessian-vector product (not the full Hessian matrix). Users must provide a direction vector; it is usually generated by an optimization solver.
937 
938   Passing `NULL` for `lambda2` disables the second-order calculation.
939 
940 .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSAdjointSetForward()`
941 @*/
942 PetscErrorCode TSSetCostHessianProducts(TS ts, PetscInt numcost, Vec lambda2[], Vec mu2[], Vec dir)
943 {
944   PetscFunctionBegin;
945   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
946   PetscCheck(!ts->numcost || ts->numcost == numcost, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand");
947   ts->numcost      = numcost;
948   ts->vecs_sensi2  = lambda2;
949   ts->vecs_sensi2p = mu2;
950   ts->vec_dir      = dir;
951   PetscFunctionReturn(PETSC_SUCCESS);
952 }
953 
954 /*@
955   TSGetCostHessianProducts - Returns the gradients from the `TSAdjointSolve()`
956 
957   Not Collective, but vectors returned are parallel if `TS` is parallel
958 
959   Input Parameter:
960 . ts - the `TS` context obtained from `TSCreate()`
961 
962   Output Parameters:
963 + numcost - number of cost functions
964 . lambda2 - Hessian-vector product with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector
965 . mu2     - Hessian-vector product with respect to the parameters, the number of entries in these vectors is the same as the number of parameters
966 - dir     - the direction vector that are multiplied with the Hessian of the cost functions
967 
968   Level: intermediate
969 
970 .seealso: [](ch_ts), `TSAdjointSolve()`, `TSSetCostHessianProducts()`
971 @*/
972 PetscErrorCode TSGetCostHessianProducts(TS ts, PetscInt *numcost, Vec *lambda2[], Vec *mu2[], Vec *dir)
973 {
974   PetscFunctionBegin;
975   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
976   if (numcost) *numcost = ts->numcost;
977   if (lambda2) *lambda2 = ts->vecs_sensi2;
978   if (mu2) *mu2 = ts->vecs_sensi2p;
979   if (dir) *dir = ts->vec_dir;
980   PetscFunctionReturn(PETSC_SUCCESS);
981 }
982 
983 /*@
984   TSAdjointSetForward - Trigger the tangent linear solver and initialize the forward sensitivities
985 
986   Logically Collective
987 
988   Input Parameters:
989 + ts   - the `TS` context obtained from `TSCreate()`
990 - didp - the derivative of initial values w.r.t. parameters
991 
992   Level: intermediate
993 
994   Notes:
995   When computing sensitivities w.r.t. initial condition, set didp to `NULL` so that the solver will take it as an identity matrix mathematically.
996   `TSAdjoint` does not reset the tangent linear solver automatically, `TSAdjointResetForward()` should be called to reset the tangent linear solver.
997 
998 .seealso: [](ch_ts), `TSAdjointSolve()`, `TSSetCostHessianProducts()`, `TSAdjointResetForward()`
999 @*/
1000 PetscErrorCode TSAdjointSetForward(TS ts, Mat didp)
1001 {
1002   Mat          A;
1003   Vec          sp;
1004   PetscScalar *xarr;
1005   PetscInt     lsize;
1006 
1007   PetscFunctionBegin;
1008   ts->forward_solve = PETSC_TRUE; /* turn on tangent linear mode */
1009   PetscCheck(ts->vecs_sensi2, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "Must call TSSetCostHessianProducts() first");
1010   PetscCheck(ts->vec_dir, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "Directional vector is missing. Call TSSetCostHessianProducts() to set it.");
1011   /* create a single-column dense matrix */
1012   PetscCall(VecGetLocalSize(ts->vec_sol, &lsize));
1013   PetscCall(MatCreateDense(PetscObjectComm((PetscObject)ts), lsize, PETSC_DECIDE, PETSC_DECIDE, 1, NULL, &A));
1014 
1015   PetscCall(VecDuplicate(ts->vec_sol, &sp));
1016   PetscCall(MatDenseGetColumn(A, 0, &xarr));
1017   PetscCall(VecPlaceArray(sp, xarr));
1018   if (ts->vecs_sensi2p) { /* tangent linear variable initialized as 2*dIdP*dir */
1019     if (didp) {
1020       PetscCall(MatMult(didp, ts->vec_dir, sp));
1021       PetscCall(VecScale(sp, 2.));
1022     } else {
1023       PetscCall(VecZeroEntries(sp));
1024     }
1025   } else { /* tangent linear variable initialized as dir */
1026     PetscCall(VecCopy(ts->vec_dir, sp));
1027   }
1028   PetscCall(VecResetArray(sp));
1029   PetscCall(MatDenseRestoreColumn(A, &xarr));
1030   PetscCall(VecDestroy(&sp));
1031 
1032   PetscCall(TSForwardSetInitialSensitivities(ts, A)); /* if didp is NULL, identity matrix is assumed */
1033 
1034   PetscCall(MatDestroy(&A));
1035   PetscFunctionReturn(PETSC_SUCCESS);
1036 }
1037 
1038 /*@
1039   TSAdjointResetForward - Reset the tangent linear solver and destroy the tangent linear context
1040 
1041   Logically Collective
1042 
1043   Input Parameter:
1044 . ts - the `TS` context obtained from `TSCreate()`
1045 
1046   Level: intermediate
1047 
1048 .seealso: [](ch_ts), `TSAdjointSetForward()`
1049 @*/
1050 PetscErrorCode TSAdjointResetForward(TS ts)
1051 {
1052   PetscFunctionBegin;
1053   ts->forward_solve = PETSC_FALSE; /* turn off tangent linear mode */
1054   PetscCall(TSForwardReset(ts));
1055   PetscFunctionReturn(PETSC_SUCCESS);
1056 }
1057 
1058 /*@
1059   TSAdjointSetUp - Sets up the internal data structures for the later use
1060   of an adjoint solver
1061 
1062   Collective
1063 
1064   Input Parameter:
1065 . ts - the `TS` context obtained from `TSCreate()`
1066 
1067   Level: advanced
1068 
1069 .seealso: [](ch_ts), `TSCreate()`, `TSAdjointStep()`, `TSSetCostGradients()`
1070 @*/
1071 PetscErrorCode TSAdjointSetUp(TS ts)
1072 {
1073   TSTrajectory tj;
1074   PetscBool    match;
1075 
1076   PetscFunctionBegin;
1077   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1078   if (ts->adjointsetupcalled) PetscFunctionReturn(PETSC_SUCCESS);
1079   PetscCheck(ts->vecs_sensi, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_WRONGSTATE, "Must call TSSetCostGradients() first");
1080   PetscCheck(!ts->vecs_sensip || ts->Jacp || ts->Jacprhs, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_WRONGSTATE, "Must call TSSetRHSJacobianP() or TSSetIJacobianP() first");
1081   PetscCall(TSGetTrajectory(ts, &tj));
1082   PetscCall(PetscObjectTypeCompare((PetscObject)tj, TSTRAJECTORYBASIC, &match));
1083   if (match) {
1084     PetscBool solution_only;
1085     PetscCall(TSTrajectoryGetSolutionOnly(tj, &solution_only));
1086     PetscCheck(!solution_only, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "TSAdjoint cannot use the solution-only mode when choosing the Basic TSTrajectory type. Turn it off with -ts_trajectory_solution_only 0");
1087   }
1088   PetscCall(TSTrajectorySetUseHistory(tj, PETSC_FALSE)); /* not use TSHistory */
1089 
1090   if (ts->quadraturets) { /* if there is integral in the cost function */
1091     PetscCall(VecDuplicate(ts->vecs_sensi[0], &ts->vec_drdu_col));
1092     if (ts->vecs_sensip) PetscCall(VecDuplicate(ts->vecs_sensip[0], &ts->vec_drdp_col));
1093   }
1094 
1095   PetscTryTypeMethod(ts, adjointsetup);
1096   ts->adjointsetupcalled = PETSC_TRUE;
1097   PetscFunctionReturn(PETSC_SUCCESS);
1098 }
1099 
1100 /*@
1101   TSAdjointReset - Resets a `TS` adjoint context and removes any allocated `Vec`s and `Mat`s.
1102 
1103   Collective
1104 
1105   Input Parameter:
1106 . ts - the `TS` context obtained from `TSCreate()`
1107 
1108   Level: beginner
1109 
1110 .seealso: [](ch_ts), `TSCreate()`, `TSAdjointSetUp()`, `TSDestroy()`
1111 @*/
1112 PetscErrorCode TSAdjointReset(TS ts)
1113 {
1114   PetscFunctionBegin;
1115   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1116   PetscTryTypeMethod(ts, adjointreset);
1117   if (ts->quadraturets) { /* if there is integral in the cost function */
1118     PetscCall(VecDestroy(&ts->vec_drdu_col));
1119     if (ts->vecs_sensip) PetscCall(VecDestroy(&ts->vec_drdp_col));
1120   }
1121   ts->vecs_sensi         = NULL;
1122   ts->vecs_sensip        = NULL;
1123   ts->vecs_sensi2        = NULL;
1124   ts->vecs_sensi2p       = NULL;
1125   ts->vec_dir            = NULL;
1126   ts->adjointsetupcalled = PETSC_FALSE;
1127   PetscFunctionReturn(PETSC_SUCCESS);
1128 }
1129 
1130 /*@
1131   TSAdjointSetSteps - Sets the number of steps the adjoint solver should take backward in time
1132 
1133   Logically Collective
1134 
1135   Input Parameters:
1136 + ts    - the `TS` context obtained from `TSCreate()`
1137 - steps - number of steps to use
1138 
1139   Level: intermediate
1140 
1141   Notes:
1142   Normally one does not call this and `TSAdjointSolve()` integrates back to the original timestep. One can call this
1143   so as to integrate back to less than the original timestep
1144 
1145 .seealso: [](ch_ts), `TSAdjointSolve()`, `TS`, `TSSetExactFinalTime()`
1146 @*/
1147 PetscErrorCode TSAdjointSetSteps(TS ts, PetscInt steps)
1148 {
1149   PetscFunctionBegin;
1150   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1151   PetscValidLogicalCollectiveInt(ts, steps, 2);
1152   PetscCheck(steps >= 0, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_OUTOFRANGE, "Cannot step back a negative number of steps");
1153   PetscCheck(steps <= ts->steps, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_OUTOFRANGE, "Cannot step back more than the total number of forward steps");
1154   ts->adjoint_max_steps = steps;
1155   PetscFunctionReturn(PETSC_SUCCESS);
1156 }
1157 
1158 // PetscClangLinter pragma disable: -fdoc-*
1159 /*@C
1160   TSAdjointSetRHSJacobian - Deprecated, use `TSSetRHSJacobianP()`
1161 
1162   Level: deprecated
1163 @*/
1164 PetscErrorCode TSAdjointSetRHSJacobian(TS ts, Mat Amat, PetscErrorCode (*func)(TS, PetscReal, Vec, Mat, void *), void *ctx)
1165 {
1166   PetscFunctionBegin;
1167   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1168   PetscValidHeaderSpecific(Amat, MAT_CLASSID, 2);
1169 
1170   ts->rhsjacobianp    = func;
1171   ts->rhsjacobianpctx = ctx;
1172   if (Amat) {
1173     PetscCall(PetscObjectReference((PetscObject)Amat));
1174     PetscCall(MatDestroy(&ts->Jacp));
1175     ts->Jacp = Amat;
1176   }
1177   PetscFunctionReturn(PETSC_SUCCESS);
1178 }
1179 
1180 // PetscClangLinter pragma disable: -fdoc-*
1181 /*@C
1182   TSAdjointComputeRHSJacobian - Deprecated, use `TSComputeRHSJacobianP()`
1183 
1184   Level: deprecated
1185 @*/
1186 PetscErrorCode TSAdjointComputeRHSJacobian(TS ts, PetscReal t, Vec U, Mat Amat)
1187 {
1188   PetscFunctionBegin;
1189   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1190   PetscValidHeaderSpecific(U, VEC_CLASSID, 3);
1191   PetscValidHeaderSpecific(Amat, MAT_CLASSID, 4);
1192 
1193   PetscCallBack("TS callback JacobianP for sensitivity analysis", (*ts->rhsjacobianp)(ts, t, U, Amat, ts->rhsjacobianpctx));
1194   PetscFunctionReturn(PETSC_SUCCESS);
1195 }
1196 
1197 // PetscClangLinter pragma disable: -fdoc-*
1198 /*@
1199   TSAdjointComputeDRDYFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobian()`
1200 
1201   Level: deprecated
1202 @*/
1203 PetscErrorCode TSAdjointComputeDRDYFunction(TS ts, PetscReal t, Vec U, Vec *DRDU)
1204 {
1205   PetscFunctionBegin;
1206   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1207   PetscValidHeaderSpecific(U, VEC_CLASSID, 3);
1208 
1209   PetscCallBack("TS callback DRDY for sensitivity analysis", (*ts->drdufunction)(ts, t, U, DRDU, ts->costintegrandctx));
1210   PetscFunctionReturn(PETSC_SUCCESS);
1211 }
1212 
1213 // PetscClangLinter pragma disable: -fdoc-*
1214 /*@
1215   TSAdjointComputeDRDPFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobianP()`
1216 
1217   Level: deprecated
1218 @*/
1219 PetscErrorCode TSAdjointComputeDRDPFunction(TS ts, PetscReal t, Vec U, Vec *DRDP)
1220 {
1221   PetscFunctionBegin;
1222   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1223   PetscValidHeaderSpecific(U, VEC_CLASSID, 3);
1224 
1225   PetscCallBack("TS callback DRDP for sensitivity analysis", (*ts->drdpfunction)(ts, t, U, DRDP, ts->costintegrandctx));
1226   PetscFunctionReturn(PETSC_SUCCESS);
1227 }
1228 
1229 // PetscClangLinter pragma disable: -fdoc-param-list-func-parameter-documentation
1230 /*@C
1231   TSAdjointMonitorSensi - monitors the first lambda sensitivity
1232 
1233   Level: intermediate
1234 
1235 .seealso: [](ch_ts), `TSAdjointMonitorSet()`
1236 @*/
1237 static PetscErrorCode TSAdjointMonitorSensi(TS ts, PetscInt step, PetscReal ptime, Vec v, PetscInt numcost, Vec *lambda, Vec *mu, PetscViewerAndFormat *vf)
1238 {
1239   PetscViewer viewer = vf->viewer;
1240 
1241   PetscFunctionBegin;
1242   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 8);
1243   PetscCall(PetscViewerPushFormat(viewer, vf->format));
1244   PetscCall(VecView(lambda[0], viewer));
1245   PetscCall(PetscViewerPopFormat(viewer));
1246   PetscFunctionReturn(PETSC_SUCCESS);
1247 }
1248 
1249 /*@C
1250   TSAdjointMonitorSetFromOptions - Sets a monitor function and viewer appropriate for the type indicated by the user
1251 
1252   Collective
1253 
1254   Input Parameters:
1255 + ts           - `TS` object you wish to monitor
1256 . name         - the monitor type one is seeking
1257 . help         - message indicating what monitoring is done
1258 . manual       - manual page for the monitor
1259 . monitor      - the monitor function, its context must be a `PetscViewerAndFormat`
1260 - monitorsetup - a function that is called once ONLY if the user selected this monitor that may set additional features of the `TS` or `PetscViewer` objects
1261 
1262   Level: developer
1263 
1264 .seealso: [](ch_ts), `PetscOptionsCreateViewer()`, `PetscOptionsGetReal()`, `PetscOptionsHasName()`, `PetscOptionsGetString()`,
1265           `PetscOptionsGetIntArray()`, `PetscOptionsGetRealArray()`, `PetscOptionsBool()`
1266           `PetscOptionsInt()`, `PetscOptionsString()`, `PetscOptionsReal()`,
1267           `PetscOptionsName()`, `PetscOptionsBegin()`, `PetscOptionsEnd()`, `PetscOptionsHeadBegin()`,
1268           `PetscOptionsStringArray()`, `PetscOptionsRealArray()`, `PetscOptionsScalar()`,
1269           `PetscOptionsBoolGroupBegin()`, `PetscOptionsBoolGroup()`, `PetscOptionsBoolGroupEnd()`,
1270           `PetscOptionsFList()`, `PetscOptionsEList()`, `PetscViewerAndFormat`
1271 @*/
1272 PetscErrorCode TSAdjointMonitorSetFromOptions(TS ts, const char name[], const char help[], const char manual[], PetscErrorCode (*monitor)(TS, PetscInt, PetscReal, Vec, PetscInt, Vec *, Vec *, PetscViewerAndFormat *), PetscErrorCode (*monitorsetup)(TS, PetscViewerAndFormat *))
1273 {
1274   PetscViewer       viewer;
1275   PetscViewerFormat format;
1276   PetscBool         flg;
1277 
1278   PetscFunctionBegin;
1279   PetscCall(PetscOptionsCreateViewer(PetscObjectComm((PetscObject)ts), ((PetscObject)ts)->options, ((PetscObject)ts)->prefix, name, &viewer, &format, &flg));
1280   if (flg) {
1281     PetscViewerAndFormat *vf;
1282     PetscCall(PetscViewerAndFormatCreate(viewer, format, &vf));
1283     PetscCall(PetscViewerDestroy(&viewer));
1284     if (monitorsetup) PetscCall((*monitorsetup)(ts, vf));
1285     PetscCall(TSAdjointMonitorSet(ts, (PetscErrorCode (*)(TS, PetscInt, PetscReal, Vec, PetscInt, Vec *, Vec *, void *))monitor, vf, (PetscCtxDestroyFn *)PetscViewerAndFormatDestroy));
1286   }
1287   PetscFunctionReturn(PETSC_SUCCESS);
1288 }
1289 
1290 /*@C
1291   TSAdjointMonitorSet - Sets an ADDITIONAL function that is to be used at every
1292   timestep to display the iteration's  progress.
1293 
1294   Logically Collective
1295 
1296   Input Parameters:
1297 + ts              - the `TS` context obtained from `TSCreate()`
1298 . adjointmonitor  - monitoring routine
1299 . adjointmctx     - [optional] user-defined context for private data for the monitor routine
1300                     (use `NULL` if no context is desired)
1301 - adjointmdestroy - [optional] routine that frees monitor context (may be `NULL`), see `PetscCtxDestroyFn` for its calling sequence
1302 
1303   Calling sequence of `adjointmonitor`:
1304 + ts          - the `TS` context
1305 . steps       - iteration number (after the final time step the monitor routine is called with
1306                 a step of -1, this is at the final time which may have been interpolated to)
1307 . time        - current time
1308 . u           - current iterate
1309 . numcost     - number of cost functionos
1310 . lambda      - sensitivities to initial conditions
1311 . mu          - sensitivities to parameters
1312 - adjointmctx - [optional] adjoint monitoring context
1313 
1314   Level: intermediate
1315 
1316   Note:
1317   This routine adds an additional monitor to the list of monitors that
1318   already has been loaded.
1319 
1320   Fortran Notes:
1321   Only a single monitor function can be set for each `TS` object
1322 
1323 .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSAdjointMonitorCancel()`, `PetscCtxDestroyFn`
1324 @*/
1325 PetscErrorCode TSAdjointMonitorSet(TS ts, PetscErrorCode (*adjointmonitor)(TS ts, PetscInt steps, PetscReal time, Vec u, PetscInt numcost, Vec *lambda, Vec *mu, void *adjointmctx), void *adjointmctx, PetscCtxDestroyFn *adjointmdestroy)
1326 {
1327   PetscInt  i;
1328   PetscBool identical;
1329 
1330   PetscFunctionBegin;
1331   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1332   for (i = 0; i < ts->numbermonitors; i++) {
1333     PetscCall(PetscMonitorCompare((PetscErrorCode (*)(void))adjointmonitor, adjointmctx, adjointmdestroy, (PetscErrorCode (*)(void))ts->adjointmonitor[i], ts->adjointmonitorcontext[i], ts->adjointmonitordestroy[i], &identical));
1334     if (identical) PetscFunctionReturn(PETSC_SUCCESS);
1335   }
1336   PetscCheck(ts->numberadjointmonitors < MAXTSMONITORS, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many adjoint monitors set");
1337   ts->adjointmonitor[ts->numberadjointmonitors]          = adjointmonitor;
1338   ts->adjointmonitordestroy[ts->numberadjointmonitors]   = adjointmdestroy;
1339   ts->adjointmonitorcontext[ts->numberadjointmonitors++] = adjointmctx;
1340   PetscFunctionReturn(PETSC_SUCCESS);
1341 }
1342 
1343 /*@C
1344   TSAdjointMonitorCancel - Clears all the adjoint monitors that have been set on a time-step object.
1345 
1346   Logically Collective
1347 
1348   Input Parameter:
1349 . ts - the `TS` context obtained from `TSCreate()`
1350 
1351   Notes:
1352   There is no way to remove a single, specific monitor.
1353 
1354   Level: intermediate
1355 
1356 .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSAdjointMonitorSet()`
1357 @*/
1358 PetscErrorCode TSAdjointMonitorCancel(TS ts)
1359 {
1360   PetscInt i;
1361 
1362   PetscFunctionBegin;
1363   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1364   for (i = 0; i < ts->numberadjointmonitors; i++) {
1365     if (ts->adjointmonitordestroy[i]) PetscCall((*ts->adjointmonitordestroy[i])(&ts->adjointmonitorcontext[i]));
1366   }
1367   ts->numberadjointmonitors = 0;
1368   PetscFunctionReturn(PETSC_SUCCESS);
1369 }
1370 
1371 /*@C
1372   TSAdjointMonitorDefault - the default monitor of adjoint computations
1373 
1374   Input Parameters:
1375 + ts      - the `TS` context
1376 . step    - iteration number (after the final time step the monitor routine is called with a
1377 step of -1, this is at the final time which may have been interpolated to)
1378 . time    - current time
1379 . v       - current iterate
1380 . numcost - number of cost functionos
1381 . lambda  - sensitivities to initial conditions
1382 . mu      - sensitivities to parameters
1383 - vf      - the viewer and format
1384 
1385   Level: intermediate
1386 
1387 .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSAdjointMonitorSet()`
1388 @*/
1389 PetscErrorCode TSAdjointMonitorDefault(TS ts, PetscInt step, PetscReal time, Vec v, PetscInt numcost, Vec lambda[], Vec mu[], PetscViewerAndFormat *vf)
1390 {
1391   PetscViewer viewer = vf->viewer;
1392 
1393   PetscFunctionBegin;
1394   (void)v;
1395   (void)numcost;
1396   (void)lambda;
1397   (void)mu;
1398   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 8);
1399   PetscCall(PetscViewerPushFormat(viewer, vf->format));
1400   PetscCall(PetscViewerASCIIAddTab(viewer, ((PetscObject)ts)->tablevel));
1401   PetscCall(PetscViewerASCIIPrintf(viewer, "%" PetscInt_FMT " TS dt %g time %g%s", step, (double)ts->time_step, (double)time, ts->steprollback ? " (r)\n" : "\n"));
1402   PetscCall(PetscViewerASCIISubtractTab(viewer, ((PetscObject)ts)->tablevel));
1403   PetscCall(PetscViewerPopFormat(viewer));
1404   PetscFunctionReturn(PETSC_SUCCESS);
1405 }
1406 
1407 /*@C
1408   TSAdjointMonitorDrawSensi - Monitors progress of the adjoint `TS` solvers by calling
1409   `VecView()` for the sensitivities to initial states at each timestep
1410 
1411   Collective
1412 
1413   Input Parameters:
1414 + ts      - the `TS` context
1415 . step    - current time-step
1416 . ptime   - current time
1417 . u       - current state
1418 . numcost - number of cost functions
1419 . lambda  - sensitivities to initial conditions
1420 . mu      - sensitivities to parameters
1421 - dummy   - either a viewer or `NULL`
1422 
1423   Level: intermediate
1424 
1425 .seealso: [](ch_ts), `TSAdjointSolve()`, `TSAdjointMonitorSet()`, `TSAdjointMonitorDefault()`, `VecView()`
1426 @*/
1427 PetscErrorCode TSAdjointMonitorDrawSensi(TS ts, PetscInt step, PetscReal ptime, Vec u, PetscInt numcost, Vec lambda[], Vec mu[], void *dummy)
1428 {
1429   TSMonitorDrawCtx ictx = (TSMonitorDrawCtx)dummy;
1430   PetscDraw        draw;
1431   PetscReal        xl, yl, xr, yr, h;
1432   char             time[32];
1433 
1434   PetscFunctionBegin;
1435   if (!(((ictx->howoften > 0) && (!(step % ictx->howoften))) || ((ictx->howoften == -1) && ts->reason))) PetscFunctionReturn(PETSC_SUCCESS);
1436 
1437   PetscCall(VecView(lambda[0], ictx->viewer));
1438   PetscCall(PetscViewerDrawGetDraw(ictx->viewer, 0, &draw));
1439   PetscCall(PetscSNPrintf(time, 32, "Timestep %" PetscInt_FMT " Time %g", step, (double)ptime));
1440   PetscCall(PetscDrawGetCoordinates(draw, &xl, &yl, &xr, &yr));
1441   h = yl + .95 * (yr - yl);
1442   PetscCall(PetscDrawStringCentered(draw, .5 * (xl + xr), h, PETSC_DRAW_BLACK, time));
1443   PetscCall(PetscDrawFlush(draw));
1444   PetscFunctionReturn(PETSC_SUCCESS);
1445 }
1446 
1447 /*@C
1448   TSAdjointSetFromOptions - Sets various `TS` adjoint parameters from options database.
1449 
1450   Collective
1451 
1452   Input Parameters:
1453 + ts                 - the `TS` context
1454 - PetscOptionsObject - the options context
1455 
1456   Options Database Keys:
1457 + -ts_adjoint_solve <yes,no>     - After solving the ODE/DAE solve the adjoint problem (requires `-ts_save_trajectory`)
1458 . -ts_adjoint_monitor            - print information at each adjoint time step
1459 - -ts_adjoint_monitor_draw_sensi - monitor the sensitivity of the first cost function wrt initial conditions (lambda[0]) graphically
1460 
1461   Level: developer
1462 
1463   Note:
1464   This is not normally called directly by users
1465 
1466 .seealso: [](ch_ts), `TSSetSaveTrajectory()`, `TSTrajectorySetUp()`
1467 @*/
1468 PetscErrorCode TSAdjointSetFromOptions(TS ts, PetscOptionItems PetscOptionsObject)
1469 {
1470   PetscBool tflg, opt;
1471 
1472   PetscFunctionBegin;
1473   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1474   PetscOptionsHeadBegin(PetscOptionsObject, "TS Adjoint options");
1475   tflg = ts->adjoint_solve ? PETSC_TRUE : PETSC_FALSE;
1476   PetscCall(PetscOptionsBool("-ts_adjoint_solve", "Solve the adjoint problem immediately after solving the forward problem", "", tflg, &tflg, &opt));
1477   if (opt) {
1478     PetscCall(TSSetSaveTrajectory(ts));
1479     ts->adjoint_solve = tflg;
1480   }
1481   PetscCall(TSAdjointMonitorSetFromOptions(ts, "-ts_adjoint_monitor", "Monitor adjoint timestep size", "TSAdjointMonitorDefault", TSAdjointMonitorDefault, NULL));
1482   PetscCall(TSAdjointMonitorSetFromOptions(ts, "-ts_adjoint_monitor_sensi", "Monitor sensitivity in the adjoint computation", "TSAdjointMonitorSensi", TSAdjointMonitorSensi, NULL));
1483   opt = PETSC_FALSE;
1484   PetscCall(PetscOptionsName("-ts_adjoint_monitor_draw_sensi", "Monitor adjoint sensitivities (lambda only) graphically", "TSAdjointMonitorDrawSensi", &opt));
1485   if (opt) {
1486     TSMonitorDrawCtx ctx;
1487     PetscInt         howoften = 1;
1488 
1489     PetscCall(PetscOptionsInt("-ts_adjoint_monitor_draw_sensi", "Monitor adjoint sensitivities (lambda only) graphically", "TSAdjointMonitorDrawSensi", howoften, &howoften, NULL));
1490     PetscCall(TSMonitorDrawCtxCreate(PetscObjectComm((PetscObject)ts), NULL, NULL, PETSC_DECIDE, PETSC_DECIDE, 300, 300, howoften, &ctx));
1491     PetscCall(TSAdjointMonitorSet(ts, TSAdjointMonitorDrawSensi, ctx, (PetscCtxDestroyFn *)TSMonitorDrawCtxDestroy));
1492   }
1493   PetscFunctionReturn(PETSC_SUCCESS);
1494 }
1495 
1496 /*@
1497   TSAdjointStep - Steps one time step backward in the adjoint run
1498 
1499   Collective
1500 
1501   Input Parameter:
1502 . ts - the `TS` context obtained from `TSCreate()`
1503 
1504   Level: intermediate
1505 
1506 .seealso: [](ch_ts), `TSAdjointSetUp()`, `TSAdjointSolve()`
1507 @*/
1508 PetscErrorCode TSAdjointStep(TS ts)
1509 {
1510   DM dm;
1511 
1512   PetscFunctionBegin;
1513   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1514   PetscCall(TSGetDM(ts, &dm));
1515   PetscCall(TSAdjointSetUp(ts));
1516   ts->steps--; /* must decrease the step index before the adjoint step is taken. */
1517 
1518   ts->reason     = TS_CONVERGED_ITERATING;
1519   ts->ptime_prev = ts->ptime;
1520   PetscCall(PetscLogEventBegin(TS_AdjointStep, ts, 0, 0, 0));
1521   PetscUseTypeMethod(ts, adjointstep);
1522   PetscCall(PetscLogEventEnd(TS_AdjointStep, ts, 0, 0, 0));
1523   ts->adjoint_steps++;
1524 
1525   if (ts->reason < 0) {
1526     PetscCheck(!ts->errorifstepfailed, PetscObjectComm((PetscObject)ts), PETSC_ERR_NOT_CONVERGED, "TSAdjointStep has failed due to %s", TSConvergedReasons[ts->reason]);
1527   } else if (!ts->reason) {
1528     if (ts->adjoint_steps >= ts->adjoint_max_steps) ts->reason = TS_CONVERGED_ITS;
1529   }
1530   PetscFunctionReturn(PETSC_SUCCESS);
1531 }
1532 
1533 /*@
1534   TSAdjointSolve - Solves the discrete ajoint problem for an ODE/DAE
1535 
1536   Collective
1537   `
1538 
1539   Input Parameter:
1540 . ts - the `TS` context obtained from `TSCreate()`
1541 
1542   Options Database Key:
1543 . -ts_adjoint_view_solution <viewerinfo> - views the first gradient with respect to the initial values
1544 
1545   Level: intermediate
1546 
1547   Notes:
1548   This must be called after a call to `TSSolve()` that solves the forward problem
1549 
1550   By default this will integrate back to the initial time, one can use `TSAdjointSetSteps()` to step back to a later time
1551 
1552 .seealso: [](ch_ts), `TSCreate()`, `TSSetCostGradients()`, `TSSetSolution()`, `TSAdjointStep()`
1553 @*/
1554 PetscErrorCode TSAdjointSolve(TS ts)
1555 {
1556   static PetscBool cite = PETSC_FALSE;
1557 #if defined(TSADJOINT_STAGE)
1558   PetscLogStage adjoint_stage;
1559 #endif
1560 
1561   PetscFunctionBegin;
1562   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1563   PetscCall(PetscCitationsRegister("@article{Zhang2022tsadjoint,\n"
1564                                    "  title         = {{PETSc TSAdjoint: A Discrete Adjoint ODE Solver for First-Order and Second-Order Sensitivity Analysis}},\n"
1565                                    "  author        = {Zhang, Hong and Constantinescu, Emil M.  and Smith, Barry F.},\n"
1566                                    "  journal       = {SIAM Journal on Scientific Computing},\n"
1567                                    "  volume        = {44},\n"
1568                                    "  number        = {1},\n"
1569                                    "  pages         = {C1-C24},\n"
1570                                    "  doi           = {10.1137/21M140078X},\n"
1571                                    "  year          = {2022}\n}\n",
1572                                    &cite));
1573 #if defined(TSADJOINT_STAGE)
1574   PetscCall(PetscLogStageRegister("TSAdjoint", &adjoint_stage));
1575   PetscCall(PetscLogStagePush(adjoint_stage));
1576 #endif
1577   PetscCall(TSAdjointSetUp(ts));
1578 
1579   /* reset time step and iteration counters */
1580   ts->adjoint_steps     = 0;
1581   ts->ksp_its           = 0;
1582   ts->snes_its          = 0;
1583   ts->num_snes_failures = 0;
1584   ts->reject            = 0;
1585   ts->reason            = TS_CONVERGED_ITERATING;
1586 
1587   if (!ts->adjoint_max_steps) ts->adjoint_max_steps = ts->steps;
1588   if (ts->adjoint_steps >= ts->adjoint_max_steps) ts->reason = TS_CONVERGED_ITS;
1589 
1590   while (!ts->reason) {
1591     PetscCall(TSTrajectoryGet(ts->trajectory, ts, ts->steps, &ts->ptime));
1592     PetscCall(TSAdjointMonitor(ts, ts->steps, ts->ptime, ts->vec_sol, ts->numcost, ts->vecs_sensi, ts->vecs_sensip));
1593     PetscCall(TSAdjointEventHandler(ts));
1594     PetscCall(TSAdjointStep(ts));
1595     if ((ts->vec_costintegral || ts->quadraturets) && !ts->costintegralfwd) PetscCall(TSAdjointCostIntegral(ts));
1596   }
1597   if (!ts->steps) {
1598     PetscCall(TSTrajectoryGet(ts->trajectory, ts, ts->steps, &ts->ptime));
1599     PetscCall(TSAdjointMonitor(ts, ts->steps, ts->ptime, ts->vec_sol, ts->numcost, ts->vecs_sensi, ts->vecs_sensip));
1600   }
1601   ts->solvetime = ts->ptime;
1602   PetscCall(TSTrajectoryViewFromOptions(ts->trajectory, NULL, "-ts_trajectory_view"));
1603   PetscCall(VecViewFromOptions(ts->vecs_sensi[0], (PetscObject)ts, "-ts_adjoint_view_solution"));
1604   ts->adjoint_max_steps = 0;
1605 #if defined(TSADJOINT_STAGE)
1606   PetscCall(PetscLogStagePop());
1607 #endif
1608   PetscFunctionReturn(PETSC_SUCCESS);
1609 }
1610 
1611 /*@C
1612   TSAdjointMonitor - Runs all user-provided adjoint monitor routines set using `TSAdjointMonitorSet()`
1613 
1614   Collective
1615 
1616   Input Parameters:
1617 + ts      - time stepping context obtained from `TSCreate()`
1618 . step    - step number that has just completed
1619 . ptime   - model time of the state
1620 . u       - state at the current model time
1621 . numcost - number of cost functions (dimension of lambda  or mu)
1622 . lambda  - vectors containing the gradients of the cost functions with respect to the ODE/DAE solution variables
1623 - mu      - vectors containing the gradients of the cost functions with respect to the problem parameters
1624 
1625   Level: developer
1626 
1627   Note:
1628   `TSAdjointMonitor()` is typically used automatically within the time stepping implementations.
1629   Users would almost never call this routine directly.
1630 
1631 .seealso: `TSAdjointMonitorSet()`, `TSAdjointSolve()`
1632 @*/
1633 PetscErrorCode TSAdjointMonitor(TS ts, PetscInt step, PetscReal ptime, Vec u, PetscInt numcost, Vec lambda[], Vec mu[])
1634 {
1635   PetscInt i, n = ts->numberadjointmonitors;
1636 
1637   PetscFunctionBegin;
1638   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1639   PetscValidHeaderSpecific(u, VEC_CLASSID, 4);
1640   PetscCall(VecLockReadPush(u));
1641   for (i = 0; i < n; i++) PetscCall((*ts->adjointmonitor[i])(ts, step, ptime, u, numcost, lambda, mu, ts->adjointmonitorcontext[i]));
1642   PetscCall(VecLockReadPop(u));
1643   PetscFunctionReturn(PETSC_SUCCESS);
1644 }
1645 
1646 /*@
1647   TSAdjointCostIntegral - Evaluate the cost integral in the adjoint run.
1648 
1649   Collective
1650 
1651   Input Parameter:
1652 . ts - time stepping context
1653 
1654   Level: advanced
1655 
1656   Notes:
1657   This function cannot be called until `TSAdjointStep()` has been completed.
1658 
1659 .seealso: [](ch_ts), `TSAdjointSolve()`, `TSAdjointStep()`
1660  @*/
1661 PetscErrorCode TSAdjointCostIntegral(TS ts)
1662 {
1663   PetscFunctionBegin;
1664   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1665   PetscUseTypeMethod(ts, adjointintegral);
1666   PetscFunctionReturn(PETSC_SUCCESS);
1667 }
1668 
1669 /* ------------------ Forward (tangent linear) sensitivity  ------------------*/
1670 
1671 /*@
1672   TSForwardSetUp - Sets up the internal data structures for the later use
1673   of forward sensitivity analysis
1674 
1675   Collective
1676 
1677   Input Parameter:
1678 . ts - the `TS` context obtained from `TSCreate()`
1679 
1680   Level: advanced
1681 
1682 .seealso: [](ch_ts), `TS`, `TSCreate()`, `TSDestroy()`, `TSSetUp()`
1683 @*/
1684 PetscErrorCode TSForwardSetUp(TS ts)
1685 {
1686   PetscFunctionBegin;
1687   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1688   if (ts->forwardsetupcalled) PetscFunctionReturn(PETSC_SUCCESS);
1689   PetscTryTypeMethod(ts, forwardsetup);
1690   PetscCall(VecDuplicate(ts->vec_sol, &ts->vec_sensip_col));
1691   ts->forwardsetupcalled = PETSC_TRUE;
1692   PetscFunctionReturn(PETSC_SUCCESS);
1693 }
1694 
1695 /*@
1696   TSForwardReset - Reset the internal data structures used by forward sensitivity analysis
1697 
1698   Collective
1699 
1700   Input Parameter:
1701 . ts - the `TS` context obtained from `TSCreate()`
1702 
1703   Level: advanced
1704 
1705 .seealso: [](ch_ts), `TSCreate()`, `TSDestroy()`, `TSForwardSetUp()`
1706 @*/
1707 PetscErrorCode TSForwardReset(TS ts)
1708 {
1709   TS quadts = ts->quadraturets;
1710 
1711   PetscFunctionBegin;
1712   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1713   PetscTryTypeMethod(ts, forwardreset);
1714   PetscCall(MatDestroy(&ts->mat_sensip));
1715   if (quadts) PetscCall(MatDestroy(&quadts->mat_sensip));
1716   PetscCall(VecDestroy(&ts->vec_sensip_col));
1717   ts->forward_solve      = PETSC_FALSE;
1718   ts->forwardsetupcalled = PETSC_FALSE;
1719   PetscFunctionReturn(PETSC_SUCCESS);
1720 }
1721 
1722 /*@
1723   TSForwardSetIntegralGradients - Set the vectors holding forward sensitivities of the integral term.
1724 
1725   Input Parameters:
1726 + ts        - the `TS` context obtained from `TSCreate()`
1727 . numfwdint - number of integrals
1728 - vp        - the vectors containing the gradients for each integral w.r.t. parameters
1729 
1730   Level: deprecated
1731 
1732 .seealso: [](ch_ts), `TSForwardGetSensitivities()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()`
1733 @*/
1734 PetscErrorCode TSForwardSetIntegralGradients(TS ts, PetscInt numfwdint, Vec vp[])
1735 {
1736   PetscFunctionBegin;
1737   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1738   PetscCheck(!ts->numcost || ts->numcost == numfwdint, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand()");
1739   if (!ts->numcost) ts->numcost = numfwdint;
1740 
1741   ts->vecs_integral_sensip = vp;
1742   PetscFunctionReturn(PETSC_SUCCESS);
1743 }
1744 
1745 /*@
1746   TSForwardGetIntegralGradients - Returns the forward sensitivities of the integral term.
1747 
1748   Input Parameter:
1749 . ts - the `TS` context obtained from `TSCreate()`
1750 
1751   Output Parameters:
1752 + numfwdint - number of integrals
1753 - vp        - the vectors containing the gradients for each integral w.r.t. parameters
1754 
1755   Level: deprecated
1756 
1757 .seealso: [](ch_ts), `TSForwardSetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardStep()`
1758 @*/
1759 PetscErrorCode TSForwardGetIntegralGradients(TS ts, PetscInt *numfwdint, Vec *vp[])
1760 {
1761   PetscFunctionBegin;
1762   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1763   PetscAssertPointer(vp, 3);
1764   if (numfwdint) *numfwdint = ts->numcost;
1765   if (vp) *vp = ts->vecs_integral_sensip;
1766   PetscFunctionReturn(PETSC_SUCCESS);
1767 }
1768 
1769 /*@
1770   TSForwardStep - Compute the forward sensitivity for one time step.
1771 
1772   Collective
1773 
1774   Input Parameter:
1775 . ts - time stepping context
1776 
1777   Level: advanced
1778 
1779   Notes:
1780   This function cannot be called until `TSStep()` has been completed.
1781 
1782 .seealso: [](ch_ts), `TSForwardSetSensitivities()`, `TSForwardGetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardSetUp()`
1783 @*/
1784 PetscErrorCode TSForwardStep(TS ts)
1785 {
1786   PetscFunctionBegin;
1787   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1788   PetscCall(PetscLogEventBegin(TS_ForwardStep, ts, 0, 0, 0));
1789   PetscUseTypeMethod(ts, forwardstep);
1790   PetscCall(PetscLogEventEnd(TS_ForwardStep, ts, 0, 0, 0));
1791   PetscCheck(ts->reason >= 0 || !ts->errorifstepfailed, PetscObjectComm((PetscObject)ts), PETSC_ERR_NOT_CONVERGED, "TSFowardStep has failed due to %s", TSConvergedReasons[ts->reason]);
1792   PetscFunctionReturn(PETSC_SUCCESS);
1793 }
1794 
1795 /*@
1796   TSForwardSetSensitivities - Sets the initial value of the trajectory sensitivities of solution  w.r.t. the problem parameters and initial values.
1797 
1798   Logically Collective
1799 
1800   Input Parameters:
1801 + ts   - the `TS` context obtained from `TSCreate()`
1802 . nump - number of parameters
1803 - Smat - sensitivities with respect to the parameters, the number of entries in these vectors is the same as the number of parameters
1804 
1805   Level: beginner
1806 
1807   Notes:
1808   Use `PETSC_DETERMINE` to use the number of columns of `Smat` for `nump`
1809 
1810   Forward sensitivity is also called 'trajectory sensitivity' in some fields such as power systems.
1811   This function turns on a flag to trigger `TSSolve()` to compute forward sensitivities automatically.
1812   You must call this function before `TSSolve()`.
1813   The entries in the sensitivity matrix must be correctly initialized with the values S = dy/dp|startingtime.
1814 
1815 .seealso: [](ch_ts), `TSForwardGetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()`
1816 @*/
1817 PetscErrorCode TSForwardSetSensitivities(TS ts, PetscInt nump, Mat Smat)
1818 {
1819   PetscFunctionBegin;
1820   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1821   PetscValidHeaderSpecific(Smat, MAT_CLASSID, 3);
1822   ts->forward_solve = PETSC_TRUE;
1823   if (nump == PETSC_DEFAULT || nump == PETSC_DETERMINE) PetscCall(MatGetSize(Smat, NULL, &ts->num_parameters));
1824   else ts->num_parameters = nump;
1825   PetscCall(PetscObjectReference((PetscObject)Smat));
1826   PetscCall(MatDestroy(&ts->mat_sensip));
1827   ts->mat_sensip = Smat;
1828   PetscFunctionReturn(PETSC_SUCCESS);
1829 }
1830 
1831 /*@
1832   TSForwardGetSensitivities - Returns the trajectory sensitivities
1833 
1834   Not Collective, but Smat returned is parallel if ts is parallel
1835 
1836   Output Parameters:
1837 + ts   - the `TS` context obtained from `TSCreate()`
1838 . nump - number of parameters
1839 - Smat - sensitivities with respect to the parameters, the number of entries in these vectors is the same as the number of parameters
1840 
1841   Level: intermediate
1842 
1843 .seealso: [](ch_ts), `TSForwardSetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()`
1844 @*/
1845 PetscErrorCode TSForwardGetSensitivities(TS ts, PetscInt *nump, Mat *Smat)
1846 {
1847   PetscFunctionBegin;
1848   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1849   if (nump) *nump = ts->num_parameters;
1850   if (Smat) *Smat = ts->mat_sensip;
1851   PetscFunctionReturn(PETSC_SUCCESS);
1852 }
1853 
1854 /*@
1855   TSForwardCostIntegral - Evaluate the cost integral in the forward run.
1856 
1857   Collective
1858 
1859   Input Parameter:
1860 . ts - time stepping context
1861 
1862   Level: advanced
1863 
1864   Note:
1865   This function cannot be called until `TSStep()` has been completed.
1866 
1867 .seealso: [](ch_ts), `TS`, `TSSolve()`, `TSAdjointCostIntegral()`
1868 @*/
1869 PetscErrorCode TSForwardCostIntegral(TS ts)
1870 {
1871   PetscFunctionBegin;
1872   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1873   PetscUseTypeMethod(ts, forwardintegral);
1874   PetscFunctionReturn(PETSC_SUCCESS);
1875 }
1876 
1877 /*@
1878   TSForwardSetInitialSensitivities - Set initial values for tangent linear sensitivities
1879 
1880   Collective
1881 
1882   Input Parameters:
1883 + ts   - the `TS` context obtained from `TSCreate()`
1884 - didp - parametric sensitivities of the initial condition
1885 
1886   Level: intermediate
1887 
1888   Notes:
1889   `TSSolve()` allows users to pass the initial solution directly to `TS`. But the tangent linear variables cannot be initialized in this way.
1890   This function is used to set initial values for tangent linear variables.
1891 
1892 .seealso: [](ch_ts), `TS`, `TSForwardSetSensitivities()`
1893 @*/
1894 PetscErrorCode TSForwardSetInitialSensitivities(TS ts, Mat didp)
1895 {
1896   PetscFunctionBegin;
1897   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1898   PetscValidHeaderSpecific(didp, MAT_CLASSID, 2);
1899   if (!ts->mat_sensip) PetscCall(TSForwardSetSensitivities(ts, PETSC_DETERMINE, didp));
1900   PetscFunctionReturn(PETSC_SUCCESS);
1901 }
1902 
1903 /*@
1904   TSForwardGetStages - Get the number of stages and the tangent linear sensitivities at the intermediate stages
1905 
1906   Input Parameter:
1907 . ts - the `TS` context obtained from `TSCreate()`
1908 
1909   Output Parameters:
1910 + ns - number of stages
1911 - S  - tangent linear sensitivities at the intermediate stages
1912 
1913   Level: advanced
1914 
1915 .seealso: `TS`
1916 @*/
1917 PetscErrorCode TSForwardGetStages(TS ts, PetscInt *ns, Mat **S)
1918 {
1919   PetscFunctionBegin;
1920   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1921 
1922   if (!ts->ops->getstages) *S = NULL;
1923   else PetscUseTypeMethod(ts, forwardgetstages, ns, S);
1924   PetscFunctionReturn(PETSC_SUCCESS);
1925 }
1926 
1927 /*@
1928   TSCreateQuadratureTS - Create a sub-`TS` that evaluates integrals over time
1929 
1930   Input Parameters:
1931 + ts  - the `TS` context obtained from `TSCreate()`
1932 - fwd - flag indicating whether to evaluate cost integral in the forward run or the adjoint run
1933 
1934   Output Parameter:
1935 . quadts - the child `TS` context
1936 
1937   Level: intermediate
1938 
1939 .seealso: [](ch_ts), `TSGetQuadratureTS()`
1940 @*/
1941 PetscErrorCode TSCreateQuadratureTS(TS ts, PetscBool fwd, TS *quadts)
1942 {
1943   char prefix[128];
1944 
1945   PetscFunctionBegin;
1946   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1947   PetscAssertPointer(quadts, 3);
1948   PetscCall(TSDestroy(&ts->quadraturets));
1949   PetscCall(TSCreate(PetscObjectComm((PetscObject)ts), &ts->quadraturets));
1950   PetscCall(PetscObjectIncrementTabLevel((PetscObject)ts->quadraturets, (PetscObject)ts, 1));
1951   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%squad_", ((PetscObject)ts)->prefix ? ((PetscObject)ts)->prefix : ""));
1952   PetscCall(TSSetOptionsPrefix(ts->quadraturets, prefix));
1953   *quadts = ts->quadraturets;
1954 
1955   if (ts->numcost) {
1956     PetscCall(VecCreateSeq(PETSC_COMM_SELF, ts->numcost, &(*quadts)->vec_sol));
1957   } else {
1958     PetscCall(VecCreateSeq(PETSC_COMM_SELF, 1, &(*quadts)->vec_sol));
1959   }
1960   ts->costintegralfwd = fwd;
1961   PetscFunctionReturn(PETSC_SUCCESS);
1962 }
1963 
1964 /*@
1965   TSGetQuadratureTS - Return the sub-`TS` that evaluates integrals over time
1966 
1967   Input Parameter:
1968 . ts - the `TS` context obtained from `TSCreate()`
1969 
1970   Output Parameters:
1971 + fwd    - flag indicating whether to evaluate cost integral in the forward run or the adjoint run
1972 - quadts - the child `TS` context
1973 
1974   Level: intermediate
1975 
1976 .seealso: [](ch_ts), `TSCreateQuadratureTS()`
1977 @*/
1978 PetscErrorCode TSGetQuadratureTS(TS ts, PetscBool *fwd, TS *quadts)
1979 {
1980   PetscFunctionBegin;
1981   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1982   if (fwd) *fwd = ts->costintegralfwd;
1983   if (quadts) *quadts = ts->quadraturets;
1984   PetscFunctionReturn(PETSC_SUCCESS);
1985 }
1986 
1987 /*@
1988   TSComputeSNESJacobian - Compute the Jacobian needed for the `SNESSolve()` in `TS`
1989 
1990   Collective
1991 
1992   Input Parameters:
1993 + ts - the `TS` context obtained from `TSCreate()`
1994 - x  - state vector
1995 
1996   Output Parameters:
1997 + J    - Jacobian matrix
1998 - Jpre - matrix used to compute the preconditioner for `J` (may be same as `J`)
1999 
2000   Level: developer
2001 
2002   Note:
2003   Uses finite differencing when `TS` Jacobian is not available.
2004 
2005 .seealso: `SNES`, `TS`, `SNESSetJacobian()`, `TSSetRHSJacobian()`, `TSSetIJacobian()`
2006 @*/
2007 PetscErrorCode TSComputeSNESJacobian(TS ts, Vec x, Mat J, Mat Jpre)
2008 {
2009   SNES snes                                          = ts->snes;
2010   PetscErrorCode (*jac)(SNES, Vec, Mat, Mat, void *) = NULL;
2011 
2012   PetscFunctionBegin;
2013   /*
2014     Unlike implicit methods, explicit methods do not have SNESMatFDColoring in the snes object
2015     because SNESSolve() has not been called yet; so querying SNESMatFDColoring does not work for
2016     explicit methods. Instead, we check the Jacobian compute function directly to determine if FD
2017     coloring is used.
2018   */
2019   PetscCall(SNESGetJacobian(snes, NULL, NULL, &jac, NULL));
2020   if (jac == SNESComputeJacobianDefaultColor) {
2021     Vec f;
2022     PetscCall(SNESSetSolution(snes, x));
2023     PetscCall(SNESGetFunction(snes, &f, NULL, NULL));
2024     /* Force MatFDColoringApply to evaluate the SNES residual function for the base vector */
2025     PetscCall(SNESComputeFunction(snes, x, f));
2026   }
2027   PetscCall(SNESComputeJacobian(snes, x, J, Jpre));
2028   PetscFunctionReturn(PETSC_SUCCESS);
2029 }
2030