xref: /petsc/src/ts/interface/sensitivity/tssen.c (revision 4e8208cbcbc709572b8abe32f33c78b69c819375)
1 #include <petsc/private/tsimpl.h> /*I <petscts.h>  I*/
2 #include <petscdraw.h>
3 
4 PetscLogEvent TS_AdjointStep, TS_ForwardStep, TS_JacobianPEval;
5 
6 /* #define TSADJOINT_STAGE */
7 
8 /* ------------------------ Sensitivity Context ---------------------------*/
9 
10 /*@C
11   TSSetRHSJacobianP - Sets the function that computes the Jacobian of $G$ w.r.t. the parameters $p$ where $U_t = G(U,p,t)$, as well as the location to store the matrix.
12 
13   Logically Collective
14 
15   Input Parameters:
16 + ts   - `TS` context obtained from `TSCreate()`
17 . Amat - JacobianP matrix
18 . func - function
19 - ctx  - [optional] user-defined function context
20 
21   Level: intermediate
22 
23   Note:
24   `Amat` has the same number of rows and the same row parallel layout as `u`, `Amat` has the same number of columns and parallel layout as `p`
25 
26 .seealso: [](ch_ts), `TS`, `TSRHSJacobianPFn`, `TSGetRHSJacobianP()`
27 @*/
TSSetRHSJacobianP(TS ts,Mat Amat,TSRHSJacobianPFn * func,PetscCtx ctx)28 PetscErrorCode TSSetRHSJacobianP(TS ts, Mat Amat, TSRHSJacobianPFn *func, PetscCtx ctx)
29 {
30   PetscFunctionBegin;
31   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
32   PetscValidHeaderSpecific(Amat, MAT_CLASSID, 2);
33 
34   ts->rhsjacobianp    = func;
35   ts->rhsjacobianpctx = ctx;
36   if (Amat) {
37     PetscCall(PetscObjectReference((PetscObject)Amat));
38     PetscCall(MatDestroy(&ts->Jacprhs));
39     ts->Jacprhs = Amat;
40   }
41   PetscFunctionReturn(PETSC_SUCCESS);
42 }
43 
44 /*@C
45   TSGetRHSJacobianP - Gets the function that computes the Jacobian of $G $ w.r.t. the parameters $p$ where $ U_t = G(U,p,t)$, as well as the location to store the matrix.
46 
47   Logically Collective
48 
49   Input Parameter:
50 . ts - `TS` context obtained from `TSCreate()`
51 
52   Output Parameters:
53 + Amat - JacobianP matrix
54 . func - function
55 - ctx  - [optional] user-defined function context
56 
57   Level: intermediate
58 
59   Note:
60   `Amat` has the same number of rows and the same row parallel layout as `u`, `Amat` has the same number of columns and parallel layout as `p`
61 
62 .seealso: [](ch_ts), `TSSetRHSJacobianP()`, `TS`, `TSRHSJacobianPFn`
63 @*/
TSGetRHSJacobianP(TS ts,Mat * Amat,TSRHSJacobianPFn ** func,PetscCtxRt ctx)64 PetscErrorCode TSGetRHSJacobianP(TS ts, Mat *Amat, TSRHSJacobianPFn **func, PetscCtxRt ctx)
65 {
66   PetscFunctionBegin;
67   if (func) *func = ts->rhsjacobianp;
68   if (ctx) *(void **)ctx = ts->rhsjacobianpctx;
69   if (Amat) *Amat = ts->Jacprhs;
70   PetscFunctionReturn(PETSC_SUCCESS);
71 }
72 
73 /*@C
74   TSComputeRHSJacobianP - Runs the user-defined JacobianP function.
75 
76   Collective
77 
78   Input Parameters:
79 + ts - The `TS` context obtained from `TSCreate()`
80 . t  - the time
81 - U  - the solution at which to compute the Jacobian
82 
83   Output Parameter:
84 . Amat - the computed Jacobian
85 
86   Level: developer
87 
88 .seealso: [](ch_ts), `TSSetRHSJacobianP()`, `TS`
89 @*/
TSComputeRHSJacobianP(TS ts,PetscReal t,Vec U,Mat Amat)90 PetscErrorCode TSComputeRHSJacobianP(TS ts, PetscReal t, Vec U, Mat Amat)
91 {
92   PetscFunctionBegin;
93   if (!Amat) PetscFunctionReturn(PETSC_SUCCESS);
94   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
95   PetscValidHeaderSpecific(U, VEC_CLASSID, 3);
96 
97   if (ts->rhsjacobianp) PetscCallBack("TS callback JacobianP for sensitivity analysis", (*ts->rhsjacobianp)(ts, t, U, Amat, ts->rhsjacobianpctx));
98   else {
99     PetscBool assembled;
100     PetscCall(MatZeroEntries(Amat));
101     PetscCall(MatAssembled(Amat, &assembled));
102     if (!assembled) {
103       PetscCall(MatAssemblyBegin(Amat, MAT_FINAL_ASSEMBLY));
104       PetscCall(MatAssemblyEnd(Amat, MAT_FINAL_ASSEMBLY));
105     }
106   }
107   PetscFunctionReturn(PETSC_SUCCESS);
108 }
109 
110 /*@C
111   TSSetIJacobianP - Sets the function that computes the Jacobian of $F$ w.r.t. the parameters $p$ where $F(Udot,U,p,t) = G(U,p,t)$, as well as the location to store the matrix.
112 
113   Logically Collective
114 
115   Input Parameters:
116 + ts   - `TS` context obtained from `TSCreate()`
117 . Amat - JacobianP matrix
118 . func - function
119 - ctx  - [optional] user-defined function context
120 
121   Calling sequence of `func`:
122 + ts    - the `TS` context
123 . t     - current timestep
124 . U     - input vector (current ODE solution)
125 . Udot  - time derivative of state vector
126 . shift - shift to apply, see the note in `TSSetIJacobian()`
127 . A     - output matrix
128 - ctx   - [optional] user-defined function context
129 
130   Level: intermediate
131 
132   Note:
133   `Amat` has the same number of rows and the same row parallel layout as `u`, `Amat` has the same number of columns and parallel layout as `p`
134 
135 .seealso: [](ch_ts), `TSSetRHSJacobianP()`, `TS`
136 @*/
TSSetIJacobianP(TS ts,Mat Amat,PetscErrorCode (* func)(TS ts,PetscReal t,Vec U,Vec Udot,PetscReal shift,Mat A,PetscCtx ctx),PetscCtx ctx)137 PetscErrorCode TSSetIJacobianP(TS ts, Mat Amat, PetscErrorCode (*func)(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal shift, Mat A, PetscCtx ctx), PetscCtx ctx)
138 {
139   PetscFunctionBegin;
140   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
141   PetscValidHeaderSpecific(Amat, MAT_CLASSID, 2);
142 
143   ts->ijacobianp    = func;
144   ts->ijacobianpctx = ctx;
145   if (Amat) {
146     PetscCall(PetscObjectReference((PetscObject)Amat));
147     PetscCall(MatDestroy(&ts->Jacp));
148     ts->Jacp = Amat;
149   }
150   PetscFunctionReturn(PETSC_SUCCESS);
151 }
152 
153 /*@C
154   TSGetIJacobianP - Gets the function that computes the Jacobian of $ F$ w.r.t. the parameters $p$ where $F(Udot,U,p,t) = G(U,p,t) $, as well as the location to store the matrix.
155 
156   Logically Collective
157 
158   Input Parameter:
159 . ts - `TS` context obtained from `TSCreate()`
160 
161   Output Parameters:
162 + Amat - JacobianP matrix
163 . func - the function that computes the JacobianP
164 - ctx  - [optional] user-defined function context
165 
166   Calling sequence of `func`:
167 + ts    - the `TS` context
168 . t     - current timestep
169 . U     - input vector (current ODE solution)
170 . Udot  - time derivative of state vector
171 . shift - shift to apply, see the note in `TSSetIJacobian()`
172 . A     - output matrix
173 - ctx   - [optional] user-defined function context
174 
175   Level: intermediate
176 
177   Note:
178   `Amat` has the same number of rows and the same row parallel layout as `u`, `Amat` has the same number of columns and parallel layout as `p`
179 
180 .seealso: [](ch_ts), `TSSetRHSJacobianP()`, `TS`, `TSSetIJacobianP()`, `TSGetRHSJacobianP()`
181 @*/
TSGetIJacobianP(TS ts,Mat * Amat,PetscErrorCode (** func)(TS ts,PetscReal t,Vec U,Vec Udot,PetscReal shift,Mat A,PetscCtx ctx),PetscCtxRt ctx)182 PetscErrorCode TSGetIJacobianP(TS ts, Mat *Amat, PetscErrorCode (**func)(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal shift, Mat A, PetscCtx ctx), PetscCtxRt ctx)
183 {
184   PetscFunctionBegin;
185   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
186 
187   if (func) *func = ts->ijacobianp;
188   if (ctx) *(void **)ctx = ts->ijacobianpctx;
189   if (Amat) *Amat = ts->Jacp;
190   PetscFunctionReturn(PETSC_SUCCESS);
191 }
192 
193 /*@
194   TSComputeIJacobianP - Runs the user-defined IJacobianP function.
195 
196   Collective
197 
198   Input Parameters:
199 + ts    - the `TS` context
200 . t     - current timestep
201 . U     - state vector
202 . Udot  - time derivative of state vector
203 . shift - shift to apply, see note below
204 - imex  - flag indicates if the method is IMEX so that the `RHSJacobianP` should be kept separate
205 
206   Output Parameter:
207 . Amat - Jacobian matrix
208 
209   Level: developer
210 
211 .seealso: [](ch_ts), `TS`, `TSSetIJacobianP()`
212 @*/
TSComputeIJacobianP(TS ts,PetscReal t,Vec U,Vec Udot,PetscReal shift,Mat Amat,PetscBool imex)213 PetscErrorCode TSComputeIJacobianP(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal shift, Mat Amat, PetscBool imex)
214 {
215   PetscFunctionBegin;
216   if (!Amat) PetscFunctionReturn(PETSC_SUCCESS);
217   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
218   PetscValidHeaderSpecific(U, VEC_CLASSID, 3);
219   PetscValidHeaderSpecific(Udot, VEC_CLASSID, 4);
220 
221   PetscCall(PetscLogEventBegin(TS_JacobianPEval, ts, U, Amat, 0));
222   if (ts->ijacobianp) PetscCallBack("TS callback JacobianP for sensitivity analysis", (*ts->ijacobianp)(ts, t, U, Udot, shift, Amat, ts->ijacobianpctx));
223   if (imex) {
224     if (!ts->ijacobianp) { /* system was written as Udot = G(t,U) */
225       PetscBool assembled;
226       PetscCall(MatZeroEntries(Amat));
227       PetscCall(MatAssembled(Amat, &assembled));
228       if (!assembled) {
229         PetscCall(MatAssemblyBegin(Amat, MAT_FINAL_ASSEMBLY));
230         PetscCall(MatAssemblyEnd(Amat, MAT_FINAL_ASSEMBLY));
231       }
232     }
233   } else {
234     if (ts->rhsjacobianp) PetscCall(TSComputeRHSJacobianP(ts, t, U, ts->Jacprhs));
235     if (ts->Jacprhs == Amat) { /* No IJacobian, so we only have the RHS matrix */
236       PetscCall(MatScale(Amat, -1));
237     } else if (ts->Jacprhs) { /* Both IJacobian and RHSJacobian */
238       MatStructure axpy = DIFFERENT_NONZERO_PATTERN;
239       if (!ts->ijacobianp) { /* No IJacobianp provided, but we have a separate RHS matrix */
240         PetscCall(MatZeroEntries(Amat));
241       }
242       PetscCall(MatAXPY(Amat, -1, ts->Jacprhs, axpy));
243     }
244   }
245   PetscCall(PetscLogEventEnd(TS_JacobianPEval, ts, U, Amat, 0));
246   PetscFunctionReturn(PETSC_SUCCESS);
247 }
248 
249 /*@C
250   TSSetCostIntegrand - Sets the routine for evaluating the integral term in one or more cost functions
251 
252   Logically Collective
253 
254   Input Parameters:
255 + ts           - the `TS` context obtained from `TSCreate()`
256 . numcost      - number of gradients to be computed, this is the number of cost functions
257 . costintegral - vector that stores the integral values
258 . rf           - routine for evaluating the integrand function
259 . drduf        - function that computes the gradients of the r with respect to u
260 . drdpf        - function that computes the gradients of the r with respect to p, can be `NULL` if parametric sensitivity is not desired (`mu` = `NULL`)
261 . fwd          - flag indicating whether to evaluate cost integral in the forward run or the adjoint run
262 - ctx          - [optional] user-defined context for private data for the function evaluation routine (may be `NULL`)
263 
264   Calling sequence of `rf`:
265 + ts  - the integrator
266 . t   - the time
267 . U   - the solution
268 . F   - the computed value of the function
269 - ctx - the user context
270 
271   Calling sequence of `drduf`:
272 + ts   - the integrator
273 . t    - the time
274 . U    - the solution
275 . dRdU - the computed gradients of the r with respect to u
276 - ctx  - the user context
277 
278   Calling sequence of `drdpf`:
279 + ts   - the integrator
280 . t    - the time
281 . U    - the solution
282 . dRdP - the computed gradients of the r with respect to p
283 - ctx  - the user context
284 
285   Level: deprecated
286 
287   Notes:
288   For optimization there is usually a single cost function (numcost = 1). For sensitivities there may be multiple cost functions
289 
290   Use `TSCreateQuadratureTS()` and `TSForwardSetSensitivities()` instead
291 
292 .seealso: [](ch_ts), `TS`, `TSSetRHSJacobianP()`, `TSGetCostGradients()`, `TSSetCostGradients()`,
293           `TSCreateQuadratureTS()`, `TSForwardSetSensitivities()`
294 @*/
TSSetCostIntegrand(TS ts,PetscInt numcost,Vec costintegral,PetscErrorCode (* rf)(TS ts,PetscReal t,Vec U,Vec F,PetscCtx ctx),PetscErrorCode (* drduf)(TS ts,PetscReal t,Vec U,Vec * dRdU,PetscCtx ctx),PetscErrorCode (* drdpf)(TS ts,PetscReal t,Vec U,Vec * dRdP,PetscCtx ctx),PetscBool fwd,PetscCtx ctx)295 PetscErrorCode TSSetCostIntegrand(TS ts, PetscInt numcost, Vec costintegral, PetscErrorCode (*rf)(TS ts, PetscReal t, Vec U, Vec F, PetscCtx ctx), PetscErrorCode (*drduf)(TS ts, PetscReal t, Vec U, Vec *dRdU, PetscCtx ctx), PetscErrorCode (*drdpf)(TS ts, PetscReal t, Vec U, Vec *dRdP, PetscCtx ctx), PetscBool fwd, PetscCtx ctx)
296 {
297   PetscFunctionBegin;
298   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
299   if (costintegral) PetscValidHeaderSpecific(costintegral, VEC_CLASSID, 3);
300   PetscCheck(!ts->numcost || ts->numcost == numcost, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostGradients() or TSForwardSetIntegralGradients()");
301   if (!ts->numcost) ts->numcost = numcost;
302 
303   if (costintegral) {
304     PetscCall(PetscObjectReference((PetscObject)costintegral));
305     PetscCall(VecDestroy(&ts->vec_costintegral));
306     ts->vec_costintegral = costintegral;
307   } else {
308     if (!ts->vec_costintegral) { /* Create a seq vec if user does not provide one */
309       PetscCall(VecCreateSeq(PETSC_COMM_SELF, numcost, &ts->vec_costintegral));
310     } else {
311       PetscCall(VecSet(ts->vec_costintegral, 0.0));
312     }
313   }
314   if (!ts->vec_costintegrand) {
315     PetscCall(VecDuplicate(ts->vec_costintegral, &ts->vec_costintegrand));
316   } else {
317     PetscCall(VecSet(ts->vec_costintegrand, 0.0));
318   }
319   ts->costintegralfwd  = fwd; /* Evaluate the cost integral in forward run if fwd is true */
320   ts->costintegrand    = rf;
321   ts->costintegrandctx = ctx;
322   ts->drdufunction     = drduf;
323   ts->drdpfunction     = drdpf;
324   PetscFunctionReturn(PETSC_SUCCESS);
325 }
326 
327 /*@
328   TSGetCostIntegral - Returns the values of the integral term in the cost functions.
329   It is valid to call the routine after a backward run.
330 
331   Not Collective
332 
333   Input Parameter:
334 . ts - the `TS` context obtained from `TSCreate()`
335 
336   Output Parameter:
337 . v - the vector containing the integrals for each cost function
338 
339   Level: intermediate
340 
341 .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSSetCostIntegrand()`
342 @*/
TSGetCostIntegral(TS ts,Vec * v)343 PetscErrorCode TSGetCostIntegral(TS ts, Vec *v)
344 {
345   TS quadts;
346 
347   PetscFunctionBegin;
348   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
349   PetscAssertPointer(v, 2);
350   PetscCall(TSGetQuadratureTS(ts, NULL, &quadts));
351   *v = quadts->vec_sol;
352   PetscFunctionReturn(PETSC_SUCCESS);
353 }
354 
355 /*@
356   TSComputeCostIntegrand - Evaluates the integral function in the cost functions.
357 
358   Input Parameters:
359 + ts - the `TS` context
360 . t  - current time
361 - U  - state vector, i.e. current solution
362 
363   Output Parameter:
364 . Q - vector of size numcost to hold the outputs
365 
366   Level: deprecated
367 
368   Note:
369   Most users should not need to explicitly call this routine, as it
370   is used internally within the sensitivity analysis context.
371 
372 .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSSetCostIntegrand()`
373 @*/
TSComputeCostIntegrand(TS ts,PetscReal t,Vec U,Vec Q)374 PetscErrorCode TSComputeCostIntegrand(TS ts, PetscReal t, Vec U, Vec Q)
375 {
376   PetscFunctionBegin;
377   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
378   PetscValidHeaderSpecific(U, VEC_CLASSID, 3);
379   PetscValidHeaderSpecific(Q, VEC_CLASSID, 4);
380 
381   PetscCall(PetscLogEventBegin(TS_FunctionEval, ts, U, Q, 0));
382   if (ts->costintegrand) PetscCallBack("TS callback integrand in the cost function", (*ts->costintegrand)(ts, t, U, Q, ts->costintegrandctx));
383   else PetscCall(VecZeroEntries(Q));
384   PetscCall(PetscLogEventEnd(TS_FunctionEval, ts, U, Q, 0));
385   PetscFunctionReturn(PETSC_SUCCESS);
386 }
387 
388 // PetscClangLinter pragma disable: -fdoc-*
389 /*@
390   TSComputeDRDUFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobian()`
391 
392   Level: deprecated
393 
394 @*/
TSComputeDRDUFunction(TS ts,PetscReal t,Vec U,Vec * DRDU)395 PetscErrorCode TSComputeDRDUFunction(TS ts, PetscReal t, Vec U, Vec *DRDU)
396 {
397   PetscFunctionBegin;
398   if (!DRDU) PetscFunctionReturn(PETSC_SUCCESS);
399   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
400   PetscValidHeaderSpecific(U, VEC_CLASSID, 3);
401 
402   PetscCallBack("TS callback DRDU for sensitivity analysis", (*ts->drdufunction)(ts, t, U, DRDU, ts->costintegrandctx));
403   PetscFunctionReturn(PETSC_SUCCESS);
404 }
405 
406 // PetscClangLinter pragma disable: -fdoc-*
407 /*@
408   TSComputeDRDPFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobianP()`
409 
410   Level: deprecated
411 
412 @*/
TSComputeDRDPFunction(TS ts,PetscReal t,Vec U,Vec * DRDP)413 PetscErrorCode TSComputeDRDPFunction(TS ts, PetscReal t, Vec U, Vec *DRDP)
414 {
415   PetscFunctionBegin;
416   if (!DRDP) PetscFunctionReturn(PETSC_SUCCESS);
417   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
418   PetscValidHeaderSpecific(U, VEC_CLASSID, 3);
419 
420   PetscCallBack("TS callback DRDP for sensitivity analysis", (*ts->drdpfunction)(ts, t, U, DRDP, ts->costintegrandctx));
421   PetscFunctionReturn(PETSC_SUCCESS);
422 }
423 
424 // PetscClangLinter pragma disable: -fdoc-param-list-func-parameter-documentation
425 // PetscClangLinter pragma disable: -fdoc-section-header-unknown
426 /*@C
427   TSSetIHessianProduct - Sets the function that computes the vector-Hessian-vector product. The Hessian is the second-order derivative of F (IFunction) w.r.t. the state variable.
428 
429   Logically Collective
430 
431   Input Parameters:
432 + ts   - `TS` context obtained from `TSCreate()`
433 . ihp1 - an array of vectors storing the result of vector-Hessian-vector product for F_UU
434 . hessianproductfunc1 - vector-Hessian-vector product function for F_UU
435 . ihp2 - an array of vectors storing the result of vector-Hessian-vector product for F_UP
436 . hessianproductfunc2 - vector-Hessian-vector product function for F_UP
437 . ihp3 - an array of vectors storing the result of vector-Hessian-vector product for F_PU
438 . hessianproductfunc3 - vector-Hessian-vector product function for F_PU
439 . ihp4 - an array of vectors storing the result of vector-Hessian-vector product for F_PP
440 - hessianproductfunc4 - vector-Hessian-vector product function for F_PP
441 
442   Calling sequence of `ihessianproductfunc1`:
443 + ts  - the `TS` context
444 + t   - current timestep
445 . U   - input vector (current ODE solution)
446 . Vl  - an array of input vectors to be left-multiplied with the Hessian
447 . Vr  - input vector to be right-multiplied with the Hessian
448 . VHV - an array of output vectors for vector-Hessian-vector product
449 - ctx - [optional] user-defined function context
450 
451   Level: intermediate
452 
453   Notes:
454   All other functions have the same calling sequence as `rhhessianproductfunc1`, so their
455   descriptions are omitted for brevity.
456 
457   The first Hessian function and the working array are required.
458   As an example to implement the callback functions, the second callback function calculates the vector-Hessian-vector product
459   $ Vl_n^T*F_UP*Vr
460   where the vector Vl_n (n-th element in the array Vl) and Vr are of size N and M respectively, and the Hessian F_UP is of size N x N x M.
461   Each entry of F_UP corresponds to the derivative
462   $ F_UP[i][j][k] = \frac{\partial^2 F[i]}{\partial U[j] \partial P[k]}.
463   The result of the vector-Hessian-vector product for Vl_n needs to be stored in vector VHV_n with the j-th entry being
464   $ VHV_n[j] = \sum_i \sum_k {Vl_n[i] * F_UP[i][j][k] * Vr[k]}
465   If the cost function is a scalar, there will be only one vector in Vl and VHV.
466 
467 .seealso: [](ch_ts), `TS`
468 @*/
TSSetIHessianProduct(TS ts,Vec * ihp1,PetscErrorCode (* ihessianproductfunc1)(TS ts,PetscReal t,Vec U,Vec * Vl,Vec Vr,Vec * VHV,PetscCtx ctx),Vec * ihp2,PetscErrorCode (* ihessianproductfunc2)(TS,PetscReal,Vec,Vec *,Vec,Vec *,void *),Vec * ihp3,PetscErrorCode (* ihessianproductfunc3)(TS,PetscReal,Vec,Vec *,Vec,Vec *,void *),Vec * ihp4,PetscErrorCode (* ihessianproductfunc4)(TS,PetscReal,Vec,Vec *,Vec,Vec *,void *),PetscCtx ctx)469 PetscErrorCode TSSetIHessianProduct(TS ts, Vec *ihp1, PetscErrorCode (*ihessianproductfunc1)(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV, PetscCtx ctx), Vec *ihp2, PetscErrorCode (*ihessianproductfunc2)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *ihp3, PetscErrorCode (*ihessianproductfunc3)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *ihp4, PetscErrorCode (*ihessianproductfunc4)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), PetscCtx ctx)
470 {
471   PetscFunctionBegin;
472   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
473   PetscAssertPointer(ihp1, 2);
474 
475   ts->ihessianproductctx = ctx;
476   if (ihp1) ts->vecs_fuu = ihp1;
477   if (ihp2) ts->vecs_fup = ihp2;
478   if (ihp3) ts->vecs_fpu = ihp3;
479   if (ihp4) ts->vecs_fpp = ihp4;
480   ts->ihessianproduct_fuu = ihessianproductfunc1;
481   ts->ihessianproduct_fup = ihessianproductfunc2;
482   ts->ihessianproduct_fpu = ihessianproductfunc3;
483   ts->ihessianproduct_fpp = ihessianproductfunc4;
484   PetscFunctionReturn(PETSC_SUCCESS);
485 }
486 
487 /*@
488   TSComputeIHessianProductFunctionUU - Runs the user-defined vector-Hessian-vector product function for Fuu.
489 
490   Collective
491 
492   Input Parameters:
493 + ts - The `TS` context obtained from `TSCreate()`
494 . t  - the time
495 . U  - the solution at which to compute the Hessian product
496 . Vl - the array of input vectors to be multiplied with the Hessian from the left
497 - Vr - the input vector to be multiplied with the Hessian from the right
498 
499   Output Parameter:
500 . VHV - the array of output vectors that store the Hessian product
501 
502   Level: developer
503 
504   Note:
505   `TSComputeIHessianProductFunctionUU()` is typically used for sensitivity implementation,
506   so most users would not generally call this routine themselves.
507 
508 .seealso: [](ch_ts), `TSSetIHessianProduct()`
509 @*/
TSComputeIHessianProductFunctionUU(TS ts,PetscReal t,Vec U,Vec Vl[],Vec Vr,Vec VHV[])510 PetscErrorCode TSComputeIHessianProductFunctionUU(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
511 {
512   PetscFunctionBegin;
513   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);
514   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
515   PetscValidHeaderSpecific(U, VEC_CLASSID, 3);
516 
517   if (ts->ihessianproduct_fuu) PetscCallBack("TS callback IHessianProduct 1 for sensitivity analysis", (*ts->ihessianproduct_fuu)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx));
518 
519   /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */
520   if (ts->rhshessianproduct_guu) {
521     PetscInt nadj;
522     PetscCall(TSComputeRHSHessianProductFunctionUU(ts, t, U, Vl, Vr, VHV));
523     for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1));
524   }
525   PetscFunctionReturn(PETSC_SUCCESS);
526 }
527 
528 /*@
529   TSComputeIHessianProductFunctionUP - Runs the user-defined vector-Hessian-vector product function for Fup.
530 
531   Collective
532 
533   Input Parameters:
534 + ts - The `TS` context obtained from `TSCreate()`
535 . t  - the time
536 . U  - the solution at which to compute the Hessian product
537 . Vl - the array of input vectors to be multiplied with the Hessian from the left
538 - Vr - the input vector to be multiplied with the Hessian from the right
539 
540   Output Parameter:
541 . VHV - the array of output vectors that store the Hessian product
542 
543   Level: developer
544 
545   Note:
546   `TSComputeIHessianProductFunctionUP()` is typically used for sensitivity implementation,
547   so most users would not generally call this routine themselves.
548 
549 .seealso: [](ch_ts), `TSSetIHessianProduct()`
550 @*/
TSComputeIHessianProductFunctionUP(TS ts,PetscReal t,Vec U,Vec Vl[],Vec Vr,Vec VHV[])551 PetscErrorCode TSComputeIHessianProductFunctionUP(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
552 {
553   PetscFunctionBegin;
554   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);
555   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
556   PetscValidHeaderSpecific(U, VEC_CLASSID, 3);
557 
558   if (ts->ihessianproduct_fup) PetscCallBack("TS callback IHessianProduct 2 for sensitivity analysis", (*ts->ihessianproduct_fup)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx));
559 
560   /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */
561   if (ts->rhshessianproduct_gup) {
562     PetscInt nadj;
563     PetscCall(TSComputeRHSHessianProductFunctionUP(ts, t, U, Vl, Vr, VHV));
564     for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1));
565   }
566   PetscFunctionReturn(PETSC_SUCCESS);
567 }
568 
569 /*@
570   TSComputeIHessianProductFunctionPU - Runs the user-defined vector-Hessian-vector product function for Fpu.
571 
572   Collective
573 
574   Input Parameters:
575 + ts - The `TS` context obtained from `TSCreate()`
576 . t  - the time
577 . U  - the solution at which to compute the Hessian product
578 . Vl - the array of input vectors to be multiplied with the Hessian from the left
579 - Vr - the input vector to be multiplied with the Hessian from the right
580 
581   Output Parameter:
582 . VHV - the array of output vectors that store the Hessian product
583 
584   Level: developer
585 
586   Note:
587   `TSComputeIHessianProductFunctionPU()` is typically used for sensitivity implementation,
588   so most users would not generally call this routine themselves.
589 
590 .seealso: [](ch_ts), `TSSetIHessianProduct()`
591 @*/
TSComputeIHessianProductFunctionPU(TS ts,PetscReal t,Vec U,Vec Vl[],Vec Vr,Vec VHV[])592 PetscErrorCode TSComputeIHessianProductFunctionPU(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
593 {
594   PetscFunctionBegin;
595   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);
596   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
597   PetscValidHeaderSpecific(U, VEC_CLASSID, 3);
598 
599   if (ts->ihessianproduct_fpu) PetscCallBack("TS callback IHessianProduct 3 for sensitivity analysis", (*ts->ihessianproduct_fpu)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx));
600 
601   /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */
602   if (ts->rhshessianproduct_gpu) {
603     PetscInt nadj;
604     PetscCall(TSComputeRHSHessianProductFunctionPU(ts, t, U, Vl, Vr, VHV));
605     for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1));
606   }
607   PetscFunctionReturn(PETSC_SUCCESS);
608 }
609 
610 /*@C
611   TSComputeIHessianProductFunctionPP - Runs the user-defined vector-Hessian-vector product function for Fpp.
612 
613   Collective
614 
615   Input Parameters:
616 + ts - The `TS` context obtained from `TSCreate()`
617 . t  - the time
618 . U  - the solution at which to compute the Hessian product
619 . Vl - the array of input vectors to be multiplied with the Hessian from the left
620 - Vr - the input vector to be multiplied with the Hessian from the right
621 
622   Output Parameter:
623 . VHV - the array of output vectors that store the Hessian product
624 
625   Level: developer
626 
627   Note:
628   `TSComputeIHessianProductFunctionPP()` is typically used for sensitivity implementation,
629   so most users would not generally call this routine themselves.
630 
631 .seealso: [](ch_ts), `TSSetIHessianProduct()`
632 @*/
TSComputeIHessianProductFunctionPP(TS ts,PetscReal t,Vec U,Vec Vl[],Vec Vr,Vec VHV[])633 PetscErrorCode TSComputeIHessianProductFunctionPP(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
634 {
635   PetscFunctionBegin;
636   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);
637   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
638   PetscValidHeaderSpecific(U, VEC_CLASSID, 3);
639 
640   if (ts->ihessianproduct_fpp) PetscCallBack("TS callback IHessianProduct 3 for sensitivity analysis", (*ts->ihessianproduct_fpp)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx));
641 
642   /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */
643   if (ts->rhshessianproduct_gpp) {
644     PetscInt nadj;
645     PetscCall(TSComputeRHSHessianProductFunctionPP(ts, t, U, Vl, Vr, VHV));
646     for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1));
647   }
648   PetscFunctionReturn(PETSC_SUCCESS);
649 }
650 
651 // PetscClangLinter pragma disable: -fdoc-param-list-func-parameter-documentation
652 // PetscClangLinter pragma disable: -fdoc-section-header-unknown
653 /*@C
654   TSSetRHSHessianProduct - Sets the function that computes the vector-Hessian-vector
655   product. The Hessian is the second-order derivative of G (RHSFunction) w.r.t. the state
656   variable.
657 
658   Logically Collective
659 
660   Input Parameters:
661 + ts     - `TS` context obtained from `TSCreate()`
662 . rhshp1 - an array of vectors storing the result of vector-Hessian-vector product for G_UU
663 . hessianproductfunc1 - vector-Hessian-vector product function for G_UU
664 . rhshp2 - an array of vectors storing the result of vector-Hessian-vector product for G_UP
665 . hessianproductfunc2 - vector-Hessian-vector product function for G_UP
666 . rhshp3 - an array of vectors storing the result of vector-Hessian-vector product for G_PU
667 . hessianproductfunc3 - vector-Hessian-vector product function for G_PU
668 . rhshp4 - an array of vectors storing the result of vector-Hessian-vector product for G_PP
669 . hessianproductfunc4 - vector-Hessian-vector product function for G_PP
670 - ctx    - [optional] user-defined function context
671 
672   Calling sequence of `rhshessianproductfunc1`:
673 + ts  - the `TS` context
674 . t   - current timestep
675 . U   - input vector (current ODE solution)
676 . Vl  - an array of input vectors to be left-multiplied with the Hessian
677 . Vr  - input vector to be right-multiplied with the Hessian
678 . VHV - an array of output vectors for vector-Hessian-vector product
679 - ctx - [optional] user-defined function context
680 
681   Level: intermediate
682 
683   Notes:
684   All other functions have the same calling sequence as `rhhessianproductfunc1`, so their
685   descriptions are omitted for brevity.
686 
687   The first Hessian function and the working array are required.
688 
689   As an example to implement the callback functions, the second callback function calculates the vector-Hessian-vector product
690   $ Vl_n^T*G_UP*Vr
691   where the vector Vl_n (n-th element in the array Vl) and Vr are of size N and M respectively, and the Hessian G_UP is of size N x N x M.
692   Each entry of G_UP corresponds to the derivative
693   $ G_UP[i][j][k] = \frac{\partial^2 G[i]}{\partial U[j] \partial P[k]}.
694   The result of the vector-Hessian-vector product for Vl_n needs to be stored in vector VHV_n with j-th entry being
695   $ VHV_n[j] = \sum_i \sum_k {Vl_n[i] * G_UP[i][j][k] * Vr[k]}
696   If the cost function is a scalar, there will be only one vector in Vl and VHV.
697 
698 .seealso: `TS`, `TSAdjoint`
699 @*/
TSSetRHSHessianProduct(TS ts,Vec rhshp1[],PetscErrorCode (* rhshessianproductfunc1)(TS ts,PetscReal t,Vec U,Vec * Vl,Vec Vr,Vec * VHV,PetscCtx ctx),Vec rhshp2[],PetscErrorCode (* rhshessianproductfunc2)(TS,PetscReal,Vec,Vec *,Vec,Vec *,void *),Vec rhshp3[],PetscErrorCode (* rhshessianproductfunc3)(TS,PetscReal,Vec,Vec *,Vec,Vec *,void *),Vec rhshp4[],PetscErrorCode (* rhshessianproductfunc4)(TS,PetscReal,Vec,Vec *,Vec,Vec *,void *),PetscCtx ctx)700 PetscErrorCode TSSetRHSHessianProduct(TS ts, Vec rhshp1[], PetscErrorCode (*rhshessianproductfunc1)(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV, PetscCtx ctx), Vec rhshp2[], PetscErrorCode (*rhshessianproductfunc2)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec rhshp3[], PetscErrorCode (*rhshessianproductfunc3)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec rhshp4[], PetscErrorCode (*rhshessianproductfunc4)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), PetscCtx ctx)
701 {
702   PetscFunctionBegin;
703   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
704   PetscAssertPointer(rhshp1, 2);
705 
706   ts->rhshessianproductctx = ctx;
707   if (rhshp1) ts->vecs_guu = rhshp1;
708   if (rhshp2) ts->vecs_gup = rhshp2;
709   if (rhshp3) ts->vecs_gpu = rhshp3;
710   if (rhshp4) ts->vecs_gpp = rhshp4;
711   ts->rhshessianproduct_guu = rhshessianproductfunc1;
712   ts->rhshessianproduct_gup = rhshessianproductfunc2;
713   ts->rhshessianproduct_gpu = rhshessianproductfunc3;
714   ts->rhshessianproduct_gpp = rhshessianproductfunc4;
715   PetscFunctionReturn(PETSC_SUCCESS);
716 }
717 
718 /*@
719   TSComputeRHSHessianProductFunctionUU - Runs the user-defined vector-Hessian-vector product function for Guu.
720 
721   Collective
722 
723   Input Parameters:
724 + ts - The `TS` context obtained from `TSCreate()`
725 . t  - the time
726 . U  - the solution at which to compute the Hessian product
727 . Vl - the array of input vectors to be multiplied with the Hessian from the left
728 - Vr - the input vector to be multiplied with the Hessian from the right
729 
730   Output Parameter:
731 . VHV - the array of output vectors that store the Hessian product
732 
733   Level: developer
734 
735   Note:
736   `TSComputeRHSHessianProductFunctionUU()` is typically used for sensitivity implementation,
737   so most users would not generally call this routine themselves.
738 
739 .seealso: [](ch_ts), `TS`, `TSSetRHSHessianProduct()`
740 @*/
TSComputeRHSHessianProductFunctionUU(TS ts,PetscReal t,Vec U,Vec Vl[],Vec Vr,Vec VHV[])741 PetscErrorCode TSComputeRHSHessianProductFunctionUU(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
742 {
743   PetscFunctionBegin;
744   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);
745   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
746   PetscValidHeaderSpecific(U, VEC_CLASSID, 3);
747 
748   PetscCallBack("TS callback RHSHessianProduct 1 for sensitivity analysis", (*ts->rhshessianproduct_guu)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx));
749   PetscFunctionReturn(PETSC_SUCCESS);
750 }
751 
752 /*@
753   TSComputeRHSHessianProductFunctionUP - Runs the user-defined vector-Hessian-vector product function for Gup.
754 
755   Collective
756 
757   Input Parameters:
758 + ts - The `TS` context obtained from `TSCreate()`
759 . t  - the time
760 . U  - the solution at which to compute the Hessian product
761 . Vl - the array of input vectors to be multiplied with the Hessian from the left
762 - Vr - the input vector to be multiplied with the Hessian from the right
763 
764   Output Parameter:
765 . VHV - the array of output vectors that store the Hessian product
766 
767   Level: developer
768 
769   Note:
770   `TSComputeRHSHessianProductFunctionUP()` is typically used for sensitivity implementation,
771   so most users would not generally call this routine themselves.
772 
773 .seealso: [](ch_ts), `TS`, `TSSetRHSHessianProduct()`
774 @*/
TSComputeRHSHessianProductFunctionUP(TS ts,PetscReal t,Vec U,Vec Vl[],Vec Vr,Vec VHV[])775 PetscErrorCode TSComputeRHSHessianProductFunctionUP(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
776 {
777   PetscFunctionBegin;
778   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);
779   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
780   PetscValidHeaderSpecific(U, VEC_CLASSID, 3);
781 
782   PetscCallBack("TS callback RHSHessianProduct 2 for sensitivity analysis", (*ts->rhshessianproduct_gup)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx));
783   PetscFunctionReturn(PETSC_SUCCESS);
784 }
785 
786 /*@
787   TSComputeRHSHessianProductFunctionPU - Runs the user-defined vector-Hessian-vector product function for Gpu.
788 
789   Collective
790 
791   Input Parameters:
792 + ts - The `TS` context obtained from `TSCreate()`
793 . t  - the time
794 . U  - the solution at which to compute the Hessian product
795 . Vl - the array of input vectors to be multiplied with the Hessian from the left
796 - Vr - the input vector to be multiplied with the Hessian from the right
797 
798   Output Parameter:
799 . VHV - the array of output vectors that store the Hessian product
800 
801   Level: developer
802 
803   Note:
804   `TSComputeRHSHessianProductFunctionPU()` is typically used for sensitivity implementation,
805   so most users would not generally call this routine themselves.
806 
807 .seealso: [](ch_ts), `TSSetRHSHessianProduct()`
808 @*/
TSComputeRHSHessianProductFunctionPU(TS ts,PetscReal t,Vec U,Vec Vl[],Vec Vr,Vec VHV[])809 PetscErrorCode TSComputeRHSHessianProductFunctionPU(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
810 {
811   PetscFunctionBegin;
812   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);
813   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
814   PetscValidHeaderSpecific(U, VEC_CLASSID, 3);
815 
816   PetscCallBack("TS callback RHSHessianProduct 3 for sensitivity analysis", (*ts->rhshessianproduct_gpu)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx));
817   PetscFunctionReturn(PETSC_SUCCESS);
818 }
819 
820 /*@
821   TSComputeRHSHessianProductFunctionPP - Runs the user-defined vector-Hessian-vector product function for Gpp.
822 
823   Collective
824 
825   Input Parameters:
826 + ts - The `TS` context obtained from `TSCreate()`
827 . t  - the time
828 . U  - the solution at which to compute the Hessian product
829 . Vl - the array of input vectors to be multiplied with the Hessian from the left
830 - Vr - the input vector to be multiplied with the Hessian from the right
831 
832   Output Parameter:
833 . VHV - the array of output vectors that store the Hessian product
834 
835   Level: developer
836 
837   Note:
838   `TSComputeRHSHessianProductFunctionPP()` is typically used for sensitivity implementation,
839   so most users would not generally call this routine themselves.
840 
841 .seealso: [](ch_ts), `TSSetRHSHessianProduct()`
842 @*/
TSComputeRHSHessianProductFunctionPP(TS ts,PetscReal t,Vec U,Vec Vl[],Vec Vr,Vec VHV[])843 PetscErrorCode TSComputeRHSHessianProductFunctionPP(TS ts, PetscReal t, Vec U, Vec Vl[], Vec Vr, Vec VHV[])
844 {
845   PetscFunctionBegin;
846   if (!VHV) PetscFunctionReturn(PETSC_SUCCESS);
847   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
848   PetscValidHeaderSpecific(U, VEC_CLASSID, 3);
849 
850   PetscCallBack("TS callback RHSHessianProduct 3 for sensitivity analysis", (*ts->rhshessianproduct_gpp)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx));
851   PetscFunctionReturn(PETSC_SUCCESS);
852 }
853 
854 /* --------------------------- Adjoint sensitivity ---------------------------*/
855 
856 /*@
857   TSSetCostGradients - Sets the initial value of the gradients of the cost function w.r.t. initial values and w.r.t. the problem parameters
858   for use by the `TS` adjoint routines.
859 
860   Logically Collective
861 
862   Input Parameters:
863 + ts      - the `TS` context obtained from `TSCreate()`
864 . numcost - number of gradients to be computed, this is the number of cost functions
865 . lambda  - gradients with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector
866 - mu      - gradients with respect to the parameters, the number of entries in these vectors is the same as the number of parameters
867 
868   Level: beginner
869 
870   Notes:
871   the entries in these vectors must be correctly initialized with the values lambda_i = df/dy|finaltime  mu_i = df/dp|finaltime
872 
873   After `TSAdjointSolve()` is called the lambda and the mu contain the computed sensitivities
874 
875 .seealso: `TS`, `TSAdjointSolve()`, `TSGetCostGradients()`
876 @*/
TSSetCostGradients(TS ts,PetscInt numcost,Vec lambda[],Vec mu[])877 PetscErrorCode TSSetCostGradients(TS ts, PetscInt numcost, Vec lambda[], Vec mu[])
878 {
879   PetscFunctionBegin;
880   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
881   PetscAssertPointer(lambda, 3);
882   ts->vecs_sensi  = lambda;
883   ts->vecs_sensip = mu;
884   PetscCheck(!ts->numcost || ts->numcost == numcost, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand");
885   ts->numcost = numcost;
886   PetscFunctionReturn(PETSC_SUCCESS);
887 }
888 
889 /*@
890   TSGetCostGradients - Returns the gradients from the `TSAdjointSolve()`
891 
892   Not Collective, but the vectors returned are parallel if `TS` is parallel
893 
894   Input Parameter:
895 . ts - the `TS` context obtained from `TSCreate()`
896 
897   Output Parameters:
898 + numcost - size of returned arrays
899 . lambda  - vectors containing the gradients of the cost functions with respect to the ODE/DAE solution variables
900 - mu      - vectors containing the gradients of the cost functions with respect to the problem parameters
901 
902   Level: intermediate
903 
904 .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSSetCostGradients()`
905 @*/
TSGetCostGradients(TS ts,PetscInt * numcost,Vec * lambda[],Vec * mu[])906 PetscErrorCode TSGetCostGradients(TS ts, PetscInt *numcost, Vec *lambda[], Vec *mu[])
907 {
908   PetscFunctionBegin;
909   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
910   if (numcost) *numcost = ts->numcost;
911   if (lambda) *lambda = ts->vecs_sensi;
912   if (mu) *mu = ts->vecs_sensip;
913   PetscFunctionReturn(PETSC_SUCCESS);
914 }
915 
916 /*@
917   TSSetCostHessianProducts - Sets the initial value of the Hessian-vector products of the cost function w.r.t. initial values and w.r.t. the problem parameters
918   for use by the `TS` adjoint routines.
919 
920   Logically Collective
921 
922   Input Parameters:
923 + ts      - the `TS` context obtained from `TSCreate()`
924 . numcost - number of cost functions
925 . lambda2 - Hessian-vector product with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector
926 . mu2     - Hessian-vector product with respect to the parameters, the number of entries in these vectors is the same as the number of parameters
927 - dir     - the direction vector that are multiplied with the Hessian of the cost functions
928 
929   Level: beginner
930 
931   Notes:
932   Hessian of the cost function is completely different from Hessian of the ODE/DAE system
933 
934   For second-order adjoint, one needs to call this function and then `TSAdjointSetForward()` before `TSSolve()`.
935 
936   After `TSAdjointSolve()` is called, the lambda2 and the mu2 will contain the computed second-order adjoint sensitivities, and can be used to produce Hessian-vector product (not the full Hessian matrix). Users must provide a direction vector; it is usually generated by an optimization solver.
937 
938   Passing `NULL` for `lambda2` disables the second-order calculation.
939 
940 .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSAdjointSetForward()`
941 @*/
TSSetCostHessianProducts(TS ts,PetscInt numcost,Vec lambda2[],Vec mu2[],Vec dir)942 PetscErrorCode TSSetCostHessianProducts(TS ts, PetscInt numcost, Vec lambda2[], Vec mu2[], Vec dir)
943 {
944   PetscFunctionBegin;
945   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
946   PetscCheck(!ts->numcost || ts->numcost == numcost, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand");
947   ts->numcost      = numcost;
948   ts->vecs_sensi2  = lambda2;
949   ts->vecs_sensi2p = mu2;
950   ts->vec_dir      = dir;
951   PetscFunctionReturn(PETSC_SUCCESS);
952 }
953 
954 /*@
955   TSGetCostHessianProducts - Returns the gradients from the `TSAdjointSolve()`
956 
957   Not Collective, but vectors returned are parallel if `TS` is parallel
958 
959   Input Parameter:
960 . ts - the `TS` context obtained from `TSCreate()`
961 
962   Output Parameters:
963 + numcost - number of cost functions
964 . lambda2 - Hessian-vector product with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector
965 . mu2     - Hessian-vector product with respect to the parameters, the number of entries in these vectors is the same as the number of parameters
966 - dir     - the direction vector that are multiplied with the Hessian of the cost functions
967 
968   Level: intermediate
969 
970 .seealso: [](ch_ts), `TSAdjointSolve()`, `TSSetCostHessianProducts()`
971 @*/
TSGetCostHessianProducts(TS ts,PetscInt * numcost,Vec * lambda2[],Vec * mu2[],Vec * dir)972 PetscErrorCode TSGetCostHessianProducts(TS ts, PetscInt *numcost, Vec *lambda2[], Vec *mu2[], Vec *dir)
973 {
974   PetscFunctionBegin;
975   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
976   if (numcost) *numcost = ts->numcost;
977   if (lambda2) *lambda2 = ts->vecs_sensi2;
978   if (mu2) *mu2 = ts->vecs_sensi2p;
979   if (dir) *dir = ts->vec_dir;
980   PetscFunctionReturn(PETSC_SUCCESS);
981 }
982 
983 /*@
984   TSAdjointSetForward - Trigger the tangent linear solver and initialize the forward sensitivities
985 
986   Logically Collective
987 
988   Input Parameters:
989 + ts   - the `TS` context obtained from `TSCreate()`
990 - didp - the derivative of initial values w.r.t. parameters
991 
992   Level: intermediate
993 
994   Notes:
995   When computing sensitivities w.r.t. initial condition, set didp to `NULL` so that the solver will take it as an identity matrix mathematically.
996   `TSAdjoint` does not reset the tangent linear solver automatically, `TSAdjointResetForward()` should be called to reset the tangent linear solver.
997 
998 .seealso: [](ch_ts), `TSAdjointSolve()`, `TSSetCostHessianProducts()`, `TSAdjointResetForward()`
999 @*/
TSAdjointSetForward(TS ts,Mat didp)1000 PetscErrorCode TSAdjointSetForward(TS ts, Mat didp)
1001 {
1002   Mat          A;
1003   Vec          sp;
1004   PetscScalar *xarr;
1005   PetscInt     lsize;
1006 
1007   PetscFunctionBegin;
1008   ts->forward_solve = PETSC_TRUE; /* turn on tangent linear mode */
1009   PetscCheck(ts->vecs_sensi2, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "Must call TSSetCostHessianProducts() first");
1010   PetscCheck(ts->vec_dir, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "Directional vector is missing. Call TSSetCostHessianProducts() to set it.");
1011   /* create a single-column dense matrix */
1012   PetscCall(VecGetLocalSize(ts->vec_sol, &lsize));
1013   PetscCall(MatCreateDense(PetscObjectComm((PetscObject)ts), lsize, PETSC_DECIDE, PETSC_DECIDE, 1, NULL, &A));
1014 
1015   PetscCall(VecDuplicate(ts->vec_sol, &sp));
1016   PetscCall(MatDenseGetColumn(A, 0, &xarr));
1017   PetscCall(VecPlaceArray(sp, xarr));
1018   if (ts->vecs_sensi2p) { /* tangent linear variable initialized as 2*dIdP*dir */
1019     if (didp) {
1020       PetscCall(MatMult(didp, ts->vec_dir, sp));
1021       PetscCall(VecScale(sp, 2.));
1022     } else {
1023       PetscCall(VecZeroEntries(sp));
1024     }
1025   } else { /* tangent linear variable initialized as dir */
1026     PetscCall(VecCopy(ts->vec_dir, sp));
1027   }
1028   PetscCall(VecResetArray(sp));
1029   PetscCall(MatDenseRestoreColumn(A, &xarr));
1030   PetscCall(VecDestroy(&sp));
1031 
1032   PetscCall(TSForwardSetInitialSensitivities(ts, A)); /* if didp is NULL, identity matrix is assumed */
1033 
1034   PetscCall(MatDestroy(&A));
1035   PetscFunctionReturn(PETSC_SUCCESS);
1036 }
1037 
1038 /*@
1039   TSAdjointResetForward - Reset the tangent linear solver and destroy the tangent linear context
1040 
1041   Logically Collective
1042 
1043   Input Parameter:
1044 . ts - the `TS` context obtained from `TSCreate()`
1045 
1046   Level: intermediate
1047 
1048 .seealso: [](ch_ts), `TSAdjointSetForward()`
1049 @*/
TSAdjointResetForward(TS ts)1050 PetscErrorCode TSAdjointResetForward(TS ts)
1051 {
1052   PetscFunctionBegin;
1053   ts->forward_solve = PETSC_FALSE; /* turn off tangent linear mode */
1054   PetscCall(TSForwardReset(ts));
1055   PetscFunctionReturn(PETSC_SUCCESS);
1056 }
1057 
1058 /*@
1059   TSAdjointSetUp - Sets up the internal data structures for the later use
1060   of an adjoint solver
1061 
1062   Collective
1063 
1064   Input Parameter:
1065 . ts - the `TS` context obtained from `TSCreate()`
1066 
1067   Level: advanced
1068 
1069 .seealso: [](ch_ts), `TSCreate()`, `TSAdjointStep()`, `TSSetCostGradients()`
1070 @*/
TSAdjointSetUp(TS ts)1071 PetscErrorCode TSAdjointSetUp(TS ts)
1072 {
1073   TSTrajectory tj;
1074   PetscBool    match;
1075 
1076   PetscFunctionBegin;
1077   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1078   if (ts->adjointsetupcalled) PetscFunctionReturn(PETSC_SUCCESS);
1079   PetscCheck(ts->vecs_sensi, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_WRONGSTATE, "Must call TSSetCostGradients() first");
1080   PetscCheck(!ts->vecs_sensip || ts->Jacp || ts->Jacprhs, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_WRONGSTATE, "Must call TSSetRHSJacobianP() or TSSetIJacobianP() first");
1081   PetscCall(TSGetTrajectory(ts, &tj));
1082   PetscCall(PetscObjectTypeCompare((PetscObject)tj, TSTRAJECTORYBASIC, &match));
1083   if (match) {
1084     PetscBool solution_only;
1085     PetscCall(TSTrajectoryGetSolutionOnly(tj, &solution_only));
1086     PetscCheck(!solution_only, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "TSAdjoint cannot use the solution-only mode when choosing the Basic TSTrajectory type. Turn it off with -ts_trajectory_solution_only 0");
1087   }
1088   PetscCall(TSTrajectorySetUseHistory(tj, PETSC_FALSE)); /* not use TSHistory */
1089 
1090   if (ts->quadraturets) { /* if there is integral in the cost function */
1091     PetscCall(VecDuplicate(ts->vecs_sensi[0], &ts->vec_drdu_col));
1092     if (ts->vecs_sensip) PetscCall(VecDuplicate(ts->vecs_sensip[0], &ts->vec_drdp_col));
1093   }
1094 
1095   PetscTryTypeMethod(ts, adjointsetup);
1096   ts->adjointsetupcalled = PETSC_TRUE;
1097   PetscFunctionReturn(PETSC_SUCCESS);
1098 }
1099 
1100 /*@
1101   TSAdjointReset - Resets a `TS` adjoint context and removes any allocated `Vec`s and `Mat`s.
1102 
1103   Collective
1104 
1105   Input Parameter:
1106 . ts - the `TS` context obtained from `TSCreate()`
1107 
1108   Level: beginner
1109 
1110 .seealso: [](ch_ts), `TSCreate()`, `TSAdjointSetUp()`, `TSDestroy()`
1111 @*/
TSAdjointReset(TS ts)1112 PetscErrorCode TSAdjointReset(TS ts)
1113 {
1114   PetscFunctionBegin;
1115   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1116   PetscTryTypeMethod(ts, adjointreset);
1117   if (ts->quadraturets) { /* if there is integral in the cost function */
1118     PetscCall(VecDestroy(&ts->vec_drdu_col));
1119     if (ts->vecs_sensip) PetscCall(VecDestroy(&ts->vec_drdp_col));
1120   }
1121   ts->vecs_sensi         = NULL;
1122   ts->vecs_sensip        = NULL;
1123   ts->vecs_sensi2        = NULL;
1124   ts->vecs_sensi2p       = NULL;
1125   ts->vec_dir            = NULL;
1126   ts->adjointsetupcalled = PETSC_FALSE;
1127   PetscFunctionReturn(PETSC_SUCCESS);
1128 }
1129 
1130 /*@
1131   TSAdjointSetSteps - Sets the number of steps the adjoint solver should take backward in time
1132 
1133   Logically Collective
1134 
1135   Input Parameters:
1136 + ts    - the `TS` context obtained from `TSCreate()`
1137 - steps - number of steps to use
1138 
1139   Level: intermediate
1140 
1141   Notes:
1142   Normally one does not call this and `TSAdjointSolve()` integrates back to the original timestep. One can call this
1143   so as to integrate back to less than the original timestep
1144 
1145 .seealso: [](ch_ts), `TSAdjointSolve()`, `TS`, `TSSetExactFinalTime()`
1146 @*/
TSAdjointSetSteps(TS ts,PetscInt steps)1147 PetscErrorCode TSAdjointSetSteps(TS ts, PetscInt steps)
1148 {
1149   PetscFunctionBegin;
1150   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1151   PetscValidLogicalCollectiveInt(ts, steps, 2);
1152   PetscCheck(steps >= 0, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_OUTOFRANGE, "Cannot step back a negative number of steps");
1153   PetscCheck(steps <= ts->steps, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_OUTOFRANGE, "Cannot step back more than the total number of forward steps");
1154   ts->adjoint_max_steps = steps;
1155   PetscFunctionReturn(PETSC_SUCCESS);
1156 }
1157 
1158 // PetscClangLinter pragma disable: -fdoc-*
1159 /*@C
1160   TSAdjointSetRHSJacobian - Deprecated, use `TSSetRHSJacobianP()`
1161 
1162   Level: deprecated
1163 @*/
TSAdjointSetRHSJacobian(TS ts,Mat Amat,PetscErrorCode (* func)(TS,PetscReal,Vec,Mat,void *),PetscCtx ctx)1164 PetscErrorCode TSAdjointSetRHSJacobian(TS ts, Mat Amat, PetscErrorCode (*func)(TS, PetscReal, Vec, Mat, void *), PetscCtx ctx)
1165 {
1166   PetscFunctionBegin;
1167   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1168   PetscValidHeaderSpecific(Amat, MAT_CLASSID, 2);
1169 
1170   ts->rhsjacobianp    = func;
1171   ts->rhsjacobianpctx = ctx;
1172   if (Amat) {
1173     PetscCall(PetscObjectReference((PetscObject)Amat));
1174     PetscCall(MatDestroy(&ts->Jacp));
1175     ts->Jacp = Amat;
1176   }
1177   PetscFunctionReturn(PETSC_SUCCESS);
1178 }
1179 
1180 // PetscClangLinter pragma disable: -fdoc-*
1181 /*@C
1182   TSAdjointComputeRHSJacobian - Deprecated, use `TSComputeRHSJacobianP()`
1183 
1184   Level: deprecated
1185 @*/
TSAdjointComputeRHSJacobian(TS ts,PetscReal t,Vec U,Mat Amat)1186 PetscErrorCode TSAdjointComputeRHSJacobian(TS ts, PetscReal t, Vec U, Mat Amat)
1187 {
1188   PetscFunctionBegin;
1189   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1190   PetscValidHeaderSpecific(U, VEC_CLASSID, 3);
1191   PetscValidHeaderSpecific(Amat, MAT_CLASSID, 4);
1192 
1193   PetscCallBack("TS callback JacobianP for sensitivity analysis", (*ts->rhsjacobianp)(ts, t, U, Amat, ts->rhsjacobianpctx));
1194   PetscFunctionReturn(PETSC_SUCCESS);
1195 }
1196 
1197 // PetscClangLinter pragma disable: -fdoc-*
1198 /*@
1199   TSAdjointComputeDRDYFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobian()`
1200 
1201   Level: deprecated
1202 @*/
TSAdjointComputeDRDYFunction(TS ts,PetscReal t,Vec U,Vec * DRDU)1203 PetscErrorCode TSAdjointComputeDRDYFunction(TS ts, PetscReal t, Vec U, Vec *DRDU)
1204 {
1205   PetscFunctionBegin;
1206   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1207   PetscValidHeaderSpecific(U, VEC_CLASSID, 3);
1208 
1209   PetscCallBack("TS callback DRDY for sensitivity analysis", (*ts->drdufunction)(ts, t, U, DRDU, ts->costintegrandctx));
1210   PetscFunctionReturn(PETSC_SUCCESS);
1211 }
1212 
1213 // PetscClangLinter pragma disable: -fdoc-*
1214 /*@
1215   TSAdjointComputeDRDPFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobianP()`
1216 
1217   Level: deprecated
1218 @*/
TSAdjointComputeDRDPFunction(TS ts,PetscReal t,Vec U,Vec * DRDP)1219 PetscErrorCode TSAdjointComputeDRDPFunction(TS ts, PetscReal t, Vec U, Vec *DRDP)
1220 {
1221   PetscFunctionBegin;
1222   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1223   PetscValidHeaderSpecific(U, VEC_CLASSID, 3);
1224 
1225   PetscCallBack("TS callback DRDP for sensitivity analysis", (*ts->drdpfunction)(ts, t, U, DRDP, ts->costintegrandctx));
1226   PetscFunctionReturn(PETSC_SUCCESS);
1227 }
1228 
1229 // PetscClangLinter pragma disable: -fdoc-param-list-func-parameter-documentation
1230 /*@C
1231   TSAdjointMonitorSensi - monitors the first lambda sensitivity
1232 
1233   Level: intermediate
1234 
1235 .seealso: [](ch_ts), `TSAdjointMonitorSet()`
1236 @*/
TSAdjointMonitorSensi(TS ts,PetscInt step,PetscReal ptime,Vec v,PetscInt numcost,Vec * lambda,Vec * mu,PetscViewerAndFormat * vf)1237 static PetscErrorCode TSAdjointMonitorSensi(TS ts, PetscInt step, PetscReal ptime, Vec v, PetscInt numcost, Vec *lambda, Vec *mu, PetscViewerAndFormat *vf)
1238 {
1239   PetscViewer viewer = vf->viewer;
1240 
1241   PetscFunctionBegin;
1242   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 8);
1243   PetscCall(PetscViewerPushFormat(viewer, vf->format));
1244   PetscCall(VecView(lambda[0], viewer));
1245   PetscCall(PetscViewerPopFormat(viewer));
1246   PetscFunctionReturn(PETSC_SUCCESS);
1247 }
1248 
1249 /*@C
1250   TSAdjointMonitorSetFromOptions - Sets a monitor function and viewer appropriate for the type indicated by the user
1251 
1252   Collective
1253 
1254   Input Parameters:
1255 + ts           - `TS` object you wish to monitor
1256 . name         - the monitor type one is seeking
1257 . help         - message indicating what monitoring is done
1258 . manual       - manual page for the monitor
1259 . monitor      - the monitor function, its context must be a `PetscViewerAndFormat`
1260 - monitorsetup - a function that is called once ONLY if the user selected this monitor that may set additional features of the `TS` or `PetscViewer` objects
1261 
1262   Level: developer
1263 
1264 .seealso: [](ch_ts), `PetscOptionsCreateViewer()`, `PetscOptionsGetReal()`, `PetscOptionsHasName()`, `PetscOptionsGetString()`,
1265           `PetscOptionsGetIntArray()`, `PetscOptionsGetRealArray()`, `PetscOptionsBool()`
1266           `PetscOptionsInt()`, `PetscOptionsString()`, `PetscOptionsReal()`,
1267           `PetscOptionsName()`, `PetscOptionsBegin()`, `PetscOptionsEnd()`, `PetscOptionsHeadBegin()`,
1268           `PetscOptionsStringArray()`, `PetscOptionsRealArray()`, `PetscOptionsScalar()`,
1269           `PetscOptionsBoolGroupBegin()`, `PetscOptionsBoolGroup()`, `PetscOptionsBoolGroupEnd()`,
1270           `PetscOptionsFList()`, `PetscOptionsEList()`, `PetscViewerAndFormat`
1271 @*/
TSAdjointMonitorSetFromOptions(TS ts,const char name[],const char help[],const char manual[],PetscErrorCode (* monitor)(TS,PetscInt,PetscReal,Vec,PetscInt,Vec *,Vec *,PetscViewerAndFormat *),PetscErrorCode (* monitorsetup)(TS,PetscViewerAndFormat *))1272 PetscErrorCode TSAdjointMonitorSetFromOptions(TS ts, const char name[], const char help[], const char manual[], PetscErrorCode (*monitor)(TS, PetscInt, PetscReal, Vec, PetscInt, Vec *, Vec *, PetscViewerAndFormat *), PetscErrorCode (*monitorsetup)(TS, PetscViewerAndFormat *))
1273 {
1274   PetscViewer       viewer;
1275   PetscViewerFormat format;
1276   PetscBool         flg;
1277 
1278   PetscFunctionBegin;
1279   PetscCall(PetscOptionsCreateViewer(PetscObjectComm((PetscObject)ts), ((PetscObject)ts)->options, ((PetscObject)ts)->prefix, name, &viewer, &format, &flg));
1280   if (flg) {
1281     PetscViewerAndFormat *vf;
1282     PetscCall(PetscViewerAndFormatCreate(viewer, format, &vf));
1283     PetscCall(PetscViewerDestroy(&viewer));
1284     if (monitorsetup) PetscCall((*monitorsetup)(ts, vf));
1285     PetscCall(TSAdjointMonitorSet(ts, (PetscErrorCode (*)(TS, PetscInt, PetscReal, Vec, PetscInt, Vec *, Vec *, PetscCtx))monitor, vf, (PetscCtxDestroyFn *)PetscViewerAndFormatDestroy));
1286   }
1287   PetscFunctionReturn(PETSC_SUCCESS);
1288 }
1289 
1290 /*@C
1291   TSAdjointMonitorSet - Sets an ADDITIONAL function that is to be used at every
1292   timestep to display the iteration's  progress.
1293 
1294   Logically Collective
1295 
1296   Input Parameters:
1297 + ts              - the `TS` context obtained from `TSCreate()`
1298 . adjointmonitor  - monitoring routine
1299 . adjointmctx     - [optional] user-defined context for private data for the monitor routine
1300                     (use `NULL` if no context is desired)
1301 - adjointmdestroy - [optional] routine that frees monitor context (may be `NULL`), see `PetscCtxDestroyFn` for its calling sequence
1302 
1303   Calling sequence of `adjointmonitor`:
1304 + ts          - the `TS` context
1305 . steps       - iteration number (after the final time step the monitor routine is called with
1306                 a step of -1, this is at the final time which may have been interpolated to)
1307 . time        - current time
1308 . u           - current iterate
1309 . numcost     - number of cost functionos
1310 . lambda      - sensitivities to initial conditions
1311 . mu          - sensitivities to parameters
1312 - adjointmctx - [optional] adjoint monitoring context
1313 
1314   Level: intermediate
1315 
1316   Note:
1317   This routine adds an additional monitor to the list of monitors that
1318   already has been loaded.
1319 
1320   Fortran Notes:
1321   Only a single monitor function can be set for each `TS` object
1322 
1323 .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSAdjointMonitorCancel()`, `PetscCtxDestroyFn`
1324 @*/
TSAdjointMonitorSet(TS ts,PetscErrorCode (* adjointmonitor)(TS ts,PetscInt steps,PetscReal time,Vec u,PetscInt numcost,Vec * lambda,Vec * mu,PetscCtx adjointmctx),PetscCtx adjointmctx,PetscCtxDestroyFn * adjointmdestroy)1325 PetscErrorCode TSAdjointMonitorSet(TS ts, PetscErrorCode (*adjointmonitor)(TS ts, PetscInt steps, PetscReal time, Vec u, PetscInt numcost, Vec *lambda, Vec *mu, PetscCtx adjointmctx), PetscCtx adjointmctx, PetscCtxDestroyFn *adjointmdestroy)
1326 {
1327   PetscFunctionBegin;
1328   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1329   for (PetscInt i = 0; i < ts->numbermonitors; i++) {
1330     PetscBool identical;
1331 
1332     PetscCall(PetscMonitorCompare((PetscErrorCode (*)(void))(PetscVoidFn *)adjointmonitor, adjointmctx, adjointmdestroy, (PetscErrorCode (*)(void))(PetscVoidFn *)ts->adjointmonitor[i], ts->adjointmonitorcontext[i], ts->adjointmonitordestroy[i], &identical));
1333     if (identical) PetscFunctionReturn(PETSC_SUCCESS);
1334   }
1335   PetscCheck(ts->numberadjointmonitors < MAXTSMONITORS, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many adjoint monitors set");
1336   ts->adjointmonitor[ts->numberadjointmonitors]          = adjointmonitor;
1337   ts->adjointmonitordestroy[ts->numberadjointmonitors]   = adjointmdestroy;
1338   ts->adjointmonitorcontext[ts->numberadjointmonitors++] = adjointmctx;
1339   PetscFunctionReturn(PETSC_SUCCESS);
1340 }
1341 
1342 /*@C
1343   TSAdjointMonitorCancel - Clears all the adjoint monitors that have been set on a time-step object.
1344 
1345   Logically Collective
1346 
1347   Input Parameter:
1348 . ts - the `TS` context obtained from `TSCreate()`
1349 
1350   Notes:
1351   There is no way to remove a single, specific monitor.
1352 
1353   Level: intermediate
1354 
1355 .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSAdjointMonitorSet()`
1356 @*/
TSAdjointMonitorCancel(TS ts)1357 PetscErrorCode TSAdjointMonitorCancel(TS ts)
1358 {
1359   PetscInt i;
1360 
1361   PetscFunctionBegin;
1362   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1363   for (i = 0; i < ts->numberadjointmonitors; i++) {
1364     if (ts->adjointmonitordestroy[i]) PetscCall((*ts->adjointmonitordestroy[i])(&ts->adjointmonitorcontext[i]));
1365   }
1366   ts->numberadjointmonitors = 0;
1367   PetscFunctionReturn(PETSC_SUCCESS);
1368 }
1369 
1370 /*@C
1371   TSAdjointMonitorDefault - the default monitor of adjoint computations
1372 
1373   Input Parameters:
1374 + ts      - the `TS` context
1375 . step    - iteration number (after the final time step the monitor routine is called with a
1376 step of -1, this is at the final time which may have been interpolated to)
1377 . time    - current time
1378 . v       - current iterate
1379 . numcost - number of cost functionos
1380 . lambda  - sensitivities to initial conditions
1381 . mu      - sensitivities to parameters
1382 - vf      - the viewer and format
1383 
1384   Level: intermediate
1385 
1386 .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSAdjointMonitorSet()`
1387 @*/
TSAdjointMonitorDefault(TS ts,PetscInt step,PetscReal time,Vec v,PetscInt numcost,Vec lambda[],Vec mu[],PetscViewerAndFormat * vf)1388 PetscErrorCode TSAdjointMonitorDefault(TS ts, PetscInt step, PetscReal time, Vec v, PetscInt numcost, Vec lambda[], Vec mu[], PetscViewerAndFormat *vf)
1389 {
1390   PetscViewer viewer = vf->viewer;
1391 
1392   PetscFunctionBegin;
1393   (void)v;
1394   (void)numcost;
1395   (void)lambda;
1396   (void)mu;
1397   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 8);
1398   PetscCall(PetscViewerPushFormat(viewer, vf->format));
1399   PetscCall(PetscViewerASCIIAddTab(viewer, ((PetscObject)ts)->tablevel));
1400   PetscCall(PetscViewerASCIIPrintf(viewer, "%" PetscInt_FMT " TS dt %g time %g%s", step, (double)ts->time_step, (double)time, ts->steprollback ? " (r)\n" : "\n"));
1401   PetscCall(PetscViewerASCIISubtractTab(viewer, ((PetscObject)ts)->tablevel));
1402   PetscCall(PetscViewerPopFormat(viewer));
1403   PetscFunctionReturn(PETSC_SUCCESS);
1404 }
1405 
1406 /*@C
1407   TSAdjointMonitorDrawSensi - Monitors progress of the adjoint `TS` solvers by calling
1408   `VecView()` for the sensitivities to initial states at each timestep
1409 
1410   Collective
1411 
1412   Input Parameters:
1413 + ts      - the `TS` context
1414 . step    - current time-step
1415 . ptime   - current time
1416 . u       - current state
1417 . numcost - number of cost functions
1418 . lambda  - sensitivities to initial conditions
1419 . mu      - sensitivities to parameters
1420 - dummy   - either a viewer or `NULL`
1421 
1422   Level: intermediate
1423 
1424 .seealso: [](ch_ts), `TSAdjointSolve()`, `TSAdjointMonitorSet()`, `TSAdjointMonitorDefault()`, `VecView()`
1425 @*/
TSAdjointMonitorDrawSensi(TS ts,PetscInt step,PetscReal ptime,Vec u,PetscInt numcost,Vec lambda[],Vec mu[],void * dummy)1426 PetscErrorCode TSAdjointMonitorDrawSensi(TS ts, PetscInt step, PetscReal ptime, Vec u, PetscInt numcost, Vec lambda[], Vec mu[], void *dummy)
1427 {
1428   TSMonitorDrawCtx ictx = (TSMonitorDrawCtx)dummy;
1429   PetscDraw        draw;
1430   PetscReal        xl, yl, xr, yr, h;
1431   char             time[32];
1432 
1433   PetscFunctionBegin;
1434   if (!(((ictx->howoften > 0) && (!(step % ictx->howoften))) || ((ictx->howoften == -1) && ts->reason))) PetscFunctionReturn(PETSC_SUCCESS);
1435 
1436   PetscCall(VecView(lambda[0], ictx->viewer));
1437   PetscCall(PetscViewerDrawGetDraw(ictx->viewer, 0, &draw));
1438   PetscCall(PetscSNPrintf(time, 32, "Timestep %" PetscInt_FMT " Time %g", step, (double)ptime));
1439   PetscCall(PetscDrawGetCoordinates(draw, &xl, &yl, &xr, &yr));
1440   h = yl + .95 * (yr - yl);
1441   PetscCall(PetscDrawStringCentered(draw, .5 * (xl + xr), h, PETSC_DRAW_BLACK, time));
1442   PetscCall(PetscDrawFlush(draw));
1443   PetscFunctionReturn(PETSC_SUCCESS);
1444 }
1445 
1446 /*@C
1447   TSAdjointSetFromOptions - Sets various `TS` adjoint parameters from options database.
1448 
1449   Collective
1450 
1451   Input Parameters:
1452 + ts                 - the `TS` context
1453 - PetscOptionsObject - the options context
1454 
1455   Options Database Keys:
1456 + -ts_adjoint_solve <yes,no>     - After solving the ODE/DAE solve the adjoint problem (requires `-ts_save_trajectory`)
1457 . -ts_adjoint_monitor            - print information at each adjoint time step
1458 - -ts_adjoint_monitor_draw_sensi - monitor the sensitivity of the first cost function wrt initial conditions (lambda[0]) graphically
1459 
1460   Level: developer
1461 
1462   Note:
1463   This is not normally called directly by users
1464 
1465 .seealso: [](ch_ts), `TSSetSaveTrajectory()`, `TSTrajectorySetUp()`
1466 @*/
TSAdjointSetFromOptions(TS ts,PetscOptionItems PetscOptionsObject)1467 PetscErrorCode TSAdjointSetFromOptions(TS ts, PetscOptionItems PetscOptionsObject)
1468 {
1469   PetscBool tflg, opt;
1470 
1471   PetscFunctionBegin;
1472   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1473   PetscOptionsHeadBegin(PetscOptionsObject, "TS Adjoint options");
1474   tflg = ts->adjoint_solve ? PETSC_TRUE : PETSC_FALSE;
1475   PetscCall(PetscOptionsBool("-ts_adjoint_solve", "Solve the adjoint problem immediately after solving the forward problem", "", tflg, &tflg, &opt));
1476   if (opt) {
1477     PetscCall(TSSetSaveTrajectory(ts));
1478     ts->adjoint_solve = tflg;
1479   }
1480   PetscCall(TSAdjointMonitorSetFromOptions(ts, "-ts_adjoint_monitor", "Monitor adjoint timestep size", "TSAdjointMonitorDefault", TSAdjointMonitorDefault, NULL));
1481   PetscCall(TSAdjointMonitorSetFromOptions(ts, "-ts_adjoint_monitor_sensi", "Monitor sensitivity in the adjoint computation", "TSAdjointMonitorSensi", TSAdjointMonitorSensi, NULL));
1482   opt = PETSC_FALSE;
1483   PetscCall(PetscOptionsName("-ts_adjoint_monitor_draw_sensi", "Monitor adjoint sensitivities (lambda only) graphically", "TSAdjointMonitorDrawSensi", &opt));
1484   if (opt) {
1485     TSMonitorDrawCtx ctx;
1486     PetscInt         howoften = 1;
1487 
1488     PetscCall(PetscOptionsInt("-ts_adjoint_monitor_draw_sensi", "Monitor adjoint sensitivities (lambda only) graphically", "TSAdjointMonitorDrawSensi", howoften, &howoften, NULL));
1489     PetscCall(TSMonitorDrawCtxCreate(PetscObjectComm((PetscObject)ts), NULL, NULL, PETSC_DECIDE, PETSC_DECIDE, 300, 300, howoften, &ctx));
1490     PetscCall(TSAdjointMonitorSet(ts, TSAdjointMonitorDrawSensi, ctx, (PetscCtxDestroyFn *)TSMonitorDrawCtxDestroy));
1491   }
1492   PetscFunctionReturn(PETSC_SUCCESS);
1493 }
1494 
1495 /*@
1496   TSAdjointStep - Steps one time step backward in the adjoint run
1497 
1498   Collective
1499 
1500   Input Parameter:
1501 . ts - the `TS` context obtained from `TSCreate()`
1502 
1503   Level: intermediate
1504 
1505 .seealso: [](ch_ts), `TSAdjointSetUp()`, `TSAdjointSolve()`
1506 @*/
TSAdjointStep(TS ts)1507 PetscErrorCode TSAdjointStep(TS ts)
1508 {
1509   DM dm;
1510 
1511   PetscFunctionBegin;
1512   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1513   PetscCall(TSGetDM(ts, &dm));
1514   PetscCall(TSAdjointSetUp(ts));
1515   ts->steps--; /* must decrease the step index before the adjoint step is taken. */
1516 
1517   ts->reason     = TS_CONVERGED_ITERATING;
1518   ts->ptime_prev = ts->ptime;
1519   PetscCall(PetscLogEventBegin(TS_AdjointStep, ts, 0, 0, 0));
1520   PetscUseTypeMethod(ts, adjointstep);
1521   PetscCall(PetscLogEventEnd(TS_AdjointStep, ts, 0, 0, 0));
1522   ts->adjoint_steps++;
1523 
1524   if (ts->reason < 0) {
1525     PetscCheck(!ts->errorifstepfailed, PetscObjectComm((PetscObject)ts), PETSC_ERR_NOT_CONVERGED, "TSAdjointStep has failed due to %s", TSConvergedReasons[ts->reason]);
1526   } else if (!ts->reason) {
1527     if (ts->adjoint_steps >= ts->adjoint_max_steps) ts->reason = TS_CONVERGED_ITS;
1528   }
1529   PetscFunctionReturn(PETSC_SUCCESS);
1530 }
1531 
1532 /*@
1533   TSAdjointSolve - Solves the discrete ajoint problem for an ODE/DAE
1534 
1535   Collective
1536   `
1537 
1538   Input Parameter:
1539 . ts - the `TS` context obtained from `TSCreate()`
1540 
1541   Options Database Key:
1542 . -ts_adjoint_view_solution <viewerinfo> - views the first gradient with respect to the initial values
1543 
1544   Level: intermediate
1545 
1546   Notes:
1547   This must be called after a call to `TSSolve()` that solves the forward problem
1548 
1549   By default this will integrate back to the initial time, one can use `TSAdjointSetSteps()` to step back to a later time
1550 
1551 .seealso: [](ch_ts), `TSCreate()`, `TSSetCostGradients()`, `TSSetSolution()`, `TSAdjointStep()`
1552 @*/
TSAdjointSolve(TS ts)1553 PetscErrorCode TSAdjointSolve(TS ts)
1554 {
1555   static PetscBool cite = PETSC_FALSE;
1556 #if defined(TSADJOINT_STAGE)
1557   PetscLogStage adjoint_stage;
1558 #endif
1559 
1560   PetscFunctionBegin;
1561   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1562   PetscCall(PetscCitationsRegister("@article{Zhang2022tsadjoint,\n"
1563                                    "  title         = {{PETSc TSAdjoint: A Discrete Adjoint ODE Solver for First-Order and Second-Order Sensitivity Analysis}},\n"
1564                                    "  author        = {Zhang, Hong and Constantinescu, Emil M.  and Smith, Barry F.},\n"
1565                                    "  journal       = {SIAM Journal on Scientific Computing},\n"
1566                                    "  volume        = {44},\n"
1567                                    "  number        = {1},\n"
1568                                    "  pages         = {C1-C24},\n"
1569                                    "  doi           = {10.1137/21M140078X},\n"
1570                                    "  year          = {2022}\n}\n",
1571                                    &cite));
1572 #if defined(TSADJOINT_STAGE)
1573   PetscCall(PetscLogStageRegister("TSAdjoint", &adjoint_stage));
1574   PetscCall(PetscLogStagePush(adjoint_stage));
1575 #endif
1576   PetscCall(TSAdjointSetUp(ts));
1577 
1578   /* reset time step and iteration counters */
1579   ts->adjoint_steps     = 0;
1580   ts->ksp_its           = 0;
1581   ts->snes_its          = 0;
1582   ts->num_snes_failures = 0;
1583   ts->reject            = 0;
1584   ts->reason            = TS_CONVERGED_ITERATING;
1585 
1586   if (!ts->adjoint_max_steps) ts->adjoint_max_steps = ts->steps;
1587   if (ts->adjoint_steps >= ts->adjoint_max_steps) ts->reason = TS_CONVERGED_ITS;
1588 
1589   while (!ts->reason) {
1590     PetscCall(TSTrajectoryGet(ts->trajectory, ts, ts->steps, &ts->ptime));
1591     PetscCall(TSAdjointMonitor(ts, ts->steps, ts->ptime, ts->vec_sol, ts->numcost, ts->vecs_sensi, ts->vecs_sensip));
1592     PetscCall(TSAdjointEventHandler(ts));
1593     PetscCall(TSAdjointStep(ts));
1594     if ((ts->vec_costintegral || ts->quadraturets) && !ts->costintegralfwd) PetscCall(TSAdjointCostIntegral(ts));
1595   }
1596   if (!ts->steps) {
1597     PetscCall(TSTrajectoryGet(ts->trajectory, ts, ts->steps, &ts->ptime));
1598     PetscCall(TSAdjointMonitor(ts, ts->steps, ts->ptime, ts->vec_sol, ts->numcost, ts->vecs_sensi, ts->vecs_sensip));
1599   }
1600   ts->solvetime = ts->ptime;
1601   PetscCall(TSTrajectoryViewFromOptions(ts->trajectory, NULL, "-ts_trajectory_view"));
1602   PetscCall(VecViewFromOptions(ts->vecs_sensi[0], (PetscObject)ts, "-ts_adjoint_view_solution"));
1603   ts->adjoint_max_steps = 0;
1604 #if defined(TSADJOINT_STAGE)
1605   PetscCall(PetscLogStagePop());
1606 #endif
1607   PetscFunctionReturn(PETSC_SUCCESS);
1608 }
1609 
1610 /*@C
1611   TSAdjointMonitor - Runs all user-provided adjoint monitor routines set using `TSAdjointMonitorSet()`
1612 
1613   Collective
1614 
1615   Input Parameters:
1616 + ts      - time stepping context obtained from `TSCreate()`
1617 . step    - step number that has just completed
1618 . ptime   - model time of the state
1619 . u       - state at the current model time
1620 . numcost - number of cost functions (dimension of lambda  or mu)
1621 . lambda  - vectors containing the gradients of the cost functions with respect to the ODE/DAE solution variables
1622 - mu      - vectors containing the gradients of the cost functions with respect to the problem parameters
1623 
1624   Level: developer
1625 
1626   Note:
1627   `TSAdjointMonitor()` is typically used automatically within the time stepping implementations.
1628   Users would almost never call this routine directly.
1629 
1630 .seealso: `TSAdjointMonitorSet()`, `TSAdjointSolve()`
1631 @*/
TSAdjointMonitor(TS ts,PetscInt step,PetscReal ptime,Vec u,PetscInt numcost,Vec lambda[],Vec mu[])1632 PetscErrorCode TSAdjointMonitor(TS ts, PetscInt step, PetscReal ptime, Vec u, PetscInt numcost, Vec lambda[], Vec mu[])
1633 {
1634   PetscInt i, n = ts->numberadjointmonitors;
1635 
1636   PetscFunctionBegin;
1637   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1638   PetscValidHeaderSpecific(u, VEC_CLASSID, 4);
1639   PetscCall(VecLockReadPush(u));
1640   for (i = 0; i < n; i++) PetscCall((*ts->adjointmonitor[i])(ts, step, ptime, u, numcost, lambda, mu, ts->adjointmonitorcontext[i]));
1641   PetscCall(VecLockReadPop(u));
1642   PetscFunctionReturn(PETSC_SUCCESS);
1643 }
1644 
1645 /*@
1646   TSAdjointCostIntegral - Evaluate the cost integral in the adjoint run.
1647 
1648   Collective
1649 
1650   Input Parameter:
1651 . ts - time stepping context
1652 
1653   Level: advanced
1654 
1655   Notes:
1656   This function cannot be called until `TSAdjointStep()` has been completed.
1657 
1658 .seealso: [](ch_ts), `TSAdjointSolve()`, `TSAdjointStep()`
1659  @*/
TSAdjointCostIntegral(TS ts)1660 PetscErrorCode TSAdjointCostIntegral(TS ts)
1661 {
1662   PetscFunctionBegin;
1663   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1664   PetscUseTypeMethod(ts, adjointintegral);
1665   PetscFunctionReturn(PETSC_SUCCESS);
1666 }
1667 
1668 /* ------------------ Forward (tangent linear) sensitivity  ------------------*/
1669 
1670 /*@
1671   TSForwardSetUp - Sets up the internal data structures for the later use
1672   of forward sensitivity analysis
1673 
1674   Collective
1675 
1676   Input Parameter:
1677 . ts - the `TS` context obtained from `TSCreate()`
1678 
1679   Level: advanced
1680 
1681 .seealso: [](ch_ts), `TS`, `TSCreate()`, `TSDestroy()`, `TSSetUp()`
1682 @*/
TSForwardSetUp(TS ts)1683 PetscErrorCode TSForwardSetUp(TS ts)
1684 {
1685   PetscFunctionBegin;
1686   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1687   if (ts->forwardsetupcalled) PetscFunctionReturn(PETSC_SUCCESS);
1688   PetscTryTypeMethod(ts, forwardsetup);
1689   PetscCall(VecDuplicate(ts->vec_sol, &ts->vec_sensip_col));
1690   ts->forwardsetupcalled = PETSC_TRUE;
1691   PetscFunctionReturn(PETSC_SUCCESS);
1692 }
1693 
1694 /*@
1695   TSForwardReset - Reset the internal data structures used by forward sensitivity analysis
1696 
1697   Collective
1698 
1699   Input Parameter:
1700 . ts - the `TS` context obtained from `TSCreate()`
1701 
1702   Level: advanced
1703 
1704 .seealso: [](ch_ts), `TSCreate()`, `TSDestroy()`, `TSForwardSetUp()`
1705 @*/
TSForwardReset(TS ts)1706 PetscErrorCode TSForwardReset(TS ts)
1707 {
1708   TS quadts = ts->quadraturets;
1709 
1710   PetscFunctionBegin;
1711   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1712   PetscTryTypeMethod(ts, forwardreset);
1713   PetscCall(MatDestroy(&ts->mat_sensip));
1714   if (quadts) PetscCall(MatDestroy(&quadts->mat_sensip));
1715   PetscCall(VecDestroy(&ts->vec_sensip_col));
1716   ts->forward_solve      = PETSC_FALSE;
1717   ts->forwardsetupcalled = PETSC_FALSE;
1718   PetscFunctionReturn(PETSC_SUCCESS);
1719 }
1720 
1721 /*@
1722   TSForwardSetIntegralGradients - Set the vectors holding forward sensitivities of the integral term.
1723 
1724   Input Parameters:
1725 + ts        - the `TS` context obtained from `TSCreate()`
1726 . numfwdint - number of integrals
1727 - vp        - the vectors containing the gradients for each integral w.r.t. parameters
1728 
1729   Level: deprecated
1730 
1731 .seealso: [](ch_ts), `TSForwardGetSensitivities()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()`
1732 @*/
TSForwardSetIntegralGradients(TS ts,PetscInt numfwdint,Vec vp[])1733 PetscErrorCode TSForwardSetIntegralGradients(TS ts, PetscInt numfwdint, Vec vp[])
1734 {
1735   PetscFunctionBegin;
1736   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1737   PetscCheck(!ts->numcost || ts->numcost == numfwdint, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand()");
1738   if (!ts->numcost) ts->numcost = numfwdint;
1739 
1740   ts->vecs_integral_sensip = vp;
1741   PetscFunctionReturn(PETSC_SUCCESS);
1742 }
1743 
1744 /*@
1745   TSForwardGetIntegralGradients - Returns the forward sensitivities of the integral term.
1746 
1747   Input Parameter:
1748 . ts - the `TS` context obtained from `TSCreate()`
1749 
1750   Output Parameters:
1751 + numfwdint - number of integrals
1752 - vp        - the vectors containing the gradients for each integral w.r.t. parameters
1753 
1754   Level: deprecated
1755 
1756 .seealso: [](ch_ts), `TSForwardSetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardStep()`
1757 @*/
TSForwardGetIntegralGradients(TS ts,PetscInt * numfwdint,Vec * vp[])1758 PetscErrorCode TSForwardGetIntegralGradients(TS ts, PetscInt *numfwdint, Vec *vp[])
1759 {
1760   PetscFunctionBegin;
1761   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1762   PetscAssertPointer(vp, 3);
1763   if (numfwdint) *numfwdint = ts->numcost;
1764   if (vp) *vp = ts->vecs_integral_sensip;
1765   PetscFunctionReturn(PETSC_SUCCESS);
1766 }
1767 
1768 /*@
1769   TSForwardStep - Compute the forward sensitivity for one time step.
1770 
1771   Collective
1772 
1773   Input Parameter:
1774 . ts - time stepping context
1775 
1776   Level: advanced
1777 
1778   Notes:
1779   This function cannot be called until `TSStep()` has been completed.
1780 
1781 .seealso: [](ch_ts), `TSForwardSetSensitivities()`, `TSForwardGetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardSetUp()`
1782 @*/
TSForwardStep(TS ts)1783 PetscErrorCode TSForwardStep(TS ts)
1784 {
1785   PetscFunctionBegin;
1786   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1787   PetscCall(PetscLogEventBegin(TS_ForwardStep, ts, 0, 0, 0));
1788   PetscUseTypeMethod(ts, forwardstep);
1789   PetscCall(PetscLogEventEnd(TS_ForwardStep, ts, 0, 0, 0));
1790   PetscCheck(ts->reason >= 0 || !ts->errorifstepfailed, PetscObjectComm((PetscObject)ts), PETSC_ERR_NOT_CONVERGED, "TSFowardStep has failed due to %s", TSConvergedReasons[ts->reason]);
1791   PetscFunctionReturn(PETSC_SUCCESS);
1792 }
1793 
1794 /*@
1795   TSForwardSetSensitivities - Sets the initial value of the trajectory sensitivities of solution  w.r.t. the problem parameters and initial values.
1796 
1797   Logically Collective
1798 
1799   Input Parameters:
1800 + ts   - the `TS` context obtained from `TSCreate()`
1801 . nump - number of parameters
1802 - Smat - sensitivities with respect to the parameters, the number of entries in these vectors is the same as the number of parameters
1803 
1804   Level: beginner
1805 
1806   Notes:
1807   Use `PETSC_DETERMINE` to use the number of columns of `Smat` for `nump`
1808 
1809   Forward sensitivity is also called 'trajectory sensitivity' in some fields such as power systems.
1810   This function turns on a flag to trigger `TSSolve()` to compute forward sensitivities automatically.
1811   You must call this function before `TSSolve()`.
1812   The entries in the sensitivity matrix must be correctly initialized with the values S = dy/dp|startingtime.
1813 
1814 .seealso: [](ch_ts), `TSForwardGetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()`
1815 @*/
TSForwardSetSensitivities(TS ts,PetscInt nump,Mat Smat)1816 PetscErrorCode TSForwardSetSensitivities(TS ts, PetscInt nump, Mat Smat)
1817 {
1818   PetscFunctionBegin;
1819   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1820   PetscValidHeaderSpecific(Smat, MAT_CLASSID, 3);
1821   ts->forward_solve = PETSC_TRUE;
1822   if (nump == PETSC_DEFAULT || nump == PETSC_DETERMINE) PetscCall(MatGetSize(Smat, NULL, &ts->num_parameters));
1823   else ts->num_parameters = nump;
1824   PetscCall(PetscObjectReference((PetscObject)Smat));
1825   PetscCall(MatDestroy(&ts->mat_sensip));
1826   ts->mat_sensip = Smat;
1827   PetscFunctionReturn(PETSC_SUCCESS);
1828 }
1829 
1830 /*@
1831   TSForwardGetSensitivities - Returns the trajectory sensitivities
1832 
1833   Not Collective, but Smat returned is parallel if ts is parallel
1834 
1835   Output Parameters:
1836 + ts   - the `TS` context obtained from `TSCreate()`
1837 . nump - number of parameters
1838 - Smat - sensitivities with respect to the parameters, the number of entries in these vectors is the same as the number of parameters
1839 
1840   Level: intermediate
1841 
1842 .seealso: [](ch_ts), `TSForwardSetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()`
1843 @*/
TSForwardGetSensitivities(TS ts,PetscInt * nump,Mat * Smat)1844 PetscErrorCode TSForwardGetSensitivities(TS ts, PetscInt *nump, Mat *Smat)
1845 {
1846   PetscFunctionBegin;
1847   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1848   if (nump) *nump = ts->num_parameters;
1849   if (Smat) *Smat = ts->mat_sensip;
1850   PetscFunctionReturn(PETSC_SUCCESS);
1851 }
1852 
1853 /*@
1854   TSForwardCostIntegral - Evaluate the cost integral in the forward run.
1855 
1856   Collective
1857 
1858   Input Parameter:
1859 . ts - time stepping context
1860 
1861   Level: advanced
1862 
1863   Note:
1864   This function cannot be called until `TSStep()` has been completed.
1865 
1866 .seealso: [](ch_ts), `TS`, `TSSolve()`, `TSAdjointCostIntegral()`
1867 @*/
TSForwardCostIntegral(TS ts)1868 PetscErrorCode TSForwardCostIntegral(TS ts)
1869 {
1870   PetscFunctionBegin;
1871   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1872   PetscUseTypeMethod(ts, forwardintegral);
1873   PetscFunctionReturn(PETSC_SUCCESS);
1874 }
1875 
1876 /*@
1877   TSForwardSetInitialSensitivities - Set initial values for tangent linear sensitivities
1878 
1879   Collective
1880 
1881   Input Parameters:
1882 + ts   - the `TS` context obtained from `TSCreate()`
1883 - didp - parametric sensitivities of the initial condition
1884 
1885   Level: intermediate
1886 
1887   Notes:
1888   `TSSolve()` allows users to pass the initial solution directly to `TS`. But the tangent linear variables cannot be initialized in this way.
1889   This function is used to set initial values for tangent linear variables.
1890 
1891 .seealso: [](ch_ts), `TS`, `TSForwardSetSensitivities()`
1892 @*/
TSForwardSetInitialSensitivities(TS ts,Mat didp)1893 PetscErrorCode TSForwardSetInitialSensitivities(TS ts, Mat didp)
1894 {
1895   PetscFunctionBegin;
1896   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1897   PetscValidHeaderSpecific(didp, MAT_CLASSID, 2);
1898   if (!ts->mat_sensip) PetscCall(TSForwardSetSensitivities(ts, PETSC_DETERMINE, didp));
1899   PetscFunctionReturn(PETSC_SUCCESS);
1900 }
1901 
1902 /*@
1903   TSForwardGetStages - Get the number of stages and the tangent linear sensitivities at the intermediate stages
1904 
1905   Input Parameter:
1906 . ts - the `TS` context obtained from `TSCreate()`
1907 
1908   Output Parameters:
1909 + ns - number of stages
1910 - S  - tangent linear sensitivities at the intermediate stages
1911 
1912   Level: advanced
1913 
1914 .seealso: `TS`
1915 @*/
TSForwardGetStages(TS ts,PetscInt * ns,Mat ** S)1916 PetscErrorCode TSForwardGetStages(TS ts, PetscInt *ns, Mat **S)
1917 {
1918   PetscFunctionBegin;
1919   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1920 
1921   if (!ts->ops->getstages) *S = NULL;
1922   else PetscUseTypeMethod(ts, forwardgetstages, ns, S);
1923   PetscFunctionReturn(PETSC_SUCCESS);
1924 }
1925 
1926 /*@
1927   TSCreateQuadratureTS - Create a sub-`TS` that evaluates integrals over time
1928 
1929   Input Parameters:
1930 + ts  - the `TS` context obtained from `TSCreate()`
1931 - fwd - flag indicating whether to evaluate cost integral in the forward run or the adjoint run
1932 
1933   Output Parameter:
1934 . quadts - the child `TS` context
1935 
1936   Level: intermediate
1937 
1938 .seealso: [](ch_ts), `TSGetQuadratureTS()`
1939 @*/
TSCreateQuadratureTS(TS ts,PetscBool fwd,TS * quadts)1940 PetscErrorCode TSCreateQuadratureTS(TS ts, PetscBool fwd, TS *quadts)
1941 {
1942   char prefix[128];
1943 
1944   PetscFunctionBegin;
1945   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1946   PetscAssertPointer(quadts, 3);
1947   PetscCall(TSDestroy(&ts->quadraturets));
1948   PetscCall(TSCreate(PetscObjectComm((PetscObject)ts), &ts->quadraturets));
1949   PetscCall(PetscObjectIncrementTabLevel((PetscObject)ts->quadraturets, (PetscObject)ts, 1));
1950   PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%squad_", ((PetscObject)ts)->prefix ? ((PetscObject)ts)->prefix : ""));
1951   PetscCall(TSSetOptionsPrefix(ts->quadraturets, prefix));
1952   *quadts = ts->quadraturets;
1953 
1954   if (ts->numcost) {
1955     PetscCall(VecCreateSeq(PETSC_COMM_SELF, ts->numcost, &(*quadts)->vec_sol));
1956   } else {
1957     PetscCall(VecCreateSeq(PETSC_COMM_SELF, 1, &(*quadts)->vec_sol));
1958   }
1959   ts->costintegralfwd = fwd;
1960   PetscFunctionReturn(PETSC_SUCCESS);
1961 }
1962 
1963 /*@
1964   TSGetQuadratureTS - Return the sub-`TS` that evaluates integrals over time
1965 
1966   Input Parameter:
1967 . ts - the `TS` context obtained from `TSCreate()`
1968 
1969   Output Parameters:
1970 + fwd    - flag indicating whether to evaluate cost integral in the forward run or the adjoint run
1971 - quadts - the child `TS` context
1972 
1973   Level: intermediate
1974 
1975 .seealso: [](ch_ts), `TSCreateQuadratureTS()`
1976 @*/
TSGetQuadratureTS(TS ts,PetscBool * fwd,TS * quadts)1977 PetscErrorCode TSGetQuadratureTS(TS ts, PetscBool *fwd, TS *quadts)
1978 {
1979   PetscFunctionBegin;
1980   PetscValidHeaderSpecific(ts, TS_CLASSID, 1);
1981   if (fwd) *fwd = ts->costintegralfwd;
1982   if (quadts) *quadts = ts->quadraturets;
1983   PetscFunctionReturn(PETSC_SUCCESS);
1984 }
1985 
1986 /*@
1987   TSComputeSNESJacobian - Compute the Jacobian needed for the `SNESSolve()` in `TS`
1988 
1989   Collective
1990 
1991   Input Parameters:
1992 + ts - the `TS` context obtained from `TSCreate()`
1993 - x  - state vector
1994 
1995   Output Parameters:
1996 + J    - Jacobian matrix
1997 - Jpre - matrix used to compute the preconditioner for `J` (may be same as `J`)
1998 
1999   Level: developer
2000 
2001   Note:
2002   Uses finite differencing when `TS` Jacobian is not available.
2003 
2004 .seealso: `SNES`, `TS`, `SNESSetJacobian()`, `TSSetRHSJacobian()`, `TSSetIJacobian()`
2005 @*/
TSComputeSNESJacobian(TS ts,Vec x,Mat J,Mat Jpre)2006 PetscErrorCode TSComputeSNESJacobian(TS ts, Vec x, Mat J, Mat Jpre)
2007 {
2008   SNES snes                                          = ts->snes;
2009   PetscErrorCode (*jac)(SNES, Vec, Mat, Mat, void *) = NULL;
2010 
2011   PetscFunctionBegin;
2012   /*
2013     Unlike implicit methods, explicit methods do not have SNESMatFDColoring in the snes object
2014     because SNESSolve() has not been called yet; so querying SNESMatFDColoring does not work for
2015     explicit methods. Instead, we check the Jacobian compute function directly to determine if FD
2016     coloring is used.
2017   */
2018   PetscCall(SNESGetJacobian(snes, NULL, NULL, &jac, NULL));
2019   if (jac == SNESComputeJacobianDefaultColor) {
2020     Vec f;
2021     PetscCall(SNESSetSolution(snes, x));
2022     PetscCall(SNESGetFunction(snes, &f, NULL, NULL));
2023     /* Force MatFDColoringApply to evaluate the SNES residual function for the base vector */
2024     PetscCall(SNESComputeFunction(snes, x, f));
2025   }
2026   PetscCall(SNESComputeJacobian(snes, x, J, Jpre));
2027   PetscFunctionReturn(PETSC_SUCCESS);
2028 }
2029