xref: /petsc/src/ts/interface/sensitivity/tssen.c (revision 40badf4fbc550ac1f60bd080eaff6de6d55b946d)
1 #include <petsc/private/tsimpl.h>        /*I "petscts.h"  I*/
2 #include <petscdraw.h>
3 
4 PetscLogEvent TS_AdjointStep,TS_ForwardStep,TS_JacobianPEval;
5 
6 /* #define TSADJOINT_STAGE */
7 
8 /* ------------------------ Sensitivity Context ---------------------------*/
9 
10 /*@C
11   TSSetRHSJacobianP - Sets the function that computes the Jacobian of G w.r.t. the parameters P where U_t = G(U,P,t), as well as the location to store the matrix.
12 
13   Logically Collective on TS
14 
15   Input Parameters:
16 + ts - TS context obtained from TSCreate()
17 . Amat - JacobianP matrix
18 . func - function
19 - ctx - [optional] user-defined function context
20 
21   Calling sequence of func:
22 $ func (TS ts,PetscReal t,Vec y,Mat A,void *ctx);
23 +   t - current timestep
24 .   U - input vector (current ODE solution)
25 .   A - output matrix
26 -   ctx - [optional] user-defined function context
27 
28   Level: intermediate
29 
30   Notes:
31     Amat has the same number of rows and the same row parallel layout as u, Amat has the same number of columns and parallel layout as p
32 
33 .seealso: TSGetRHSJacobianP()
34 @*/
35 PetscErrorCode TSSetRHSJacobianP(TS ts,Mat Amat,PetscErrorCode (*func)(TS,PetscReal,Vec,Mat,void*),void *ctx)
36 {
37   PetscFunctionBegin;
38   PetscValidHeaderSpecific(ts, TS_CLASSID,1);
39   PetscValidHeaderSpecific(Amat,MAT_CLASSID,2);
40 
41   ts->rhsjacobianp    = func;
42   ts->rhsjacobianpctx = ctx;
43   if (Amat) {
44     CHKERRQ(PetscObjectReference((PetscObject)Amat));
45     CHKERRQ(MatDestroy(&ts->Jacprhs));
46     ts->Jacprhs = Amat;
47   }
48   PetscFunctionReturn(0);
49 }
50 
51 /*@C
52   TSGetRHSJacobianP - Gets the function that computes the Jacobian of G w.r.t. the parameters P where U_t = G(U,P,t), as well as the location to store the matrix.
53 
54   Logically Collective on TS
55 
56   Input Parameter:
57 . ts - TS context obtained from TSCreate()
58 
59   Output Parameters:
60 + Amat - JacobianP matrix
61 . func - function
62 - ctx - [optional] user-defined function context
63 
64   Calling sequence of func:
65 $ func (TS ts,PetscReal t,Vec y,Mat A,void *ctx);
66 +   t - current timestep
67 .   U - input vector (current ODE solution)
68 .   A - output matrix
69 -   ctx - [optional] user-defined function context
70 
71   Level: intermediate
72 
73   Notes:
74     Amat has the same number of rows and the same row parallel layout as u, Amat has the same number of columns and parallel layout as p
75 
76 .seealso: TSSetRHSJacobianP()
77 @*/
78 PetscErrorCode TSGetRHSJacobianP(TS ts,Mat *Amat,PetscErrorCode (**func)(TS,PetscReal,Vec,Mat,void*),void **ctx)
79 {
80   PetscFunctionBegin;
81   if (func) *func = ts->rhsjacobianp;
82   if (ctx) *ctx  = ts->rhsjacobianpctx;
83   if (Amat) *Amat = ts->Jacprhs;
84   PetscFunctionReturn(0);
85 }
86 
87 /*@C
88   TSComputeRHSJacobianP - Runs the user-defined JacobianP function.
89 
90   Collective on TS
91 
92   Input Parameters:
93 . ts   - The TS context obtained from TSCreate()
94 
95   Level: developer
96 
97 .seealso: TSSetRHSJacobianP()
98 @*/
99 PetscErrorCode TSComputeRHSJacobianP(TS ts,PetscReal t,Vec U,Mat Amat)
100 {
101   PetscFunctionBegin;
102   if (!Amat) PetscFunctionReturn(0);
103   PetscValidHeaderSpecific(ts,TS_CLASSID,1);
104   PetscValidHeaderSpecific(U,VEC_CLASSID,3);
105 
106   PetscStackPush("TS user JacobianP function for sensitivity analysis");
107   CHKERRQ((*ts->rhsjacobianp)(ts,t,U,Amat,ts->rhsjacobianpctx));
108   PetscStackPop;
109   PetscFunctionReturn(0);
110 }
111 
112 /*@C
113   TSSetIJacobianP - Sets the function that computes the Jacobian of F w.r.t. the parameters P where F(Udot,U,t) = G(U,P,t), as well as the location to store the matrix.
114 
115   Logically Collective on TS
116 
117   Input Parameters:
118 + ts - TS context obtained from TSCreate()
119 . Amat - JacobianP matrix
120 . func - function
121 - ctx - [optional] user-defined function context
122 
123   Calling sequence of func:
124 $ func (TS ts,PetscReal t,Vec y,Mat A,void *ctx);
125 +   t - current timestep
126 .   U - input vector (current ODE solution)
127 .   Udot - time derivative of state vector
128 .   shift - shift to apply, see note below
129 .   A - output matrix
130 -   ctx - [optional] user-defined function context
131 
132   Level: intermediate
133 
134   Notes:
135     Amat has the same number of rows and the same row parallel layout as u, Amat has the same number of columns and parallel layout as p
136 
137 .seealso:
138 @*/
139 PetscErrorCode TSSetIJacobianP(TS ts,Mat Amat,PetscErrorCode (*func)(TS,PetscReal,Vec,Vec,PetscReal,Mat,void*),void *ctx)
140 {
141   PetscFunctionBegin;
142   PetscValidHeaderSpecific(ts, TS_CLASSID,1);
143   PetscValidHeaderSpecific(Amat,MAT_CLASSID,2);
144 
145   ts->ijacobianp    = func;
146   ts->ijacobianpctx = ctx;
147   if (Amat) {
148     CHKERRQ(PetscObjectReference((PetscObject)Amat));
149     CHKERRQ(MatDestroy(&ts->Jacp));
150     ts->Jacp = Amat;
151   }
152   PetscFunctionReturn(0);
153 }
154 
155 /*@C
156   TSComputeIJacobianP - Runs the user-defined IJacobianP function.
157 
158   Collective on TS
159 
160   Input Parameters:
161 + ts - the TS context
162 . t - current timestep
163 . U - state vector
164 . Udot - time derivative of state vector
165 . shift - shift to apply, see note below
166 - imex - flag indicates if the method is IMEX so that the RHSJacobian should be kept separate
167 
168   Output Parameters:
169 . A - Jacobian matrix
170 
171   Level: developer
172 
173 .seealso: TSSetIJacobianP()
174 @*/
175 PetscErrorCode TSComputeIJacobianP(TS ts,PetscReal t,Vec U,Vec Udot,PetscReal shift,Mat Amat,PetscBool imex)
176 {
177   PetscFunctionBegin;
178   if (!Amat) PetscFunctionReturn(0);
179   PetscValidHeaderSpecific(ts,TS_CLASSID,1);
180   PetscValidHeaderSpecific(U,VEC_CLASSID,3);
181   PetscValidHeaderSpecific(Udot,VEC_CLASSID,4);
182 
183   CHKERRQ(PetscLogEventBegin(TS_JacobianPEval,ts,U,Amat,0));
184   if (ts->ijacobianp) {
185     PetscStackPush("TS user JacobianP function for sensitivity analysis");
186     CHKERRQ((*ts->ijacobianp)(ts,t,U,Udot,shift,Amat,ts->ijacobianpctx));
187     PetscStackPop;
188   }
189   if (imex) {
190     if (!ts->ijacobianp) {  /* system was written as Udot = G(t,U) */
191       PetscBool assembled;
192       CHKERRQ(MatZeroEntries(Amat));
193       CHKERRQ(MatAssembled(Amat,&assembled));
194       if (!assembled) {
195         CHKERRQ(MatAssemblyBegin(Amat,MAT_FINAL_ASSEMBLY));
196         CHKERRQ(MatAssemblyEnd(Amat,MAT_FINAL_ASSEMBLY));
197       }
198     }
199   } else {
200     if (ts->rhsjacobianp) {
201       CHKERRQ(TSComputeRHSJacobianP(ts,t,U,ts->Jacprhs));
202     }
203     if (ts->Jacprhs == Amat) { /* No IJacobian, so we only have the RHS matrix */
204       CHKERRQ(MatScale(Amat,-1));
205     } else if (ts->Jacprhs) { /* Both IJacobian and RHSJacobian */
206       MatStructure axpy = DIFFERENT_NONZERO_PATTERN;
207       if (!ts->ijacobianp) { /* No IJacobianp provided, but we have a separate RHS matrix */
208         CHKERRQ(MatZeroEntries(Amat));
209       }
210       CHKERRQ(MatAXPY(Amat,-1,ts->Jacprhs,axpy));
211     }
212   }
213   CHKERRQ(PetscLogEventEnd(TS_JacobianPEval,ts,U,Amat,0));
214   PetscFunctionReturn(0);
215 }
216 
217 /*@C
218     TSSetCostIntegrand - Sets the routine for evaluating the integral term in one or more cost functions
219 
220     Logically Collective on TS
221 
222     Input Parameters:
223 +   ts - the TS context obtained from TSCreate()
224 .   numcost - number of gradients to be computed, this is the number of cost functions
225 .   costintegral - vector that stores the integral values
226 .   rf - routine for evaluating the integrand function
227 .   drduf - function that computes the gradients of the r's with respect to u
228 .   drdpf - function that computes the gradients of the r's with respect to p, can be NULL if parametric sensitivity is not desired (mu=NULL)
229 .   fwd - flag indicating whether to evaluate cost integral in the forward run or the adjoint run
230 -   ctx - [optional] user-defined context for private data for the function evaluation routine (may be NULL)
231 
232     Calling sequence of rf:
233 $   PetscErrorCode rf(TS ts,PetscReal t,Vec U,Vec F,void *ctx);
234 
235     Calling sequence of drduf:
236 $   PetscErroCode drduf(TS ts,PetscReal t,Vec U,Vec *dRdU,void *ctx);
237 
238     Calling sequence of drdpf:
239 $   PetscErroCode drdpf(TS ts,PetscReal t,Vec U,Vec *dRdP,void *ctx);
240 
241     Level: deprecated
242 
243     Notes:
244     For optimization there is usually a single cost function (numcost = 1). For sensitivities there may be multiple cost functions
245 
246 .seealso: TSSetRHSJacobianP(), TSGetCostGradients(), TSSetCostGradients()
247 @*/
248 PetscErrorCode TSSetCostIntegrand(TS ts,PetscInt numcost,Vec costintegral,PetscErrorCode (*rf)(TS,PetscReal,Vec,Vec,void*),
249                                                           PetscErrorCode (*drduf)(TS,PetscReal,Vec,Vec*,void*),
250                                                           PetscErrorCode (*drdpf)(TS,PetscReal,Vec,Vec*,void*),
251                                                           PetscBool fwd,void *ctx)
252 {
253   PetscFunctionBegin;
254   PetscValidHeaderSpecific(ts,TS_CLASSID,1);
255   if (costintegral) PetscValidHeaderSpecific(costintegral,VEC_CLASSID,3);
256   PetscCheck(!ts->numcost || ts->numcost == numcost,PetscObjectComm((PetscObject)ts),PETSC_ERR_USER,"The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostGradients() or TSForwardSetIntegralGradients()");
257   if (!ts->numcost) ts->numcost=numcost;
258 
259   if (costintegral) {
260     CHKERRQ(PetscObjectReference((PetscObject)costintegral));
261     CHKERRQ(VecDestroy(&ts->vec_costintegral));
262     ts->vec_costintegral = costintegral;
263   } else {
264     if (!ts->vec_costintegral) { /* Create a seq vec if user does not provide one */
265       CHKERRQ(VecCreateSeq(PETSC_COMM_SELF,numcost,&ts->vec_costintegral));
266     } else {
267       CHKERRQ(VecSet(ts->vec_costintegral,0.0));
268     }
269   }
270   if (!ts->vec_costintegrand) {
271     CHKERRQ(VecDuplicate(ts->vec_costintegral,&ts->vec_costintegrand));
272   } else {
273     CHKERRQ(VecSet(ts->vec_costintegrand,0.0));
274   }
275   ts->costintegralfwd  = fwd; /* Evaluate the cost integral in forward run if fwd is true */
276   ts->costintegrand    = rf;
277   ts->costintegrandctx = ctx;
278   ts->drdufunction     = drduf;
279   ts->drdpfunction     = drdpf;
280   PetscFunctionReturn(0);
281 }
282 
283 /*@C
284    TSGetCostIntegral - Returns the values of the integral term in the cost functions.
285    It is valid to call the routine after a backward run.
286 
287    Not Collective
288 
289    Input Parameter:
290 .  ts - the TS context obtained from TSCreate()
291 
292    Output Parameter:
293 .  v - the vector containing the integrals for each cost function
294 
295    Level: intermediate
296 
297 .seealso: TSSetCostIntegrand()
298 
299 @*/
300 PetscErrorCode  TSGetCostIntegral(TS ts,Vec *v)
301 {
302   TS             quadts;
303 
304   PetscFunctionBegin;
305   PetscValidHeaderSpecific(ts,TS_CLASSID,1);
306   PetscValidPointer(v,2);
307   CHKERRQ(TSGetQuadratureTS(ts,NULL,&quadts));
308   *v = quadts->vec_sol;
309   PetscFunctionReturn(0);
310 }
311 
312 /*@C
313    TSComputeCostIntegrand - Evaluates the integral function in the cost functions.
314 
315    Input Parameters:
316 +  ts - the TS context
317 .  t - current time
318 -  U - state vector, i.e. current solution
319 
320    Output Parameter:
321 .  Q - vector of size numcost to hold the outputs
322 
323    Notes:
324    Most users should not need to explicitly call this routine, as it
325    is used internally within the sensitivity analysis context.
326 
327    Level: deprecated
328 
329 .seealso: TSSetCostIntegrand()
330 @*/
331 PetscErrorCode TSComputeCostIntegrand(TS ts,PetscReal t,Vec U,Vec Q)
332 {
333   PetscFunctionBegin;
334   PetscValidHeaderSpecific(ts,TS_CLASSID,1);
335   PetscValidHeaderSpecific(U,VEC_CLASSID,3);
336   PetscValidHeaderSpecific(Q,VEC_CLASSID,4);
337 
338   CHKERRQ(PetscLogEventBegin(TS_FunctionEval,ts,U,Q,0));
339   if (ts->costintegrand) {
340     PetscStackPush("TS user integrand in the cost function");
341     CHKERRQ((*ts->costintegrand)(ts,t,U,Q,ts->costintegrandctx));
342     PetscStackPop;
343   } else {
344     CHKERRQ(VecZeroEntries(Q));
345   }
346 
347   CHKERRQ(PetscLogEventEnd(TS_FunctionEval,ts,U,Q,0));
348   PetscFunctionReturn(0);
349 }
350 
351 /*@C
352   TSComputeDRDUFunction - Deprecated, use TSGetQuadratureTS() then TSComputeRHSJacobian()
353 
354   Level: deprecated
355 
356 @*/
357 PetscErrorCode TSComputeDRDUFunction(TS ts,PetscReal t,Vec U,Vec *DRDU)
358 {
359   PetscFunctionBegin;
360   if (!DRDU) PetscFunctionReturn(0);
361   PetscValidHeaderSpecific(ts,TS_CLASSID,1);
362   PetscValidHeaderSpecific(U,VEC_CLASSID,3);
363 
364   PetscStackPush("TS user DRDU function for sensitivity analysis");
365   CHKERRQ((*ts->drdufunction)(ts,t,U,DRDU,ts->costintegrandctx));
366   PetscStackPop;
367   PetscFunctionReturn(0);
368 }
369 
370 /*@C
371   TSComputeDRDPFunction - Deprecated, use TSGetQuadratureTS() then TSComputeRHSJacobianP()
372 
373   Level: deprecated
374 
375 @*/
376 PetscErrorCode TSComputeDRDPFunction(TS ts,PetscReal t,Vec U,Vec *DRDP)
377 {
378   PetscFunctionBegin;
379   if (!DRDP) PetscFunctionReturn(0);
380   PetscValidHeaderSpecific(ts,TS_CLASSID,1);
381   PetscValidHeaderSpecific(U,VEC_CLASSID,3);
382 
383   PetscStackPush("TS user DRDP function for sensitivity analysis");
384   CHKERRQ((*ts->drdpfunction)(ts,t,U,DRDP,ts->costintegrandctx));
385   PetscStackPop;
386   PetscFunctionReturn(0);
387 }
388 
389 /*@C
390   TSSetIHessianProduct - Sets the function that computes the vector-Hessian-vector product. The Hessian is the second-order derivative of F (IFunction) w.r.t. the state variable.
391 
392   Logically Collective on TS
393 
394   Input Parameters:
395 + ts - TS context obtained from TSCreate()
396 . ihp1 - an array of vectors storing the result of vector-Hessian-vector product for F_UU
397 . hessianproductfunc1 - vector-Hessian-vector product function for F_UU
398 . ihp2 - an array of vectors storing the result of vector-Hessian-vector product for F_UP
399 . hessianproductfunc2 - vector-Hessian-vector product function for F_UP
400 . ihp3 - an array of vectors storing the result of vector-Hessian-vector product for F_PU
401 . hessianproductfunc3 - vector-Hessian-vector product function for F_PU
402 . ihp4 - an array of vectors storing the result of vector-Hessian-vector product for F_PP
403 - hessianproductfunc4 - vector-Hessian-vector product function for F_PP
404 
405   Calling sequence of ihessianproductfunc:
406 $ ihessianproductfunc (TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV,void *ctx);
407 +   t - current timestep
408 .   U - input vector (current ODE solution)
409 .   Vl - an array of input vectors to be left-multiplied with the Hessian
410 .   Vr - input vector to be right-multiplied with the Hessian
411 .   VHV - an array of output vectors for vector-Hessian-vector product
412 -   ctx - [optional] user-defined function context
413 
414   Level: intermediate
415 
416   Notes:
417   The first Hessian function and the working array are required.
418   As an example to implement the callback functions, the second callback function calculates the vector-Hessian-vector product
419   $ Vl_n^T*F_UP*Vr
420   where the vector Vl_n (n-th element in the array Vl) and Vr are of size N and M respectively, and the Hessian F_UP is of size N x N x M.
421   Each entry of F_UP corresponds to the derivative
422   $ F_UP[i][j][k] = \frac{\partial^2 F[i]}{\partial U[j] \partial P[k]}.
423   The result of the vector-Hessian-vector product for Vl_n needs to be stored in vector VHV_n with the j-th entry being
424   $ VHV_n[j] = \sum_i \sum_k {Vl_n[i] * F_UP[i][j][k] * Vr[k]}
425   If the cost function is a scalar, there will be only one vector in Vl and VHV.
426 
427 .seealso:
428 @*/
429 PetscErrorCode TSSetIHessianProduct(TS ts,Vec *ihp1,PetscErrorCode (*ihessianproductfunc1)(TS,PetscReal,Vec,Vec*,Vec,Vec*,void*),
430                                           Vec *ihp2,PetscErrorCode (*ihessianproductfunc2)(TS,PetscReal,Vec,Vec*,Vec,Vec*,void*),
431                                           Vec *ihp3,PetscErrorCode (*ihessianproductfunc3)(TS,PetscReal,Vec,Vec*,Vec,Vec*,void*),
432                                           Vec *ihp4,PetscErrorCode (*ihessianproductfunc4)(TS,PetscReal,Vec,Vec*,Vec,Vec*,void*),
433                                     void *ctx)
434 {
435   PetscFunctionBegin;
436   PetscValidHeaderSpecific(ts,TS_CLASSID,1);
437   PetscValidPointer(ihp1,2);
438 
439   ts->ihessianproductctx = ctx;
440   if (ihp1) ts->vecs_fuu = ihp1;
441   if (ihp2) ts->vecs_fup = ihp2;
442   if (ihp3) ts->vecs_fpu = ihp3;
443   if (ihp4) ts->vecs_fpp = ihp4;
444   ts->ihessianproduct_fuu = ihessianproductfunc1;
445   ts->ihessianproduct_fup = ihessianproductfunc2;
446   ts->ihessianproduct_fpu = ihessianproductfunc3;
447   ts->ihessianproduct_fpp = ihessianproductfunc4;
448   PetscFunctionReturn(0);
449 }
450 
451 /*@C
452   TSComputeIHessianProductFunctionUU - Runs the user-defined vector-Hessian-vector product function for Fuu.
453 
454   Collective on TS
455 
456   Input Parameters:
457 . ts   - The TS context obtained from TSCreate()
458 
459   Notes:
460   TSComputeIHessianProductFunctionUU() is typically used for sensitivity implementation,
461   so most users would not generally call this routine themselves.
462 
463   Level: developer
464 
465 .seealso: TSSetIHessianProduct()
466 @*/
467 PetscErrorCode TSComputeIHessianProductFunctionUU(TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV)
468 {
469   PetscFunctionBegin;
470   if (!VHV) PetscFunctionReturn(0);
471   PetscValidHeaderSpecific(ts,TS_CLASSID,1);
472   PetscValidHeaderSpecific(U,VEC_CLASSID,3);
473 
474   if (ts->ihessianproduct_fuu) {
475     PetscStackPush("TS user IHessianProduct function 1 for sensitivity analysis");
476     CHKERRQ((*ts->ihessianproduct_fuu)(ts,t,U,Vl,Vr,VHV,ts->ihessianproductctx));
477     PetscStackPop;
478   }
479   /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */
480   if (ts->rhshessianproduct_guu) {
481     PetscInt nadj;
482     CHKERRQ(TSComputeRHSHessianProductFunctionUU(ts,t,U,Vl,Vr,VHV));
483     for (nadj=0; nadj<ts->numcost; nadj++) {
484       CHKERRQ(VecScale(VHV[nadj],-1));
485     }
486   }
487   PetscFunctionReturn(0);
488 }
489 
490 /*@C
491   TSComputeIHessianProductFunctionUP - Runs the user-defined vector-Hessian-vector product function for Fup.
492 
493   Collective on TS
494 
495   Input Parameters:
496 . ts   - The TS context obtained from TSCreate()
497 
498   Notes:
499   TSComputeIHessianProductFunctionUP() is typically used for sensitivity implementation,
500   so most users would not generally call this routine themselves.
501 
502   Level: developer
503 
504 .seealso: TSSetIHessianProduct()
505 @*/
506 PetscErrorCode TSComputeIHessianProductFunctionUP(TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV)
507 {
508   PetscFunctionBegin;
509   if (!VHV) PetscFunctionReturn(0);
510   PetscValidHeaderSpecific(ts,TS_CLASSID,1);
511   PetscValidHeaderSpecific(U,VEC_CLASSID,3);
512 
513   if (ts->ihessianproduct_fup) {
514     PetscStackPush("TS user IHessianProduct function 2 for sensitivity analysis");
515     CHKERRQ((*ts->ihessianproduct_fup)(ts,t,U,Vl,Vr,VHV,ts->ihessianproductctx));
516     PetscStackPop;
517   }
518   /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */
519   if (ts->rhshessianproduct_gup) {
520     PetscInt nadj;
521     CHKERRQ(TSComputeRHSHessianProductFunctionUP(ts,t,U,Vl,Vr,VHV));
522     for (nadj=0; nadj<ts->numcost; nadj++) {
523       CHKERRQ(VecScale(VHV[nadj],-1));
524     }
525   }
526   PetscFunctionReturn(0);
527 }
528 
529 /*@C
530   TSComputeIHessianProductFunctionPU - Runs the user-defined vector-Hessian-vector product function for Fpu.
531 
532   Collective on TS
533 
534   Input Parameters:
535 . ts   - The TS context obtained from TSCreate()
536 
537   Notes:
538   TSComputeIHessianProductFunctionPU() is typically used for sensitivity implementation,
539   so most users would not generally call this routine themselves.
540 
541   Level: developer
542 
543 .seealso: TSSetIHessianProduct()
544 @*/
545 PetscErrorCode TSComputeIHessianProductFunctionPU(TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV)
546 {
547   PetscFunctionBegin;
548   if (!VHV) PetscFunctionReturn(0);
549   PetscValidHeaderSpecific(ts,TS_CLASSID,1);
550   PetscValidHeaderSpecific(U,VEC_CLASSID,3);
551 
552   if (ts->ihessianproduct_fpu) {
553     PetscStackPush("TS user IHessianProduct function 3 for sensitivity analysis");
554     CHKERRQ((*ts->ihessianproduct_fpu)(ts,t,U,Vl,Vr,VHV,ts->ihessianproductctx));
555     PetscStackPop;
556   }
557   /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */
558   if (ts->rhshessianproduct_gpu) {
559     PetscInt nadj;
560     CHKERRQ(TSComputeRHSHessianProductFunctionPU(ts,t,U,Vl,Vr,VHV));
561     for (nadj=0; nadj<ts->numcost; nadj++) {
562       CHKERRQ(VecScale(VHV[nadj],-1));
563     }
564   }
565   PetscFunctionReturn(0);
566 }
567 
568 /*@C
569   TSComputeIHessianProductFunctionPP - Runs the user-defined vector-Hessian-vector product function for Fpp.
570 
571   Collective on TS
572 
573   Input Parameters:
574 . ts   - The TS context obtained from TSCreate()
575 
576   Notes:
577   TSComputeIHessianProductFunctionPP() is typically used for sensitivity implementation,
578   so most users would not generally call this routine themselves.
579 
580   Level: developer
581 
582 .seealso: TSSetIHessianProduct()
583 @*/
584 PetscErrorCode TSComputeIHessianProductFunctionPP(TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV)
585 {
586   PetscFunctionBegin;
587   if (!VHV) PetscFunctionReturn(0);
588   PetscValidHeaderSpecific(ts,TS_CLASSID,1);
589   PetscValidHeaderSpecific(U,VEC_CLASSID,3);
590 
591   if (ts->ihessianproduct_fpp) {
592     PetscStackPush("TS user IHessianProduct function 3 for sensitivity analysis");
593     CHKERRQ((*ts->ihessianproduct_fpp)(ts,t,U,Vl,Vr,VHV,ts->ihessianproductctx));
594     PetscStackPop;
595   }
596   /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */
597   if (ts->rhshessianproduct_gpp) {
598     PetscInt nadj;
599     CHKERRQ(TSComputeRHSHessianProductFunctionPP(ts,t,U,Vl,Vr,VHV));
600     for (nadj=0; nadj<ts->numcost; nadj++) {
601       CHKERRQ(VecScale(VHV[nadj],-1));
602     }
603   }
604   PetscFunctionReturn(0);
605 }
606 
607 /*@C
608   TSSetRHSHessianProduct - Sets the function that computes the vector-Hessian-vector product. The Hessian is the second-order derivative of G (RHSFunction) w.r.t. the state variable.
609 
610   Logically Collective on TS
611 
612   Input Parameters:
613 + ts - TS context obtained from TSCreate()
614 . rhshp1 - an array of vectors storing the result of vector-Hessian-vector product for G_UU
615 . hessianproductfunc1 - vector-Hessian-vector product function for G_UU
616 . rhshp2 - an array of vectors storing the result of vector-Hessian-vector product for G_UP
617 . hessianproductfunc2 - vector-Hessian-vector product function for G_UP
618 . rhshp3 - an array of vectors storing the result of vector-Hessian-vector product for G_PU
619 . hessianproductfunc3 - vector-Hessian-vector product function for G_PU
620 . rhshp4 - an array of vectors storing the result of vector-Hessian-vector product for G_PP
621 - hessianproductfunc4 - vector-Hessian-vector product function for G_PP
622 
623   Calling sequence of ihessianproductfunc:
624 $ rhshessianproductfunc (TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV,void *ctx);
625 +   t - current timestep
626 .   U - input vector (current ODE solution)
627 .   Vl - an array of input vectors to be left-multiplied with the Hessian
628 .   Vr - input vector to be right-multiplied with the Hessian
629 .   VHV - an array of output vectors for vector-Hessian-vector product
630 -   ctx - [optional] user-defined function context
631 
632   Level: intermediate
633 
634   Notes:
635   The first Hessian function and the working array are required.
636   As an example to implement the callback functions, the second callback function calculates the vector-Hessian-vector product
637   $ Vl_n^T*G_UP*Vr
638   where the vector Vl_n (n-th element in the array Vl) and Vr are of size N and M respectively, and the Hessian G_UP is of size N x N x M.
639   Each entry of G_UP corresponds to the derivative
640   $ G_UP[i][j][k] = \frac{\partial^2 G[i]}{\partial U[j] \partial P[k]}.
641   The result of the vector-Hessian-vector product for Vl_n needs to be stored in vector VHV_n with j-th entry being
642   $ VHV_n[j] = \sum_i \sum_k {Vl_n[i] * G_UP[i][j][k] * Vr[k]}
643   If the cost function is a scalar, there will be only one vector in Vl and VHV.
644 
645 .seealso:
646 @*/
647 PetscErrorCode TSSetRHSHessianProduct(TS ts,Vec *rhshp1,PetscErrorCode (*rhshessianproductfunc1)(TS,PetscReal,Vec,Vec*,Vec,Vec*,void*),
648                                           Vec *rhshp2,PetscErrorCode (*rhshessianproductfunc2)(TS,PetscReal,Vec,Vec*,Vec,Vec*,void*),
649                                           Vec *rhshp3,PetscErrorCode (*rhshessianproductfunc3)(TS,PetscReal,Vec,Vec*,Vec,Vec*,void*),
650                                           Vec *rhshp4,PetscErrorCode (*rhshessianproductfunc4)(TS,PetscReal,Vec,Vec*,Vec,Vec*,void*),
651                                     void *ctx)
652 {
653   PetscFunctionBegin;
654   PetscValidHeaderSpecific(ts,TS_CLASSID,1);
655   PetscValidPointer(rhshp1,2);
656 
657   ts->rhshessianproductctx = ctx;
658   if (rhshp1) ts->vecs_guu = rhshp1;
659   if (rhshp2) ts->vecs_gup = rhshp2;
660   if (rhshp3) ts->vecs_gpu = rhshp3;
661   if (rhshp4) ts->vecs_gpp = rhshp4;
662   ts->rhshessianproduct_guu = rhshessianproductfunc1;
663   ts->rhshessianproduct_gup = rhshessianproductfunc2;
664   ts->rhshessianproduct_gpu = rhshessianproductfunc3;
665   ts->rhshessianproduct_gpp = rhshessianproductfunc4;
666   PetscFunctionReturn(0);
667 }
668 
669 /*@C
670   TSComputeRHSHessianProductFunctionUU - Runs the user-defined vector-Hessian-vector product function for Guu.
671 
672   Collective on TS
673 
674   Input Parameters:
675 . ts   - The TS context obtained from TSCreate()
676 
677   Notes:
678   TSComputeRHSHessianProductFunctionUU() is typically used for sensitivity implementation,
679   so most users would not generally call this routine themselves.
680 
681   Level: developer
682 
683 .seealso: TSSetRHSHessianProduct()
684 @*/
685 PetscErrorCode TSComputeRHSHessianProductFunctionUU(TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV)
686 {
687   PetscFunctionBegin;
688   if (!VHV) PetscFunctionReturn(0);
689   PetscValidHeaderSpecific(ts,TS_CLASSID,1);
690   PetscValidHeaderSpecific(U,VEC_CLASSID,3);
691 
692   PetscStackPush("TS user RHSHessianProduct function 1 for sensitivity analysis");
693   CHKERRQ((*ts->rhshessianproduct_guu)(ts,t,U,Vl,Vr,VHV,ts->rhshessianproductctx));
694   PetscStackPop;
695   PetscFunctionReturn(0);
696 }
697 
698 /*@C
699   TSComputeRHSHessianProductFunctionUP - Runs the user-defined vector-Hessian-vector product function for Gup.
700 
701   Collective on TS
702 
703   Input Parameters:
704 . ts   - The TS context obtained from TSCreate()
705 
706   Notes:
707   TSComputeRHSHessianProductFunctionUP() is typically used for sensitivity implementation,
708   so most users would not generally call this routine themselves.
709 
710   Level: developer
711 
712 .seealso: TSSetRHSHessianProduct()
713 @*/
714 PetscErrorCode TSComputeRHSHessianProductFunctionUP(TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV)
715 {
716   PetscFunctionBegin;
717   if (!VHV) PetscFunctionReturn(0);
718   PetscValidHeaderSpecific(ts,TS_CLASSID,1);
719   PetscValidHeaderSpecific(U,VEC_CLASSID,3);
720 
721   PetscStackPush("TS user RHSHessianProduct function 2 for sensitivity analysis");
722   CHKERRQ((*ts->rhshessianproduct_gup)(ts,t,U,Vl,Vr,VHV,ts->rhshessianproductctx));
723   PetscStackPop;
724   PetscFunctionReturn(0);
725 }
726 
727 /*@C
728   TSComputeRHSHessianProductFunctionPU - Runs the user-defined vector-Hessian-vector product function for Gpu.
729 
730   Collective on TS
731 
732   Input Parameters:
733 . ts   - The TS context obtained from TSCreate()
734 
735   Notes:
736   TSComputeRHSHessianProductFunctionPU() is typically used for sensitivity implementation,
737   so most users would not generally call this routine themselves.
738 
739   Level: developer
740 
741 .seealso: TSSetRHSHessianProduct()
742 @*/
743 PetscErrorCode TSComputeRHSHessianProductFunctionPU(TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV)
744 {
745   PetscFunctionBegin;
746   if (!VHV) PetscFunctionReturn(0);
747   PetscValidHeaderSpecific(ts,TS_CLASSID,1);
748   PetscValidHeaderSpecific(U,VEC_CLASSID,3);
749 
750   PetscStackPush("TS user RHSHessianProduct function 3 for sensitivity analysis");
751   CHKERRQ((*ts->rhshessianproduct_gpu)(ts,t,U,Vl,Vr,VHV,ts->rhshessianproductctx));
752   PetscStackPop;
753   PetscFunctionReturn(0);
754 }
755 
756 /*@C
757   TSComputeRHSHessianProductFunctionPP - Runs the user-defined vector-Hessian-vector product function for Gpp.
758 
759   Collective on TS
760 
761   Input Parameters:
762 . ts   - The TS context obtained from TSCreate()
763 
764   Notes:
765   TSComputeRHSHessianProductFunctionPP() is typically used for sensitivity implementation,
766   so most users would not generally call this routine themselves.
767 
768   Level: developer
769 
770 .seealso: TSSetRHSHessianProduct()
771 @*/
772 PetscErrorCode TSComputeRHSHessianProductFunctionPP(TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV)
773 {
774   PetscFunctionBegin;
775   if (!VHV) PetscFunctionReturn(0);
776   PetscValidHeaderSpecific(ts,TS_CLASSID,1);
777   PetscValidHeaderSpecific(U,VEC_CLASSID,3);
778 
779   PetscStackPush("TS user RHSHessianProduct function 3 for sensitivity analysis");
780   CHKERRQ((*ts->rhshessianproduct_gpp)(ts,t,U,Vl,Vr,VHV,ts->rhshessianproductctx));
781   PetscStackPop;
782   PetscFunctionReturn(0);
783 }
784 
785 /* --------------------------- Adjoint sensitivity ---------------------------*/
786 
787 /*@
788    TSSetCostGradients - Sets the initial value of the gradients of the cost function w.r.t. initial values and w.r.t. the problem parameters
789       for use by the TSAdjoint routines.
790 
791    Logically Collective on TS
792 
793    Input Parameters:
794 +  ts - the TS context obtained from TSCreate()
795 .  numcost - number of gradients to be computed, this is the number of cost functions
796 .  lambda - gradients with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector
797 -  mu - gradients with respect to the parameters, the number of entries in these vectors is the same as the number of parameters
798 
799    Level: beginner
800 
801    Notes:
802     the entries in these vectors must be correctly initialized with the values lamda_i = df/dy|finaltime  mu_i = df/dp|finaltime
803 
804    After TSAdjointSolve() is called the lamba and the mu contain the computed sensitivities
805 
806 .seealso TSGetCostGradients()
807 @*/
808 PetscErrorCode TSSetCostGradients(TS ts,PetscInt numcost,Vec *lambda,Vec *mu)
809 {
810   PetscFunctionBegin;
811   PetscValidHeaderSpecific(ts,TS_CLASSID,1);
812   PetscValidPointer(lambda,3);
813   ts->vecs_sensi  = lambda;
814   ts->vecs_sensip = mu;
815   PetscCheck(!ts->numcost || ts->numcost == numcost,PetscObjectComm((PetscObject)ts),PETSC_ERR_USER,"The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand");
816   ts->numcost  = numcost;
817   PetscFunctionReturn(0);
818 }
819 
820 /*@
821    TSGetCostGradients - Returns the gradients from the TSAdjointSolve()
822 
823    Not Collective, but Vec returned is parallel if TS is parallel
824 
825    Input Parameter:
826 .  ts - the TS context obtained from TSCreate()
827 
828    Output Parameters:
829 +  numcost - size of returned arrays
830 .  lambda - vectors containing the gradients of the cost functions with respect to the ODE/DAE solution variables
831 -  mu - vectors containing the gradients of the cost functions with respect to the problem parameters
832 
833    Level: intermediate
834 
835 .seealso: TSSetCostGradients()
836 @*/
837 PetscErrorCode TSGetCostGradients(TS ts,PetscInt *numcost,Vec **lambda,Vec **mu)
838 {
839   PetscFunctionBegin;
840   PetscValidHeaderSpecific(ts,TS_CLASSID,1);
841   if (numcost) *numcost = ts->numcost;
842   if (lambda)  *lambda  = ts->vecs_sensi;
843   if (mu)      *mu      = ts->vecs_sensip;
844   PetscFunctionReturn(0);
845 }
846 
847 /*@
848    TSSetCostHessianProducts - Sets the initial value of the Hessian-vector products of the cost function w.r.t. initial values and w.r.t. the problem parameters
849       for use by the TSAdjoint routines.
850 
851    Logically Collective on TS
852 
853    Input Parameters:
854 +  ts - the TS context obtained from TSCreate()
855 .  numcost - number of cost functions
856 .  lambda2 - Hessian-vector product with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector
857 .  mu2 - Hessian-vector product with respect to the parameters, the number of entries in these vectors is the same as the number of parameters
858 -  dir - the direction vector that are multiplied with the Hessian of the cost functions
859 
860    Level: beginner
861 
862    Notes: Hessian of the cost function is completely different from Hessian of the ODE/DAE system
863 
864    For second-order adjoint, one needs to call this function and then TSAdjointSetForward() before TSSolve().
865 
866    After TSAdjointSolve() is called, the lamba2 and the mu2 will contain the computed second-order adjoint sensitivities, and can be used to produce Hessian-vector product (not the full Hessian matrix). Users must provide a direction vector; it is usually generated by an optimization solver.
867 
868    Passing NULL for lambda2 disables the second-order calculation.
869 .seealso: TSAdjointSetForward()
870 @*/
871 PetscErrorCode TSSetCostHessianProducts(TS ts,PetscInt numcost,Vec *lambda2,Vec *mu2,Vec dir)
872 {
873   PetscFunctionBegin;
874   PetscValidHeaderSpecific(ts,TS_CLASSID,1);
875   PetscCheck(!ts->numcost || ts->numcost == numcost,PetscObjectComm((PetscObject)ts),PETSC_ERR_USER,"The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand");
876   ts->numcost       = numcost;
877   ts->vecs_sensi2   = lambda2;
878   ts->vecs_sensi2p  = mu2;
879   ts->vec_dir       = dir;
880   PetscFunctionReturn(0);
881 }
882 
883 /*@
884    TSGetCostHessianProducts - Returns the gradients from the TSAdjointSolve()
885 
886    Not Collective, but Vec returned is parallel if TS is parallel
887 
888    Input Parameter:
889 .  ts - the TS context obtained from TSCreate()
890 
891    Output Parameters:
892 +  numcost - number of cost functions
893 .  lambda2 - Hessian-vector product with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector
894 .  mu2 - Hessian-vector product with respect to the parameters, the number of entries in these vectors is the same as the number of parameters
895 -  dir - the direction vector that are multiplied with the Hessian of the cost functions
896 
897    Level: intermediate
898 
899 .seealso: TSSetCostHessianProducts()
900 @*/
901 PetscErrorCode TSGetCostHessianProducts(TS ts,PetscInt *numcost,Vec **lambda2,Vec **mu2, Vec *dir)
902 {
903   PetscFunctionBegin;
904   PetscValidHeaderSpecific(ts,TS_CLASSID,1);
905   if (numcost) *numcost = ts->numcost;
906   if (lambda2) *lambda2 = ts->vecs_sensi2;
907   if (mu2)     *mu2     = ts->vecs_sensi2p;
908   if (dir)     *dir     = ts->vec_dir;
909   PetscFunctionReturn(0);
910 }
911 
912 /*@
913   TSAdjointSetForward - Trigger the tangent linear solver and initialize the forward sensitivities
914 
915   Logically Collective on TS
916 
917   Input Parameters:
918 +  ts - the TS context obtained from TSCreate()
919 -  didp - the derivative of initial values w.r.t. parameters
920 
921   Level: intermediate
922 
923   Notes: When computing sensitivies w.r.t. initial condition, set didp to NULL so that the solver will take it as an identity matrix mathematically. TSAdjoint does not reset the tangent linear solver automatically, TSAdjointResetForward() should be called to reset the tangent linear solver.
924 
925 .seealso: TSSetCostHessianProducts(), TSAdjointResetForward()
926 @*/
927 PetscErrorCode TSAdjointSetForward(TS ts,Mat didp)
928 {
929   Mat            A;
930   Vec            sp;
931   PetscScalar    *xarr;
932   PetscInt       lsize;
933 
934   PetscFunctionBegin;
935   ts->forward_solve = PETSC_TRUE; /* turn on tangent linear mode */
936   PetscCheck(ts->vecs_sensi2,PetscObjectComm((PetscObject)ts),PETSC_ERR_USER,"Must call TSSetCostHessianProducts() first");
937   PetscCheck(ts->vec_dir,PetscObjectComm((PetscObject)ts),PETSC_ERR_USER,"Directional vector is missing. Call TSSetCostHessianProducts() to set it.");
938   /* create a single-column dense matrix */
939   CHKERRQ(VecGetLocalSize(ts->vec_sol,&lsize));
940   CHKERRQ(MatCreateDense(PetscObjectComm((PetscObject)ts),lsize,PETSC_DECIDE,PETSC_DECIDE,1,NULL,&A));
941 
942   CHKERRQ(VecDuplicate(ts->vec_sol,&sp));
943   CHKERRQ(MatDenseGetColumn(A,0,&xarr));
944   CHKERRQ(VecPlaceArray(sp,xarr));
945   if (ts->vecs_sensi2p) { /* tangent linear variable initialized as 2*dIdP*dir */
946     if (didp) {
947       CHKERRQ(MatMult(didp,ts->vec_dir,sp));
948       CHKERRQ(VecScale(sp,2.));
949     } else {
950       CHKERRQ(VecZeroEntries(sp));
951     }
952   } else { /* tangent linear variable initialized as dir */
953     CHKERRQ(VecCopy(ts->vec_dir,sp));
954   }
955   CHKERRQ(VecResetArray(sp));
956   CHKERRQ(MatDenseRestoreColumn(A,&xarr));
957   CHKERRQ(VecDestroy(&sp));
958 
959   CHKERRQ(TSForwardSetInitialSensitivities(ts,A)); /* if didp is NULL, identity matrix is assumed */
960 
961   CHKERRQ(MatDestroy(&A));
962   PetscFunctionReturn(0);
963 }
964 
965 /*@
966   TSAdjointResetForward - Reset the tangent linear solver and destroy the tangent linear context
967 
968   Logically Collective on TS
969 
970   Input Parameters:
971 .  ts - the TS context obtained from TSCreate()
972 
973   Level: intermediate
974 
975 .seealso: TSAdjointSetForward()
976 @*/
977 PetscErrorCode TSAdjointResetForward(TS ts)
978 {
979   PetscFunctionBegin;
980   ts->forward_solve = PETSC_FALSE; /* turn off tangent linear mode */
981   CHKERRQ(TSForwardReset(ts));
982   PetscFunctionReturn(0);
983 }
984 
985 /*@
986    TSAdjointSetUp - Sets up the internal data structures for the later use
987    of an adjoint solver
988 
989    Collective on TS
990 
991    Input Parameter:
992 .  ts - the TS context obtained from TSCreate()
993 
994    Level: advanced
995 
996 .seealso: TSCreate(), TSAdjointStep(), TSSetCostGradients()
997 @*/
998 PetscErrorCode TSAdjointSetUp(TS ts)
999 {
1000   TSTrajectory     tj;
1001   PetscBool        match;
1002 
1003   PetscFunctionBegin;
1004   PetscValidHeaderSpecific(ts,TS_CLASSID,1);
1005   if (ts->adjointsetupcalled) PetscFunctionReturn(0);
1006   PetscCheck(ts->vecs_sensi,PetscObjectComm((PetscObject)ts),PETSC_ERR_ARG_WRONGSTATE,"Must call TSSetCostGradients() first");
1007   PetscCheck(!ts->vecs_sensip || ts->Jacp || ts->Jacprhs,PetscObjectComm((PetscObject)ts),PETSC_ERR_ARG_WRONGSTATE,"Must call TSSetRHSJacobianP() or TSSetIJacobianP() first");
1008   CHKERRQ(TSGetTrajectory(ts,&tj));
1009   CHKERRQ(PetscObjectTypeCompare((PetscObject)tj,TSTRAJECTORYBASIC,&match));
1010   if (match) {
1011     PetscBool solution_only;
1012     CHKERRQ(TSTrajectoryGetSolutionOnly(tj,&solution_only));
1013     PetscCheck(!solution_only,PetscObjectComm((PetscObject)ts),PETSC_ERR_USER,"TSAdjoint cannot use the solution-only mode when choosing the Basic TSTrajectory type. Turn it off with -ts_trajectory_solution_only 0");
1014   }
1015   CHKERRQ(TSTrajectorySetUseHistory(tj,PETSC_FALSE)); /* not use TSHistory */
1016 
1017   if (ts->quadraturets) { /* if there is integral in the cost function */
1018     CHKERRQ(VecDuplicate(ts->vecs_sensi[0],&ts->vec_drdu_col));
1019     if (ts->vecs_sensip) {
1020       CHKERRQ(VecDuplicate(ts->vecs_sensip[0],&ts->vec_drdp_col));
1021     }
1022   }
1023 
1024   if (ts->ops->adjointsetup) {
1025     CHKERRQ((*ts->ops->adjointsetup)(ts));
1026   }
1027   ts->adjointsetupcalled = PETSC_TRUE;
1028   PetscFunctionReturn(0);
1029 }
1030 
1031 /*@
1032    TSAdjointReset - Resets a TSAdjoint context and removes any allocated Vecs and Mats.
1033 
1034    Collective on TS
1035 
1036    Input Parameter:
1037 .  ts - the TS context obtained from TSCreate()
1038 
1039    Level: beginner
1040 
1041 .seealso: TSCreate(), TSAdjointSetUp(), TSADestroy()
1042 @*/
1043 PetscErrorCode TSAdjointReset(TS ts)
1044 {
1045   PetscFunctionBegin;
1046   PetscValidHeaderSpecific(ts,TS_CLASSID,1);
1047   if (ts->ops->adjointreset) {
1048     CHKERRQ((*ts->ops->adjointreset)(ts));
1049   }
1050   if (ts->quadraturets) { /* if there is integral in the cost function */
1051     CHKERRQ(VecDestroy(&ts->vec_drdu_col));
1052     if (ts->vecs_sensip) {
1053       CHKERRQ(VecDestroy(&ts->vec_drdp_col));
1054     }
1055   }
1056   ts->vecs_sensi         = NULL;
1057   ts->vecs_sensip        = NULL;
1058   ts->vecs_sensi2        = NULL;
1059   ts->vecs_sensi2p       = NULL;
1060   ts->vec_dir            = NULL;
1061   ts->adjointsetupcalled = PETSC_FALSE;
1062   PetscFunctionReturn(0);
1063 }
1064 
1065 /*@
1066    TSAdjointSetSteps - Sets the number of steps the adjoint solver should take backward in time
1067 
1068    Logically Collective on TS
1069 
1070    Input Parameters:
1071 +  ts - the TS context obtained from TSCreate()
1072 -  steps - number of steps to use
1073 
1074    Level: intermediate
1075 
1076    Notes:
1077     Normally one does not call this and TSAdjointSolve() integrates back to the original timestep. One can call this
1078           so as to integrate back to less than the original timestep
1079 
1080 .seealso: TSSetExactFinalTime()
1081 @*/
1082 PetscErrorCode TSAdjointSetSteps(TS ts,PetscInt steps)
1083 {
1084   PetscFunctionBegin;
1085   PetscValidHeaderSpecific(ts,TS_CLASSID,1);
1086   PetscValidLogicalCollectiveInt(ts,steps,2);
1087   PetscCheck(steps >= 0,PetscObjectComm((PetscObject)ts),PETSC_ERR_ARG_OUTOFRANGE,"Cannot step back a negative number of steps");
1088   PetscCheck(steps <= ts->steps,PetscObjectComm((PetscObject)ts),PETSC_ERR_ARG_OUTOFRANGE,"Cannot step back more than the total number of forward steps");
1089   ts->adjoint_max_steps = steps;
1090   PetscFunctionReturn(0);
1091 }
1092 
1093 /*@C
1094   TSAdjointSetRHSJacobian - Deprecated, use TSSetRHSJacobianP()
1095 
1096   Level: deprecated
1097 
1098 @*/
1099 PetscErrorCode TSAdjointSetRHSJacobian(TS ts,Mat Amat,PetscErrorCode (*func)(TS,PetscReal,Vec,Mat,void*),void *ctx)
1100 {
1101   PetscFunctionBegin;
1102   PetscValidHeaderSpecific(ts, TS_CLASSID,1);
1103   PetscValidHeaderSpecific(Amat,MAT_CLASSID,2);
1104 
1105   ts->rhsjacobianp    = func;
1106   ts->rhsjacobianpctx = ctx;
1107   if (Amat) {
1108     CHKERRQ(PetscObjectReference((PetscObject)Amat));
1109     CHKERRQ(MatDestroy(&ts->Jacp));
1110     ts->Jacp = Amat;
1111   }
1112   PetscFunctionReturn(0);
1113 }
1114 
1115 /*@C
1116   TSAdjointComputeRHSJacobian - Deprecated, use TSComputeRHSJacobianP()
1117 
1118   Level: deprecated
1119 
1120 @*/
1121 PetscErrorCode TSAdjointComputeRHSJacobian(TS ts,PetscReal t,Vec U,Mat Amat)
1122 {
1123   PetscFunctionBegin;
1124   PetscValidHeaderSpecific(ts,TS_CLASSID,1);
1125   PetscValidHeaderSpecific(U,VEC_CLASSID,3);
1126   PetscValidPointer(Amat,4);
1127 
1128   PetscStackPush("TS user JacobianP function for sensitivity analysis");
1129   CHKERRQ((*ts->rhsjacobianp)(ts,t,U,Amat,ts->rhsjacobianpctx));
1130   PetscStackPop;
1131   PetscFunctionReturn(0);
1132 }
1133 
1134 /*@
1135   TSAdjointComputeDRDYFunction - Deprecated, use TSGetQuadratureTS() then TSComputeRHSJacobian()
1136 
1137   Level: deprecated
1138 
1139 @*/
1140 PetscErrorCode TSAdjointComputeDRDYFunction(TS ts,PetscReal t,Vec U,Vec *DRDU)
1141 {
1142   PetscFunctionBegin;
1143   PetscValidHeaderSpecific(ts,TS_CLASSID,1);
1144   PetscValidHeaderSpecific(U,VEC_CLASSID,3);
1145 
1146   PetscStackPush("TS user DRDY function for sensitivity analysis");
1147   CHKERRQ((*ts->drdufunction)(ts,t,U,DRDU,ts->costintegrandctx));
1148   PetscStackPop;
1149   PetscFunctionReturn(0);
1150 }
1151 
1152 /*@
1153   TSAdjointComputeDRDPFunction - Deprecated, use TSGetQuadratureTS() then TSComputeRHSJacobianP()
1154 
1155   Level: deprecated
1156 
1157 @*/
1158 PetscErrorCode TSAdjointComputeDRDPFunction(TS ts,PetscReal t,Vec U,Vec *DRDP)
1159 {
1160   PetscFunctionBegin;
1161   PetscValidHeaderSpecific(ts,TS_CLASSID,1);
1162   PetscValidHeaderSpecific(U,VEC_CLASSID,3);
1163 
1164   PetscStackPush("TS user DRDP function for sensitivity analysis");
1165   CHKERRQ((*ts->drdpfunction)(ts,t,U,DRDP,ts->costintegrandctx));
1166   PetscStackPop;
1167   PetscFunctionReturn(0);
1168 }
1169 
1170 /*@C
1171    TSAdjointMonitorSensi - monitors the first lambda sensitivity
1172 
1173    Level: intermediate
1174 
1175 .seealso: TSAdjointMonitorSet()
1176 @*/
1177 PetscErrorCode TSAdjointMonitorSensi(TS ts,PetscInt step,PetscReal ptime,Vec v,PetscInt numcost,Vec *lambda,Vec *mu,PetscViewerAndFormat *vf)
1178 {
1179   PetscViewer    viewer = vf->viewer;
1180 
1181   PetscFunctionBegin;
1182   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,8);
1183   CHKERRQ(PetscViewerPushFormat(viewer,vf->format));
1184   CHKERRQ(VecView(lambda[0],viewer));
1185   CHKERRQ(PetscViewerPopFormat(viewer));
1186   PetscFunctionReturn(0);
1187 }
1188 
1189 /*@C
1190    TSAdjointMonitorSetFromOptions - Sets a monitor function and viewer appropriate for the type indicated by the user
1191 
1192    Collective on TS
1193 
1194    Input Parameters:
1195 +  ts - TS object you wish to monitor
1196 .  name - the monitor type one is seeking
1197 .  help - message indicating what monitoring is done
1198 .  manual - manual page for the monitor
1199 .  monitor - the monitor function
1200 -  monitorsetup - a function that is called once ONLY if the user selected this monitor that may set additional features of the TS or PetscViewer objects
1201 
1202    Level: developer
1203 
1204 .seealso: PetscOptionsGetViewer(), PetscOptionsGetReal(), PetscOptionsHasName(), PetscOptionsGetString(),
1205           PetscOptionsGetIntArray(), PetscOptionsGetRealArray(), PetscOptionsBool()
1206           PetscOptionsInt(), PetscOptionsString(), PetscOptionsReal(), PetscOptionsBool(),
1207           PetscOptionsName(), PetscOptionsBegin(), PetscOptionsEnd(), PetscOptionsHead(),
1208           PetscOptionsStringArray(),PetscOptionsRealArray(), PetscOptionsScalar(),
1209           PetscOptionsBoolGroupBegin(), PetscOptionsBoolGroup(), PetscOptionsBoolGroupEnd(),
1210           PetscOptionsFList(), PetscOptionsEList()
1211 @*/
1212 PetscErrorCode TSAdjointMonitorSetFromOptions(TS ts,const char name[],const char help[], const char manual[],PetscErrorCode (*monitor)(TS,PetscInt,PetscReal,Vec,PetscInt,Vec*,Vec*,PetscViewerAndFormat*),PetscErrorCode (*monitorsetup)(TS,PetscViewerAndFormat*))
1213 {
1214   PetscViewer       viewer;
1215   PetscViewerFormat format;
1216   PetscBool         flg;
1217 
1218   PetscFunctionBegin;
1219   CHKERRQ(PetscOptionsGetViewer(PetscObjectComm((PetscObject)ts),((PetscObject) ts)->options,((PetscObject)ts)->prefix,name,&viewer,&format,&flg));
1220   if (flg) {
1221     PetscViewerAndFormat *vf;
1222     CHKERRQ(PetscViewerAndFormatCreate(viewer,format,&vf));
1223     CHKERRQ(PetscObjectDereference((PetscObject)viewer));
1224     if (monitorsetup) {
1225       CHKERRQ((*monitorsetup)(ts,vf));
1226     }
1227     CHKERRQ(TSAdjointMonitorSet(ts,(PetscErrorCode (*)(TS,PetscInt,PetscReal,Vec,PetscInt,Vec*,Vec*,void*))monitor,vf,(PetscErrorCode (*)(void**))PetscViewerAndFormatDestroy));
1228   }
1229   PetscFunctionReturn(0);
1230 }
1231 
1232 /*@C
1233    TSAdjointMonitorSet - Sets an ADDITIONAL function that is to be used at every
1234    timestep to display the iteration's  progress.
1235 
1236    Logically Collective on TS
1237 
1238    Input Parameters:
1239 +  ts - the TS context obtained from TSCreate()
1240 .  adjointmonitor - monitoring routine
1241 .  adjointmctx - [optional] user-defined context for private data for the
1242              monitor routine (use NULL if no context is desired)
1243 -  adjointmonitordestroy - [optional] routine that frees monitor context
1244           (may be NULL)
1245 
1246    Calling sequence of monitor:
1247 $    int adjointmonitor(TS ts,PetscInt steps,PetscReal time,Vec u,PetscInt numcost,Vec *lambda, Vec *mu,void *adjointmctx)
1248 
1249 +    ts - the TS context
1250 .    steps - iteration number (after the final time step the monitor routine is called with a step of -1, this is at the final time which may have
1251                                been interpolated to)
1252 .    time - current time
1253 .    u - current iterate
1254 .    numcost - number of cost functionos
1255 .    lambda - sensitivities to initial conditions
1256 .    mu - sensitivities to parameters
1257 -    adjointmctx - [optional] adjoint monitoring context
1258 
1259    Notes:
1260    This routine adds an additional monitor to the list of monitors that
1261    already has been loaded.
1262 
1263    Fortran Notes:
1264     Only a single monitor function can be set for each TS object
1265 
1266    Level: intermediate
1267 
1268 .seealso: TSAdjointMonitorCancel()
1269 @*/
1270 PetscErrorCode TSAdjointMonitorSet(TS ts,PetscErrorCode (*adjointmonitor)(TS,PetscInt,PetscReal,Vec,PetscInt,Vec*,Vec*,void*),void *adjointmctx,PetscErrorCode (*adjointmdestroy)(void**))
1271 {
1272   PetscInt       i;
1273   PetscBool      identical;
1274 
1275   PetscFunctionBegin;
1276   PetscValidHeaderSpecific(ts,TS_CLASSID,1);
1277   for (i=0; i<ts->numbermonitors;i++) {
1278     CHKERRQ(PetscMonitorCompare((PetscErrorCode (*)(void))adjointmonitor,adjointmctx,adjointmdestroy,(PetscErrorCode (*)(void))ts->adjointmonitor[i],ts->adjointmonitorcontext[i],ts->adjointmonitordestroy[i],&identical));
1279     if (identical) PetscFunctionReturn(0);
1280   }
1281   PetscCheck(ts->numberadjointmonitors < MAXTSMONITORS,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many adjoint monitors set");
1282   ts->adjointmonitor[ts->numberadjointmonitors]          = adjointmonitor;
1283   ts->adjointmonitordestroy[ts->numberadjointmonitors]   = adjointmdestroy;
1284   ts->adjointmonitorcontext[ts->numberadjointmonitors++] = (void*)adjointmctx;
1285   PetscFunctionReturn(0);
1286 }
1287 
1288 /*@C
1289    TSAdjointMonitorCancel - Clears all the adjoint monitors that have been set on a time-step object.
1290 
1291    Logically Collective on TS
1292 
1293    Input Parameters:
1294 .  ts - the TS context obtained from TSCreate()
1295 
1296    Notes:
1297    There is no way to remove a single, specific monitor.
1298 
1299    Level: intermediate
1300 
1301 .seealso: TSAdjointMonitorSet()
1302 @*/
1303 PetscErrorCode TSAdjointMonitorCancel(TS ts)
1304 {
1305   PetscInt       i;
1306 
1307   PetscFunctionBegin;
1308   PetscValidHeaderSpecific(ts,TS_CLASSID,1);
1309   for (i=0; i<ts->numberadjointmonitors; i++) {
1310     if (ts->adjointmonitordestroy[i]) {
1311       CHKERRQ((*ts->adjointmonitordestroy[i])(&ts->adjointmonitorcontext[i]));
1312     }
1313   }
1314   ts->numberadjointmonitors = 0;
1315   PetscFunctionReturn(0);
1316 }
1317 
1318 /*@C
1319    TSAdjointMonitorDefault - the default monitor of adjoint computations
1320 
1321    Level: intermediate
1322 
1323 .seealso: TSAdjointMonitorSet()
1324 @*/
1325 PetscErrorCode TSAdjointMonitorDefault(TS ts,PetscInt step,PetscReal ptime,Vec v,PetscInt numcost,Vec *lambda,Vec *mu,PetscViewerAndFormat *vf)
1326 {
1327   PetscViewer    viewer = vf->viewer;
1328 
1329   PetscFunctionBegin;
1330   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,8);
1331   CHKERRQ(PetscViewerPushFormat(viewer,vf->format));
1332   CHKERRQ(PetscViewerASCIIAddTab(viewer,((PetscObject)ts)->tablevel));
1333   CHKERRQ(PetscViewerASCIIPrintf(viewer,"%D TS dt %g time %g%s",step,(double)ts->time_step,(double)ptime,ts->steprollback ? " (r)\n" : "\n"));
1334   CHKERRQ(PetscViewerASCIISubtractTab(viewer,((PetscObject)ts)->tablevel));
1335   CHKERRQ(PetscViewerPopFormat(viewer));
1336   PetscFunctionReturn(0);
1337 }
1338 
1339 /*@C
1340    TSAdjointMonitorDrawSensi - Monitors progress of the adjoint TS solvers by calling
1341    VecView() for the sensitivities to initial states at each timestep
1342 
1343    Collective on TS
1344 
1345    Input Parameters:
1346 +  ts - the TS context
1347 .  step - current time-step
1348 .  ptime - current time
1349 .  u - current state
1350 .  numcost - number of cost functions
1351 .  lambda - sensitivities to initial conditions
1352 .  mu - sensitivities to parameters
1353 -  dummy - either a viewer or NULL
1354 
1355    Level: intermediate
1356 
1357 .seealso: TSAdjointMonitorSet(), TSAdjointMonitorDefault(), VecView()
1358 @*/
1359 PetscErrorCode TSAdjointMonitorDrawSensi(TS ts,PetscInt step,PetscReal ptime,Vec u,PetscInt numcost,Vec *lambda,Vec *mu,void *dummy)
1360 {
1361   TSMonitorDrawCtx ictx = (TSMonitorDrawCtx)dummy;
1362   PetscDraw        draw;
1363   PetscReal        xl,yl,xr,yr,h;
1364   char             time[32];
1365 
1366   PetscFunctionBegin;
1367   if (!(((ictx->howoften > 0) && (!(step % ictx->howoften))) || ((ictx->howoften == -1) && ts->reason))) PetscFunctionReturn(0);
1368 
1369   CHKERRQ(VecView(lambda[0],ictx->viewer));
1370   CHKERRQ(PetscViewerDrawGetDraw(ictx->viewer,0,&draw));
1371   CHKERRQ(PetscSNPrintf(time,32,"Timestep %d Time %g",(int)step,(double)ptime));
1372   CHKERRQ(PetscDrawGetCoordinates(draw,&xl,&yl,&xr,&yr));
1373   h    = yl + .95*(yr - yl);
1374   CHKERRQ(PetscDrawStringCentered(draw,.5*(xl+xr),h,PETSC_DRAW_BLACK,time));
1375   CHKERRQ(PetscDrawFlush(draw));
1376   PetscFunctionReturn(0);
1377 }
1378 
1379 /*
1380    TSAdjointSetFromOptions - Sets various TSAdjoint parameters from user options.
1381 
1382    Collective on TSAdjoint
1383 
1384    Input Parameter:
1385 .  ts - the TS context
1386 
1387    Options Database Keys:
1388 +  -ts_adjoint_solve <yes,no> After solving the ODE/DAE solve the adjoint problem (requires -ts_save_trajectory)
1389 .  -ts_adjoint_monitor - print information at each adjoint time step
1390 -  -ts_adjoint_monitor_draw_sensi - monitor the sensitivity of the first cost function wrt initial conditions (lambda[0]) graphically
1391 
1392    Level: developer
1393 
1394    Notes:
1395     This is not normally called directly by users
1396 
1397 .seealso: TSSetSaveTrajectory(), TSTrajectorySetUp()
1398 */
1399 PetscErrorCode TSAdjointSetFromOptions(PetscOptionItems *PetscOptionsObject,TS ts)
1400 {
1401   PetscBool      tflg,opt;
1402 
1403   PetscFunctionBegin;
1404   PetscValidHeaderSpecific(ts,TS_CLASSID,2);
1405   CHKERRQ(PetscOptionsHead(PetscOptionsObject,"TS Adjoint options"));
1406   tflg = ts->adjoint_solve ? PETSC_TRUE : PETSC_FALSE;
1407   CHKERRQ(PetscOptionsBool("-ts_adjoint_solve","Solve the adjoint problem immediately after solving the forward problem","",tflg,&tflg,&opt));
1408   if (opt) {
1409     CHKERRQ(TSSetSaveTrajectory(ts));
1410     ts->adjoint_solve = tflg;
1411   }
1412   CHKERRQ(TSAdjointMonitorSetFromOptions(ts,"-ts_adjoint_monitor","Monitor adjoint timestep size","TSAdjointMonitorDefault",TSAdjointMonitorDefault,NULL));
1413   CHKERRQ(TSAdjointMonitorSetFromOptions(ts,"-ts_adjoint_monitor_sensi","Monitor sensitivity in the adjoint computation","TSAdjointMonitorSensi",TSAdjointMonitorSensi,NULL));
1414   opt  = PETSC_FALSE;
1415   CHKERRQ(PetscOptionsName("-ts_adjoint_monitor_draw_sensi","Monitor adjoint sensitivities (lambda only) graphically","TSAdjointMonitorDrawSensi",&opt));
1416   if (opt) {
1417     TSMonitorDrawCtx ctx;
1418     PetscInt         howoften = 1;
1419 
1420     CHKERRQ(PetscOptionsInt("-ts_adjoint_monitor_draw_sensi","Monitor adjoint sensitivities (lambda only) graphically","TSAdjointMonitorDrawSensi",howoften,&howoften,NULL));
1421     CHKERRQ(TSMonitorDrawCtxCreate(PetscObjectComm((PetscObject)ts),NULL,NULL,PETSC_DECIDE,PETSC_DECIDE,300,300,howoften,&ctx));
1422     CHKERRQ(TSAdjointMonitorSet(ts,TSAdjointMonitorDrawSensi,ctx,(PetscErrorCode (*)(void**))TSMonitorDrawCtxDestroy));
1423   }
1424   PetscFunctionReturn(0);
1425 }
1426 
1427 /*@
1428    TSAdjointStep - Steps one time step backward in the adjoint run
1429 
1430    Collective on TS
1431 
1432    Input Parameter:
1433 .  ts - the TS context obtained from TSCreate()
1434 
1435    Level: intermediate
1436 
1437 .seealso: TSAdjointSetUp(), TSAdjointSolve()
1438 @*/
1439 PetscErrorCode TSAdjointStep(TS ts)
1440 {
1441   DM               dm;
1442 
1443   PetscFunctionBegin;
1444   PetscValidHeaderSpecific(ts,TS_CLASSID,1);
1445   CHKERRQ(TSGetDM(ts,&dm));
1446   CHKERRQ(TSAdjointSetUp(ts));
1447   ts->steps--; /* must decrease the step index before the adjoint step is taken. */
1448 
1449   ts->reason = TS_CONVERGED_ITERATING;
1450   ts->ptime_prev = ts->ptime;
1451   PetscCheck(ts->ops->adjointstep,PetscObjectComm((PetscObject)ts),PETSC_ERR_NOT_CONVERGED,"TSStep has failed because the adjoint of  %s has not been implemented, try other time stepping methods for adjoint sensitivity analysis",((PetscObject)ts)->type_name);
1452   CHKERRQ(PetscLogEventBegin(TS_AdjointStep,ts,0,0,0));
1453   CHKERRQ((*ts->ops->adjointstep)(ts));
1454   CHKERRQ(PetscLogEventEnd(TS_AdjointStep,ts,0,0,0));
1455   ts->adjoint_steps++;
1456 
1457   if (ts->reason < 0) {
1458     PetscCheck(!ts->errorifstepfailed,PetscObjectComm((PetscObject)ts),PETSC_ERR_NOT_CONVERGED,"TSAdjointStep has failed due to %s",TSConvergedReasons[ts->reason]);
1459   } else if (!ts->reason) {
1460     if (ts->adjoint_steps >= ts->adjoint_max_steps) ts->reason = TS_CONVERGED_ITS;
1461   }
1462   PetscFunctionReturn(0);
1463 }
1464 
1465 /*@
1466    TSAdjointSolve - Solves the discrete ajoint problem for an ODE/DAE
1467 
1468    Collective on TS
1469 
1470    Input Parameter:
1471 .  ts - the TS context obtained from TSCreate()
1472 
1473    Options Database:
1474 . -ts_adjoint_view_solution <viewerinfo> - views the first gradient with respect to the initial values
1475 
1476    Level: intermediate
1477 
1478    Notes:
1479    This must be called after a call to TSSolve() that solves the forward problem
1480 
1481    By default this will integrate back to the initial time, one can use TSAdjointSetSteps() to step back to a later time
1482 
1483 .seealso: TSCreate(), TSSetCostGradients(), TSSetSolution(), TSAdjointStep()
1484 @*/
1485 PetscErrorCode TSAdjointSolve(TS ts)
1486 {
1487   static PetscBool cite = PETSC_FALSE;
1488 #if defined(TSADJOINT_STAGE)
1489   PetscLogStage  adjoint_stage;
1490 #endif
1491   PetscErrorCode ierr;
1492 
1493   PetscFunctionBegin;
1494   PetscValidHeaderSpecific(ts,TS_CLASSID,1);
1495   ierr = PetscCitationsRegister("@article{tsadjointpaper,\n"
1496                                 "  title         = {{PETSc TSAdjoint: a discrete adjoint ODE solver for first-order and second-order sensitivity analysis}},\n"
1497                                 "  author        = {Zhang, Hong and Constantinescu, Emil M.  and Smith, Barry F.},\n"
1498                                 "  journal       = {arXiv e-preprints},\n"
1499                                 "  eprint        = {1912.07696},\n"
1500                                 "  archivePrefix = {arXiv},\n"
1501                                 "  year          = {2019}\n}\n",&cite);CHKERRQ(ierr);
1502 #if defined(TSADJOINT_STAGE)
1503   CHKERRQ(PetscLogStageRegister("TSAdjoint",&adjoint_stage));
1504   CHKERRQ(PetscLogStagePush(adjoint_stage));
1505 #endif
1506   CHKERRQ(TSAdjointSetUp(ts));
1507 
1508   /* reset time step and iteration counters */
1509   ts->adjoint_steps     = 0;
1510   ts->ksp_its           = 0;
1511   ts->snes_its          = 0;
1512   ts->num_snes_failures = 0;
1513   ts->reject            = 0;
1514   ts->reason            = TS_CONVERGED_ITERATING;
1515 
1516   if (!ts->adjoint_max_steps) ts->adjoint_max_steps = ts->steps;
1517   if (ts->adjoint_steps >= ts->adjoint_max_steps) ts->reason = TS_CONVERGED_ITS;
1518 
1519   while (!ts->reason) {
1520     CHKERRQ(TSTrajectoryGet(ts->trajectory,ts,ts->steps,&ts->ptime));
1521     CHKERRQ(TSAdjointMonitor(ts,ts->steps,ts->ptime,ts->vec_sol,ts->numcost,ts->vecs_sensi,ts->vecs_sensip));
1522     CHKERRQ(TSAdjointEventHandler(ts));
1523     CHKERRQ(TSAdjointStep(ts));
1524     if ((ts->vec_costintegral || ts->quadraturets) && !ts->costintegralfwd) {
1525       CHKERRQ(TSAdjointCostIntegral(ts));
1526     }
1527   }
1528   if (!ts->steps) {
1529     CHKERRQ(TSTrajectoryGet(ts->trajectory,ts,ts->steps,&ts->ptime));
1530     CHKERRQ(TSAdjointMonitor(ts,ts->steps,ts->ptime,ts->vec_sol,ts->numcost,ts->vecs_sensi,ts->vecs_sensip));
1531   }
1532   ts->solvetime = ts->ptime;
1533   CHKERRQ(TSTrajectoryViewFromOptions(ts->trajectory,NULL,"-ts_trajectory_view"));
1534   CHKERRQ(VecViewFromOptions(ts->vecs_sensi[0],(PetscObject) ts, "-ts_adjoint_view_solution"));
1535   ts->adjoint_max_steps = 0;
1536 #if defined(TSADJOINT_STAGE)
1537   CHKERRQ(PetscLogStagePop());
1538 #endif
1539   PetscFunctionReturn(0);
1540 }
1541 
1542 /*@C
1543    TSAdjointMonitor - Runs all user-provided adjoint monitor routines set using TSAdjointMonitorSet()
1544 
1545    Collective on TS
1546 
1547    Input Parameters:
1548 +  ts - time stepping context obtained from TSCreate()
1549 .  step - step number that has just completed
1550 .  ptime - model time of the state
1551 .  u - state at the current model time
1552 .  numcost - number of cost functions (dimension of lambda  or mu)
1553 .  lambda - vectors containing the gradients of the cost functions with respect to the ODE/DAE solution variables
1554 -  mu - vectors containing the gradients of the cost functions with respect to the problem parameters
1555 
1556    Notes:
1557    TSAdjointMonitor() is typically used automatically within the time stepping implementations.
1558    Users would almost never call this routine directly.
1559 
1560    Level: developer
1561 
1562 @*/
1563 PetscErrorCode TSAdjointMonitor(TS ts,PetscInt step,PetscReal ptime,Vec u,PetscInt numcost,Vec *lambda, Vec *mu)
1564 {
1565   PetscInt       i,n = ts->numberadjointmonitors;
1566 
1567   PetscFunctionBegin;
1568   PetscValidHeaderSpecific(ts,TS_CLASSID,1);
1569   PetscValidHeaderSpecific(u,VEC_CLASSID,4);
1570   CHKERRQ(VecLockReadPush(u));
1571   for (i=0; i<n; i++) {
1572     CHKERRQ((*ts->adjointmonitor[i])(ts,step,ptime,u,numcost,lambda,mu,ts->adjointmonitorcontext[i]));
1573   }
1574   CHKERRQ(VecLockReadPop(u));
1575   PetscFunctionReturn(0);
1576 }
1577 
1578 /*@
1579  TSAdjointCostIntegral - Evaluate the cost integral in the adjoint run.
1580 
1581  Collective on TS
1582 
1583  Input Parameter:
1584  .  ts - time stepping context
1585 
1586  Level: advanced
1587 
1588  Notes:
1589  This function cannot be called until TSAdjointStep() has been completed.
1590 
1591  .seealso: TSAdjointSolve(), TSAdjointStep
1592  @*/
1593 PetscErrorCode TSAdjointCostIntegral(TS ts)
1594 {
1595   PetscFunctionBegin;
1596   PetscValidHeaderSpecific(ts,TS_CLASSID,1);
1597   PetscCheck(ts->ops->adjointintegral,PetscObjectComm((PetscObject)ts),PETSC_ERR_SUP,"%s does not provide integral evaluation in the adjoint run",((PetscObject)ts)->type_name);
1598   CHKERRQ((*ts->ops->adjointintegral)(ts));
1599   PetscFunctionReturn(0);
1600 }
1601 
1602 /* ------------------ Forward (tangent linear) sensitivity  ------------------*/
1603 
1604 /*@
1605   TSForwardSetUp - Sets up the internal data structures for the later use
1606   of forward sensitivity analysis
1607 
1608   Collective on TS
1609 
1610   Input Parameter:
1611 . ts - the TS context obtained from TSCreate()
1612 
1613   Level: advanced
1614 
1615 .seealso: TSCreate(), TSDestroy(), TSSetUp()
1616 @*/
1617 PetscErrorCode TSForwardSetUp(TS ts)
1618 {
1619   PetscFunctionBegin;
1620   PetscValidHeaderSpecific(ts,TS_CLASSID,1);
1621   if (ts->forwardsetupcalled) PetscFunctionReturn(0);
1622   if (ts->ops->forwardsetup) {
1623     CHKERRQ((*ts->ops->forwardsetup)(ts));
1624   }
1625   CHKERRQ(VecDuplicate(ts->vec_sol,&ts->vec_sensip_col));
1626   ts->forwardsetupcalled = PETSC_TRUE;
1627   PetscFunctionReturn(0);
1628 }
1629 
1630 /*@
1631   TSForwardReset - Reset the internal data structures used by forward sensitivity analysis
1632 
1633   Collective on TS
1634 
1635   Input Parameter:
1636 . ts - the TS context obtained from TSCreate()
1637 
1638   Level: advanced
1639 
1640 .seealso: TSCreate(), TSDestroy(), TSForwardSetUp()
1641 @*/
1642 PetscErrorCode TSForwardReset(TS ts)
1643 {
1644   TS             quadts = ts->quadraturets;
1645 
1646   PetscFunctionBegin;
1647   PetscValidHeaderSpecific(ts,TS_CLASSID,1);
1648   if (ts->ops->forwardreset) {
1649     CHKERRQ((*ts->ops->forwardreset)(ts));
1650   }
1651   CHKERRQ(MatDestroy(&ts->mat_sensip));
1652   if (quadts) {
1653     CHKERRQ(MatDestroy(&quadts->mat_sensip));
1654   }
1655   CHKERRQ(VecDestroy(&ts->vec_sensip_col));
1656   ts->forward_solve      = PETSC_FALSE;
1657   ts->forwardsetupcalled = PETSC_FALSE;
1658   PetscFunctionReturn(0);
1659 }
1660 
1661 /*@
1662   TSForwardSetIntegralGradients - Set the vectors holding forward sensitivities of the integral term.
1663 
1664   Input Parameters:
1665 + ts - the TS context obtained from TSCreate()
1666 . numfwdint - number of integrals
1667 - vp - the vectors containing the gradients for each integral w.r.t. parameters
1668 
1669   Level: deprecated
1670 
1671 .seealso: TSForwardGetSensitivities(), TSForwardSetIntegralGradients(), TSForwardGetIntegralGradients(), TSForwardStep()
1672 @*/
1673 PetscErrorCode TSForwardSetIntegralGradients(TS ts,PetscInt numfwdint,Vec *vp)
1674 {
1675   PetscFunctionBegin;
1676   PetscValidHeaderSpecific(ts,TS_CLASSID,1);
1677   PetscCheck(!ts->numcost || ts->numcost == numfwdint,PetscObjectComm((PetscObject)ts),PETSC_ERR_USER,"The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand()");
1678   if (!ts->numcost) ts->numcost = numfwdint;
1679 
1680   ts->vecs_integral_sensip = vp;
1681   PetscFunctionReturn(0);
1682 }
1683 
1684 /*@
1685   TSForwardGetIntegralGradients - Returns the forward sensitivities ofthe integral term.
1686 
1687   Input Parameter:
1688 . ts - the TS context obtained from TSCreate()
1689 
1690   Output Parameter:
1691 . vp - the vectors containing the gradients for each integral w.r.t. parameters
1692 
1693   Level: deprecated
1694 
1695 .seealso: TSForwardSetSensitivities(), TSForwardSetIntegralGradients(), TSForwardGetIntegralGradients(), TSForwardStep()
1696 @*/
1697 PetscErrorCode TSForwardGetIntegralGradients(TS ts,PetscInt *numfwdint,Vec **vp)
1698 {
1699   PetscFunctionBegin;
1700   PetscValidHeaderSpecific(ts,TS_CLASSID,1);
1701   PetscValidPointer(vp,3);
1702   if (numfwdint) *numfwdint = ts->numcost;
1703   if (vp) *vp = ts->vecs_integral_sensip;
1704   PetscFunctionReturn(0);
1705 }
1706 
1707 /*@
1708   TSForwardStep - Compute the forward sensitivity for one time step.
1709 
1710   Collective on TS
1711 
1712   Input Parameter:
1713 . ts - time stepping context
1714 
1715   Level: advanced
1716 
1717   Notes:
1718   This function cannot be called until TSStep() has been completed.
1719 
1720 .seealso: TSForwardSetSensitivities(), TSForwardGetSensitivities(), TSForwardSetIntegralGradients(), TSForwardGetIntegralGradients(), TSForwardSetUp()
1721 @*/
1722 PetscErrorCode TSForwardStep(TS ts)
1723 {
1724   PetscFunctionBegin;
1725   PetscValidHeaderSpecific(ts,TS_CLASSID,1);
1726   PetscCheck(ts->ops->forwardstep,PetscObjectComm((PetscObject)ts),PETSC_ERR_SUP,"%s does not provide forward sensitivity analysis",((PetscObject)ts)->type_name);
1727   CHKERRQ(PetscLogEventBegin(TS_ForwardStep,ts,0,0,0));
1728   CHKERRQ((*ts->ops->forwardstep)(ts));
1729   CHKERRQ(PetscLogEventEnd(TS_ForwardStep,ts,0,0,0));
1730   PetscCheck(ts->reason >= 0 || !ts->errorifstepfailed,PetscObjectComm((PetscObject)ts),PETSC_ERR_NOT_CONVERGED,"TSFowardStep has failed due to %s",TSConvergedReasons[ts->reason]);
1731   PetscFunctionReturn(0);
1732 }
1733 
1734 /*@
1735   TSForwardSetSensitivities - Sets the initial value of the trajectory sensitivities of solution  w.r.t. the problem parameters and initial values.
1736 
1737   Logically Collective on TS
1738 
1739   Input Parameters:
1740 + ts - the TS context obtained from TSCreate()
1741 . nump - number of parameters
1742 - Smat - sensitivities with respect to the parameters, the number of entries in these vectors is the same as the number of parameters
1743 
1744   Level: beginner
1745 
1746   Notes:
1747   Forward sensitivity is also called 'trajectory sensitivity' in some fields such as power systems.
1748   This function turns on a flag to trigger TSSolve() to compute forward sensitivities automatically.
1749   You must call this function before TSSolve().
1750   The entries in the sensitivity matrix must be correctly initialized with the values S = dy/dp|startingtime.
1751 
1752 .seealso: TSForwardGetSensitivities(), TSForwardSetIntegralGradients(), TSForwardGetIntegralGradients(), TSForwardStep()
1753 @*/
1754 PetscErrorCode TSForwardSetSensitivities(TS ts,PetscInt nump,Mat Smat)
1755 {
1756   PetscFunctionBegin;
1757   PetscValidHeaderSpecific(ts,TS_CLASSID,1);
1758   PetscValidHeaderSpecific(Smat,MAT_CLASSID,3);
1759   ts->forward_solve  = PETSC_TRUE;
1760   if (nump == PETSC_DEFAULT) {
1761     CHKERRQ(MatGetSize(Smat,NULL,&ts->num_parameters));
1762   } else ts->num_parameters = nump;
1763   CHKERRQ(PetscObjectReference((PetscObject)Smat));
1764   CHKERRQ(MatDestroy(&ts->mat_sensip));
1765   ts->mat_sensip = Smat;
1766   PetscFunctionReturn(0);
1767 }
1768 
1769 /*@
1770   TSForwardGetSensitivities - Returns the trajectory sensitivities
1771 
1772   Not Collective, but Vec returned is parallel if TS is parallel
1773 
1774   Output Parameters:
1775 + ts - the TS context obtained from TSCreate()
1776 . nump - number of parameters
1777 - Smat - sensitivities with respect to the parameters, the number of entries in these vectors is the same as the number of parameters
1778 
1779   Level: intermediate
1780 
1781 .seealso: TSForwardSetSensitivities(), TSForwardSetIntegralGradients(), TSForwardGetIntegralGradients(), TSForwardStep()
1782 @*/
1783 PetscErrorCode TSForwardGetSensitivities(TS ts,PetscInt *nump,Mat *Smat)
1784 {
1785   PetscFunctionBegin;
1786   PetscValidHeaderSpecific(ts,TS_CLASSID,1);
1787   if (nump) *nump = ts->num_parameters;
1788   if (Smat) *Smat = ts->mat_sensip;
1789   PetscFunctionReturn(0);
1790 }
1791 
1792 /*@
1793    TSForwardCostIntegral - Evaluate the cost integral in the forward run.
1794 
1795    Collective on TS
1796 
1797    Input Parameter:
1798 .  ts - time stepping context
1799 
1800    Level: advanced
1801 
1802    Notes:
1803    This function cannot be called until TSStep() has been completed.
1804 
1805 .seealso: TSSolve(), TSAdjointCostIntegral()
1806 @*/
1807 PetscErrorCode TSForwardCostIntegral(TS ts)
1808 {
1809   PetscFunctionBegin;
1810   PetscValidHeaderSpecific(ts,TS_CLASSID,1);
1811   PetscCheck(ts->ops->forwardintegral,PetscObjectComm((PetscObject)ts),PETSC_ERR_SUP,"%s does not provide integral evaluation in the forward run",((PetscObject)ts)->type_name);
1812   CHKERRQ((*ts->ops->forwardintegral)(ts));
1813   PetscFunctionReturn(0);
1814 }
1815 
1816 /*@
1817   TSForwardSetInitialSensitivities - Set initial values for tangent linear sensitivities
1818 
1819   Collective on TS
1820 
1821   Input Parameters:
1822 + ts - the TS context obtained from TSCreate()
1823 - didp - parametric sensitivities of the initial condition
1824 
1825   Level: intermediate
1826 
1827   Notes: TSSolve() allows users to pass the initial solution directly to TS. But the tangent linear variables cannot be initialized in this way. This function is used to set initial values for tangent linear variables.
1828 
1829 .seealso: TSForwardSetSensitivities()
1830 @*/
1831 PetscErrorCode TSForwardSetInitialSensitivities(TS ts,Mat didp)
1832 {
1833   PetscFunctionBegin;
1834   PetscValidHeaderSpecific(ts,TS_CLASSID,1);
1835   PetscValidHeaderSpecific(didp,MAT_CLASSID,2);
1836   if (!ts->mat_sensip) {
1837     CHKERRQ(TSForwardSetSensitivities(ts,PETSC_DEFAULT,didp));
1838   }
1839   PetscFunctionReturn(0);
1840 }
1841 
1842 /*@
1843    TSForwardGetStages - Get the number of stages and the tangent linear sensitivities at the intermediate stages
1844 
1845    Input Parameter:
1846 .  ts - the TS context obtained from TSCreate()
1847 
1848    Output Parameters:
1849 +  ns - number of stages
1850 -  S - tangent linear sensitivities at the intermediate stages
1851 
1852    Level: advanced
1853 
1854 @*/
1855 PetscErrorCode TSForwardGetStages(TS ts,PetscInt *ns,Mat **S)
1856 {
1857   PetscFunctionBegin;
1858   PetscValidHeaderSpecific(ts, TS_CLASSID,1);
1859 
1860   if (!ts->ops->getstages) *S=NULL;
1861   else {
1862     CHKERRQ((*ts->ops->forwardgetstages)(ts,ns,S));
1863   }
1864   PetscFunctionReturn(0);
1865 }
1866 
1867 /*@
1868    TSCreateQuadratureTS - Create a sub-TS that evaluates integrals over time
1869 
1870    Input Parameters:
1871 +  ts - the TS context obtained from TSCreate()
1872 -  fwd - flag indicating whether to evaluate cost integral in the forward run or the adjoint run
1873 
1874    Output Parameters:
1875 .  quadts - the child TS context
1876 
1877    Level: intermediate
1878 
1879 .seealso: TSGetQuadratureTS()
1880 @*/
1881 PetscErrorCode TSCreateQuadratureTS(TS ts,PetscBool fwd,TS *quadts)
1882 {
1883   char prefix[128];
1884 
1885   PetscFunctionBegin;
1886   PetscValidHeaderSpecific(ts,TS_CLASSID,1);
1887   PetscValidPointer(quadts,3);
1888   CHKERRQ(TSDestroy(&ts->quadraturets));
1889   CHKERRQ(TSCreate(PetscObjectComm((PetscObject)ts),&ts->quadraturets));
1890   CHKERRQ(PetscObjectIncrementTabLevel((PetscObject)ts->quadraturets,(PetscObject)ts,1));
1891   CHKERRQ(PetscLogObjectParent((PetscObject)ts,(PetscObject)ts->quadraturets));
1892   CHKERRQ(PetscSNPrintf(prefix,sizeof(prefix),"%squad_",((PetscObject)ts)->prefix ? ((PetscObject)ts)->prefix : ""));
1893   CHKERRQ(TSSetOptionsPrefix(ts->quadraturets,prefix));
1894   *quadts = ts->quadraturets;
1895 
1896   if (ts->numcost) {
1897     CHKERRQ(VecCreateSeq(PETSC_COMM_SELF,ts->numcost,&(*quadts)->vec_sol));
1898   } else {
1899     CHKERRQ(VecCreateSeq(PETSC_COMM_SELF,1,&(*quadts)->vec_sol));
1900   }
1901   ts->costintegralfwd = fwd;
1902   PetscFunctionReturn(0);
1903 }
1904 
1905 /*@
1906    TSGetQuadratureTS - Return the sub-TS that evaluates integrals over time
1907 
1908    Input Parameter:
1909 .  ts - the TS context obtained from TSCreate()
1910 
1911    Output Parameters:
1912 +  fwd - flag indicating whether to evaluate cost integral in the forward run or the adjoint run
1913 -  quadts - the child TS context
1914 
1915    Level: intermediate
1916 
1917 .seealso: TSCreateQuadratureTS()
1918 @*/
1919 PetscErrorCode TSGetQuadratureTS(TS ts,PetscBool *fwd,TS *quadts)
1920 {
1921   PetscFunctionBegin;
1922   PetscValidHeaderSpecific(ts,TS_CLASSID,1);
1923   if (fwd) *fwd = ts->costintegralfwd;
1924   if (quadts) *quadts = ts->quadraturets;
1925   PetscFunctionReturn(0);
1926 }
1927 
1928 /*@
1929    TSComputeSNESJacobian - Compute the SNESJacobian
1930 
1931    Input Parameters:
1932 +  ts - the TS context obtained from TSCreate()
1933 -  x - state vector
1934 
1935    Output Parameters:
1936 +  J - Jacobian matrix
1937 -  Jpre - preconditioning matrix for J (may be same as J)
1938 
1939    Level: developer
1940 
1941    Notes:
1942    Using SNES to compute the Jacobian enables finite differencing when TS Jacobian is not available.
1943 @*/
1944 PetscErrorCode TSComputeSNESJacobian(TS ts,Vec x,Mat J,Mat Jpre)
1945 {
1946   SNES           snes = ts->snes;
1947   PetscErrorCode (*jac)(SNES,Vec,Mat,Mat,void*) = NULL;
1948 
1949   PetscFunctionBegin;
1950   /*
1951     Unlike implicit methods, explicit methods do not have SNESMatFDColoring in the snes object
1952     because SNESSolve() has not been called yet; so querying SNESMatFDColoring does not work for
1953     explicit methods. Instead, we check the Jacobian compute function directly to determin if FD
1954     coloring is used.
1955   */
1956   CHKERRQ(SNESGetJacobian(snes,NULL,NULL,&jac,NULL));
1957   if (jac == SNESComputeJacobianDefaultColor) {
1958     Vec f;
1959     CHKERRQ(SNESSetSolution(snes,x));
1960     CHKERRQ(SNESGetFunction(snes,&f,NULL,NULL));
1961     /* Force MatFDColoringApply to evaluate the SNES residual function for the base vector */
1962     CHKERRQ(SNESComputeFunction(snes,x,f));
1963   }
1964   CHKERRQ(SNESComputeJacobian(snes,x,J,Jpre));
1965   PetscFunctionReturn(0);
1966 }
1967