1 #include <petsc/private/tsimpl.h> /*I "petscts.h" I*/ 2 #include <petscdraw.h> 3 4 PetscLogEvent TS_AdjointStep, TS_ForwardStep, TS_JacobianPEval; 5 6 /* #define TSADJOINT_STAGE */ 7 8 /* ------------------------ Sensitivity Context ---------------------------*/ 9 10 /*@C 11 TSSetRHSJacobianP - Sets the function that computes the Jacobian of G w.r.t. the parameters P where U_t = G(U,P,t), as well as the location to store the matrix. 12 13 Logically Collective on ts 14 15 Input Parameters: 16 + ts - `TS` context obtained from `TSCreate()` 17 . Amat - JacobianP matrix 18 . func - function 19 - ctx - [optional] user-defined function context 20 21 Calling sequence of func: 22 $ func (TS ts,PetscReal t,Vec y,Mat A,void *ctx); 23 + t - current timestep 24 . U - input vector (current ODE solution) 25 . A - output matrix 26 - ctx - [optional] user-defined function context 27 28 Level: intermediate 29 30 Note: 31 Amat has the same number of rows and the same row parallel layout as u, Amat has the same number of columns and parallel layout as p 32 33 .seealso: [](chapter_ts), `TS`, `TSGetRHSJacobianP()` 34 @*/ 35 PetscErrorCode TSSetRHSJacobianP(TS ts, Mat Amat, PetscErrorCode (*func)(TS, PetscReal, Vec, Mat, void *), void *ctx) 36 { 37 PetscFunctionBegin; 38 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 39 PetscValidHeaderSpecific(Amat, MAT_CLASSID, 2); 40 41 ts->rhsjacobianp = func; 42 ts->rhsjacobianpctx = ctx; 43 if (Amat) { 44 PetscCall(PetscObjectReference((PetscObject)Amat)); 45 PetscCall(MatDestroy(&ts->Jacprhs)); 46 ts->Jacprhs = Amat; 47 } 48 PetscFunctionReturn(0); 49 } 50 51 /*@C 52 TSGetRHSJacobianP - Gets the function that computes the Jacobian of G w.r.t. the parameters P where U_t = G(U,P,t), as well as the location to store the matrix. 53 54 Logically Collective on ts 55 56 Input Parameter: 57 . ts - `TS` context obtained from `TSCreate()` 58 59 Output Parameters: 60 + Amat - JacobianP matrix 61 . func - function 62 - ctx - [optional] user-defined function context 63 64 Calling sequence of func: 65 $ func (TS ts,PetscReal t,Vec y,Mat A,void *ctx); 66 + t - current timestep 67 . U - input vector (current ODE solution) 68 . A - output matrix 69 - ctx - [optional] user-defined function context 70 71 Level: intermediate 72 73 Note: 74 Amat has the same number of rows and the same row parallel layout as u, Amat has the same number of columns and parallel layout as p 75 76 .seealso: [](chapter_ts), `TSSetRHSJacobianP()`, `TS`, `TSGetRHSJacobianP()` 77 @*/ 78 PetscErrorCode TSGetRHSJacobianP(TS ts, Mat *Amat, PetscErrorCode (**func)(TS, PetscReal, Vec, Mat, void *), void **ctx) 79 { 80 PetscFunctionBegin; 81 if (func) *func = ts->rhsjacobianp; 82 if (ctx) *ctx = ts->rhsjacobianpctx; 83 if (Amat) *Amat = ts->Jacprhs; 84 PetscFunctionReturn(0); 85 } 86 87 /*@C 88 TSComputeRHSJacobianP - Runs the user-defined JacobianP function. 89 90 Collective on ts 91 92 Input Parameters: 93 . ts - The `TS` context obtained from `TSCreate()` 94 95 Level: developer 96 97 .seealso: [](chapter_ts), `TSSetRHSJacobianP()`, `TS` 98 @*/ 99 PetscErrorCode TSComputeRHSJacobianP(TS ts, PetscReal t, Vec U, Mat Amat) 100 { 101 PetscFunctionBegin; 102 if (!Amat) PetscFunctionReturn(0); 103 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 104 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 105 106 PetscCallBack("TS callback JacobianP for sensitivity analysis", (*ts->rhsjacobianp)(ts, t, U, Amat, ts->rhsjacobianpctx)); 107 PetscFunctionReturn(0); 108 } 109 110 /*@C 111 TSSetIJacobianP - Sets the function that computes the Jacobian of F w.r.t. the parameters P where F(Udot,U,t) = G(U,P,t), as well as the location to store the matrix. 112 113 Logically Collective on ts 114 115 Input Parameters: 116 + ts - `TS` context obtained from `TSCreate()` 117 . Amat - JacobianP matrix 118 . func - function 119 - ctx - [optional] user-defined function context 120 121 Calling sequence of func: 122 $ func (TS ts,PetscReal t,Vec y,Mat A,void *ctx); 123 + t - current timestep 124 . U - input vector (current ODE solution) 125 . Udot - time derivative of state vector 126 . shift - shift to apply, see note below 127 . A - output matrix 128 - ctx - [optional] user-defined function context 129 130 Level: intermediate 131 132 Note: 133 Amat has the same number of rows and the same row parallel layout as u, Amat has the same number of columns and parallel layout as p 134 135 .seealso: [](chapter_ts), `TSSetRHSJacobianP()`, `TS` 136 @*/ 137 PetscErrorCode TSSetIJacobianP(TS ts, Mat Amat, PetscErrorCode (*func)(TS, PetscReal, Vec, Vec, PetscReal, Mat, void *), void *ctx) 138 { 139 PetscFunctionBegin; 140 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 141 PetscValidHeaderSpecific(Amat, MAT_CLASSID, 2); 142 143 ts->ijacobianp = func; 144 ts->ijacobianpctx = ctx; 145 if (Amat) { 146 PetscCall(PetscObjectReference((PetscObject)Amat)); 147 PetscCall(MatDestroy(&ts->Jacp)); 148 ts->Jacp = Amat; 149 } 150 PetscFunctionReturn(0); 151 } 152 153 /*@C 154 TSComputeIJacobianP - Runs the user-defined IJacobianP function. 155 156 Collective on ts 157 158 Input Parameters: 159 + ts - the `TS` context 160 . t - current timestep 161 . U - state vector 162 . Udot - time derivative of state vector 163 . shift - shift to apply, see note below 164 - imex - flag indicates if the method is IMEX so that the RHSJacobian should be kept separate 165 166 Output Parameters: 167 . A - Jacobian matrix 168 169 Level: developer 170 171 .seealso: [](chapter_ts), `TS`, `TSSetIJacobianP()` 172 @*/ 173 PetscErrorCode TSComputeIJacobianP(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal shift, Mat Amat, PetscBool imex) 174 { 175 PetscFunctionBegin; 176 if (!Amat) PetscFunctionReturn(0); 177 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 178 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 179 PetscValidHeaderSpecific(Udot, VEC_CLASSID, 4); 180 181 PetscCall(PetscLogEventBegin(TS_JacobianPEval, ts, U, Amat, 0)); 182 if (ts->ijacobianp) PetscCallBack("TS callback JacobianP for sensitivity analysis", (*ts->ijacobianp)(ts, t, U, Udot, shift, Amat, ts->ijacobianpctx)); 183 if (imex) { 184 if (!ts->ijacobianp) { /* system was written as Udot = G(t,U) */ 185 PetscBool assembled; 186 PetscCall(MatZeroEntries(Amat)); 187 PetscCall(MatAssembled(Amat, &assembled)); 188 if (!assembled) { 189 PetscCall(MatAssemblyBegin(Amat, MAT_FINAL_ASSEMBLY)); 190 PetscCall(MatAssemblyEnd(Amat, MAT_FINAL_ASSEMBLY)); 191 } 192 } 193 } else { 194 if (ts->rhsjacobianp) PetscCall(TSComputeRHSJacobianP(ts, t, U, ts->Jacprhs)); 195 if (ts->Jacprhs == Amat) { /* No IJacobian, so we only have the RHS matrix */ 196 PetscCall(MatScale(Amat, -1)); 197 } else if (ts->Jacprhs) { /* Both IJacobian and RHSJacobian */ 198 MatStructure axpy = DIFFERENT_NONZERO_PATTERN; 199 if (!ts->ijacobianp) { /* No IJacobianp provided, but we have a separate RHS matrix */ 200 PetscCall(MatZeroEntries(Amat)); 201 } 202 PetscCall(MatAXPY(Amat, -1, ts->Jacprhs, axpy)); 203 } 204 } 205 PetscCall(PetscLogEventEnd(TS_JacobianPEval, ts, U, Amat, 0)); 206 PetscFunctionReturn(0); 207 } 208 209 /*@C 210 TSSetCostIntegrand - Sets the routine for evaluating the integral term in one or more cost functions 211 212 Logically Collective on ts 213 214 Input Parameters: 215 + ts - the `TS` context obtained from `TSCreate()` 216 . numcost - number of gradients to be computed, this is the number of cost functions 217 . costintegral - vector that stores the integral values 218 . rf - routine for evaluating the integrand function 219 . drduf - function that computes the gradients of the r's with respect to u 220 . drdpf - function that computes the gradients of the r's with respect to p, can be NULL if parametric sensitivity is not desired (mu=NULL) 221 . fwd - flag indicating whether to evaluate cost integral in the forward run or the adjoint run 222 - ctx - [optional] user-defined context for private data for the function evaluation routine (may be NULL) 223 224 Calling sequence of rf: 225 $ PetscErrorCode rf(TS ts,PetscReal t,Vec U,Vec F,void *ctx); 226 227 Calling sequence of drduf: 228 $ PetscErroCode drduf(TS ts,PetscReal t,Vec U,Vec *dRdU,void *ctx); 229 230 Calling sequence of drdpf: 231 $ PetscErroCode drdpf(TS ts,PetscReal t,Vec U,Vec *dRdP,void *ctx); 232 233 Level: deprecated 234 235 Note: 236 For optimization there is usually a single cost function (numcost = 1). For sensitivities there may be multiple cost functions 237 238 .seealso: [](chapter_ts), `TS`, `TSSetRHSJacobianP()`, `TSGetCostGradients()`, `TSSetCostGradients()` 239 @*/ 240 PetscErrorCode TSSetCostIntegrand(TS ts, PetscInt numcost, Vec costintegral, PetscErrorCode (*rf)(TS, PetscReal, Vec, Vec, void *), PetscErrorCode (*drduf)(TS, PetscReal, Vec, Vec *, void *), PetscErrorCode (*drdpf)(TS, PetscReal, Vec, Vec *, void *), PetscBool fwd, void *ctx) 241 { 242 PetscFunctionBegin; 243 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 244 if (costintegral) PetscValidHeaderSpecific(costintegral, VEC_CLASSID, 3); 245 PetscCheck(!ts->numcost || ts->numcost == numcost, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostGradients() or TSForwardSetIntegralGradients()"); 246 if (!ts->numcost) ts->numcost = numcost; 247 248 if (costintegral) { 249 PetscCall(PetscObjectReference((PetscObject)costintegral)); 250 PetscCall(VecDestroy(&ts->vec_costintegral)); 251 ts->vec_costintegral = costintegral; 252 } else { 253 if (!ts->vec_costintegral) { /* Create a seq vec if user does not provide one */ 254 PetscCall(VecCreateSeq(PETSC_COMM_SELF, numcost, &ts->vec_costintegral)); 255 } else { 256 PetscCall(VecSet(ts->vec_costintegral, 0.0)); 257 } 258 } 259 if (!ts->vec_costintegrand) { 260 PetscCall(VecDuplicate(ts->vec_costintegral, &ts->vec_costintegrand)); 261 } else { 262 PetscCall(VecSet(ts->vec_costintegrand, 0.0)); 263 } 264 ts->costintegralfwd = fwd; /* Evaluate the cost integral in forward run if fwd is true */ 265 ts->costintegrand = rf; 266 ts->costintegrandctx = ctx; 267 ts->drdufunction = drduf; 268 ts->drdpfunction = drdpf; 269 PetscFunctionReturn(0); 270 } 271 272 /*@C 273 TSGetCostIntegral - Returns the values of the integral term in the cost functions. 274 It is valid to call the routine after a backward run. 275 276 Not Collective 277 278 Input Parameter: 279 . ts - the `TS` context obtained from `TSCreate()` 280 281 Output Parameter: 282 . v - the vector containing the integrals for each cost function 283 284 Level: intermediate 285 286 .seealso: [](chapter_ts), `TS`, `TSAdjointSolve()`, ``TSSetCostIntegrand()` 287 @*/ 288 PetscErrorCode TSGetCostIntegral(TS ts, Vec *v) 289 { 290 TS quadts; 291 292 PetscFunctionBegin; 293 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 294 PetscValidPointer(v, 2); 295 PetscCall(TSGetQuadratureTS(ts, NULL, &quadts)); 296 *v = quadts->vec_sol; 297 PetscFunctionReturn(0); 298 } 299 300 /*@C 301 TSComputeCostIntegrand - Evaluates the integral function in the cost functions. 302 303 Input Parameters: 304 + ts - the `TS` context 305 . t - current time 306 - U - state vector, i.e. current solution 307 308 Output Parameter: 309 . Q - vector of size numcost to hold the outputs 310 311 Level: deprecated 312 313 Note: 314 Most users should not need to explicitly call this routine, as it 315 is used internally within the sensitivity analysis context. 316 317 .seealso: [](chapter_ts), `TS`, `TSAdjointSolve()`, `TSSetCostIntegrand()` 318 @*/ 319 PetscErrorCode TSComputeCostIntegrand(TS ts, PetscReal t, Vec U, Vec Q) 320 { 321 PetscFunctionBegin; 322 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 323 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 324 PetscValidHeaderSpecific(Q, VEC_CLASSID, 4); 325 326 PetscCall(PetscLogEventBegin(TS_FunctionEval, ts, U, Q, 0)); 327 if (ts->costintegrand) PetscCallBack("TS callback integrand in the cost function", (*ts->costintegrand)(ts, t, U, Q, ts->costintegrandctx)); 328 else PetscCall(VecZeroEntries(Q)); 329 PetscCall(PetscLogEventEnd(TS_FunctionEval, ts, U, Q, 0)); 330 PetscFunctionReturn(0); 331 } 332 333 /*@C 334 TSComputeDRDUFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobian()` 335 336 Level: deprecated 337 338 @*/ 339 PetscErrorCode TSComputeDRDUFunction(TS ts, PetscReal t, Vec U, Vec *DRDU) 340 { 341 PetscFunctionBegin; 342 if (!DRDU) PetscFunctionReturn(0); 343 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 344 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 345 346 PetscCallBack("TS callback DRDU for sensitivity analysis", (*ts->drdufunction)(ts, t, U, DRDU, ts->costintegrandctx)); 347 PetscFunctionReturn(0); 348 } 349 350 /*@C 351 TSComputeDRDPFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobianP()` 352 353 Level: deprecated 354 355 @*/ 356 PetscErrorCode TSComputeDRDPFunction(TS ts, PetscReal t, Vec U, Vec *DRDP) 357 { 358 PetscFunctionBegin; 359 if (!DRDP) PetscFunctionReturn(0); 360 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 361 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 362 363 PetscCallBack("TS callback DRDP for sensitivity analysis", (*ts->drdpfunction)(ts, t, U, DRDP, ts->costintegrandctx)); 364 PetscFunctionReturn(0); 365 } 366 367 /*@C 368 TSSetIHessianProduct - Sets the function that computes the vector-Hessian-vector product. The Hessian is the second-order derivative of F (IFunction) w.r.t. the state variable. 369 370 Logically Collective on ts 371 372 Input Parameters: 373 + ts - `TS` context obtained from `TSCreate()` 374 . ihp1 - an array of vectors storing the result of vector-Hessian-vector product for F_UU 375 . hessianproductfunc1 - vector-Hessian-vector product function for F_UU 376 . ihp2 - an array of vectors storing the result of vector-Hessian-vector product for F_UP 377 . hessianproductfunc2 - vector-Hessian-vector product function for F_UP 378 . ihp3 - an array of vectors storing the result of vector-Hessian-vector product for F_PU 379 . hessianproductfunc3 - vector-Hessian-vector product function for F_PU 380 . ihp4 - an array of vectors storing the result of vector-Hessian-vector product for F_PP 381 - hessianproductfunc4 - vector-Hessian-vector product function for F_PP 382 383 Calling sequence of ihessianproductfunc: 384 $ ihessianproductfunc (TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV,void *ctx); 385 + t - current timestep 386 . U - input vector (current ODE solution) 387 . Vl - an array of input vectors to be left-multiplied with the Hessian 388 . Vr - input vector to be right-multiplied with the Hessian 389 . VHV - an array of output vectors for vector-Hessian-vector product 390 - ctx - [optional] user-defined function context 391 392 Level: intermediate 393 394 Notes: 395 The first Hessian function and the working array are required. 396 As an example to implement the callback functions, the second callback function calculates the vector-Hessian-vector product 397 $ Vl_n^T*F_UP*Vr 398 where the vector Vl_n (n-th element in the array Vl) and Vr are of size N and M respectively, and the Hessian F_UP is of size N x N x M. 399 Each entry of F_UP corresponds to the derivative 400 $ F_UP[i][j][k] = \frac{\partial^2 F[i]}{\partial U[j] \partial P[k]}. 401 The result of the vector-Hessian-vector product for Vl_n needs to be stored in vector VHV_n with the j-th entry being 402 $ VHV_n[j] = \sum_i \sum_k {Vl_n[i] * F_UP[i][j][k] * Vr[k]} 403 If the cost function is a scalar, there will be only one vector in Vl and VHV. 404 405 .seealso: [](chapter_ts), `TS` 406 @*/ 407 PetscErrorCode TSSetIHessianProduct(TS ts, Vec *ihp1, PetscErrorCode (*ihessianproductfunc1)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *ihp2, PetscErrorCode (*ihessianproductfunc2)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *ihp3, PetscErrorCode (*ihessianproductfunc3)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *ihp4, PetscErrorCode (*ihessianproductfunc4)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), void *ctx) 408 { 409 PetscFunctionBegin; 410 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 411 PetscValidPointer(ihp1, 2); 412 413 ts->ihessianproductctx = ctx; 414 if (ihp1) ts->vecs_fuu = ihp1; 415 if (ihp2) ts->vecs_fup = ihp2; 416 if (ihp3) ts->vecs_fpu = ihp3; 417 if (ihp4) ts->vecs_fpp = ihp4; 418 ts->ihessianproduct_fuu = ihessianproductfunc1; 419 ts->ihessianproduct_fup = ihessianproductfunc2; 420 ts->ihessianproduct_fpu = ihessianproductfunc3; 421 ts->ihessianproduct_fpp = ihessianproductfunc4; 422 PetscFunctionReturn(0); 423 } 424 425 /*@C 426 TSComputeIHessianProductFunctionUU - Runs the user-defined vector-Hessian-vector product function for Fuu. 427 428 Collective on ts 429 430 Input Parameters: 431 . ts - The `TS` context obtained from `TSCreate()` 432 433 Level: developer 434 435 Note: 436 `TSComputeIHessianProductFunctionUU()` is typically used for sensitivity implementation, 437 so most users would not generally call this routine themselves. 438 439 .seealso: [](chapter_ts), `TSSetIHessianProduct()` 440 @*/ 441 PetscErrorCode TSComputeIHessianProductFunctionUU(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV) 442 { 443 PetscFunctionBegin; 444 if (!VHV) PetscFunctionReturn(0); 445 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 446 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 447 448 if (ts->ihessianproduct_fuu) PetscCallBack("TS callback IHessianProduct 1 for sensitivity analysis", (*ts->ihessianproduct_fuu)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx)); 449 450 /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */ 451 if (ts->rhshessianproduct_guu) { 452 PetscInt nadj; 453 PetscCall(TSComputeRHSHessianProductFunctionUU(ts, t, U, Vl, Vr, VHV)); 454 for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1)); 455 } 456 PetscFunctionReturn(0); 457 } 458 459 /*@C 460 TSComputeIHessianProductFunctionUP - Runs the user-defined vector-Hessian-vector product function for Fup. 461 462 Collective on ts 463 464 Input Parameters: 465 . ts - The `TS` context obtained from `TSCreate()` 466 467 Level: developer 468 469 Note: 470 `TSComputeIHessianProductFunctionUP()` is typically used for sensitivity implementation, 471 so most users would not generally call this routine themselves. 472 473 .seealso: [](chapter_ts), `TSSetIHessianProduct()` 474 @*/ 475 PetscErrorCode TSComputeIHessianProductFunctionUP(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV) 476 { 477 PetscFunctionBegin; 478 if (!VHV) PetscFunctionReturn(0); 479 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 480 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 481 482 if (ts->ihessianproduct_fup) PetscCallBack("TS callback IHessianProduct 2 for sensitivity analysis", (*ts->ihessianproduct_fup)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx)); 483 484 /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */ 485 if (ts->rhshessianproduct_gup) { 486 PetscInt nadj; 487 PetscCall(TSComputeRHSHessianProductFunctionUP(ts, t, U, Vl, Vr, VHV)); 488 for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1)); 489 } 490 PetscFunctionReturn(0); 491 } 492 493 /*@C 494 TSComputeIHessianProductFunctionPU - Runs the user-defined vector-Hessian-vector product function for Fpu. 495 496 Collective on ts 497 498 Input Parameters: 499 . ts - The `TS` context obtained from `TSCreate()` 500 501 Level: developer 502 503 Note: 504 `TSComputeIHessianProductFunctionPU()` is typically used for sensitivity implementation, 505 so most users would not generally call this routine themselves. 506 507 .seealso: [](chapter_ts), `TSSetIHessianProduct()` 508 @*/ 509 PetscErrorCode TSComputeIHessianProductFunctionPU(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV) 510 { 511 PetscFunctionBegin; 512 if (!VHV) PetscFunctionReturn(0); 513 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 514 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 515 516 if (ts->ihessianproduct_fpu) PetscCallBack("TS callback IHessianProduct 3 for sensitivity analysis", (*ts->ihessianproduct_fpu)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx)); 517 518 /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */ 519 if (ts->rhshessianproduct_gpu) { 520 PetscInt nadj; 521 PetscCall(TSComputeRHSHessianProductFunctionPU(ts, t, U, Vl, Vr, VHV)); 522 for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1)); 523 } 524 PetscFunctionReturn(0); 525 } 526 527 /*@C 528 TSComputeIHessianProductFunctionPP - Runs the user-defined vector-Hessian-vector product function for Fpp. 529 530 Collective on ts 531 532 Input Parameters: 533 . ts - The `TS` context obtained from `TSCreate()` 534 535 Level: developer 536 537 Note: 538 `TSComputeIHessianProductFunctionPP()` is typically used for sensitivity implementation, 539 so most users would not generally call this routine themselves. 540 541 .seealso: [](chapter_ts), `TSSetIHessianProduct()` 542 @*/ 543 PetscErrorCode TSComputeIHessianProductFunctionPP(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV) 544 { 545 PetscFunctionBegin; 546 if (!VHV) PetscFunctionReturn(0); 547 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 548 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 549 550 if (ts->ihessianproduct_fpp) PetscCallBack("TS callback IHessianProduct 3 for sensitivity analysis", (*ts->ihessianproduct_fpp)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx)); 551 552 /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */ 553 if (ts->rhshessianproduct_gpp) { 554 PetscInt nadj; 555 PetscCall(TSComputeRHSHessianProductFunctionPP(ts, t, U, Vl, Vr, VHV)); 556 for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1)); 557 } 558 PetscFunctionReturn(0); 559 } 560 561 /*@C 562 TSSetRHSHessianProduct - Sets the function that computes the vector-Hessian-vector product. The Hessian is the second-order derivative of G (RHSFunction) w.r.t. the state variable. 563 564 Logically Collective on ts 565 566 Input Parameters: 567 + ts - `TS` context obtained from `TSCreate()` 568 . rhshp1 - an array of vectors storing the result of vector-Hessian-vector product for G_UU 569 . hessianproductfunc1 - vector-Hessian-vector product function for G_UU 570 . rhshp2 - an array of vectors storing the result of vector-Hessian-vector product for G_UP 571 . hessianproductfunc2 - vector-Hessian-vector product function for G_UP 572 . rhshp3 - an array of vectors storing the result of vector-Hessian-vector product for G_PU 573 . hessianproductfunc3 - vector-Hessian-vector product function for G_PU 574 . rhshp4 - an array of vectors storing the result of vector-Hessian-vector product for G_PP 575 - hessianproductfunc4 - vector-Hessian-vector product function for G_PP 576 577 Calling sequence of ihessianproductfunc: 578 $ rhshessianproductfunc (TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV,void *ctx); 579 + t - current timestep 580 . U - input vector (current ODE solution) 581 . Vl - an array of input vectors to be left-multiplied with the Hessian 582 . Vr - input vector to be right-multiplied with the Hessian 583 . VHV - an array of output vectors for vector-Hessian-vector product 584 - ctx - [optional] user-defined function context 585 586 Level: intermediate 587 588 Notes: 589 The first Hessian function and the working array are required. 590 As an example to implement the callback functions, the second callback function calculates the vector-Hessian-vector product 591 $ Vl_n^T*G_UP*Vr 592 where the vector Vl_n (n-th element in the array Vl) and Vr are of size N and M respectively, and the Hessian G_UP is of size N x N x M. 593 Each entry of G_UP corresponds to the derivative 594 $ G_UP[i][j][k] = \frac{\partial^2 G[i]}{\partial U[j] \partial P[k]}. 595 The result of the vector-Hessian-vector product for Vl_n needs to be stored in vector VHV_n with j-th entry being 596 $ VHV_n[j] = \sum_i \sum_k {Vl_n[i] * G_UP[i][j][k] * Vr[k]} 597 If the cost function is a scalar, there will be only one vector in Vl and VHV. 598 599 .seealso: `TS` 600 @*/ 601 PetscErrorCode TSSetRHSHessianProduct(TS ts, Vec *rhshp1, PetscErrorCode (*rhshessianproductfunc1)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *rhshp2, PetscErrorCode (*rhshessianproductfunc2)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *rhshp3, PetscErrorCode (*rhshessianproductfunc3)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *rhshp4, PetscErrorCode (*rhshessianproductfunc4)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), void *ctx) 602 { 603 PetscFunctionBegin; 604 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 605 PetscValidPointer(rhshp1, 2); 606 607 ts->rhshessianproductctx = ctx; 608 if (rhshp1) ts->vecs_guu = rhshp1; 609 if (rhshp2) ts->vecs_gup = rhshp2; 610 if (rhshp3) ts->vecs_gpu = rhshp3; 611 if (rhshp4) ts->vecs_gpp = rhshp4; 612 ts->rhshessianproduct_guu = rhshessianproductfunc1; 613 ts->rhshessianproduct_gup = rhshessianproductfunc2; 614 ts->rhshessianproduct_gpu = rhshessianproductfunc3; 615 ts->rhshessianproduct_gpp = rhshessianproductfunc4; 616 PetscFunctionReturn(0); 617 } 618 619 /*@C 620 TSComputeRHSHessianProductFunctionUU - Runs the user-defined vector-Hessian-vector product function for Guu. 621 622 Collective on ts 623 624 Input Parameters: 625 . ts - The `TS` context obtained from `TSCreate()` 626 627 Level: developer 628 629 Note: 630 `TSComputeRHSHessianProductFunctionUU()` is typically used for sensitivity implementation, 631 so most users would not generally call this routine themselves. 632 633 .seealso: [](chapter_ts), `TS`, `TSSetRHSHessianProduct()` 634 @*/ 635 PetscErrorCode TSComputeRHSHessianProductFunctionUU(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV) 636 { 637 PetscFunctionBegin; 638 if (!VHV) PetscFunctionReturn(0); 639 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 640 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 641 642 PetscCallBack("TS callback RHSHessianProduct 1 for sensitivity analysis", (*ts->rhshessianproduct_guu)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx)); 643 PetscFunctionReturn(0); 644 } 645 646 /*@C 647 TSComputeRHSHessianProductFunctionUP - Runs the user-defined vector-Hessian-vector product function for Gup. 648 649 Collective on ts 650 651 Input Parameters: 652 . ts - The `TS` context obtained from `TSCreate()` 653 654 Level: developer 655 656 Note: 657 `TSComputeRHSHessianProductFunctionUP()` is typically used for sensitivity implementation, 658 so most users would not generally call this routine themselves. 659 660 .seealso: [](chapter_ts), `TS`, `TSSetRHSHessianProduct()` 661 @*/ 662 PetscErrorCode TSComputeRHSHessianProductFunctionUP(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV) 663 { 664 PetscFunctionBegin; 665 if (!VHV) PetscFunctionReturn(0); 666 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 667 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 668 669 PetscCallBack("TS callback RHSHessianProduct 2 for sensitivity analysis", (*ts->rhshessianproduct_gup)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx)); 670 PetscFunctionReturn(0); 671 } 672 673 /*@C 674 TSComputeRHSHessianProductFunctionPU - Runs the user-defined vector-Hessian-vector product function for Gpu. 675 676 Collective on ts 677 678 Input Parameters: 679 . ts - The `TS` context obtained from `TSCreate()` 680 681 Level: developer 682 683 Note: 684 `TSComputeRHSHessianProductFunctionPU()` is typically used for sensitivity implementation, 685 so most users would not generally call this routine themselves. 686 687 .seealso: [](chapter_ts), `TSSetRHSHessianProduct()` 688 @*/ 689 PetscErrorCode TSComputeRHSHessianProductFunctionPU(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV) 690 { 691 PetscFunctionBegin; 692 if (!VHV) PetscFunctionReturn(0); 693 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 694 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 695 696 PetscCallBack("TS callback RHSHessianProduct 3 for sensitivity analysis", (*ts->rhshessianproduct_gpu)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx)); 697 PetscFunctionReturn(0); 698 } 699 700 /*@C 701 TSComputeRHSHessianProductFunctionPP - Runs the user-defined vector-Hessian-vector product function for Gpp. 702 703 Collective on ts 704 705 Input Parameters: 706 . ts - The `TS` context obtained from `TSCreate()` 707 708 Level: developer 709 710 Note: 711 `TSComputeRHSHessianProductFunctionPP()` is typically used for sensitivity implementation, 712 so most users would not generally call this routine themselves. 713 714 .seealso: [](chapter_ts), `TSSetRHSHessianProduct()` 715 @*/ 716 PetscErrorCode TSComputeRHSHessianProductFunctionPP(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV) 717 { 718 PetscFunctionBegin; 719 if (!VHV) PetscFunctionReturn(0); 720 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 721 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 722 723 PetscCallBack("TS callback RHSHessianProduct 3 for sensitivity analysis", (*ts->rhshessianproduct_gpp)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx)); 724 PetscFunctionReturn(0); 725 } 726 727 /* --------------------------- Adjoint sensitivity ---------------------------*/ 728 729 /*@ 730 TSSetCostGradients - Sets the initial value of the gradients of the cost function w.r.t. initial values and w.r.t. the problem parameters 731 for use by the `TS` adjoint routines. 732 733 Logically Collective on ts 734 735 Input Parameters: 736 + ts - the `TS` context obtained from `TSCreate()` 737 . numcost - number of gradients to be computed, this is the number of cost functions 738 . lambda - gradients with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector 739 - mu - gradients with respect to the parameters, the number of entries in these vectors is the same as the number of parameters 740 741 Level: beginner 742 743 Notes: 744 the entries in these vectors must be correctly initialized with the values lamda_i = df/dy|finaltime mu_i = df/dp|finaltime 745 746 After `TSAdjointSolve()` is called the lamba and the mu contain the computed sensitivities 747 748 .seealso: `TS`, `TSAdjointSolve()`, `TSGetCostGradients()` 749 @*/ 750 PetscErrorCode TSSetCostGradients(TS ts, PetscInt numcost, Vec *lambda, Vec *mu) 751 { 752 PetscFunctionBegin; 753 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 754 PetscValidPointer(lambda, 3); 755 ts->vecs_sensi = lambda; 756 ts->vecs_sensip = mu; 757 PetscCheck(!ts->numcost || ts->numcost == numcost, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand"); 758 ts->numcost = numcost; 759 PetscFunctionReturn(0); 760 } 761 762 /*@ 763 TSGetCostGradients - Returns the gradients from the `TSAdjointSolve()` 764 765 Not Collective, but the vectors returned are parallel if `TS` is parallel 766 767 Input Parameter: 768 . ts - the `TS` context obtained from `TSCreate()` 769 770 Output Parameters: 771 + numcost - size of returned arrays 772 . lambda - vectors containing the gradients of the cost functions with respect to the ODE/DAE solution variables 773 - mu - vectors containing the gradients of the cost functions with respect to the problem parameters 774 775 Level: intermediate 776 777 .seealso: [](chapter_ts), `TS`, `TSAdjointSolve()`, `TSSetCostGradients()` 778 @*/ 779 PetscErrorCode TSGetCostGradients(TS ts, PetscInt *numcost, Vec **lambda, Vec **mu) 780 { 781 PetscFunctionBegin; 782 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 783 if (numcost) *numcost = ts->numcost; 784 if (lambda) *lambda = ts->vecs_sensi; 785 if (mu) *mu = ts->vecs_sensip; 786 PetscFunctionReturn(0); 787 } 788 789 /*@ 790 TSSetCostHessianProducts - Sets the initial value of the Hessian-vector products of the cost function w.r.t. initial values and w.r.t. the problem parameters 791 for use by the `TS` adjoint routines. 792 793 Logically Collective on ts 794 795 Input Parameters: 796 + ts - the `TS` context obtained from `TSCreate()` 797 . numcost - number of cost functions 798 . lambda2 - Hessian-vector product with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector 799 . mu2 - Hessian-vector product with respect to the parameters, the number of entries in these vectors is the same as the number of parameters 800 - dir - the direction vector that are multiplied with the Hessian of the cost functions 801 802 Level: beginner 803 804 Notes: 805 Hessian of the cost function is completely different from Hessian of the ODE/DAE system 806 807 For second-order adjoint, one needs to call this function and then `TSAdjointSetForward()` before `TSSolve()`. 808 809 After `TSAdjointSolve()` is called, the lamba2 and the mu2 will contain the computed second-order adjoint sensitivities, and can be used to produce Hessian-vector product (not the full Hessian matrix). Users must provide a direction vector; it is usually generated by an optimization solver. 810 811 Passing NULL for lambda2 disables the second-order calculation. 812 813 .seealso: [](chapter_ts), `TS`, `TSAdjointSolve()`, `TSAdjointSetForward()` 814 @*/ 815 PetscErrorCode TSSetCostHessianProducts(TS ts, PetscInt numcost, Vec *lambda2, Vec *mu2, Vec dir) 816 { 817 PetscFunctionBegin; 818 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 819 PetscCheck(!ts->numcost || ts->numcost == numcost, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand"); 820 ts->numcost = numcost; 821 ts->vecs_sensi2 = lambda2; 822 ts->vecs_sensi2p = mu2; 823 ts->vec_dir = dir; 824 PetscFunctionReturn(0); 825 } 826 827 /*@ 828 TSGetCostHessianProducts - Returns the gradients from the `TSAdjointSolve()` 829 830 Not Collective, but vectors returned are parallel if `TS` is parallel 831 832 Input Parameter: 833 . ts - the `TS` context obtained from `TSCreate()` 834 835 Output Parameters: 836 + numcost - number of cost functions 837 . lambda2 - Hessian-vector product with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector 838 . mu2 - Hessian-vector product with respect to the parameters, the number of entries in these vectors is the same as the number of parameters 839 - dir - the direction vector that are multiplied with the Hessian of the cost functions 840 841 Level: intermediate 842 843 .seealso: [](chapter_ts), `TSAdjointSolve()`, `TSSetCostHessianProducts()` 844 @*/ 845 PetscErrorCode TSGetCostHessianProducts(TS ts, PetscInt *numcost, Vec **lambda2, Vec **mu2, Vec *dir) 846 { 847 PetscFunctionBegin; 848 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 849 if (numcost) *numcost = ts->numcost; 850 if (lambda2) *lambda2 = ts->vecs_sensi2; 851 if (mu2) *mu2 = ts->vecs_sensi2p; 852 if (dir) *dir = ts->vec_dir; 853 PetscFunctionReturn(0); 854 } 855 856 /*@ 857 TSAdjointSetForward - Trigger the tangent linear solver and initialize the forward sensitivities 858 859 Logically Collective on ts 860 861 Input Parameters: 862 + ts - the `TS` context obtained from `TSCreate()` 863 - didp - the derivative of initial values w.r.t. parameters 864 865 Level: intermediate 866 867 Notes: 868 When computing sensitivies w.r.t. initial condition, set didp to NULL so that the solver will take it as an identity matrix mathematically. 869 `TS` adjoint does not reset the tangent linear solver automatically, `TSAdjointResetForward()` should be called to reset the tangent linear solver. 870 871 .seealso: [](chapter_ts), `TSAdjointSolve()`, `TSSetCostHessianProducts()`, `TSAdjointResetForward()` 872 @*/ 873 PetscErrorCode TSAdjointSetForward(TS ts, Mat didp) 874 { 875 Mat A; 876 Vec sp; 877 PetscScalar *xarr; 878 PetscInt lsize; 879 880 PetscFunctionBegin; 881 ts->forward_solve = PETSC_TRUE; /* turn on tangent linear mode */ 882 PetscCheck(ts->vecs_sensi2, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "Must call TSSetCostHessianProducts() first"); 883 PetscCheck(ts->vec_dir, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "Directional vector is missing. Call TSSetCostHessianProducts() to set it."); 884 /* create a single-column dense matrix */ 885 PetscCall(VecGetLocalSize(ts->vec_sol, &lsize)); 886 PetscCall(MatCreateDense(PetscObjectComm((PetscObject)ts), lsize, PETSC_DECIDE, PETSC_DECIDE, 1, NULL, &A)); 887 888 PetscCall(VecDuplicate(ts->vec_sol, &sp)); 889 PetscCall(MatDenseGetColumn(A, 0, &xarr)); 890 PetscCall(VecPlaceArray(sp, xarr)); 891 if (ts->vecs_sensi2p) { /* tangent linear variable initialized as 2*dIdP*dir */ 892 if (didp) { 893 PetscCall(MatMult(didp, ts->vec_dir, sp)); 894 PetscCall(VecScale(sp, 2.)); 895 } else { 896 PetscCall(VecZeroEntries(sp)); 897 } 898 } else { /* tangent linear variable initialized as dir */ 899 PetscCall(VecCopy(ts->vec_dir, sp)); 900 } 901 PetscCall(VecResetArray(sp)); 902 PetscCall(MatDenseRestoreColumn(A, &xarr)); 903 PetscCall(VecDestroy(&sp)); 904 905 PetscCall(TSForwardSetInitialSensitivities(ts, A)); /* if didp is NULL, identity matrix is assumed */ 906 907 PetscCall(MatDestroy(&A)); 908 PetscFunctionReturn(0); 909 } 910 911 /*@ 912 TSAdjointResetForward - Reset the tangent linear solver and destroy the tangent linear context 913 914 Logically Collective on ts 915 916 Input Parameters: 917 . ts - the `TS` context obtained from `TSCreate()` 918 919 Level: intermediate 920 921 .seealso: [](chapter_ts), `TSAdjointSetForward()` 922 @*/ 923 PetscErrorCode TSAdjointResetForward(TS ts) 924 { 925 PetscFunctionBegin; 926 ts->forward_solve = PETSC_FALSE; /* turn off tangent linear mode */ 927 PetscCall(TSForwardReset(ts)); 928 PetscFunctionReturn(0); 929 } 930 931 /*@ 932 TSAdjointSetUp - Sets up the internal data structures for the later use 933 of an adjoint solver 934 935 Collective on ts 936 937 Input Parameter: 938 . ts - the `TS` context obtained from `TSCreate()` 939 940 Level: advanced 941 942 .seealso: [](chapter_ts), `TSCreate()`, `TSAdjointStep()`, `TSSetCostGradients()` 943 @*/ 944 PetscErrorCode TSAdjointSetUp(TS ts) 945 { 946 TSTrajectory tj; 947 PetscBool match; 948 949 PetscFunctionBegin; 950 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 951 if (ts->adjointsetupcalled) PetscFunctionReturn(0); 952 PetscCheck(ts->vecs_sensi, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_WRONGSTATE, "Must call TSSetCostGradients() first"); 953 PetscCheck(!ts->vecs_sensip || ts->Jacp || ts->Jacprhs, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_WRONGSTATE, "Must call TSSetRHSJacobianP() or TSSetIJacobianP() first"); 954 PetscCall(TSGetTrajectory(ts, &tj)); 955 PetscCall(PetscObjectTypeCompare((PetscObject)tj, TSTRAJECTORYBASIC, &match)); 956 if (match) { 957 PetscBool solution_only; 958 PetscCall(TSTrajectoryGetSolutionOnly(tj, &solution_only)); 959 PetscCheck(!solution_only, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "TSAdjoint cannot use the solution-only mode when choosing the Basic TSTrajectory type. Turn it off with -ts_trajectory_solution_only 0"); 960 } 961 PetscCall(TSTrajectorySetUseHistory(tj, PETSC_FALSE)); /* not use TSHistory */ 962 963 if (ts->quadraturets) { /* if there is integral in the cost function */ 964 PetscCall(VecDuplicate(ts->vecs_sensi[0], &ts->vec_drdu_col)); 965 if (ts->vecs_sensip) PetscCall(VecDuplicate(ts->vecs_sensip[0], &ts->vec_drdp_col)); 966 } 967 968 PetscTryTypeMethod(ts, adjointsetup); 969 ts->adjointsetupcalled = PETSC_TRUE; 970 PetscFunctionReturn(0); 971 } 972 973 /*@ 974 TSAdjointReset - Resets a `TS` adjoint context and removes any allocated `Vec`s and `Mat`s. 975 976 Collective on ts 977 978 Input Parameter: 979 . ts - the `TS` context obtained from `TSCreate()` 980 981 Level: beginner 982 983 .seealso: [](chapter_ts), `TSCreate()`, `TSAdjointSetUp()`, `TSADestroy()` 984 @*/ 985 PetscErrorCode TSAdjointReset(TS ts) 986 { 987 PetscFunctionBegin; 988 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 989 PetscTryTypeMethod(ts, adjointreset); 990 if (ts->quadraturets) { /* if there is integral in the cost function */ 991 PetscCall(VecDestroy(&ts->vec_drdu_col)); 992 if (ts->vecs_sensip) PetscCall(VecDestroy(&ts->vec_drdp_col)); 993 } 994 ts->vecs_sensi = NULL; 995 ts->vecs_sensip = NULL; 996 ts->vecs_sensi2 = NULL; 997 ts->vecs_sensi2p = NULL; 998 ts->vec_dir = NULL; 999 ts->adjointsetupcalled = PETSC_FALSE; 1000 PetscFunctionReturn(0); 1001 } 1002 1003 /*@ 1004 TSAdjointSetSteps - Sets the number of steps the adjoint solver should take backward in time 1005 1006 Logically Collective on ts 1007 1008 Input Parameters: 1009 + ts - the `TS` context obtained from `TSCreate()` 1010 - steps - number of steps to use 1011 1012 Level: intermediate 1013 1014 Notes: 1015 Normally one does not call this and `TSAdjointSolve()` integrates back to the original timestep. One can call this 1016 so as to integrate back to less than the original timestep 1017 1018 .seealso: [](chapter_ts), `TSAdjointSolve()`, `TS`, `TSSetExactFinalTime()` 1019 @*/ 1020 PetscErrorCode TSAdjointSetSteps(TS ts, PetscInt steps) 1021 { 1022 PetscFunctionBegin; 1023 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1024 PetscValidLogicalCollectiveInt(ts, steps, 2); 1025 PetscCheck(steps >= 0, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_OUTOFRANGE, "Cannot step back a negative number of steps"); 1026 PetscCheck(steps <= ts->steps, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_OUTOFRANGE, "Cannot step back more than the total number of forward steps"); 1027 ts->adjoint_max_steps = steps; 1028 PetscFunctionReturn(0); 1029 } 1030 1031 /*@C 1032 TSAdjointSetRHSJacobian - Deprecated, use `TSSetRHSJacobianP()` 1033 1034 Level: deprecated 1035 1036 @*/ 1037 PetscErrorCode TSAdjointSetRHSJacobian(TS ts, Mat Amat, PetscErrorCode (*func)(TS, PetscReal, Vec, Mat, void *), void *ctx) 1038 { 1039 PetscFunctionBegin; 1040 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1041 PetscValidHeaderSpecific(Amat, MAT_CLASSID, 2); 1042 1043 ts->rhsjacobianp = func; 1044 ts->rhsjacobianpctx = ctx; 1045 if (Amat) { 1046 PetscCall(PetscObjectReference((PetscObject)Amat)); 1047 PetscCall(MatDestroy(&ts->Jacp)); 1048 ts->Jacp = Amat; 1049 } 1050 PetscFunctionReturn(0); 1051 } 1052 1053 /*@C 1054 TSAdjointComputeRHSJacobian - Deprecated, use `TSComputeRHSJacobianP()` 1055 1056 Level: deprecated 1057 1058 @*/ 1059 PetscErrorCode TSAdjointComputeRHSJacobian(TS ts, PetscReal t, Vec U, Mat Amat) 1060 { 1061 PetscFunctionBegin; 1062 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1063 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 1064 PetscValidPointer(Amat, 4); 1065 1066 PetscCallBack("TS callback JacobianP for sensitivity analysis", (*ts->rhsjacobianp)(ts, t, U, Amat, ts->rhsjacobianpctx)); 1067 PetscFunctionReturn(0); 1068 } 1069 1070 /*@ 1071 TSAdjointComputeDRDYFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobian()` 1072 1073 Level: deprecated 1074 1075 @*/ 1076 PetscErrorCode TSAdjointComputeDRDYFunction(TS ts, PetscReal t, Vec U, Vec *DRDU) 1077 { 1078 PetscFunctionBegin; 1079 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1080 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 1081 1082 PetscCallBack("TS callback DRDY for sensitivity analysis", (*ts->drdufunction)(ts, t, U, DRDU, ts->costintegrandctx)); 1083 PetscFunctionReturn(0); 1084 } 1085 1086 /*@ 1087 TSAdjointComputeDRDPFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobianP()` 1088 1089 Level: deprecated 1090 1091 @*/ 1092 PetscErrorCode TSAdjointComputeDRDPFunction(TS ts, PetscReal t, Vec U, Vec *DRDP) 1093 { 1094 PetscFunctionBegin; 1095 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1096 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 1097 1098 PetscCallBack("TS callback DRDP for sensitivity analysis", (*ts->drdpfunction)(ts, t, U, DRDP, ts->costintegrandctx)); 1099 PetscFunctionReturn(0); 1100 } 1101 1102 /*@C 1103 TSAdjointMonitorSensi - monitors the first lambda sensitivity 1104 1105 Level: intermediate 1106 1107 .seealso: [](chapter_ts), `TSAdjointMonitorSet()` 1108 @*/ 1109 PetscErrorCode TSAdjointMonitorSensi(TS ts, PetscInt step, PetscReal ptime, Vec v, PetscInt numcost, Vec *lambda, Vec *mu, PetscViewerAndFormat *vf) 1110 { 1111 PetscViewer viewer = vf->viewer; 1112 1113 PetscFunctionBegin; 1114 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 8); 1115 PetscCall(PetscViewerPushFormat(viewer, vf->format)); 1116 PetscCall(VecView(lambda[0], viewer)); 1117 PetscCall(PetscViewerPopFormat(viewer)); 1118 PetscFunctionReturn(0); 1119 } 1120 1121 /*@C 1122 TSAdjointMonitorSetFromOptions - Sets a monitor function and viewer appropriate for the type indicated by the user 1123 1124 Collective on ts 1125 1126 Input Parameters: 1127 + ts - `TS` object you wish to monitor 1128 . name - the monitor type one is seeking 1129 . help - message indicating what monitoring is done 1130 . manual - manual page for the monitor 1131 . monitor - the monitor function 1132 - monitorsetup - a function that is called once ONLY if the user selected this monitor that may set additional features of the `TS` or `PetscViewer` objects 1133 1134 Level: developer 1135 1136 .seealso: [](chapter_ts), `PetscOptionsGetViewer()`, `PetscOptionsGetReal()`, `PetscOptionsHasName()`, `PetscOptionsGetString()`, 1137 `PetscOptionsGetIntArray()`, `PetscOptionsGetRealArray()`, `PetscOptionsBool()` 1138 `PetscOptionsInt()`, `PetscOptionsString()`, `PetscOptionsReal()`, `PetscOptionsBool()`, 1139 `PetscOptionsName()`, `PetscOptionsBegin()`, `PetscOptionsEnd()`, `PetscOptionsHeadBegin()`, 1140 `PetscOptionsStringArray()`, `PetscOptionsRealArray()`, `PetscOptionsScalar()`, 1141 `PetscOptionsBoolGroupBegin()`, `PetscOptionsBoolGroup()`, `PetscOptionsBoolGroupEnd()`, 1142 `PetscOptionsFList()`, `PetscOptionsEList()` 1143 @*/ 1144 PetscErrorCode TSAdjointMonitorSetFromOptions(TS ts, const char name[], const char help[], const char manual[], PetscErrorCode (*monitor)(TS, PetscInt, PetscReal, Vec, PetscInt, Vec *, Vec *, PetscViewerAndFormat *), PetscErrorCode (*monitorsetup)(TS, PetscViewerAndFormat *)) 1145 { 1146 PetscViewer viewer; 1147 PetscViewerFormat format; 1148 PetscBool flg; 1149 1150 PetscFunctionBegin; 1151 PetscCall(PetscOptionsGetViewer(PetscObjectComm((PetscObject)ts), ((PetscObject)ts)->options, ((PetscObject)ts)->prefix, name, &viewer, &format, &flg)); 1152 if (flg) { 1153 PetscViewerAndFormat *vf; 1154 PetscCall(PetscViewerAndFormatCreate(viewer, format, &vf)); 1155 PetscCall(PetscObjectDereference((PetscObject)viewer)); 1156 if (monitorsetup) PetscCall((*monitorsetup)(ts, vf)); 1157 PetscCall(TSAdjointMonitorSet(ts, (PetscErrorCode(*)(TS, PetscInt, PetscReal, Vec, PetscInt, Vec *, Vec *, void *))monitor, vf, (PetscErrorCode(*)(void **))PetscViewerAndFormatDestroy)); 1158 } 1159 PetscFunctionReturn(0); 1160 } 1161 1162 /*@C 1163 TSAdjointMonitorSet - Sets an ADDITIONAL function that is to be used at every 1164 timestep to display the iteration's progress. 1165 1166 Logically Collective on ts 1167 1168 Input Parameters: 1169 + ts - the `TS` context obtained from `TSCreate()` 1170 . adjointmonitor - monitoring routine 1171 . adjointmctx - [optional] user-defined context for private data for the 1172 monitor routine (use NULL if no context is desired) 1173 - adjointmonitordestroy - [optional] routine that frees monitor context 1174 (may be NULL) 1175 1176 Calling sequence of monitor: 1177 $ int adjointmonitor(TS ts,PetscInt steps,PetscReal time,Vec u,PetscInt numcost,Vec *lambda, Vec *mu,void *adjointmctx) 1178 1179 + ts - the `TS` context 1180 . steps - iteration number (after the final time step the monitor routine is called with a step of -1, this is at the final time which may have 1181 been interpolated to) 1182 . time - current time 1183 . u - current iterate 1184 . numcost - number of cost functionos 1185 . lambda - sensitivities to initial conditions 1186 . mu - sensitivities to parameters 1187 - adjointmctx - [optional] adjoint monitoring context 1188 1189 Level: intermediate 1190 1191 Note: 1192 This routine adds an additional monitor to the list of monitors that 1193 already has been loaded. 1194 1195 Fortran Note: 1196 Only a single monitor function can be set for each TS object 1197 1198 .seealso: [](chapter_ts), `TS`, `TSAdjointSolve()`, `TSAdjointMonitorCancel()` 1199 @*/ 1200 PetscErrorCode TSAdjointMonitorSet(TS ts, PetscErrorCode (*adjointmonitor)(TS, PetscInt, PetscReal, Vec, PetscInt, Vec *, Vec *, void *), void *adjointmctx, PetscErrorCode (*adjointmdestroy)(void **)) 1201 { 1202 PetscInt i; 1203 PetscBool identical; 1204 1205 PetscFunctionBegin; 1206 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1207 for (i = 0; i < ts->numbermonitors; i++) { 1208 PetscCall(PetscMonitorCompare((PetscErrorCode(*)(void))adjointmonitor, adjointmctx, adjointmdestroy, (PetscErrorCode(*)(void))ts->adjointmonitor[i], ts->adjointmonitorcontext[i], ts->adjointmonitordestroy[i], &identical)); 1209 if (identical) PetscFunctionReturn(0); 1210 } 1211 PetscCheck(ts->numberadjointmonitors < MAXTSMONITORS, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many adjoint monitors set"); 1212 ts->adjointmonitor[ts->numberadjointmonitors] = adjointmonitor; 1213 ts->adjointmonitordestroy[ts->numberadjointmonitors] = adjointmdestroy; 1214 ts->adjointmonitorcontext[ts->numberadjointmonitors++] = (void *)adjointmctx; 1215 PetscFunctionReturn(0); 1216 } 1217 1218 /*@C 1219 TSAdjointMonitorCancel - Clears all the adjoint monitors that have been set on a time-step object. 1220 1221 Logically Collective on ts 1222 1223 Input Parameters: 1224 . ts - the `TS` context obtained from `TSCreate()` 1225 1226 Notes: 1227 There is no way to remove a single, specific monitor. 1228 1229 Level: intermediate 1230 1231 .seealso: [](chapter_ts), `TS`, `TSAdjointSolve()`, `TSAdjointMonitorSet()` 1232 @*/ 1233 PetscErrorCode TSAdjointMonitorCancel(TS ts) 1234 { 1235 PetscInt i; 1236 1237 PetscFunctionBegin; 1238 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1239 for (i = 0; i < ts->numberadjointmonitors; i++) { 1240 if (ts->adjointmonitordestroy[i]) PetscCall((*ts->adjointmonitordestroy[i])(&ts->adjointmonitorcontext[i])); 1241 } 1242 ts->numberadjointmonitors = 0; 1243 PetscFunctionReturn(0); 1244 } 1245 1246 /*@C 1247 TSAdjointMonitorDefault - the default monitor of adjoint computations 1248 1249 Level: intermediate 1250 1251 .seealso: [](chapter_ts), `TS`, `TSAdjointSolve()`, `TSAdjointMonitorSet()` 1252 @*/ 1253 PetscErrorCode TSAdjointMonitorDefault(TS ts, PetscInt step, PetscReal ptime, Vec v, PetscInt numcost, Vec *lambda, Vec *mu, PetscViewerAndFormat *vf) 1254 { 1255 PetscViewer viewer = vf->viewer; 1256 1257 PetscFunctionBegin; 1258 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 8); 1259 PetscCall(PetscViewerPushFormat(viewer, vf->format)); 1260 PetscCall(PetscViewerASCIIAddTab(viewer, ((PetscObject)ts)->tablevel)); 1261 PetscCall(PetscViewerASCIIPrintf(viewer, "%" PetscInt_FMT " TS dt %g time %g%s", step, (double)ts->time_step, (double)ptime, ts->steprollback ? " (r)\n" : "\n")); 1262 PetscCall(PetscViewerASCIISubtractTab(viewer, ((PetscObject)ts)->tablevel)); 1263 PetscCall(PetscViewerPopFormat(viewer)); 1264 PetscFunctionReturn(0); 1265 } 1266 1267 /*@C 1268 TSAdjointMonitorDrawSensi - Monitors progress of the adjoint `TS` solvers by calling 1269 `VecView()` for the sensitivities to initial states at each timestep 1270 1271 Collective on ts 1272 1273 Input Parameters: 1274 + ts - the `TS` context 1275 . step - current time-step 1276 . ptime - current time 1277 . u - current state 1278 . numcost - number of cost functions 1279 . lambda - sensitivities to initial conditions 1280 . mu - sensitivities to parameters 1281 - dummy - either a viewer or NULL 1282 1283 Level: intermediate 1284 1285 .seealso: [](chapter_ts), `TSAdjointSolve()`, `TSAdjointMonitorSet()`, `TSAdjointMonitorDefault()`, `VecView()` 1286 @*/ 1287 PetscErrorCode TSAdjointMonitorDrawSensi(TS ts, PetscInt step, PetscReal ptime, Vec u, PetscInt numcost, Vec *lambda, Vec *mu, void *dummy) 1288 { 1289 TSMonitorDrawCtx ictx = (TSMonitorDrawCtx)dummy; 1290 PetscDraw draw; 1291 PetscReal xl, yl, xr, yr, h; 1292 char time[32]; 1293 1294 PetscFunctionBegin; 1295 if (!(((ictx->howoften > 0) && (!(step % ictx->howoften))) || ((ictx->howoften == -1) && ts->reason))) PetscFunctionReturn(0); 1296 1297 PetscCall(VecView(lambda[0], ictx->viewer)); 1298 PetscCall(PetscViewerDrawGetDraw(ictx->viewer, 0, &draw)); 1299 PetscCall(PetscSNPrintf(time, 32, "Timestep %d Time %g", (int)step, (double)ptime)); 1300 PetscCall(PetscDrawGetCoordinates(draw, &xl, &yl, &xr, &yr)); 1301 h = yl + .95 * (yr - yl); 1302 PetscCall(PetscDrawStringCentered(draw, .5 * (xl + xr), h, PETSC_DRAW_BLACK, time)); 1303 PetscCall(PetscDrawFlush(draw)); 1304 PetscFunctionReturn(0); 1305 } 1306 1307 /* 1308 TSAdjointSetFromOptions - Sets various `TS` adjoint parameters from user options. 1309 1310 Collective on ts 1311 1312 Input Parameter: 1313 . ts - the `TS` context 1314 1315 Options Database Keys: 1316 + -ts_adjoint_solve <yes,no> After solving the ODE/DAE solve the adjoint problem (requires -ts_save_trajectory) 1317 . -ts_adjoint_monitor - print information at each adjoint time step 1318 - -ts_adjoint_monitor_draw_sensi - monitor the sensitivity of the first cost function wrt initial conditions (lambda[0]) graphically 1319 1320 Level: developer 1321 1322 Note: 1323 This is not normally called directly by users 1324 1325 .seealso: [](chapter_ts), `TSSetSaveTrajectory()`, `TSTrajectorySetUp()` 1326 */ 1327 PetscErrorCode TSAdjointSetFromOptions(TS ts, PetscOptionItems *PetscOptionsObject) 1328 { 1329 PetscBool tflg, opt; 1330 1331 PetscFunctionBegin; 1332 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1333 PetscOptionsHeadBegin(PetscOptionsObject, "TS Adjoint options"); 1334 tflg = ts->adjoint_solve ? PETSC_TRUE : PETSC_FALSE; 1335 PetscCall(PetscOptionsBool("-ts_adjoint_solve", "Solve the adjoint problem immediately after solving the forward problem", "", tflg, &tflg, &opt)); 1336 if (opt) { 1337 PetscCall(TSSetSaveTrajectory(ts)); 1338 ts->adjoint_solve = tflg; 1339 } 1340 PetscCall(TSAdjointMonitorSetFromOptions(ts, "-ts_adjoint_monitor", "Monitor adjoint timestep size", "TSAdjointMonitorDefault", TSAdjointMonitorDefault, NULL)); 1341 PetscCall(TSAdjointMonitorSetFromOptions(ts, "-ts_adjoint_monitor_sensi", "Monitor sensitivity in the adjoint computation", "TSAdjointMonitorSensi", TSAdjointMonitorSensi, NULL)); 1342 opt = PETSC_FALSE; 1343 PetscCall(PetscOptionsName("-ts_adjoint_monitor_draw_sensi", "Monitor adjoint sensitivities (lambda only) graphically", "TSAdjointMonitorDrawSensi", &opt)); 1344 if (opt) { 1345 TSMonitorDrawCtx ctx; 1346 PetscInt howoften = 1; 1347 1348 PetscCall(PetscOptionsInt("-ts_adjoint_monitor_draw_sensi", "Monitor adjoint sensitivities (lambda only) graphically", "TSAdjointMonitorDrawSensi", howoften, &howoften, NULL)); 1349 PetscCall(TSMonitorDrawCtxCreate(PetscObjectComm((PetscObject)ts), NULL, NULL, PETSC_DECIDE, PETSC_DECIDE, 300, 300, howoften, &ctx)); 1350 PetscCall(TSAdjointMonitorSet(ts, TSAdjointMonitorDrawSensi, ctx, (PetscErrorCode(*)(void **))TSMonitorDrawCtxDestroy)); 1351 } 1352 PetscFunctionReturn(0); 1353 } 1354 1355 /*@ 1356 TSAdjointStep - Steps one time step backward in the adjoint run 1357 1358 Collective on ts 1359 1360 Input Parameter: 1361 . ts - the `TS` context obtained from `TSCreate()` 1362 1363 Level: intermediate 1364 1365 .seealso: [](chapter_ts), `TSAdjointSetUp()`, `TSAdjointSolve()` 1366 @*/ 1367 PetscErrorCode TSAdjointStep(TS ts) 1368 { 1369 DM dm; 1370 1371 PetscFunctionBegin; 1372 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1373 PetscCall(TSGetDM(ts, &dm)); 1374 PetscCall(TSAdjointSetUp(ts)); 1375 ts->steps--; /* must decrease the step index before the adjoint step is taken. */ 1376 1377 ts->reason = TS_CONVERGED_ITERATING; 1378 ts->ptime_prev = ts->ptime; 1379 PetscCall(PetscLogEventBegin(TS_AdjointStep, ts, 0, 0, 0)); 1380 PetscUseTypeMethod(ts, adjointstep); 1381 PetscCall(PetscLogEventEnd(TS_AdjointStep, ts, 0, 0, 0)); 1382 ts->adjoint_steps++; 1383 1384 if (ts->reason < 0) { 1385 PetscCheck(!ts->errorifstepfailed, PetscObjectComm((PetscObject)ts), PETSC_ERR_NOT_CONVERGED, "TSAdjointStep has failed due to %s", TSConvergedReasons[ts->reason]); 1386 } else if (!ts->reason) { 1387 if (ts->adjoint_steps >= ts->adjoint_max_steps) ts->reason = TS_CONVERGED_ITS; 1388 } 1389 PetscFunctionReturn(0); 1390 } 1391 1392 /*@ 1393 TSAdjointSolve - Solves the discrete ajoint problem for an ODE/DAE 1394 1395 Collective on ts 1396 ` 1397 Input Parameter: 1398 . ts - the `TS` context obtained from `TSCreate()` 1399 1400 Options Database Key: 1401 . -ts_adjoint_view_solution <viewerinfo> - views the first gradient with respect to the initial values 1402 1403 Level: intermediate 1404 1405 Notes: 1406 This must be called after a call to `TSSolve()` that solves the forward problem 1407 1408 By default this will integrate back to the initial time, one can use `TSAdjointSetSteps()` to step back to a later time 1409 1410 .seealso: [](chapter_ts), `TSAdjointSolve()`, `TSCreate()`, `TSSetCostGradients()`, `TSSetSolution()`, `TSAdjointStep()` 1411 @*/ 1412 PetscErrorCode TSAdjointSolve(TS ts) 1413 { 1414 static PetscBool cite = PETSC_FALSE; 1415 #if defined(TSADJOINT_STAGE) 1416 PetscLogStage adjoint_stage; 1417 #endif 1418 1419 PetscFunctionBegin; 1420 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1421 PetscCall(PetscCitationsRegister("@article{Zhang2022tsadjoint,\n" 1422 " title = {{PETSc TSAdjoint: A Discrete Adjoint ODE Solver for First-Order and Second-Order Sensitivity Analysis}},\n" 1423 " author = {Zhang, Hong and Constantinescu, Emil M. and Smith, Barry F.},\n" 1424 " journal = {SIAM Journal on Scientific Computing},\n" 1425 " volume = {44},\n" 1426 " number = {1},\n" 1427 " pages = {C1-C24},\n" 1428 " doi = {10.1137/21M140078X},\n" 1429 " year = {2022}\n}\n", 1430 &cite)); 1431 #if defined(TSADJOINT_STAGE) 1432 PetscCall(PetscLogStageRegister("TSAdjoint", &adjoint_stage)); 1433 PetscCall(PetscLogStagePush(adjoint_stage)); 1434 #endif 1435 PetscCall(TSAdjointSetUp(ts)); 1436 1437 /* reset time step and iteration counters */ 1438 ts->adjoint_steps = 0; 1439 ts->ksp_its = 0; 1440 ts->snes_its = 0; 1441 ts->num_snes_failures = 0; 1442 ts->reject = 0; 1443 ts->reason = TS_CONVERGED_ITERATING; 1444 1445 if (!ts->adjoint_max_steps) ts->adjoint_max_steps = ts->steps; 1446 if (ts->adjoint_steps >= ts->adjoint_max_steps) ts->reason = TS_CONVERGED_ITS; 1447 1448 while (!ts->reason) { 1449 PetscCall(TSTrajectoryGet(ts->trajectory, ts, ts->steps, &ts->ptime)); 1450 PetscCall(TSAdjointMonitor(ts, ts->steps, ts->ptime, ts->vec_sol, ts->numcost, ts->vecs_sensi, ts->vecs_sensip)); 1451 PetscCall(TSAdjointEventHandler(ts)); 1452 PetscCall(TSAdjointStep(ts)); 1453 if ((ts->vec_costintegral || ts->quadraturets) && !ts->costintegralfwd) PetscCall(TSAdjointCostIntegral(ts)); 1454 } 1455 if (!ts->steps) { 1456 PetscCall(TSTrajectoryGet(ts->trajectory, ts, ts->steps, &ts->ptime)); 1457 PetscCall(TSAdjointMonitor(ts, ts->steps, ts->ptime, ts->vec_sol, ts->numcost, ts->vecs_sensi, ts->vecs_sensip)); 1458 } 1459 ts->solvetime = ts->ptime; 1460 PetscCall(TSTrajectoryViewFromOptions(ts->trajectory, NULL, "-ts_trajectory_view")); 1461 PetscCall(VecViewFromOptions(ts->vecs_sensi[0], (PetscObject)ts, "-ts_adjoint_view_solution")); 1462 ts->adjoint_max_steps = 0; 1463 #if defined(TSADJOINT_STAGE) 1464 PetscCall(PetscLogStagePop()); 1465 #endif 1466 PetscFunctionReturn(0); 1467 } 1468 1469 /*@C 1470 TSAdjointMonitor - Runs all user-provided adjoint monitor routines set using `TSAdjointMonitorSet()` 1471 1472 Collective on ts 1473 1474 Input Parameters: 1475 + ts - time stepping context obtained from `TSCreate()` 1476 . step - step number that has just completed 1477 . ptime - model time of the state 1478 . u - state at the current model time 1479 . numcost - number of cost functions (dimension of lambda or mu) 1480 . lambda - vectors containing the gradients of the cost functions with respect to the ODE/DAE solution variables 1481 - mu - vectors containing the gradients of the cost functions with respect to the problem parameters 1482 1483 Level: developer 1484 1485 Note: 1486 `TSAdjointMonitor()` is typically used automatically within the time stepping implementations. 1487 Users would almost never call this routine directly. 1488 1489 .seealso: `TSAdjointMonitorSet()`, `TSAdjointSolve()` 1490 @*/ 1491 PetscErrorCode TSAdjointMonitor(TS ts, PetscInt step, PetscReal ptime, Vec u, PetscInt numcost, Vec *lambda, Vec *mu) 1492 { 1493 PetscInt i, n = ts->numberadjointmonitors; 1494 1495 PetscFunctionBegin; 1496 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1497 PetscValidHeaderSpecific(u, VEC_CLASSID, 4); 1498 PetscCall(VecLockReadPush(u)); 1499 for (i = 0; i < n; i++) PetscCall((*ts->adjointmonitor[i])(ts, step, ptime, u, numcost, lambda, mu, ts->adjointmonitorcontext[i])); 1500 PetscCall(VecLockReadPop(u)); 1501 PetscFunctionReturn(0); 1502 } 1503 1504 /*@ 1505 TSAdjointCostIntegral - Evaluate the cost integral in the adjoint run. 1506 1507 Collective on ts 1508 1509 Input Parameter: 1510 . ts - time stepping context 1511 1512 Level: advanced 1513 1514 Notes: 1515 This function cannot be called until `TSAdjointStep()` has been completed. 1516 1517 .seealso: [](chapter_ts), `TSAdjointSolve()`, `TSAdjointStep()` 1518 @*/ 1519 PetscErrorCode TSAdjointCostIntegral(TS ts) 1520 { 1521 PetscFunctionBegin; 1522 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1523 PetscUseTypeMethod(ts, adjointintegral); 1524 PetscFunctionReturn(0); 1525 } 1526 1527 /* ------------------ Forward (tangent linear) sensitivity ------------------*/ 1528 1529 /*@ 1530 TSForwardSetUp - Sets up the internal data structures for the later use 1531 of forward sensitivity analysis 1532 1533 Collective on ts 1534 1535 Input Parameter: 1536 . ts - the `TS` context obtained from `TSCreate()` 1537 1538 Level: advanced 1539 1540 .seealso: [](chapter_ts), `TS`, `TSCreate()`, `TSDestroy()`, `TSSetUp()` 1541 @*/ 1542 PetscErrorCode TSForwardSetUp(TS ts) 1543 { 1544 PetscFunctionBegin; 1545 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1546 if (ts->forwardsetupcalled) PetscFunctionReturn(0); 1547 PetscTryTypeMethod(ts, forwardsetup); 1548 PetscCall(VecDuplicate(ts->vec_sol, &ts->vec_sensip_col)); 1549 ts->forwardsetupcalled = PETSC_TRUE; 1550 PetscFunctionReturn(0); 1551 } 1552 1553 /*@ 1554 TSForwardReset - Reset the internal data structures used by forward sensitivity analysis 1555 1556 Collective on ts 1557 1558 Input Parameter: 1559 . ts - the `TS` context obtained from `TSCreate()` 1560 1561 Level: advanced 1562 1563 .seealso: [](chapter_ts), `TSCreate()`, `TSDestroy()`, `TSForwardSetUp()` 1564 @*/ 1565 PetscErrorCode TSForwardReset(TS ts) 1566 { 1567 TS quadts = ts->quadraturets; 1568 1569 PetscFunctionBegin; 1570 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1571 PetscTryTypeMethod(ts, forwardreset); 1572 PetscCall(MatDestroy(&ts->mat_sensip)); 1573 if (quadts) PetscCall(MatDestroy(&quadts->mat_sensip)); 1574 PetscCall(VecDestroy(&ts->vec_sensip_col)); 1575 ts->forward_solve = PETSC_FALSE; 1576 ts->forwardsetupcalled = PETSC_FALSE; 1577 PetscFunctionReturn(0); 1578 } 1579 1580 /*@ 1581 TSForwardSetIntegralGradients - Set the vectors holding forward sensitivities of the integral term. 1582 1583 Input Parameters: 1584 + ts - the `TS` context obtained from `TSCreate()` 1585 . numfwdint - number of integrals 1586 - vp - the vectors containing the gradients for each integral w.r.t. parameters 1587 1588 Level: deprecated 1589 1590 .seealso: [](chapter_ts), `TSForwardGetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()` 1591 @*/ 1592 PetscErrorCode TSForwardSetIntegralGradients(TS ts, PetscInt numfwdint, Vec *vp) 1593 { 1594 PetscFunctionBegin; 1595 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1596 PetscCheck(!ts->numcost || ts->numcost == numfwdint, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand()"); 1597 if (!ts->numcost) ts->numcost = numfwdint; 1598 1599 ts->vecs_integral_sensip = vp; 1600 PetscFunctionReturn(0); 1601 } 1602 1603 /*@ 1604 TSForwardGetIntegralGradients - Returns the forward sensitivities ofthe integral term. 1605 1606 Input Parameter: 1607 . ts - the `TS` context obtained from `TSCreate()` 1608 1609 Output Parameter: 1610 . vp - the vectors containing the gradients for each integral w.r.t. parameters 1611 1612 Level: deprecated 1613 1614 .seealso: [](chapter_ts), `TSForwardSetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()` 1615 @*/ 1616 PetscErrorCode TSForwardGetIntegralGradients(TS ts, PetscInt *numfwdint, Vec **vp) 1617 { 1618 PetscFunctionBegin; 1619 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1620 PetscValidPointer(vp, 3); 1621 if (numfwdint) *numfwdint = ts->numcost; 1622 if (vp) *vp = ts->vecs_integral_sensip; 1623 PetscFunctionReturn(0); 1624 } 1625 1626 /*@ 1627 TSForwardStep - Compute the forward sensitivity for one time step. 1628 1629 Collective on ts 1630 1631 Input Parameter: 1632 . ts - time stepping context 1633 1634 Level: advanced 1635 1636 Notes: 1637 This function cannot be called until `TSStep()` has been completed. 1638 1639 .seealso: [](chapter_ts), `TSForwardSetSensitivities()`, `TSForwardGetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardSetUp()` 1640 @*/ 1641 PetscErrorCode TSForwardStep(TS ts) 1642 { 1643 PetscFunctionBegin; 1644 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1645 PetscCall(PetscLogEventBegin(TS_ForwardStep, ts, 0, 0, 0)); 1646 PetscUseTypeMethod(ts, forwardstep); 1647 PetscCall(PetscLogEventEnd(TS_ForwardStep, ts, 0, 0, 0)); 1648 PetscCheck(ts->reason >= 0 || !ts->errorifstepfailed, PetscObjectComm((PetscObject)ts), PETSC_ERR_NOT_CONVERGED, "TSFowardStep has failed due to %s", TSConvergedReasons[ts->reason]); 1649 PetscFunctionReturn(0); 1650 } 1651 1652 /*@ 1653 TSForwardSetSensitivities - Sets the initial value of the trajectory sensitivities of solution w.r.t. the problem parameters and initial values. 1654 1655 Logically Collective on ts 1656 1657 Input Parameters: 1658 + ts - the `TS` context obtained from `TSCreate()` 1659 . nump - number of parameters 1660 - Smat - sensitivities with respect to the parameters, the number of entries in these vectors is the same as the number of parameters 1661 1662 Level: beginner 1663 1664 Notes: 1665 Forward sensitivity is also called 'trajectory sensitivity' in some fields such as power systems. 1666 This function turns on a flag to trigger `TSSolve()` to compute forward sensitivities automatically. 1667 You must call this function before `TSSolve()`. 1668 The entries in the sensitivity matrix must be correctly initialized with the values S = dy/dp|startingtime. 1669 1670 .seealso: [](chapter_ts), `TSForwardGetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()` 1671 @*/ 1672 PetscErrorCode TSForwardSetSensitivities(TS ts, PetscInt nump, Mat Smat) 1673 { 1674 PetscFunctionBegin; 1675 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1676 PetscValidHeaderSpecific(Smat, MAT_CLASSID, 3); 1677 ts->forward_solve = PETSC_TRUE; 1678 if (nump == PETSC_DEFAULT) { 1679 PetscCall(MatGetSize(Smat, NULL, &ts->num_parameters)); 1680 } else ts->num_parameters = nump; 1681 PetscCall(PetscObjectReference((PetscObject)Smat)); 1682 PetscCall(MatDestroy(&ts->mat_sensip)); 1683 ts->mat_sensip = Smat; 1684 PetscFunctionReturn(0); 1685 } 1686 1687 /*@ 1688 TSForwardGetSensitivities - Returns the trajectory sensitivities 1689 1690 Not Collective, but Smat returned is parallel if ts is parallel 1691 1692 Output Parameters: 1693 + ts - the `TS` context obtained from `TSCreate()` 1694 . nump - number of parameters 1695 - Smat - sensitivities with respect to the parameters, the number of entries in these vectors is the same as the number of parameters 1696 1697 Level: intermediate 1698 1699 .seealso: [](chapter_ts), `TSForwardSetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()` 1700 @*/ 1701 PetscErrorCode TSForwardGetSensitivities(TS ts, PetscInt *nump, Mat *Smat) 1702 { 1703 PetscFunctionBegin; 1704 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1705 if (nump) *nump = ts->num_parameters; 1706 if (Smat) *Smat = ts->mat_sensip; 1707 PetscFunctionReturn(0); 1708 } 1709 1710 /*@ 1711 TSForwardCostIntegral - Evaluate the cost integral in the forward run. 1712 1713 Collective on ts 1714 1715 Input Parameter: 1716 . ts - time stepping context 1717 1718 Level: advanced 1719 1720 Note: 1721 This function cannot be called until `TSStep()` has been completed. 1722 1723 .seealso: [](chapter_ts), `TS`, `TSSolve()`, `TSAdjointCostIntegral()` 1724 @*/ 1725 PetscErrorCode TSForwardCostIntegral(TS ts) 1726 { 1727 PetscFunctionBegin; 1728 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1729 PetscUseTypeMethod(ts, forwardintegral); 1730 PetscFunctionReturn(0); 1731 } 1732 1733 /*@ 1734 TSForwardSetInitialSensitivities - Set initial values for tangent linear sensitivities 1735 1736 Collective on ts 1737 1738 Input Parameters: 1739 + ts - the `TS` context obtained from `TSCreate()` 1740 - didp - parametric sensitivities of the initial condition 1741 1742 Level: intermediate 1743 1744 Notes: 1745 `TSSolve()` allows users to pass the initial solution directly to `TS`. But the tangent linear variables cannot be initialized in this way. 1746 This function is used to set initial values for tangent linear variables. 1747 1748 .seealso: [](chapter_ts), `TS`, `TSForwardSetSensitivities()` 1749 @*/ 1750 PetscErrorCode TSForwardSetInitialSensitivities(TS ts, Mat didp) 1751 { 1752 PetscFunctionBegin; 1753 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1754 PetscValidHeaderSpecific(didp, MAT_CLASSID, 2); 1755 if (!ts->mat_sensip) PetscCall(TSForwardSetSensitivities(ts, PETSC_DEFAULT, didp)); 1756 PetscFunctionReturn(0); 1757 } 1758 1759 /*@ 1760 TSForwardGetStages - Get the number of stages and the tangent linear sensitivities at the intermediate stages 1761 1762 Input Parameter: 1763 . ts - the `TS` context obtained from `TSCreate()` 1764 1765 Output Parameters: 1766 + ns - number of stages 1767 - S - tangent linear sensitivities at the intermediate stages 1768 1769 Level: advanced 1770 1771 .seealso: `TS` 1772 @*/ 1773 PetscErrorCode TSForwardGetStages(TS ts, PetscInt *ns, Mat **S) 1774 { 1775 PetscFunctionBegin; 1776 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1777 1778 if (!ts->ops->getstages) *S = NULL; 1779 else PetscUseTypeMethod(ts, forwardgetstages, ns, S); 1780 PetscFunctionReturn(0); 1781 } 1782 1783 /*@ 1784 TSCreateQuadratureTS - Create a sub-`TS` that evaluates integrals over time 1785 1786 Input Parameters: 1787 + ts - the `TS` context obtained from `TSCreate()` 1788 - fwd - flag indicating whether to evaluate cost integral in the forward run or the adjoint run 1789 1790 Output Parameters: 1791 . quadts - the child `TS` context 1792 1793 Level: intermediate 1794 1795 .seealso: [](chapter_ts), `TSGetQuadratureTS()` 1796 @*/ 1797 PetscErrorCode TSCreateQuadratureTS(TS ts, PetscBool fwd, TS *quadts) 1798 { 1799 char prefix[128]; 1800 1801 PetscFunctionBegin; 1802 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1803 PetscValidPointer(quadts, 3); 1804 PetscCall(TSDestroy(&ts->quadraturets)); 1805 PetscCall(TSCreate(PetscObjectComm((PetscObject)ts), &ts->quadraturets)); 1806 PetscCall(PetscObjectIncrementTabLevel((PetscObject)ts->quadraturets, (PetscObject)ts, 1)); 1807 PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%squad_", ((PetscObject)ts)->prefix ? ((PetscObject)ts)->prefix : "")); 1808 PetscCall(TSSetOptionsPrefix(ts->quadraturets, prefix)); 1809 *quadts = ts->quadraturets; 1810 1811 if (ts->numcost) { 1812 PetscCall(VecCreateSeq(PETSC_COMM_SELF, ts->numcost, &(*quadts)->vec_sol)); 1813 } else { 1814 PetscCall(VecCreateSeq(PETSC_COMM_SELF, 1, &(*quadts)->vec_sol)); 1815 } 1816 ts->costintegralfwd = fwd; 1817 PetscFunctionReturn(0); 1818 } 1819 1820 /*@ 1821 TSGetQuadratureTS - Return the sub-`TS` that evaluates integrals over time 1822 1823 Input Parameter: 1824 . ts - the `TS` context obtained from `TSCreate()` 1825 1826 Output Parameters: 1827 + fwd - flag indicating whether to evaluate cost integral in the forward run or the adjoint run 1828 - quadts - the child `TS` context 1829 1830 Level: intermediate 1831 1832 .seealso: [](chapter_ts), `TSCreateQuadratureTS()` 1833 @*/ 1834 PetscErrorCode TSGetQuadratureTS(TS ts, PetscBool *fwd, TS *quadts) 1835 { 1836 PetscFunctionBegin; 1837 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1838 if (fwd) *fwd = ts->costintegralfwd; 1839 if (quadts) *quadts = ts->quadraturets; 1840 PetscFunctionReturn(0); 1841 } 1842 1843 /*@ 1844 TSComputeSNESJacobian - Compute the Jacobian needed for the `SNESSolve()` in `TS` 1845 1846 Collective on ts 1847 1848 Input Parameters: 1849 + ts - the `TS` context obtained from `TSCreate()` 1850 - x - state vector 1851 1852 Output Parameters: 1853 + J - Jacobian matrix 1854 - Jpre - preconditioning matrix for J (may be same as J) 1855 1856 Level: developer 1857 1858 Note: 1859 Uses finite differencing when `TS` Jacobian is not available. 1860 1861 .seealso: `SNES`, `TS`, `SNESSetJacobian()`, TSSetRHSJacobian()`, `TSSetIJacobian()` 1862 @*/ 1863 PetscErrorCode TSComputeSNESJacobian(TS ts, Vec x, Mat J, Mat Jpre) 1864 { 1865 SNES snes = ts->snes; 1866 PetscErrorCode (*jac)(SNES, Vec, Mat, Mat, void *) = NULL; 1867 1868 PetscFunctionBegin; 1869 /* 1870 Unlike implicit methods, explicit methods do not have SNESMatFDColoring in the snes object 1871 because SNESSolve() has not been called yet; so querying SNESMatFDColoring does not work for 1872 explicit methods. Instead, we check the Jacobian compute function directly to determin if FD 1873 coloring is used. 1874 */ 1875 PetscCall(SNESGetJacobian(snes, NULL, NULL, &jac, NULL)); 1876 if (jac == SNESComputeJacobianDefaultColor) { 1877 Vec f; 1878 PetscCall(SNESSetSolution(snes, x)); 1879 PetscCall(SNESGetFunction(snes, &f, NULL, NULL)); 1880 /* Force MatFDColoringApply to evaluate the SNES residual function for the base vector */ 1881 PetscCall(SNESComputeFunction(snes, x, f)); 1882 } 1883 PetscCall(SNESComputeJacobian(snes, x, J, Jpre)); 1884 PetscFunctionReturn(0); 1885 } 1886