1 #include <petsc/private/tsimpl.h> /*I "petscts.h" I*/ 2 #include <petscdraw.h> 3 4 PetscLogEvent TS_AdjointStep, TS_ForwardStep, TS_JacobianPEval; 5 6 /* #define TSADJOINT_STAGE */ 7 8 /* ------------------------ Sensitivity Context ---------------------------*/ 9 10 /*@C 11 TSSetRHSJacobianP - Sets the function that computes the Jacobian of G w.r.t. the parameters P where U_t = G(U,P,t), as well as the location to store the matrix. 12 13 Logically Collective 14 15 Input Parameters: 16 + ts - `TS` context obtained from `TSCreate()` 17 . Amat - JacobianP matrix 18 . func - function 19 - ctx - [optional] user-defined function context 20 21 Calling sequence of `func`: 22 $ PetscErrorCode func(TS ts, PetscReal t, Vec y, Mat A, void *ctx) 23 + t - current timestep 24 . U - input vector (current ODE solution) 25 . A - output matrix 26 - ctx - [optional] user-defined function context 27 28 Level: intermediate 29 30 Note: 31 Amat has the same number of rows and the same row parallel layout as u, Amat has the same number of columns and parallel layout as p 32 33 .seealso: [](ch_ts), `TS`, `TSGetRHSJacobianP()` 34 @*/ 35 PetscErrorCode TSSetRHSJacobianP(TS ts, Mat Amat, PetscErrorCode (*func)(TS, PetscReal, Vec, Mat, void *), void *ctx) 36 { 37 PetscFunctionBegin; 38 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 39 PetscValidHeaderSpecific(Amat, MAT_CLASSID, 2); 40 41 ts->rhsjacobianp = func; 42 ts->rhsjacobianpctx = ctx; 43 if (Amat) { 44 PetscCall(PetscObjectReference((PetscObject)Amat)); 45 PetscCall(MatDestroy(&ts->Jacprhs)); 46 ts->Jacprhs = Amat; 47 } 48 PetscFunctionReturn(PETSC_SUCCESS); 49 } 50 51 /*@C 52 TSGetRHSJacobianP - Gets the function that computes the Jacobian of G w.r.t. the parameters P where U_t = G(U,P,t), as well as the location to store the matrix. 53 54 Logically Collective 55 56 Input Parameter: 57 . ts - `TS` context obtained from `TSCreate()` 58 59 Output Parameters: 60 + Amat - JacobianP matrix 61 . func - function 62 - ctx - [optional] user-defined function context 63 64 Calling sequence of `func`: 65 $ PetscErrorCode func(TS ts, PetscReal t, Vec y, Mat A, void *ctx) 66 + t - current timestep 67 . U - input vector (current ODE solution) 68 . A - output matrix 69 - ctx - [optional] user-defined function context 70 71 Level: intermediate 72 73 Note: 74 Amat has the same number of rows and the same row parallel layout as u, Amat has the same number of columns and parallel layout as p 75 76 .seealso: [](ch_ts), `TSSetRHSJacobianP()`, `TS`, `TSGetRHSJacobianP()` 77 @*/ 78 PetscErrorCode TSGetRHSJacobianP(TS ts, Mat *Amat, PetscErrorCode (**func)(TS, PetscReal, Vec, Mat, void *), void **ctx) 79 { 80 PetscFunctionBegin; 81 if (func) *func = ts->rhsjacobianp; 82 if (ctx) *ctx = ts->rhsjacobianpctx; 83 if (Amat) *Amat = ts->Jacprhs; 84 PetscFunctionReturn(PETSC_SUCCESS); 85 } 86 87 /*@C 88 TSComputeRHSJacobianP - Runs the user-defined JacobianP function. 89 90 Collective 91 92 Input Parameters: 93 + ts - The `TS` context obtained from `TSCreate()` 94 . t - the time 95 - U - the solution at which to compute the Jacobian 96 97 Output Parameter: 98 . Amat - the computed Jacobian 99 100 Level: developer 101 102 .seealso: [](ch_ts), `TSSetRHSJacobianP()`, `TS` 103 @*/ 104 PetscErrorCode TSComputeRHSJacobianP(TS ts, PetscReal t, Vec U, Mat Amat) 105 { 106 PetscFunctionBegin; 107 if (!Amat) PetscFunctionReturn(PETSC_SUCCESS); 108 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 109 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 110 111 if (ts->rhsjacobianp) PetscCallBack("TS callback JacobianP for sensitivity analysis", (*ts->rhsjacobianp)(ts, t, U, Amat, ts->rhsjacobianpctx)); 112 else { 113 PetscBool assembled; 114 PetscCall(MatZeroEntries(Amat)); 115 PetscCall(MatAssembled(Amat, &assembled)); 116 if (!assembled) { 117 PetscCall(MatAssemblyBegin(Amat, MAT_FINAL_ASSEMBLY)); 118 PetscCall(MatAssemblyEnd(Amat, MAT_FINAL_ASSEMBLY)); 119 } 120 } 121 PetscFunctionReturn(PETSC_SUCCESS); 122 } 123 124 /*@C 125 TSSetIJacobianP - Sets the function that computes the Jacobian of F w.r.t. the parameters P where F(Udot,U,t) = G(U,P,t), as well as the location to store the matrix. 126 127 Logically Collective 128 129 Input Parameters: 130 + ts - `TS` context obtained from `TSCreate()` 131 . Amat - JacobianP matrix 132 . func - function 133 - ctx - [optional] user-defined function context 134 135 Calling sequence of `func`: 136 $ PetscErrorCode func(TS ts, PetscReal t, Vec y, Mat A, void *ctx) 137 + t - current timestep 138 . U - input vector (current ODE solution) 139 . Udot - time derivative of state vector 140 . shift - shift to apply, see note below 141 . A - output matrix 142 - ctx - [optional] user-defined function context 143 144 Level: intermediate 145 146 Note: 147 Amat has the same number of rows and the same row parallel layout as u, Amat has the same number of columns and parallel layout as p 148 149 .seealso: [](ch_ts), `TSSetRHSJacobianP()`, `TS` 150 @*/ 151 PetscErrorCode TSSetIJacobianP(TS ts, Mat Amat, PetscErrorCode (*func)(TS, PetscReal, Vec, Vec, PetscReal, Mat, void *), void *ctx) 152 { 153 PetscFunctionBegin; 154 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 155 PetscValidHeaderSpecific(Amat, MAT_CLASSID, 2); 156 157 ts->ijacobianp = func; 158 ts->ijacobianpctx = ctx; 159 if (Amat) { 160 PetscCall(PetscObjectReference((PetscObject)Amat)); 161 PetscCall(MatDestroy(&ts->Jacp)); 162 ts->Jacp = Amat; 163 } 164 PetscFunctionReturn(PETSC_SUCCESS); 165 } 166 167 /*@C 168 TSComputeIJacobianP - Runs the user-defined IJacobianP function. 169 170 Collective 171 172 Input Parameters: 173 + ts - the `TS` context 174 . t - current timestep 175 . U - state vector 176 . Udot - time derivative of state vector 177 . shift - shift to apply, see note below 178 - imex - flag indicates if the method is IMEX so that the RHSJacobianP should be kept separate 179 180 Output Parameter: 181 . Amat - Jacobian matrix 182 183 Level: developer 184 185 .seealso: [](ch_ts), `TS`, `TSSetIJacobianP()` 186 @*/ 187 PetscErrorCode TSComputeIJacobianP(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal shift, Mat Amat, PetscBool imex) 188 { 189 PetscFunctionBegin; 190 if (!Amat) PetscFunctionReturn(PETSC_SUCCESS); 191 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 192 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 193 PetscValidHeaderSpecific(Udot, VEC_CLASSID, 4); 194 195 PetscCall(PetscLogEventBegin(TS_JacobianPEval, ts, U, Amat, 0)); 196 if (ts->ijacobianp) PetscCallBack("TS callback JacobianP for sensitivity analysis", (*ts->ijacobianp)(ts, t, U, Udot, shift, Amat, ts->ijacobianpctx)); 197 if (imex) { 198 if (!ts->ijacobianp) { /* system was written as Udot = G(t,U) */ 199 PetscBool assembled; 200 PetscCall(MatZeroEntries(Amat)); 201 PetscCall(MatAssembled(Amat, &assembled)); 202 if (!assembled) { 203 PetscCall(MatAssemblyBegin(Amat, MAT_FINAL_ASSEMBLY)); 204 PetscCall(MatAssemblyEnd(Amat, MAT_FINAL_ASSEMBLY)); 205 } 206 } 207 } else { 208 if (ts->rhsjacobianp) PetscCall(TSComputeRHSJacobianP(ts, t, U, ts->Jacprhs)); 209 if (ts->Jacprhs == Amat) { /* No IJacobian, so we only have the RHS matrix */ 210 PetscCall(MatScale(Amat, -1)); 211 } else if (ts->Jacprhs) { /* Both IJacobian and RHSJacobian */ 212 MatStructure axpy = DIFFERENT_NONZERO_PATTERN; 213 if (!ts->ijacobianp) { /* No IJacobianp provided, but we have a separate RHS matrix */ 214 PetscCall(MatZeroEntries(Amat)); 215 } 216 PetscCall(MatAXPY(Amat, -1, ts->Jacprhs, axpy)); 217 } 218 } 219 PetscCall(PetscLogEventEnd(TS_JacobianPEval, ts, U, Amat, 0)); 220 PetscFunctionReturn(PETSC_SUCCESS); 221 } 222 223 /*@C 224 TSSetCostIntegrand - Sets the routine for evaluating the integral term in one or more cost functions 225 226 Logically Collective 227 228 Input Parameters: 229 + ts - the `TS` context obtained from `TSCreate()` 230 . numcost - number of gradients to be computed, this is the number of cost functions 231 . costintegral - vector that stores the integral values 232 . rf - routine for evaluating the integrand function 233 . drduf - function that computes the gradients of the r's with respect to u 234 . drdpf - function that computes the gradients of the r's with respect to p, can be `NULL` if parametric sensitivity is not desired (`mu` = `NULL`) 235 . fwd - flag indicating whether to evaluate cost integral in the forward run or the adjoint run 236 - ctx - [optional] user-defined context for private data for the function evaluation routine (may be `NULL`) 237 238 Calling sequence of `rf`: 239 $ PetscErrorCode rf(TS ts, PetscReal t, Vec U, Vec F, oid *ctx) 240 241 Calling sequence of `drduf`: 242 $ PetscErroCode drduf(TS ts, PetscReal t, Vec U, Vec *dRdU, void *ctx) 243 244 Calling sequence of `drdpf`: 245 $ PetscErroCode drdpf(TS ts, PetscReal t, Vec U, Vec *dRdP, void *ctx) 246 247 Level: deprecated 248 249 Note: 250 For optimization there is usually a single cost function (numcost = 1). For sensitivities there may be multiple cost functions 251 252 .seealso: [](ch_ts), `TS`, `TSSetRHSJacobianP()`, `TSGetCostGradients()`, `TSSetCostGradients()` 253 @*/ 254 PetscErrorCode TSSetCostIntegrand(TS ts, PetscInt numcost, Vec costintegral, PetscErrorCode (*rf)(TS, PetscReal, Vec, Vec, void *), PetscErrorCode (*drduf)(TS, PetscReal, Vec, Vec *, void *), PetscErrorCode (*drdpf)(TS, PetscReal, Vec, Vec *, void *), PetscBool fwd, void *ctx) 255 { 256 PetscFunctionBegin; 257 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 258 if (costintegral) PetscValidHeaderSpecific(costintegral, VEC_CLASSID, 3); 259 PetscCheck(!ts->numcost || ts->numcost == numcost, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostGradients() or TSForwardSetIntegralGradients()"); 260 if (!ts->numcost) ts->numcost = numcost; 261 262 if (costintegral) { 263 PetscCall(PetscObjectReference((PetscObject)costintegral)); 264 PetscCall(VecDestroy(&ts->vec_costintegral)); 265 ts->vec_costintegral = costintegral; 266 } else { 267 if (!ts->vec_costintegral) { /* Create a seq vec if user does not provide one */ 268 PetscCall(VecCreateSeq(PETSC_COMM_SELF, numcost, &ts->vec_costintegral)); 269 } else { 270 PetscCall(VecSet(ts->vec_costintegral, 0.0)); 271 } 272 } 273 if (!ts->vec_costintegrand) { 274 PetscCall(VecDuplicate(ts->vec_costintegral, &ts->vec_costintegrand)); 275 } else { 276 PetscCall(VecSet(ts->vec_costintegrand, 0.0)); 277 } 278 ts->costintegralfwd = fwd; /* Evaluate the cost integral in forward run if fwd is true */ 279 ts->costintegrand = rf; 280 ts->costintegrandctx = ctx; 281 ts->drdufunction = drduf; 282 ts->drdpfunction = drdpf; 283 PetscFunctionReturn(PETSC_SUCCESS); 284 } 285 286 /*@C 287 TSGetCostIntegral - Returns the values of the integral term in the cost functions. 288 It is valid to call the routine after a backward run. 289 290 Not Collective 291 292 Input Parameter: 293 . ts - the `TS` context obtained from `TSCreate()` 294 295 Output Parameter: 296 . v - the vector containing the integrals for each cost function 297 298 Level: intermediate 299 300 .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, ``TSSetCostIntegrand()` 301 @*/ 302 PetscErrorCode TSGetCostIntegral(TS ts, Vec *v) 303 { 304 TS quadts; 305 306 PetscFunctionBegin; 307 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 308 PetscValidPointer(v, 2); 309 PetscCall(TSGetQuadratureTS(ts, NULL, &quadts)); 310 *v = quadts->vec_sol; 311 PetscFunctionReturn(PETSC_SUCCESS); 312 } 313 314 /*@C 315 TSComputeCostIntegrand - Evaluates the integral function in the cost functions. 316 317 Input Parameters: 318 + ts - the `TS` context 319 . t - current time 320 - U - state vector, i.e. current solution 321 322 Output Parameter: 323 . Q - vector of size numcost to hold the outputs 324 325 Level: deprecated 326 327 Note: 328 Most users should not need to explicitly call this routine, as it 329 is used internally within the sensitivity analysis context. 330 331 .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSSetCostIntegrand()` 332 @*/ 333 PetscErrorCode TSComputeCostIntegrand(TS ts, PetscReal t, Vec U, Vec Q) 334 { 335 PetscFunctionBegin; 336 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 337 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 338 PetscValidHeaderSpecific(Q, VEC_CLASSID, 4); 339 340 PetscCall(PetscLogEventBegin(TS_FunctionEval, ts, U, Q, 0)); 341 if (ts->costintegrand) PetscCallBack("TS callback integrand in the cost function", (*ts->costintegrand)(ts, t, U, Q, ts->costintegrandctx)); 342 else PetscCall(VecZeroEntries(Q)); 343 PetscCall(PetscLogEventEnd(TS_FunctionEval, ts, U, Q, 0)); 344 PetscFunctionReturn(PETSC_SUCCESS); 345 } 346 347 /*@C 348 TSComputeDRDUFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobian()` 349 350 Level: deprecated 351 352 @*/ 353 PetscErrorCode TSComputeDRDUFunction(TS ts, PetscReal t, Vec U, Vec *DRDU) 354 { 355 PetscFunctionBegin; 356 if (!DRDU) PetscFunctionReturn(PETSC_SUCCESS); 357 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 358 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 359 360 PetscCallBack("TS callback DRDU for sensitivity analysis", (*ts->drdufunction)(ts, t, U, DRDU, ts->costintegrandctx)); 361 PetscFunctionReturn(PETSC_SUCCESS); 362 } 363 364 /*@C 365 TSComputeDRDPFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobianP()` 366 367 Level: deprecated 368 369 @*/ 370 PetscErrorCode TSComputeDRDPFunction(TS ts, PetscReal t, Vec U, Vec *DRDP) 371 { 372 PetscFunctionBegin; 373 if (!DRDP) PetscFunctionReturn(PETSC_SUCCESS); 374 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 375 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 376 377 PetscCallBack("TS callback DRDP for sensitivity analysis", (*ts->drdpfunction)(ts, t, U, DRDP, ts->costintegrandctx)); 378 PetscFunctionReturn(PETSC_SUCCESS); 379 } 380 381 /*@C 382 TSSetIHessianProduct - Sets the function that computes the vector-Hessian-vector product. The Hessian is the second-order derivative of F (IFunction) w.r.t. the state variable. 383 384 Logically Collective 385 386 Input Parameters: 387 + ts - `TS` context obtained from `TSCreate()` 388 . ihp1 - an array of vectors storing the result of vector-Hessian-vector product for F_UU 389 . hessianproductfunc1 - vector-Hessian-vector product function for F_UU 390 . ihp2 - an array of vectors storing the result of vector-Hessian-vector product for F_UP 391 . hessianproductfunc2 - vector-Hessian-vector product function for F_UP 392 . ihp3 - an array of vectors storing the result of vector-Hessian-vector product for F_PU 393 . hessianproductfunc3 - vector-Hessian-vector product function for F_PU 394 . ihp4 - an array of vectors storing the result of vector-Hessian-vector product for F_PP 395 - hessianproductfunc4 - vector-Hessian-vector product function for F_PP 396 397 Calling sequence of `ihessianproductfunc`: 398 $ PetscErrorCode ihessianproductfunc(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV, void *ctx); 399 + t - current timestep 400 . U - input vector (current ODE solution) 401 . Vl - an array of input vectors to be left-multiplied with the Hessian 402 . Vr - input vector to be right-multiplied with the Hessian 403 . VHV - an array of output vectors for vector-Hessian-vector product 404 - ctx - [optional] user-defined function context 405 406 Level: intermediate 407 408 Notes: 409 The first Hessian function and the working array are required. 410 As an example to implement the callback functions, the second callback function calculates the vector-Hessian-vector product 411 $ Vl_n^T*F_UP*Vr 412 where the vector Vl_n (n-th element in the array Vl) and Vr are of size N and M respectively, and the Hessian F_UP is of size N x N x M. 413 Each entry of F_UP corresponds to the derivative 414 $ F_UP[i][j][k] = \frac{\partial^2 F[i]}{\partial U[j] \partial P[k]}. 415 The result of the vector-Hessian-vector product for Vl_n needs to be stored in vector VHV_n with the j-th entry being 416 $ VHV_n[j] = \sum_i \sum_k {Vl_n[i] * F_UP[i][j][k] * Vr[k]} 417 If the cost function is a scalar, there will be only one vector in Vl and VHV. 418 419 .seealso: [](ch_ts), `TS` 420 @*/ 421 PetscErrorCode TSSetIHessianProduct(TS ts, Vec *ihp1, PetscErrorCode (*ihessianproductfunc1)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *ihp2, PetscErrorCode (*ihessianproductfunc2)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *ihp3, PetscErrorCode (*ihessianproductfunc3)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *ihp4, PetscErrorCode (*ihessianproductfunc4)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), void *ctx) 422 { 423 PetscFunctionBegin; 424 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 425 PetscValidPointer(ihp1, 2); 426 427 ts->ihessianproductctx = ctx; 428 if (ihp1) ts->vecs_fuu = ihp1; 429 if (ihp2) ts->vecs_fup = ihp2; 430 if (ihp3) ts->vecs_fpu = ihp3; 431 if (ihp4) ts->vecs_fpp = ihp4; 432 ts->ihessianproduct_fuu = ihessianproductfunc1; 433 ts->ihessianproduct_fup = ihessianproductfunc2; 434 ts->ihessianproduct_fpu = ihessianproductfunc3; 435 ts->ihessianproduct_fpp = ihessianproductfunc4; 436 PetscFunctionReturn(PETSC_SUCCESS); 437 } 438 439 /*@C 440 TSComputeIHessianProductFunctionUU - Runs the user-defined vector-Hessian-vector product function for Fuu. 441 442 Collective 443 444 Input Parameters: 445 + ts - The `TS` context obtained from `TSCreate()` 446 . t - the time 447 . U - the solution at which to compute the Hessian product 448 . Vl - the array of input vectors to be multiplied with the Hessian from the left 449 - Vr - the input vector to be multiplied with the Hessian from the right 450 451 Output Parameter: 452 . VHV - the array of output vectors that store the Hessian product 453 454 Level: developer 455 456 Note: 457 `TSComputeIHessianProductFunctionUU()` is typically used for sensitivity implementation, 458 so most users would not generally call this routine themselves. 459 460 .seealso: [](ch_ts), `TSSetIHessianProduct()` 461 @*/ 462 PetscErrorCode TSComputeIHessianProductFunctionUU(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV) 463 { 464 PetscFunctionBegin; 465 if (!VHV) PetscFunctionReturn(PETSC_SUCCESS); 466 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 467 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 468 469 if (ts->ihessianproduct_fuu) PetscCallBack("TS callback IHessianProduct 1 for sensitivity analysis", (*ts->ihessianproduct_fuu)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx)); 470 471 /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */ 472 if (ts->rhshessianproduct_guu) { 473 PetscInt nadj; 474 PetscCall(TSComputeRHSHessianProductFunctionUU(ts, t, U, Vl, Vr, VHV)); 475 for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1)); 476 } 477 PetscFunctionReturn(PETSC_SUCCESS); 478 } 479 480 /*@C 481 TSComputeIHessianProductFunctionUP - Runs the user-defined vector-Hessian-vector product function for Fup. 482 483 Collective 484 485 Input Parameters: 486 + ts - The `TS` context obtained from `TSCreate()` 487 . t - the time 488 . U - the solution at which to compute the Hessian product 489 . Vl - the array of input vectors to be multiplied with the Hessian from the left 490 - Vr - the input vector to be multiplied with the Hessian from the right 491 492 Output Parameter: 493 . VHV - the array of output vectors that store the Hessian product 494 495 Level: developer 496 497 Note: 498 `TSComputeIHessianProductFunctionUP()` is typically used for sensitivity implementation, 499 so most users would not generally call this routine themselves. 500 501 .seealso: [](ch_ts), `TSSetIHessianProduct()` 502 @*/ 503 PetscErrorCode TSComputeIHessianProductFunctionUP(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV) 504 { 505 PetscFunctionBegin; 506 if (!VHV) PetscFunctionReturn(PETSC_SUCCESS); 507 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 508 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 509 510 if (ts->ihessianproduct_fup) PetscCallBack("TS callback IHessianProduct 2 for sensitivity analysis", (*ts->ihessianproduct_fup)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx)); 511 512 /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */ 513 if (ts->rhshessianproduct_gup) { 514 PetscInt nadj; 515 PetscCall(TSComputeRHSHessianProductFunctionUP(ts, t, U, Vl, Vr, VHV)); 516 for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1)); 517 } 518 PetscFunctionReturn(PETSC_SUCCESS); 519 } 520 521 /*@C 522 TSComputeIHessianProductFunctionPU - Runs the user-defined vector-Hessian-vector product function for Fpu. 523 524 Collective 525 526 Input Parameters: 527 + ts - The `TS` context obtained from `TSCreate()` 528 . t - the time 529 . U - the solution at which to compute the Hessian product 530 . Vl - the array of input vectors to be multiplied with the Hessian from the left 531 - Vr - the input vector to be multiplied with the Hessian from the right 532 533 Output Parameter: 534 . VHV - the array of output vectors that store the Hessian product 535 536 Level: developer 537 538 Note: 539 `TSComputeIHessianProductFunctionPU()` is typically used for sensitivity implementation, 540 so most users would not generally call this routine themselves. 541 542 .seealso: [](ch_ts), `TSSetIHessianProduct()` 543 @*/ 544 PetscErrorCode TSComputeIHessianProductFunctionPU(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV) 545 { 546 PetscFunctionBegin; 547 if (!VHV) PetscFunctionReturn(PETSC_SUCCESS); 548 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 549 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 550 551 if (ts->ihessianproduct_fpu) PetscCallBack("TS callback IHessianProduct 3 for sensitivity analysis", (*ts->ihessianproduct_fpu)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx)); 552 553 /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */ 554 if (ts->rhshessianproduct_gpu) { 555 PetscInt nadj; 556 PetscCall(TSComputeRHSHessianProductFunctionPU(ts, t, U, Vl, Vr, VHV)); 557 for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1)); 558 } 559 PetscFunctionReturn(PETSC_SUCCESS); 560 } 561 562 /*@C 563 TSComputeIHessianProductFunctionPP - Runs the user-defined vector-Hessian-vector product function for Fpp. 564 565 Collective 566 567 Input Parameters: 568 + ts - The `TS` context obtained from `TSCreate()` 569 . t - the time 570 . U - the solution at which to compute the Hessian product 571 . Vl - the array of input vectors to be multiplied with the Hessian from the left 572 - Vr - the input vector to be multiplied with the Hessian from the right 573 574 Output Parameter: 575 . VHV - the array of output vectors that store the Hessian product 576 577 Level: developer 578 579 Note: 580 `TSComputeIHessianProductFunctionPP()` is typically used for sensitivity implementation, 581 so most users would not generally call this routine themselves. 582 583 .seealso: [](ch_ts), `TSSetIHessianProduct()` 584 @*/ 585 PetscErrorCode TSComputeIHessianProductFunctionPP(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV) 586 { 587 PetscFunctionBegin; 588 if (!VHV) PetscFunctionReturn(PETSC_SUCCESS); 589 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 590 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 591 592 if (ts->ihessianproduct_fpp) PetscCallBack("TS callback IHessianProduct 3 for sensitivity analysis", (*ts->ihessianproduct_fpp)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx)); 593 594 /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */ 595 if (ts->rhshessianproduct_gpp) { 596 PetscInt nadj; 597 PetscCall(TSComputeRHSHessianProductFunctionPP(ts, t, U, Vl, Vr, VHV)); 598 for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1)); 599 } 600 PetscFunctionReturn(PETSC_SUCCESS); 601 } 602 603 /*@C 604 TSSetRHSHessianProduct - Sets the function that computes the vector-Hessian-vector product. The Hessian is the second-order derivative of G (RHSFunction) w.r.t. the state variable. 605 606 Logically Collective 607 608 Input Parameters: 609 + ts - `TS` context obtained from `TSCreate()` 610 . rhshp1 - an array of vectors storing the result of vector-Hessian-vector product for G_UU 611 . hessianproductfunc1 - vector-Hessian-vector product function for G_UU 612 . rhshp2 - an array of vectors storing the result of vector-Hessian-vector product for G_UP 613 . hessianproductfunc2 - vector-Hessian-vector product function for G_UP 614 . rhshp3 - an array of vectors storing the result of vector-Hessian-vector product for G_PU 615 . hessianproductfunc3 - vector-Hessian-vector product function for G_PU 616 . rhshp4 - an array of vectors storing the result of vector-Hessian-vector product for G_PP 617 - hessianproductfunc4 - vector-Hessian-vector product function for G_PP 618 619 Calling sequence of `ihessianproductfunc`: 620 $ PetscErrorCode rhshessianproductfunc(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV, void *ctx); 621 + t - current timestep 622 . U - input vector (current ODE solution) 623 . Vl - an array of input vectors to be left-multiplied with the Hessian 624 . Vr - input vector to be right-multiplied with the Hessian 625 . VHV - an array of output vectors for vector-Hessian-vector product 626 - ctx - [optional] user-defined function context 627 628 Level: intermediate 629 630 Notes: 631 The first Hessian function and the working array are required. 632 As an example to implement the callback functions, the second callback function calculates the vector-Hessian-vector product 633 $ Vl_n^T*G_UP*Vr 634 where the vector Vl_n (n-th element in the array Vl) and Vr are of size N and M respectively, and the Hessian G_UP is of size N x N x M. 635 Each entry of G_UP corresponds to the derivative 636 $ G_UP[i][j][k] = \frac{\partial^2 G[i]}{\partial U[j] \partial P[k]}. 637 The result of the vector-Hessian-vector product for Vl_n needs to be stored in vector VHV_n with j-th entry being 638 $ VHV_n[j] = \sum_i \sum_k {Vl_n[i] * G_UP[i][j][k] * Vr[k]} 639 If the cost function is a scalar, there will be only one vector in Vl and VHV. 640 641 .seealso: `TS`, `TSAdjoint` 642 @*/ 643 PetscErrorCode TSSetRHSHessianProduct(TS ts, Vec *rhshp1, PetscErrorCode (*rhshessianproductfunc1)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *rhshp2, PetscErrorCode (*rhshessianproductfunc2)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *rhshp3, PetscErrorCode (*rhshessianproductfunc3)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *rhshp4, PetscErrorCode (*rhshessianproductfunc4)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), void *ctx) 644 { 645 PetscFunctionBegin; 646 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 647 PetscValidPointer(rhshp1, 2); 648 649 ts->rhshessianproductctx = ctx; 650 if (rhshp1) ts->vecs_guu = rhshp1; 651 if (rhshp2) ts->vecs_gup = rhshp2; 652 if (rhshp3) ts->vecs_gpu = rhshp3; 653 if (rhshp4) ts->vecs_gpp = rhshp4; 654 ts->rhshessianproduct_guu = rhshessianproductfunc1; 655 ts->rhshessianproduct_gup = rhshessianproductfunc2; 656 ts->rhshessianproduct_gpu = rhshessianproductfunc3; 657 ts->rhshessianproduct_gpp = rhshessianproductfunc4; 658 PetscFunctionReturn(PETSC_SUCCESS); 659 } 660 661 /*@C 662 TSComputeRHSHessianProductFunctionUU - Runs the user-defined vector-Hessian-vector product function for Guu. 663 664 Collective 665 666 Input Parameters: 667 + ts - The `TS` context obtained from `TSCreate()` 668 . t - the time 669 . U - the solution at which to compute the Hessian product 670 . Vl - the array of input vectors to be multiplied with the Hessian from the left 671 - Vr - the input vector to be multiplied with the Hessian from the right 672 673 Output Parameter: 674 . VHV - the array of output vectors that store the Hessian product 675 676 Level: developer 677 678 Note: 679 `TSComputeRHSHessianProductFunctionUU()` is typically used for sensitivity implementation, 680 so most users would not generally call this routine themselves. 681 682 .seealso: [](ch_ts), `TS`, `TSSetRHSHessianProduct()` 683 @*/ 684 PetscErrorCode TSComputeRHSHessianProductFunctionUU(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV) 685 { 686 PetscFunctionBegin; 687 if (!VHV) PetscFunctionReturn(PETSC_SUCCESS); 688 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 689 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 690 691 PetscCallBack("TS callback RHSHessianProduct 1 for sensitivity analysis", (*ts->rhshessianproduct_guu)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx)); 692 PetscFunctionReturn(PETSC_SUCCESS); 693 } 694 695 /*@C 696 TSComputeRHSHessianProductFunctionUP - Runs the user-defined vector-Hessian-vector product function for Gup. 697 698 Collective 699 700 Input Parameters: 701 + ts - The `TS` context obtained from `TSCreate()` 702 . t - the time 703 . U - the solution at which to compute the Hessian product 704 . Vl - the array of input vectors to be multiplied with the Hessian from the left 705 - Vr - the input vector to be multiplied with the Hessian from the right 706 707 Output Parameter: 708 . VHV - the array of output vectors that store the Hessian product 709 710 Level: developer 711 712 Note: 713 `TSComputeRHSHessianProductFunctionUP()` is typically used for sensitivity implementation, 714 so most users would not generally call this routine themselves. 715 716 .seealso: [](ch_ts), `TS`, `TSSetRHSHessianProduct()` 717 @*/ 718 PetscErrorCode TSComputeRHSHessianProductFunctionUP(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV) 719 { 720 PetscFunctionBegin; 721 if (!VHV) PetscFunctionReturn(PETSC_SUCCESS); 722 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 723 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 724 725 PetscCallBack("TS callback RHSHessianProduct 2 for sensitivity analysis", (*ts->rhshessianproduct_gup)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx)); 726 PetscFunctionReturn(PETSC_SUCCESS); 727 } 728 729 /*@C 730 TSComputeRHSHessianProductFunctionPU - Runs the user-defined vector-Hessian-vector product function for Gpu. 731 732 Collective 733 734 Input Parameters: 735 + ts - The `TS` context obtained from `TSCreate()` 736 . t - the time 737 . U - the solution at which to compute the Hessian product 738 . Vl - the array of input vectors to be multiplied with the Hessian from the left 739 - Vr - the input vector to be multiplied with the Hessian from the right 740 741 Output Parameter: 742 . VHV - the array of output vectors that store the Hessian product 743 744 Level: developer 745 746 Note: 747 `TSComputeRHSHessianProductFunctionPU()` is typically used for sensitivity implementation, 748 so most users would not generally call this routine themselves. 749 750 .seealso: [](ch_ts), `TSSetRHSHessianProduct()` 751 @*/ 752 PetscErrorCode TSComputeRHSHessianProductFunctionPU(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV) 753 { 754 PetscFunctionBegin; 755 if (!VHV) PetscFunctionReturn(PETSC_SUCCESS); 756 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 757 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 758 759 PetscCallBack("TS callback RHSHessianProduct 3 for sensitivity analysis", (*ts->rhshessianproduct_gpu)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx)); 760 PetscFunctionReturn(PETSC_SUCCESS); 761 } 762 763 /*@C 764 TSComputeRHSHessianProductFunctionPP - Runs the user-defined vector-Hessian-vector product function for Gpp. 765 766 Collective 767 768 Input Parameters: 769 + ts - The `TS` context obtained from `TSCreate()` 770 . t - the time 771 . U - the solution at which to compute the Hessian product 772 . Vl - the array of input vectors to be multiplied with the Hessian from the left 773 - Vr - the input vector to be multiplied with the Hessian from the right 774 775 Output Parameter: 776 . VHV - the array of output vectors that store the Hessian product 777 778 Level: developer 779 780 Note: 781 `TSComputeRHSHessianProductFunctionPP()` is typically used for sensitivity implementation, 782 so most users would not generally call this routine themselves. 783 784 .seealso: [](ch_ts), `TSSetRHSHessianProduct()` 785 @*/ 786 PetscErrorCode TSComputeRHSHessianProductFunctionPP(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV) 787 { 788 PetscFunctionBegin; 789 if (!VHV) PetscFunctionReturn(PETSC_SUCCESS); 790 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 791 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 792 793 PetscCallBack("TS callback RHSHessianProduct 3 for sensitivity analysis", (*ts->rhshessianproduct_gpp)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx)); 794 PetscFunctionReturn(PETSC_SUCCESS); 795 } 796 797 /* --------------------------- Adjoint sensitivity ---------------------------*/ 798 799 /*@ 800 TSSetCostGradients - Sets the initial value of the gradients of the cost function w.r.t. initial values and w.r.t. the problem parameters 801 for use by the `TS` adjoint routines. 802 803 Logically Collective 804 805 Input Parameters: 806 + ts - the `TS` context obtained from `TSCreate()` 807 . numcost - number of gradients to be computed, this is the number of cost functions 808 . lambda - gradients with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector 809 - mu - gradients with respect to the parameters, the number of entries in these vectors is the same as the number of parameters 810 811 Level: beginner 812 813 Notes: 814 the entries in these vectors must be correctly initialized with the values lambda_i = df/dy|finaltime mu_i = df/dp|finaltime 815 816 After `TSAdjointSolve()` is called the lambda and the mu contain the computed sensitivities 817 818 .seealso: `TS`, `TSAdjointSolve()`, `TSGetCostGradients()` 819 @*/ 820 PetscErrorCode TSSetCostGradients(TS ts, PetscInt numcost, Vec *lambda, Vec *mu) 821 { 822 PetscFunctionBegin; 823 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 824 PetscValidPointer(lambda, 3); 825 ts->vecs_sensi = lambda; 826 ts->vecs_sensip = mu; 827 PetscCheck(!ts->numcost || ts->numcost == numcost, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand"); 828 ts->numcost = numcost; 829 PetscFunctionReturn(PETSC_SUCCESS); 830 } 831 832 /*@ 833 TSGetCostGradients - Returns the gradients from the `TSAdjointSolve()` 834 835 Not Collective, but the vectors returned are parallel if `TS` is parallel 836 837 Input Parameter: 838 . ts - the `TS` context obtained from `TSCreate()` 839 840 Output Parameters: 841 + numcost - size of returned arrays 842 . lambda - vectors containing the gradients of the cost functions with respect to the ODE/DAE solution variables 843 - mu - vectors containing the gradients of the cost functions with respect to the problem parameters 844 845 Level: intermediate 846 847 .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSSetCostGradients()` 848 @*/ 849 PetscErrorCode TSGetCostGradients(TS ts, PetscInt *numcost, Vec **lambda, Vec **mu) 850 { 851 PetscFunctionBegin; 852 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 853 if (numcost) *numcost = ts->numcost; 854 if (lambda) *lambda = ts->vecs_sensi; 855 if (mu) *mu = ts->vecs_sensip; 856 PetscFunctionReturn(PETSC_SUCCESS); 857 } 858 859 /*@ 860 TSSetCostHessianProducts - Sets the initial value of the Hessian-vector products of the cost function w.r.t. initial values and w.r.t. the problem parameters 861 for use by the `TS` adjoint routines. 862 863 Logically Collective 864 865 Input Parameters: 866 + ts - the `TS` context obtained from `TSCreate()` 867 . numcost - number of cost functions 868 . lambda2 - Hessian-vector product with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector 869 . mu2 - Hessian-vector product with respect to the parameters, the number of entries in these vectors is the same as the number of parameters 870 - dir - the direction vector that are multiplied with the Hessian of the cost functions 871 872 Level: beginner 873 874 Notes: 875 Hessian of the cost function is completely different from Hessian of the ODE/DAE system 876 877 For second-order adjoint, one needs to call this function and then `TSAdjointSetForward()` before `TSSolve()`. 878 879 After `TSAdjointSolve()` is called, the lambda2 and the mu2 will contain the computed second-order adjoint sensitivities, and can be used to produce Hessian-vector product (not the full Hessian matrix). Users must provide a direction vector; it is usually generated by an optimization solver. 880 881 Passing `NULL` for `lambda2` disables the second-order calculation. 882 883 .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSAdjointSetForward()` 884 @*/ 885 PetscErrorCode TSSetCostHessianProducts(TS ts, PetscInt numcost, Vec *lambda2, Vec *mu2, Vec dir) 886 { 887 PetscFunctionBegin; 888 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 889 PetscCheck(!ts->numcost || ts->numcost == numcost, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand"); 890 ts->numcost = numcost; 891 ts->vecs_sensi2 = lambda2; 892 ts->vecs_sensi2p = mu2; 893 ts->vec_dir = dir; 894 PetscFunctionReturn(PETSC_SUCCESS); 895 } 896 897 /*@ 898 TSGetCostHessianProducts - Returns the gradients from the `TSAdjointSolve()` 899 900 Not Collective, but vectors returned are parallel if `TS` is parallel 901 902 Input Parameter: 903 . ts - the `TS` context obtained from `TSCreate()` 904 905 Output Parameters: 906 + numcost - number of cost functions 907 . lambda2 - Hessian-vector product with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector 908 . mu2 - Hessian-vector product with respect to the parameters, the number of entries in these vectors is the same as the number of parameters 909 - dir - the direction vector that are multiplied with the Hessian of the cost functions 910 911 Level: intermediate 912 913 .seealso: [](ch_ts), `TSAdjointSolve()`, `TSSetCostHessianProducts()` 914 @*/ 915 PetscErrorCode TSGetCostHessianProducts(TS ts, PetscInt *numcost, Vec **lambda2, Vec **mu2, Vec *dir) 916 { 917 PetscFunctionBegin; 918 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 919 if (numcost) *numcost = ts->numcost; 920 if (lambda2) *lambda2 = ts->vecs_sensi2; 921 if (mu2) *mu2 = ts->vecs_sensi2p; 922 if (dir) *dir = ts->vec_dir; 923 PetscFunctionReturn(PETSC_SUCCESS); 924 } 925 926 /*@ 927 TSAdjointSetForward - Trigger the tangent linear solver and initialize the forward sensitivities 928 929 Logically Collective 930 931 Input Parameters: 932 + ts - the `TS` context obtained from `TSCreate()` 933 - didp - the derivative of initial values w.r.t. parameters 934 935 Level: intermediate 936 937 Notes: 938 When computing sensitivies w.r.t. initial condition, set didp to `NULL` so that the solver will take it as an identity matrix mathematically. 939 `TSAdjoint` does not reset the tangent linear solver automatically, `TSAdjointResetForward()` should be called to reset the tangent linear solver. 940 941 .seealso: [](ch_ts), `TSAdjointSolve()`, `TSSetCostHessianProducts()`, `TSAdjointResetForward()` 942 @*/ 943 PetscErrorCode TSAdjointSetForward(TS ts, Mat didp) 944 { 945 Mat A; 946 Vec sp; 947 PetscScalar *xarr; 948 PetscInt lsize; 949 950 PetscFunctionBegin; 951 ts->forward_solve = PETSC_TRUE; /* turn on tangent linear mode */ 952 PetscCheck(ts->vecs_sensi2, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "Must call TSSetCostHessianProducts() first"); 953 PetscCheck(ts->vec_dir, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "Directional vector is missing. Call TSSetCostHessianProducts() to set it."); 954 /* create a single-column dense matrix */ 955 PetscCall(VecGetLocalSize(ts->vec_sol, &lsize)); 956 PetscCall(MatCreateDense(PetscObjectComm((PetscObject)ts), lsize, PETSC_DECIDE, PETSC_DECIDE, 1, NULL, &A)); 957 958 PetscCall(VecDuplicate(ts->vec_sol, &sp)); 959 PetscCall(MatDenseGetColumn(A, 0, &xarr)); 960 PetscCall(VecPlaceArray(sp, xarr)); 961 if (ts->vecs_sensi2p) { /* tangent linear variable initialized as 2*dIdP*dir */ 962 if (didp) { 963 PetscCall(MatMult(didp, ts->vec_dir, sp)); 964 PetscCall(VecScale(sp, 2.)); 965 } else { 966 PetscCall(VecZeroEntries(sp)); 967 } 968 } else { /* tangent linear variable initialized as dir */ 969 PetscCall(VecCopy(ts->vec_dir, sp)); 970 } 971 PetscCall(VecResetArray(sp)); 972 PetscCall(MatDenseRestoreColumn(A, &xarr)); 973 PetscCall(VecDestroy(&sp)); 974 975 PetscCall(TSForwardSetInitialSensitivities(ts, A)); /* if didp is NULL, identity matrix is assumed */ 976 977 PetscCall(MatDestroy(&A)); 978 PetscFunctionReturn(PETSC_SUCCESS); 979 } 980 981 /*@ 982 TSAdjointResetForward - Reset the tangent linear solver and destroy the tangent linear context 983 984 Logically Collective 985 986 Input Parameter: 987 . ts - the `TS` context obtained from `TSCreate()` 988 989 Level: intermediate 990 991 .seealso: [](ch_ts), `TSAdjointSetForward()` 992 @*/ 993 PetscErrorCode TSAdjointResetForward(TS ts) 994 { 995 PetscFunctionBegin; 996 ts->forward_solve = PETSC_FALSE; /* turn off tangent linear mode */ 997 PetscCall(TSForwardReset(ts)); 998 PetscFunctionReturn(PETSC_SUCCESS); 999 } 1000 1001 /*@ 1002 TSAdjointSetUp - Sets up the internal data structures for the later use 1003 of an adjoint solver 1004 1005 Collective 1006 1007 Input Parameter: 1008 . ts - the `TS` context obtained from `TSCreate()` 1009 1010 Level: advanced 1011 1012 .seealso: [](ch_ts), `TSCreate()`, `TSAdjointStep()`, `TSSetCostGradients()` 1013 @*/ 1014 PetscErrorCode TSAdjointSetUp(TS ts) 1015 { 1016 TSTrajectory tj; 1017 PetscBool match; 1018 1019 PetscFunctionBegin; 1020 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1021 if (ts->adjointsetupcalled) PetscFunctionReturn(PETSC_SUCCESS); 1022 PetscCheck(ts->vecs_sensi, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_WRONGSTATE, "Must call TSSetCostGradients() first"); 1023 PetscCheck(!ts->vecs_sensip || ts->Jacp || ts->Jacprhs, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_WRONGSTATE, "Must call TSSetRHSJacobianP() or TSSetIJacobianP() first"); 1024 PetscCall(TSGetTrajectory(ts, &tj)); 1025 PetscCall(PetscObjectTypeCompare((PetscObject)tj, TSTRAJECTORYBASIC, &match)); 1026 if (match) { 1027 PetscBool solution_only; 1028 PetscCall(TSTrajectoryGetSolutionOnly(tj, &solution_only)); 1029 PetscCheck(!solution_only, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "TSAdjoint cannot use the solution-only mode when choosing the Basic TSTrajectory type. Turn it off with -ts_trajectory_solution_only 0"); 1030 } 1031 PetscCall(TSTrajectorySetUseHistory(tj, PETSC_FALSE)); /* not use TSHistory */ 1032 1033 if (ts->quadraturets) { /* if there is integral in the cost function */ 1034 PetscCall(VecDuplicate(ts->vecs_sensi[0], &ts->vec_drdu_col)); 1035 if (ts->vecs_sensip) PetscCall(VecDuplicate(ts->vecs_sensip[0], &ts->vec_drdp_col)); 1036 } 1037 1038 PetscTryTypeMethod(ts, adjointsetup); 1039 ts->adjointsetupcalled = PETSC_TRUE; 1040 PetscFunctionReturn(PETSC_SUCCESS); 1041 } 1042 1043 /*@ 1044 TSAdjointReset - Resets a `TS` adjoint context and removes any allocated `Vec`s and `Mat`s. 1045 1046 Collective 1047 1048 Input Parameter: 1049 . ts - the `TS` context obtained from `TSCreate()` 1050 1051 Level: beginner 1052 1053 .seealso: [](ch_ts), `TSCreate()`, `TSAdjointSetUp()`, `TSADestroy()` 1054 @*/ 1055 PetscErrorCode TSAdjointReset(TS ts) 1056 { 1057 PetscFunctionBegin; 1058 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1059 PetscTryTypeMethod(ts, adjointreset); 1060 if (ts->quadraturets) { /* if there is integral in the cost function */ 1061 PetscCall(VecDestroy(&ts->vec_drdu_col)); 1062 if (ts->vecs_sensip) PetscCall(VecDestroy(&ts->vec_drdp_col)); 1063 } 1064 ts->vecs_sensi = NULL; 1065 ts->vecs_sensip = NULL; 1066 ts->vecs_sensi2 = NULL; 1067 ts->vecs_sensi2p = NULL; 1068 ts->vec_dir = NULL; 1069 ts->adjointsetupcalled = PETSC_FALSE; 1070 PetscFunctionReturn(PETSC_SUCCESS); 1071 } 1072 1073 /*@ 1074 TSAdjointSetSteps - Sets the number of steps the adjoint solver should take backward in time 1075 1076 Logically Collective 1077 1078 Input Parameters: 1079 + ts - the `TS` context obtained from `TSCreate()` 1080 - steps - number of steps to use 1081 1082 Level: intermediate 1083 1084 Notes: 1085 Normally one does not call this and `TSAdjointSolve()` integrates back to the original timestep. One can call this 1086 so as to integrate back to less than the original timestep 1087 1088 .seealso: [](ch_ts), `TSAdjointSolve()`, `TS`, `TSSetExactFinalTime()` 1089 @*/ 1090 PetscErrorCode TSAdjointSetSteps(TS ts, PetscInt steps) 1091 { 1092 PetscFunctionBegin; 1093 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1094 PetscValidLogicalCollectiveInt(ts, steps, 2); 1095 PetscCheck(steps >= 0, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_OUTOFRANGE, "Cannot step back a negative number of steps"); 1096 PetscCheck(steps <= ts->steps, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_OUTOFRANGE, "Cannot step back more than the total number of forward steps"); 1097 ts->adjoint_max_steps = steps; 1098 PetscFunctionReturn(PETSC_SUCCESS); 1099 } 1100 1101 /*@C 1102 TSAdjointSetRHSJacobian - Deprecated, use `TSSetRHSJacobianP()` 1103 1104 Level: deprecated 1105 1106 @*/ 1107 PetscErrorCode TSAdjointSetRHSJacobian(TS ts, Mat Amat, PetscErrorCode (*func)(TS, PetscReal, Vec, Mat, void *), void *ctx) 1108 { 1109 PetscFunctionBegin; 1110 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1111 PetscValidHeaderSpecific(Amat, MAT_CLASSID, 2); 1112 1113 ts->rhsjacobianp = func; 1114 ts->rhsjacobianpctx = ctx; 1115 if (Amat) { 1116 PetscCall(PetscObjectReference((PetscObject)Amat)); 1117 PetscCall(MatDestroy(&ts->Jacp)); 1118 ts->Jacp = Amat; 1119 } 1120 PetscFunctionReturn(PETSC_SUCCESS); 1121 } 1122 1123 /*@C 1124 TSAdjointComputeRHSJacobian - Deprecated, use `TSComputeRHSJacobianP()` 1125 1126 Level: deprecated 1127 1128 @*/ 1129 PetscErrorCode TSAdjointComputeRHSJacobian(TS ts, PetscReal t, Vec U, Mat Amat) 1130 { 1131 PetscFunctionBegin; 1132 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1133 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 1134 PetscValidPointer(Amat, 4); 1135 1136 PetscCallBack("TS callback JacobianP for sensitivity analysis", (*ts->rhsjacobianp)(ts, t, U, Amat, ts->rhsjacobianpctx)); 1137 PetscFunctionReturn(PETSC_SUCCESS); 1138 } 1139 1140 /*@ 1141 TSAdjointComputeDRDYFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobian()` 1142 1143 Level: deprecated 1144 1145 @*/ 1146 PetscErrorCode TSAdjointComputeDRDYFunction(TS ts, PetscReal t, Vec U, Vec *DRDU) 1147 { 1148 PetscFunctionBegin; 1149 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1150 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 1151 1152 PetscCallBack("TS callback DRDY for sensitivity analysis", (*ts->drdufunction)(ts, t, U, DRDU, ts->costintegrandctx)); 1153 PetscFunctionReturn(PETSC_SUCCESS); 1154 } 1155 1156 /*@ 1157 TSAdjointComputeDRDPFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobianP()` 1158 1159 Level: deprecated 1160 1161 @*/ 1162 PetscErrorCode TSAdjointComputeDRDPFunction(TS ts, PetscReal t, Vec U, Vec *DRDP) 1163 { 1164 PetscFunctionBegin; 1165 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1166 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 1167 1168 PetscCallBack("TS callback DRDP for sensitivity analysis", (*ts->drdpfunction)(ts, t, U, DRDP, ts->costintegrandctx)); 1169 PetscFunctionReturn(PETSC_SUCCESS); 1170 } 1171 1172 /*@C 1173 TSAdjointMonitorSensi - monitors the first lambda sensitivity 1174 1175 Level: intermediate 1176 1177 .seealso: [](ch_ts), `TSAdjointMonitorSet()` 1178 @*/ 1179 PetscErrorCode TSAdjointMonitorSensi(TS ts, PetscInt step, PetscReal ptime, Vec v, PetscInt numcost, Vec *lambda, Vec *mu, PetscViewerAndFormat *vf) 1180 { 1181 PetscViewer viewer = vf->viewer; 1182 1183 PetscFunctionBegin; 1184 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 8); 1185 PetscCall(PetscViewerPushFormat(viewer, vf->format)); 1186 PetscCall(VecView(lambda[0], viewer)); 1187 PetscCall(PetscViewerPopFormat(viewer)); 1188 PetscFunctionReturn(PETSC_SUCCESS); 1189 } 1190 1191 /*@C 1192 TSAdjointMonitorSetFromOptions - Sets a monitor function and viewer appropriate for the type indicated by the user 1193 1194 Collective 1195 1196 Input Parameters: 1197 + ts - `TS` object you wish to monitor 1198 . name - the monitor type one is seeking 1199 . help - message indicating what monitoring is done 1200 . manual - manual page for the monitor 1201 . monitor - the monitor function 1202 - monitorsetup - a function that is called once ONLY if the user selected this monitor that may set additional features of the `TS` or `PetscViewer` objects 1203 1204 Level: developer 1205 1206 .seealso: [](ch_ts), `PetscOptionsGetViewer()`, `PetscOptionsGetReal()`, `PetscOptionsHasName()`, `PetscOptionsGetString()`, 1207 `PetscOptionsGetIntArray()`, `PetscOptionsGetRealArray()`, `PetscOptionsBool()` 1208 `PetscOptionsInt()`, `PetscOptionsString()`, `PetscOptionsReal()`, 1209 `PetscOptionsName()`, `PetscOptionsBegin()`, `PetscOptionsEnd()`, `PetscOptionsHeadBegin()`, 1210 `PetscOptionsStringArray()`, `PetscOptionsRealArray()`, `PetscOptionsScalar()`, 1211 `PetscOptionsBoolGroupBegin()`, `PetscOptionsBoolGroup()`, `PetscOptionsBoolGroupEnd()`, 1212 `PetscOptionsFList()`, `PetscOptionsEList()` 1213 @*/ 1214 PetscErrorCode TSAdjointMonitorSetFromOptions(TS ts, const char name[], const char help[], const char manual[], PetscErrorCode (*monitor)(TS, PetscInt, PetscReal, Vec, PetscInt, Vec *, Vec *, PetscViewerAndFormat *), PetscErrorCode (*monitorsetup)(TS, PetscViewerAndFormat *)) 1215 { 1216 PetscViewer viewer; 1217 PetscViewerFormat format; 1218 PetscBool flg; 1219 1220 PetscFunctionBegin; 1221 PetscCall(PetscOptionsGetViewer(PetscObjectComm((PetscObject)ts), ((PetscObject)ts)->options, ((PetscObject)ts)->prefix, name, &viewer, &format, &flg)); 1222 if (flg) { 1223 PetscViewerAndFormat *vf; 1224 PetscCall(PetscViewerAndFormatCreate(viewer, format, &vf)); 1225 PetscCall(PetscObjectDereference((PetscObject)viewer)); 1226 if (monitorsetup) PetscCall((*monitorsetup)(ts, vf)); 1227 PetscCall(TSAdjointMonitorSet(ts, (PetscErrorCode(*)(TS, PetscInt, PetscReal, Vec, PetscInt, Vec *, Vec *, void *))monitor, vf, (PetscErrorCode(*)(void **))PetscViewerAndFormatDestroy)); 1228 } 1229 PetscFunctionReturn(PETSC_SUCCESS); 1230 } 1231 1232 /*@C 1233 TSAdjointMonitorSet - Sets an ADDITIONAL function that is to be used at every 1234 timestep to display the iteration's progress. 1235 1236 Logically Collective 1237 1238 Input Parameters: 1239 + ts - the `TS` context obtained from `TSCreate()` 1240 . adjointmonitor - monitoring routine 1241 . adjointmctx - [optional] user-defined context for private data for the 1242 monitor routine (use `NULL` if no context is desired) 1243 - adjointmdestroy - [optional] routine that frees monitor context 1244 (may be `NULL`) 1245 1246 Calling sequence of `adjointmonitor`: 1247 $ PetscErrorCode adjointmonitor(TS ts, PetscInt steps, PetscReal time, Vec u, PetscInt numcost, Vec *lambda, Vec *mu, void *adjointmctx) 1248 + ts - the `TS` context 1249 . steps - iteration number (after the final time step the monitor routine is called with a step of -1, this is at the final time which may have 1250 been interpolated to) 1251 . time - current time 1252 . u - current iterate 1253 . numcost - number of cost functionos 1254 . lambda - sensitivities to initial conditions 1255 . mu - sensitivities to parameters 1256 - adjointmctx - [optional] adjoint monitoring context 1257 1258 Level: intermediate 1259 1260 Note: 1261 This routine adds an additional monitor to the list of monitors that 1262 already has been loaded. 1263 1264 Fortran Notes: 1265 Only a single monitor function can be set for each `TS` object 1266 1267 .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSAdjointMonitorCancel()` 1268 @*/ 1269 PetscErrorCode TSAdjointMonitorSet(TS ts, PetscErrorCode (*adjointmonitor)(TS, PetscInt, PetscReal, Vec, PetscInt, Vec *, Vec *, void *), void *adjointmctx, PetscErrorCode (*adjointmdestroy)(void **)) 1270 { 1271 PetscInt i; 1272 PetscBool identical; 1273 1274 PetscFunctionBegin; 1275 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1276 for (i = 0; i < ts->numbermonitors; i++) { 1277 PetscCall(PetscMonitorCompare((PetscErrorCode(*)(void))adjointmonitor, adjointmctx, adjointmdestroy, (PetscErrorCode(*)(void))ts->adjointmonitor[i], ts->adjointmonitorcontext[i], ts->adjointmonitordestroy[i], &identical)); 1278 if (identical) PetscFunctionReturn(PETSC_SUCCESS); 1279 } 1280 PetscCheck(ts->numberadjointmonitors < MAXTSMONITORS, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many adjoint monitors set"); 1281 ts->adjointmonitor[ts->numberadjointmonitors] = adjointmonitor; 1282 ts->adjointmonitordestroy[ts->numberadjointmonitors] = adjointmdestroy; 1283 ts->adjointmonitorcontext[ts->numberadjointmonitors++] = (void *)adjointmctx; 1284 PetscFunctionReturn(PETSC_SUCCESS); 1285 } 1286 1287 /*@C 1288 TSAdjointMonitorCancel - Clears all the adjoint monitors that have been set on a time-step object. 1289 1290 Logically Collective 1291 1292 Input Parameter: 1293 . ts - the `TS` context obtained from `TSCreate()` 1294 1295 Notes: 1296 There is no way to remove a single, specific monitor. 1297 1298 Level: intermediate 1299 1300 .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSAdjointMonitorSet()` 1301 @*/ 1302 PetscErrorCode TSAdjointMonitorCancel(TS ts) 1303 { 1304 PetscInt i; 1305 1306 PetscFunctionBegin; 1307 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1308 for (i = 0; i < ts->numberadjointmonitors; i++) { 1309 if (ts->adjointmonitordestroy[i]) PetscCall((*ts->adjointmonitordestroy[i])(&ts->adjointmonitorcontext[i])); 1310 } 1311 ts->numberadjointmonitors = 0; 1312 PetscFunctionReturn(PETSC_SUCCESS); 1313 } 1314 1315 /*@C 1316 TSAdjointMonitorDefault - the default monitor of adjoint computations 1317 1318 Level: intermediate 1319 1320 .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSAdjointMonitorSet()` 1321 @*/ 1322 PetscErrorCode TSAdjointMonitorDefault(TS ts, PetscInt step, PetscReal ptime, Vec v, PetscInt numcost, Vec *lambda, Vec *mu, PetscViewerAndFormat *vf) 1323 { 1324 PetscViewer viewer = vf->viewer; 1325 1326 PetscFunctionBegin; 1327 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 8); 1328 PetscCall(PetscViewerPushFormat(viewer, vf->format)); 1329 PetscCall(PetscViewerASCIIAddTab(viewer, ((PetscObject)ts)->tablevel)); 1330 PetscCall(PetscViewerASCIIPrintf(viewer, "%" PetscInt_FMT " TS dt %g time %g%s", step, (double)ts->time_step, (double)ptime, ts->steprollback ? " (r)\n" : "\n")); 1331 PetscCall(PetscViewerASCIISubtractTab(viewer, ((PetscObject)ts)->tablevel)); 1332 PetscCall(PetscViewerPopFormat(viewer)); 1333 PetscFunctionReturn(PETSC_SUCCESS); 1334 } 1335 1336 /*@C 1337 TSAdjointMonitorDrawSensi - Monitors progress of the adjoint `TS` solvers by calling 1338 `VecView()` for the sensitivities to initial states at each timestep 1339 1340 Collective 1341 1342 Input Parameters: 1343 + ts - the `TS` context 1344 . step - current time-step 1345 . ptime - current time 1346 . u - current state 1347 . numcost - number of cost functions 1348 . lambda - sensitivities to initial conditions 1349 . mu - sensitivities to parameters 1350 - dummy - either a viewer or `NULL` 1351 1352 Level: intermediate 1353 1354 .seealso: [](ch_ts), `TSAdjointSolve()`, `TSAdjointMonitorSet()`, `TSAdjointMonitorDefault()`, `VecView()` 1355 @*/ 1356 PetscErrorCode TSAdjointMonitorDrawSensi(TS ts, PetscInt step, PetscReal ptime, Vec u, PetscInt numcost, Vec *lambda, Vec *mu, void *dummy) 1357 { 1358 TSMonitorDrawCtx ictx = (TSMonitorDrawCtx)dummy; 1359 PetscDraw draw; 1360 PetscReal xl, yl, xr, yr, h; 1361 char time[32]; 1362 1363 PetscFunctionBegin; 1364 if (!(((ictx->howoften > 0) && (!(step % ictx->howoften))) || ((ictx->howoften == -1) && ts->reason))) PetscFunctionReturn(PETSC_SUCCESS); 1365 1366 PetscCall(VecView(lambda[0], ictx->viewer)); 1367 PetscCall(PetscViewerDrawGetDraw(ictx->viewer, 0, &draw)); 1368 PetscCall(PetscSNPrintf(time, 32, "Timestep %d Time %g", (int)step, (double)ptime)); 1369 PetscCall(PetscDrawGetCoordinates(draw, &xl, &yl, &xr, &yr)); 1370 h = yl + .95 * (yr - yl); 1371 PetscCall(PetscDrawStringCentered(draw, .5 * (xl + xr), h, PETSC_DRAW_BLACK, time)); 1372 PetscCall(PetscDrawFlush(draw)); 1373 PetscFunctionReturn(PETSC_SUCCESS); 1374 } 1375 1376 /* 1377 TSAdjointSetFromOptions - Sets various `TS` adjoint parameters from user options. 1378 1379 Collective 1380 1381 Input Parameter: 1382 . ts - the `TS` context 1383 1384 Options Database Keys: 1385 + -ts_adjoint_solve <yes,no> - After solving the ODE/DAE solve the adjoint problem (requires -ts_save_trajectory) 1386 . -ts_adjoint_monitor - print information at each adjoint time step 1387 - -ts_adjoint_monitor_draw_sensi - monitor the sensitivity of the first cost function wrt initial conditions (lambda[0]) graphically 1388 1389 Level: developer 1390 1391 Note: 1392 This is not normally called directly by users 1393 1394 .seealso: [](ch_ts), `TSSetSaveTrajectory()`, `TSTrajectorySetUp()` 1395 */ 1396 PetscErrorCode TSAdjointSetFromOptions(TS ts, PetscOptionItems *PetscOptionsObject) 1397 { 1398 PetscBool tflg, opt; 1399 1400 PetscFunctionBegin; 1401 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1402 PetscOptionsHeadBegin(PetscOptionsObject, "TS Adjoint options"); 1403 tflg = ts->adjoint_solve ? PETSC_TRUE : PETSC_FALSE; 1404 PetscCall(PetscOptionsBool("-ts_adjoint_solve", "Solve the adjoint problem immediately after solving the forward problem", "", tflg, &tflg, &opt)); 1405 if (opt) { 1406 PetscCall(TSSetSaveTrajectory(ts)); 1407 ts->adjoint_solve = tflg; 1408 } 1409 PetscCall(TSAdjointMonitorSetFromOptions(ts, "-ts_adjoint_monitor", "Monitor adjoint timestep size", "TSAdjointMonitorDefault", TSAdjointMonitorDefault, NULL)); 1410 PetscCall(TSAdjointMonitorSetFromOptions(ts, "-ts_adjoint_monitor_sensi", "Monitor sensitivity in the adjoint computation", "TSAdjointMonitorSensi", TSAdjointMonitorSensi, NULL)); 1411 opt = PETSC_FALSE; 1412 PetscCall(PetscOptionsName("-ts_adjoint_monitor_draw_sensi", "Monitor adjoint sensitivities (lambda only) graphically", "TSAdjointMonitorDrawSensi", &opt)); 1413 if (opt) { 1414 TSMonitorDrawCtx ctx; 1415 PetscInt howoften = 1; 1416 1417 PetscCall(PetscOptionsInt("-ts_adjoint_monitor_draw_sensi", "Monitor adjoint sensitivities (lambda only) graphically", "TSAdjointMonitorDrawSensi", howoften, &howoften, NULL)); 1418 PetscCall(TSMonitorDrawCtxCreate(PetscObjectComm((PetscObject)ts), NULL, NULL, PETSC_DECIDE, PETSC_DECIDE, 300, 300, howoften, &ctx)); 1419 PetscCall(TSAdjointMonitorSet(ts, TSAdjointMonitorDrawSensi, ctx, (PetscErrorCode(*)(void **))TSMonitorDrawCtxDestroy)); 1420 } 1421 PetscFunctionReturn(PETSC_SUCCESS); 1422 } 1423 1424 /*@ 1425 TSAdjointStep - Steps one time step backward in the adjoint run 1426 1427 Collective 1428 1429 Input Parameter: 1430 . ts - the `TS` context obtained from `TSCreate()` 1431 1432 Level: intermediate 1433 1434 .seealso: [](ch_ts), `TSAdjointSetUp()`, `TSAdjointSolve()` 1435 @*/ 1436 PetscErrorCode TSAdjointStep(TS ts) 1437 { 1438 DM dm; 1439 1440 PetscFunctionBegin; 1441 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1442 PetscCall(TSGetDM(ts, &dm)); 1443 PetscCall(TSAdjointSetUp(ts)); 1444 ts->steps--; /* must decrease the step index before the adjoint step is taken. */ 1445 1446 ts->reason = TS_CONVERGED_ITERATING; 1447 ts->ptime_prev = ts->ptime; 1448 PetscCall(PetscLogEventBegin(TS_AdjointStep, ts, 0, 0, 0)); 1449 PetscUseTypeMethod(ts, adjointstep); 1450 PetscCall(PetscLogEventEnd(TS_AdjointStep, ts, 0, 0, 0)); 1451 ts->adjoint_steps++; 1452 1453 if (ts->reason < 0) { 1454 PetscCheck(!ts->errorifstepfailed, PetscObjectComm((PetscObject)ts), PETSC_ERR_NOT_CONVERGED, "TSAdjointStep has failed due to %s", TSConvergedReasons[ts->reason]); 1455 } else if (!ts->reason) { 1456 if (ts->adjoint_steps >= ts->adjoint_max_steps) ts->reason = TS_CONVERGED_ITS; 1457 } 1458 PetscFunctionReturn(PETSC_SUCCESS); 1459 } 1460 1461 /*@ 1462 TSAdjointSolve - Solves the discrete ajoint problem for an ODE/DAE 1463 1464 Collective 1465 ` 1466 1467 Input Parameter: 1468 . ts - the `TS` context obtained from `TSCreate()` 1469 1470 Options Database Key: 1471 . -ts_adjoint_view_solution <viewerinfo> - views the first gradient with respect to the initial values 1472 1473 Level: intermediate 1474 1475 Notes: 1476 This must be called after a call to `TSSolve()` that solves the forward problem 1477 1478 By default this will integrate back to the initial time, one can use `TSAdjointSetSteps()` to step back to a later time 1479 1480 .seealso: [](ch_ts), `TSAdjointSolve()`, `TSCreate()`, `TSSetCostGradients()`, `TSSetSolution()`, `TSAdjointStep()` 1481 @*/ 1482 PetscErrorCode TSAdjointSolve(TS ts) 1483 { 1484 static PetscBool cite = PETSC_FALSE; 1485 #if defined(TSADJOINT_STAGE) 1486 PetscLogStage adjoint_stage; 1487 #endif 1488 1489 PetscFunctionBegin; 1490 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1491 PetscCall(PetscCitationsRegister("@article{Zhang2022tsadjoint,\n" 1492 " title = {{PETSc TSAdjoint: A Discrete Adjoint ODE Solver for First-Order and Second-Order Sensitivity Analysis}},\n" 1493 " author = {Zhang, Hong and Constantinescu, Emil M. and Smith, Barry F.},\n" 1494 " journal = {SIAM Journal on Scientific Computing},\n" 1495 " volume = {44},\n" 1496 " number = {1},\n" 1497 " pages = {C1-C24},\n" 1498 " doi = {10.1137/21M140078X},\n" 1499 " year = {2022}\n}\n", 1500 &cite)); 1501 #if defined(TSADJOINT_STAGE) 1502 PetscCall(PetscLogStageRegister("TSAdjoint", &adjoint_stage)); 1503 PetscCall(PetscLogStagePush(adjoint_stage)); 1504 #endif 1505 PetscCall(TSAdjointSetUp(ts)); 1506 1507 /* reset time step and iteration counters */ 1508 ts->adjoint_steps = 0; 1509 ts->ksp_its = 0; 1510 ts->snes_its = 0; 1511 ts->num_snes_failures = 0; 1512 ts->reject = 0; 1513 ts->reason = TS_CONVERGED_ITERATING; 1514 1515 if (!ts->adjoint_max_steps) ts->adjoint_max_steps = ts->steps; 1516 if (ts->adjoint_steps >= ts->adjoint_max_steps) ts->reason = TS_CONVERGED_ITS; 1517 1518 while (!ts->reason) { 1519 PetscCall(TSTrajectoryGet(ts->trajectory, ts, ts->steps, &ts->ptime)); 1520 PetscCall(TSAdjointMonitor(ts, ts->steps, ts->ptime, ts->vec_sol, ts->numcost, ts->vecs_sensi, ts->vecs_sensip)); 1521 PetscCall(TSAdjointEventHandler(ts)); 1522 PetscCall(TSAdjointStep(ts)); 1523 if ((ts->vec_costintegral || ts->quadraturets) && !ts->costintegralfwd) PetscCall(TSAdjointCostIntegral(ts)); 1524 } 1525 if (!ts->steps) { 1526 PetscCall(TSTrajectoryGet(ts->trajectory, ts, ts->steps, &ts->ptime)); 1527 PetscCall(TSAdjointMonitor(ts, ts->steps, ts->ptime, ts->vec_sol, ts->numcost, ts->vecs_sensi, ts->vecs_sensip)); 1528 } 1529 ts->solvetime = ts->ptime; 1530 PetscCall(TSTrajectoryViewFromOptions(ts->trajectory, NULL, "-ts_trajectory_view")); 1531 PetscCall(VecViewFromOptions(ts->vecs_sensi[0], (PetscObject)ts, "-ts_adjoint_view_solution")); 1532 ts->adjoint_max_steps = 0; 1533 #if defined(TSADJOINT_STAGE) 1534 PetscCall(PetscLogStagePop()); 1535 #endif 1536 PetscFunctionReturn(PETSC_SUCCESS); 1537 } 1538 1539 /*@C 1540 TSAdjointMonitor - Runs all user-provided adjoint monitor routines set using `TSAdjointMonitorSet()` 1541 1542 Collective 1543 1544 Input Parameters: 1545 + ts - time stepping context obtained from `TSCreate()` 1546 . step - step number that has just completed 1547 . ptime - model time of the state 1548 . u - state at the current model time 1549 . numcost - number of cost functions (dimension of lambda or mu) 1550 . lambda - vectors containing the gradients of the cost functions with respect to the ODE/DAE solution variables 1551 - mu - vectors containing the gradients of the cost functions with respect to the problem parameters 1552 1553 Level: developer 1554 1555 Note: 1556 `TSAdjointMonitor()` is typically used automatically within the time stepping implementations. 1557 Users would almost never call this routine directly. 1558 1559 .seealso: `TSAdjointMonitorSet()`, `TSAdjointSolve()` 1560 @*/ 1561 PetscErrorCode TSAdjointMonitor(TS ts, PetscInt step, PetscReal ptime, Vec u, PetscInt numcost, Vec *lambda, Vec *mu) 1562 { 1563 PetscInt i, n = ts->numberadjointmonitors; 1564 1565 PetscFunctionBegin; 1566 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1567 PetscValidHeaderSpecific(u, VEC_CLASSID, 4); 1568 PetscCall(VecLockReadPush(u)); 1569 for (i = 0; i < n; i++) PetscCall((*ts->adjointmonitor[i])(ts, step, ptime, u, numcost, lambda, mu, ts->adjointmonitorcontext[i])); 1570 PetscCall(VecLockReadPop(u)); 1571 PetscFunctionReturn(PETSC_SUCCESS); 1572 } 1573 1574 /*@ 1575 TSAdjointCostIntegral - Evaluate the cost integral in the adjoint run. 1576 1577 Collective 1578 1579 Input Parameter: 1580 . ts - time stepping context 1581 1582 Level: advanced 1583 1584 Notes: 1585 This function cannot be called until `TSAdjointStep()` has been completed. 1586 1587 .seealso: [](ch_ts), `TSAdjointSolve()`, `TSAdjointStep()` 1588 @*/ 1589 PetscErrorCode TSAdjointCostIntegral(TS ts) 1590 { 1591 PetscFunctionBegin; 1592 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1593 PetscUseTypeMethod(ts, adjointintegral); 1594 PetscFunctionReturn(PETSC_SUCCESS); 1595 } 1596 1597 /* ------------------ Forward (tangent linear) sensitivity ------------------*/ 1598 1599 /*@ 1600 TSForwardSetUp - Sets up the internal data structures for the later use 1601 of forward sensitivity analysis 1602 1603 Collective 1604 1605 Input Parameter: 1606 . ts - the `TS` context obtained from `TSCreate()` 1607 1608 Level: advanced 1609 1610 .seealso: [](ch_ts), `TS`, `TSCreate()`, `TSDestroy()`, `TSSetUp()` 1611 @*/ 1612 PetscErrorCode TSForwardSetUp(TS ts) 1613 { 1614 PetscFunctionBegin; 1615 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1616 if (ts->forwardsetupcalled) PetscFunctionReturn(PETSC_SUCCESS); 1617 PetscTryTypeMethod(ts, forwardsetup); 1618 PetscCall(VecDuplicate(ts->vec_sol, &ts->vec_sensip_col)); 1619 ts->forwardsetupcalled = PETSC_TRUE; 1620 PetscFunctionReturn(PETSC_SUCCESS); 1621 } 1622 1623 /*@ 1624 TSForwardReset - Reset the internal data structures used by forward sensitivity analysis 1625 1626 Collective 1627 1628 Input Parameter: 1629 . ts - the `TS` context obtained from `TSCreate()` 1630 1631 Level: advanced 1632 1633 .seealso: [](ch_ts), `TSCreate()`, `TSDestroy()`, `TSForwardSetUp()` 1634 @*/ 1635 PetscErrorCode TSForwardReset(TS ts) 1636 { 1637 TS quadts = ts->quadraturets; 1638 1639 PetscFunctionBegin; 1640 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1641 PetscTryTypeMethod(ts, forwardreset); 1642 PetscCall(MatDestroy(&ts->mat_sensip)); 1643 if (quadts) PetscCall(MatDestroy(&quadts->mat_sensip)); 1644 PetscCall(VecDestroy(&ts->vec_sensip_col)); 1645 ts->forward_solve = PETSC_FALSE; 1646 ts->forwardsetupcalled = PETSC_FALSE; 1647 PetscFunctionReturn(PETSC_SUCCESS); 1648 } 1649 1650 /*@ 1651 TSForwardSetIntegralGradients - Set the vectors holding forward sensitivities of the integral term. 1652 1653 Input Parameters: 1654 + ts - the `TS` context obtained from `TSCreate()` 1655 . numfwdint - number of integrals 1656 - vp - the vectors containing the gradients for each integral w.r.t. parameters 1657 1658 Level: deprecated 1659 1660 .seealso: [](ch_ts), `TSForwardGetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()` 1661 @*/ 1662 PetscErrorCode TSForwardSetIntegralGradients(TS ts, PetscInt numfwdint, Vec *vp) 1663 { 1664 PetscFunctionBegin; 1665 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1666 PetscCheck(!ts->numcost || ts->numcost == numfwdint, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand()"); 1667 if (!ts->numcost) ts->numcost = numfwdint; 1668 1669 ts->vecs_integral_sensip = vp; 1670 PetscFunctionReturn(PETSC_SUCCESS); 1671 } 1672 1673 /*@ 1674 TSForwardGetIntegralGradients - Returns the forward sensitivities of the integral term. 1675 1676 Input Parameter: 1677 . ts - the `TS` context obtained from `TSCreate()` 1678 1679 Output Parameter: 1680 . vp - the vectors containing the gradients for each integral w.r.t. parameters 1681 1682 Level: deprecated 1683 1684 .seealso: [](ch_ts), `TSForwardSetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()` 1685 @*/ 1686 PetscErrorCode TSForwardGetIntegralGradients(TS ts, PetscInt *numfwdint, Vec **vp) 1687 { 1688 PetscFunctionBegin; 1689 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1690 PetscValidPointer(vp, 3); 1691 if (numfwdint) *numfwdint = ts->numcost; 1692 if (vp) *vp = ts->vecs_integral_sensip; 1693 PetscFunctionReturn(PETSC_SUCCESS); 1694 } 1695 1696 /*@ 1697 TSForwardStep - Compute the forward sensitivity for one time step. 1698 1699 Collective 1700 1701 Input Parameter: 1702 . ts - time stepping context 1703 1704 Level: advanced 1705 1706 Notes: 1707 This function cannot be called until `TSStep()` has been completed. 1708 1709 .seealso: [](ch_ts), `TSForwardSetSensitivities()`, `TSForwardGetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardSetUp()` 1710 @*/ 1711 PetscErrorCode TSForwardStep(TS ts) 1712 { 1713 PetscFunctionBegin; 1714 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1715 PetscCall(PetscLogEventBegin(TS_ForwardStep, ts, 0, 0, 0)); 1716 PetscUseTypeMethod(ts, forwardstep); 1717 PetscCall(PetscLogEventEnd(TS_ForwardStep, ts, 0, 0, 0)); 1718 PetscCheck(ts->reason >= 0 || !ts->errorifstepfailed, PetscObjectComm((PetscObject)ts), PETSC_ERR_NOT_CONVERGED, "TSFowardStep has failed due to %s", TSConvergedReasons[ts->reason]); 1719 PetscFunctionReturn(PETSC_SUCCESS); 1720 } 1721 1722 /*@ 1723 TSForwardSetSensitivities - Sets the initial value of the trajectory sensitivities of solution w.r.t. the problem parameters and initial values. 1724 1725 Logically Collective 1726 1727 Input Parameters: 1728 + ts - the `TS` context obtained from `TSCreate()` 1729 . nump - number of parameters 1730 - Smat - sensitivities with respect to the parameters, the number of entries in these vectors is the same as the number of parameters 1731 1732 Level: beginner 1733 1734 Notes: 1735 Forward sensitivity is also called 'trajectory sensitivity' in some fields such as power systems. 1736 This function turns on a flag to trigger `TSSolve()` to compute forward sensitivities automatically. 1737 You must call this function before `TSSolve()`. 1738 The entries in the sensitivity matrix must be correctly initialized with the values S = dy/dp|startingtime. 1739 1740 .seealso: [](ch_ts), `TSForwardGetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()` 1741 @*/ 1742 PetscErrorCode TSForwardSetSensitivities(TS ts, PetscInt nump, Mat Smat) 1743 { 1744 PetscFunctionBegin; 1745 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1746 PetscValidHeaderSpecific(Smat, MAT_CLASSID, 3); 1747 ts->forward_solve = PETSC_TRUE; 1748 if (nump == PETSC_DEFAULT) { 1749 PetscCall(MatGetSize(Smat, NULL, &ts->num_parameters)); 1750 } else ts->num_parameters = nump; 1751 PetscCall(PetscObjectReference((PetscObject)Smat)); 1752 PetscCall(MatDestroy(&ts->mat_sensip)); 1753 ts->mat_sensip = Smat; 1754 PetscFunctionReturn(PETSC_SUCCESS); 1755 } 1756 1757 /*@ 1758 TSForwardGetSensitivities - Returns the trajectory sensitivities 1759 1760 Not Collective, but Smat returned is parallel if ts is parallel 1761 1762 Output Parameters: 1763 + ts - the `TS` context obtained from `TSCreate()` 1764 . nump - number of parameters 1765 - Smat - sensitivities with respect to the parameters, the number of entries in these vectors is the same as the number of parameters 1766 1767 Level: intermediate 1768 1769 .seealso: [](ch_ts), `TSForwardSetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()` 1770 @*/ 1771 PetscErrorCode TSForwardGetSensitivities(TS ts, PetscInt *nump, Mat *Smat) 1772 { 1773 PetscFunctionBegin; 1774 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1775 if (nump) *nump = ts->num_parameters; 1776 if (Smat) *Smat = ts->mat_sensip; 1777 PetscFunctionReturn(PETSC_SUCCESS); 1778 } 1779 1780 /*@ 1781 TSForwardCostIntegral - Evaluate the cost integral in the forward run. 1782 1783 Collective 1784 1785 Input Parameter: 1786 . ts - time stepping context 1787 1788 Level: advanced 1789 1790 Note: 1791 This function cannot be called until `TSStep()` has been completed. 1792 1793 .seealso: [](ch_ts), `TS`, `TSSolve()`, `TSAdjointCostIntegral()` 1794 @*/ 1795 PetscErrorCode TSForwardCostIntegral(TS ts) 1796 { 1797 PetscFunctionBegin; 1798 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1799 PetscUseTypeMethod(ts, forwardintegral); 1800 PetscFunctionReturn(PETSC_SUCCESS); 1801 } 1802 1803 /*@ 1804 TSForwardSetInitialSensitivities - Set initial values for tangent linear sensitivities 1805 1806 Collective 1807 1808 Input Parameters: 1809 + ts - the `TS` context obtained from `TSCreate()` 1810 - didp - parametric sensitivities of the initial condition 1811 1812 Level: intermediate 1813 1814 Notes: 1815 `TSSolve()` allows users to pass the initial solution directly to `TS`. But the tangent linear variables cannot be initialized in this way. 1816 This function is used to set initial values for tangent linear variables. 1817 1818 .seealso: [](ch_ts), `TS`, `TSForwardSetSensitivities()` 1819 @*/ 1820 PetscErrorCode TSForwardSetInitialSensitivities(TS ts, Mat didp) 1821 { 1822 PetscFunctionBegin; 1823 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1824 PetscValidHeaderSpecific(didp, MAT_CLASSID, 2); 1825 if (!ts->mat_sensip) PetscCall(TSForwardSetSensitivities(ts, PETSC_DEFAULT, didp)); 1826 PetscFunctionReturn(PETSC_SUCCESS); 1827 } 1828 1829 /*@ 1830 TSForwardGetStages - Get the number of stages and the tangent linear sensitivities at the intermediate stages 1831 1832 Input Parameter: 1833 . ts - the `TS` context obtained from `TSCreate()` 1834 1835 Output Parameters: 1836 + ns - number of stages 1837 - S - tangent linear sensitivities at the intermediate stages 1838 1839 Level: advanced 1840 1841 .seealso: `TS` 1842 @*/ 1843 PetscErrorCode TSForwardGetStages(TS ts, PetscInt *ns, Mat **S) 1844 { 1845 PetscFunctionBegin; 1846 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1847 1848 if (!ts->ops->getstages) *S = NULL; 1849 else PetscUseTypeMethod(ts, forwardgetstages, ns, S); 1850 PetscFunctionReturn(PETSC_SUCCESS); 1851 } 1852 1853 /*@ 1854 TSCreateQuadratureTS - Create a sub-`TS` that evaluates integrals over time 1855 1856 Input Parameters: 1857 + ts - the `TS` context obtained from `TSCreate()` 1858 - fwd - flag indicating whether to evaluate cost integral in the forward run or the adjoint run 1859 1860 Output Parameter: 1861 . quadts - the child `TS` context 1862 1863 Level: intermediate 1864 1865 .seealso: [](ch_ts), `TSGetQuadratureTS()` 1866 @*/ 1867 PetscErrorCode TSCreateQuadratureTS(TS ts, PetscBool fwd, TS *quadts) 1868 { 1869 char prefix[128]; 1870 1871 PetscFunctionBegin; 1872 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1873 PetscValidPointer(quadts, 3); 1874 PetscCall(TSDestroy(&ts->quadraturets)); 1875 PetscCall(TSCreate(PetscObjectComm((PetscObject)ts), &ts->quadraturets)); 1876 PetscCall(PetscObjectIncrementTabLevel((PetscObject)ts->quadraturets, (PetscObject)ts, 1)); 1877 PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%squad_", ((PetscObject)ts)->prefix ? ((PetscObject)ts)->prefix : "")); 1878 PetscCall(TSSetOptionsPrefix(ts->quadraturets, prefix)); 1879 *quadts = ts->quadraturets; 1880 1881 if (ts->numcost) { 1882 PetscCall(VecCreateSeq(PETSC_COMM_SELF, ts->numcost, &(*quadts)->vec_sol)); 1883 } else { 1884 PetscCall(VecCreateSeq(PETSC_COMM_SELF, 1, &(*quadts)->vec_sol)); 1885 } 1886 ts->costintegralfwd = fwd; 1887 PetscFunctionReturn(PETSC_SUCCESS); 1888 } 1889 1890 /*@ 1891 TSGetQuadratureTS - Return the sub-`TS` that evaluates integrals over time 1892 1893 Input Parameter: 1894 . ts - the `TS` context obtained from `TSCreate()` 1895 1896 Output Parameters: 1897 + fwd - flag indicating whether to evaluate cost integral in the forward run or the adjoint run 1898 - quadts - the child `TS` context 1899 1900 Level: intermediate 1901 1902 .seealso: [](ch_ts), `TSCreateQuadratureTS()` 1903 @*/ 1904 PetscErrorCode TSGetQuadratureTS(TS ts, PetscBool *fwd, TS *quadts) 1905 { 1906 PetscFunctionBegin; 1907 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1908 if (fwd) *fwd = ts->costintegralfwd; 1909 if (quadts) *quadts = ts->quadraturets; 1910 PetscFunctionReturn(PETSC_SUCCESS); 1911 } 1912 1913 /*@ 1914 TSComputeSNESJacobian - Compute the Jacobian needed for the `SNESSolve()` in `TS` 1915 1916 Collective 1917 1918 Input Parameters: 1919 + ts - the `TS` context obtained from `TSCreate()` 1920 - x - state vector 1921 1922 Output Parameters: 1923 + J - Jacobian matrix 1924 - Jpre - preconditioning matrix for J (may be same as J) 1925 1926 Level: developer 1927 1928 Note: 1929 Uses finite differencing when `TS` Jacobian is not available. 1930 1931 .seealso: `SNES`, `TS`, `SNESSetJacobian()`, `TSSetRHSJacobian()`, `TSSetIJacobian()` 1932 @*/ 1933 PetscErrorCode TSComputeSNESJacobian(TS ts, Vec x, Mat J, Mat Jpre) 1934 { 1935 SNES snes = ts->snes; 1936 PetscErrorCode (*jac)(SNES, Vec, Mat, Mat, void *) = NULL; 1937 1938 PetscFunctionBegin; 1939 /* 1940 Unlike implicit methods, explicit methods do not have SNESMatFDColoring in the snes object 1941 because SNESSolve() has not been called yet; so querying SNESMatFDColoring does not work for 1942 explicit methods. Instead, we check the Jacobian compute function directly to determine if FD 1943 coloring is used. 1944 */ 1945 PetscCall(SNESGetJacobian(snes, NULL, NULL, &jac, NULL)); 1946 if (jac == SNESComputeJacobianDefaultColor) { 1947 Vec f; 1948 PetscCall(SNESSetSolution(snes, x)); 1949 PetscCall(SNESGetFunction(snes, &f, NULL, NULL)); 1950 /* Force MatFDColoringApply to evaluate the SNES residual function for the base vector */ 1951 PetscCall(SNESComputeFunction(snes, x, f)); 1952 } 1953 PetscCall(SNESComputeJacobian(snes, x, J, Jpre)); 1954 PetscFunctionReturn(PETSC_SUCCESS); 1955 } 1956