1 #include <petsc/private/tsimpl.h> /*I "petscts.h" I*/ 2 #include <petscdraw.h> 3 4 PetscLogEvent TS_AdjointStep, TS_ForwardStep, TS_JacobianPEval; 5 6 /* #define TSADJOINT_STAGE */ 7 8 /* ------------------------ Sensitivity Context ---------------------------*/ 9 10 /*@C 11 TSSetRHSJacobianP - Sets the function that computes the Jacobian of G w.r.t. the parameters P where U_t = G(U,P,t), as well as the location to store the matrix. 12 13 Logically Collective 14 15 Input Parameters: 16 + ts - `TS` context obtained from `TSCreate()` 17 . Amat - JacobianP matrix 18 . func - function 19 - ctx - [optional] user-defined function context 20 21 Calling sequence of `func`: 22 $ PetscErrorCode func(TS ts, PetscReal t, Vec y, Mat A, void *ctx) 23 + t - current timestep 24 . U - input vector (current ODE solution) 25 . A - output matrix 26 - ctx - [optional] user-defined function context 27 28 Level: intermediate 29 30 Note: 31 Amat has the same number of rows and the same row parallel layout as u, Amat has the same number of columns and parallel layout as p 32 33 .seealso: [](chapter_ts), `TS`, `TSGetRHSJacobianP()` 34 @*/ 35 PetscErrorCode TSSetRHSJacobianP(TS ts, Mat Amat, PetscErrorCode (*func)(TS, PetscReal, Vec, Mat, void *), void *ctx) 36 { 37 PetscFunctionBegin; 38 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 39 PetscValidHeaderSpecific(Amat, MAT_CLASSID, 2); 40 41 ts->rhsjacobianp = func; 42 ts->rhsjacobianpctx = ctx; 43 if (Amat) { 44 PetscCall(PetscObjectReference((PetscObject)Amat)); 45 PetscCall(MatDestroy(&ts->Jacprhs)); 46 ts->Jacprhs = Amat; 47 } 48 PetscFunctionReturn(PETSC_SUCCESS); 49 } 50 51 /*@C 52 TSGetRHSJacobianP - Gets the function that computes the Jacobian of G w.r.t. the parameters P where U_t = G(U,P,t), as well as the location to store the matrix. 53 54 Logically Collective 55 56 Input Parameter: 57 . ts - `TS` context obtained from `TSCreate()` 58 59 Output Parameters: 60 + Amat - JacobianP matrix 61 . func - function 62 - ctx - [optional] user-defined function context 63 64 Calling sequence of `func`: 65 $ PetscErrorCode func(TS ts, PetscReal t, Vec y, Mat A, void *ctx) 66 + t - current timestep 67 . U - input vector (current ODE solution) 68 . A - output matrix 69 - ctx - [optional] user-defined function context 70 71 Level: intermediate 72 73 Note: 74 Amat has the same number of rows and the same row parallel layout as u, Amat has the same number of columns and parallel layout as p 75 76 .seealso: [](chapter_ts), `TSSetRHSJacobianP()`, `TS`, `TSGetRHSJacobianP()` 77 @*/ 78 PetscErrorCode TSGetRHSJacobianP(TS ts, Mat *Amat, PetscErrorCode (**func)(TS, PetscReal, Vec, Mat, void *), void **ctx) 79 { 80 PetscFunctionBegin; 81 if (func) *func = ts->rhsjacobianp; 82 if (ctx) *ctx = ts->rhsjacobianpctx; 83 if (Amat) *Amat = ts->Jacprhs; 84 PetscFunctionReturn(PETSC_SUCCESS); 85 } 86 87 /*@C 88 TSComputeRHSJacobianP - Runs the user-defined JacobianP function. 89 90 Collective 91 92 Input Parameters: 93 . ts - The `TS` context obtained from `TSCreate()` 94 95 Level: developer 96 97 .seealso: [](chapter_ts), `TSSetRHSJacobianP()`, `TS` 98 @*/ 99 PetscErrorCode TSComputeRHSJacobianP(TS ts, PetscReal t, Vec U, Mat Amat) 100 { 101 PetscFunctionBegin; 102 if (!Amat) PetscFunctionReturn(PETSC_SUCCESS); 103 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 104 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 105 106 if (ts->rhsjacobianp) PetscCallBack("TS callback JacobianP for sensitivity analysis", (*ts->rhsjacobianp)(ts, t, U, Amat, ts->rhsjacobianpctx)); 107 else { 108 PetscBool assembled; 109 PetscCall(MatZeroEntries(Amat)); 110 PetscCall(MatAssembled(Amat, &assembled)); 111 if (!assembled) { 112 PetscCall(MatAssemblyBegin(Amat, MAT_FINAL_ASSEMBLY)); 113 PetscCall(MatAssemblyEnd(Amat, MAT_FINAL_ASSEMBLY)); 114 } 115 } 116 PetscFunctionReturn(PETSC_SUCCESS); 117 } 118 119 /*@C 120 TSSetIJacobianP - Sets the function that computes the Jacobian of F w.r.t. the parameters P where F(Udot,U,t) = G(U,P,t), as well as the location to store the matrix. 121 122 Logically Collective 123 124 Input Parameters: 125 + ts - `TS` context obtained from `TSCreate()` 126 . Amat - JacobianP matrix 127 . func - function 128 - ctx - [optional] user-defined function context 129 130 Calling sequence of `func`: 131 $ PetscErrorCode func(TS ts, PetscReal t, Vec y, Mat A, void *ctx) 132 + t - current timestep 133 . U - input vector (current ODE solution) 134 . Udot - time derivative of state vector 135 . shift - shift to apply, see note below 136 . A - output matrix 137 - ctx - [optional] user-defined function context 138 139 Level: intermediate 140 141 Note: 142 Amat has the same number of rows and the same row parallel layout as u, Amat has the same number of columns and parallel layout as p 143 144 .seealso: [](chapter_ts), `TSSetRHSJacobianP()`, `TS` 145 @*/ 146 PetscErrorCode TSSetIJacobianP(TS ts, Mat Amat, PetscErrorCode (*func)(TS, PetscReal, Vec, Vec, PetscReal, Mat, void *), void *ctx) 147 { 148 PetscFunctionBegin; 149 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 150 PetscValidHeaderSpecific(Amat, MAT_CLASSID, 2); 151 152 ts->ijacobianp = func; 153 ts->ijacobianpctx = ctx; 154 if (Amat) { 155 PetscCall(PetscObjectReference((PetscObject)Amat)); 156 PetscCall(MatDestroy(&ts->Jacp)); 157 ts->Jacp = Amat; 158 } 159 PetscFunctionReturn(PETSC_SUCCESS); 160 } 161 162 /*@C 163 TSComputeIJacobianP - Runs the user-defined IJacobianP function. 164 165 Collective 166 167 Input Parameters: 168 + ts - the `TS` context 169 . t - current timestep 170 . U - state vector 171 . Udot - time derivative of state vector 172 . shift - shift to apply, see note below 173 - imex - flag indicates if the method is IMEX so that the RHSJacobianP should be kept separate 174 175 Output Parameters: 176 . A - Jacobian matrix 177 178 Level: developer 179 180 .seealso: [](chapter_ts), `TS`, `TSSetIJacobianP()` 181 @*/ 182 PetscErrorCode TSComputeIJacobianP(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal shift, Mat Amat, PetscBool imex) 183 { 184 PetscFunctionBegin; 185 if (!Amat) PetscFunctionReturn(PETSC_SUCCESS); 186 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 187 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 188 PetscValidHeaderSpecific(Udot, VEC_CLASSID, 4); 189 190 PetscCall(PetscLogEventBegin(TS_JacobianPEval, ts, U, Amat, 0)); 191 if (ts->ijacobianp) PetscCallBack("TS callback JacobianP for sensitivity analysis", (*ts->ijacobianp)(ts, t, U, Udot, shift, Amat, ts->ijacobianpctx)); 192 if (imex) { 193 if (!ts->ijacobianp) { /* system was written as Udot = G(t,U) */ 194 PetscBool assembled; 195 PetscCall(MatZeroEntries(Amat)); 196 PetscCall(MatAssembled(Amat, &assembled)); 197 if (!assembled) { 198 PetscCall(MatAssemblyBegin(Amat, MAT_FINAL_ASSEMBLY)); 199 PetscCall(MatAssemblyEnd(Amat, MAT_FINAL_ASSEMBLY)); 200 } 201 } 202 } else { 203 if (ts->rhsjacobianp) PetscCall(TSComputeRHSJacobianP(ts, t, U, ts->Jacprhs)); 204 if (ts->Jacprhs == Amat) { /* No IJacobian, so we only have the RHS matrix */ 205 PetscCall(MatScale(Amat, -1)); 206 } else if (ts->Jacprhs) { /* Both IJacobian and RHSJacobian */ 207 MatStructure axpy = DIFFERENT_NONZERO_PATTERN; 208 if (!ts->ijacobianp) { /* No IJacobianp provided, but we have a separate RHS matrix */ 209 PetscCall(MatZeroEntries(Amat)); 210 } 211 PetscCall(MatAXPY(Amat, -1, ts->Jacprhs, axpy)); 212 } 213 } 214 PetscCall(PetscLogEventEnd(TS_JacobianPEval, ts, U, Amat, 0)); 215 PetscFunctionReturn(PETSC_SUCCESS); 216 } 217 218 /*@C 219 TSSetCostIntegrand - Sets the routine for evaluating the integral term in one or more cost functions 220 221 Logically Collective 222 223 Input Parameters: 224 + ts - the `TS` context obtained from `TSCreate()` 225 . numcost - number of gradients to be computed, this is the number of cost functions 226 . costintegral - vector that stores the integral values 227 . rf - routine for evaluating the integrand function 228 . drduf - function that computes the gradients of the r's with respect to u 229 . drdpf - function that computes the gradients of the r's with respect to p, can be NULL if parametric sensitivity is not desired (mu=NULL) 230 . fwd - flag indicating whether to evaluate cost integral in the forward run or the adjoint run 231 - ctx - [optional] user-defined context for private data for the function evaluation routine (may be NULL) 232 233 Calling sequence of `rf`: 234 $ PetscErrorCode rf(TS ts, PetscReal t, Vec U, Vec F, oid *ctx) 235 236 Calling sequence of `drduf`: 237 $ PetscErroCode drduf(TS ts, PetscReal t, Vec U, Vec *dRdU, void *ctx) 238 239 Calling sequence of `drdpf`: 240 $ PetscErroCode drdpf(TS ts, PetscReal t, Vec U, Vec *dRdP, void *ctx) 241 242 Level: deprecated 243 244 Note: 245 For optimization there is usually a single cost function (numcost = 1). For sensitivities there may be multiple cost functions 246 247 .seealso: [](chapter_ts), `TS`, `TSSetRHSJacobianP()`, `TSGetCostGradients()`, `TSSetCostGradients()` 248 @*/ 249 PetscErrorCode TSSetCostIntegrand(TS ts, PetscInt numcost, Vec costintegral, PetscErrorCode (*rf)(TS, PetscReal, Vec, Vec, void *), PetscErrorCode (*drduf)(TS, PetscReal, Vec, Vec *, void *), PetscErrorCode (*drdpf)(TS, PetscReal, Vec, Vec *, void *), PetscBool fwd, void *ctx) 250 { 251 PetscFunctionBegin; 252 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 253 if (costintegral) PetscValidHeaderSpecific(costintegral, VEC_CLASSID, 3); 254 PetscCheck(!ts->numcost || ts->numcost == numcost, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostGradients() or TSForwardSetIntegralGradients()"); 255 if (!ts->numcost) ts->numcost = numcost; 256 257 if (costintegral) { 258 PetscCall(PetscObjectReference((PetscObject)costintegral)); 259 PetscCall(VecDestroy(&ts->vec_costintegral)); 260 ts->vec_costintegral = costintegral; 261 } else { 262 if (!ts->vec_costintegral) { /* Create a seq vec if user does not provide one */ 263 PetscCall(VecCreateSeq(PETSC_COMM_SELF, numcost, &ts->vec_costintegral)); 264 } else { 265 PetscCall(VecSet(ts->vec_costintegral, 0.0)); 266 } 267 } 268 if (!ts->vec_costintegrand) { 269 PetscCall(VecDuplicate(ts->vec_costintegral, &ts->vec_costintegrand)); 270 } else { 271 PetscCall(VecSet(ts->vec_costintegrand, 0.0)); 272 } 273 ts->costintegralfwd = fwd; /* Evaluate the cost integral in forward run if fwd is true */ 274 ts->costintegrand = rf; 275 ts->costintegrandctx = ctx; 276 ts->drdufunction = drduf; 277 ts->drdpfunction = drdpf; 278 PetscFunctionReturn(PETSC_SUCCESS); 279 } 280 281 /*@C 282 TSGetCostIntegral - Returns the values of the integral term in the cost functions. 283 It is valid to call the routine after a backward run. 284 285 Not Collective 286 287 Input Parameter: 288 . ts - the `TS` context obtained from `TSCreate()` 289 290 Output Parameter: 291 . v - the vector containing the integrals for each cost function 292 293 Level: intermediate 294 295 .seealso: [](chapter_ts), `TS`, `TSAdjointSolve()`, ``TSSetCostIntegrand()` 296 @*/ 297 PetscErrorCode TSGetCostIntegral(TS ts, Vec *v) 298 { 299 TS quadts; 300 301 PetscFunctionBegin; 302 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 303 PetscValidPointer(v, 2); 304 PetscCall(TSGetQuadratureTS(ts, NULL, &quadts)); 305 *v = quadts->vec_sol; 306 PetscFunctionReturn(PETSC_SUCCESS); 307 } 308 309 /*@C 310 TSComputeCostIntegrand - Evaluates the integral function in the cost functions. 311 312 Input Parameters: 313 + ts - the `TS` context 314 . t - current time 315 - U - state vector, i.e. current solution 316 317 Output Parameter: 318 . Q - vector of size numcost to hold the outputs 319 320 Level: deprecated 321 322 Note: 323 Most users should not need to explicitly call this routine, as it 324 is used internally within the sensitivity analysis context. 325 326 .seealso: [](chapter_ts), `TS`, `TSAdjointSolve()`, `TSSetCostIntegrand()` 327 @*/ 328 PetscErrorCode TSComputeCostIntegrand(TS ts, PetscReal t, Vec U, Vec Q) 329 { 330 PetscFunctionBegin; 331 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 332 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 333 PetscValidHeaderSpecific(Q, VEC_CLASSID, 4); 334 335 PetscCall(PetscLogEventBegin(TS_FunctionEval, ts, U, Q, 0)); 336 if (ts->costintegrand) PetscCallBack("TS callback integrand in the cost function", (*ts->costintegrand)(ts, t, U, Q, ts->costintegrandctx)); 337 else PetscCall(VecZeroEntries(Q)); 338 PetscCall(PetscLogEventEnd(TS_FunctionEval, ts, U, Q, 0)); 339 PetscFunctionReturn(PETSC_SUCCESS); 340 } 341 342 /*@C 343 TSComputeDRDUFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobian()` 344 345 Level: deprecated 346 347 @*/ 348 PetscErrorCode TSComputeDRDUFunction(TS ts, PetscReal t, Vec U, Vec *DRDU) 349 { 350 PetscFunctionBegin; 351 if (!DRDU) PetscFunctionReturn(PETSC_SUCCESS); 352 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 353 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 354 355 PetscCallBack("TS callback DRDU for sensitivity analysis", (*ts->drdufunction)(ts, t, U, DRDU, ts->costintegrandctx)); 356 PetscFunctionReturn(PETSC_SUCCESS); 357 } 358 359 /*@C 360 TSComputeDRDPFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobianP()` 361 362 Level: deprecated 363 364 @*/ 365 PetscErrorCode TSComputeDRDPFunction(TS ts, PetscReal t, Vec U, Vec *DRDP) 366 { 367 PetscFunctionBegin; 368 if (!DRDP) PetscFunctionReturn(PETSC_SUCCESS); 369 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 370 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 371 372 PetscCallBack("TS callback DRDP for sensitivity analysis", (*ts->drdpfunction)(ts, t, U, DRDP, ts->costintegrandctx)); 373 PetscFunctionReturn(PETSC_SUCCESS); 374 } 375 376 /*@C 377 TSSetIHessianProduct - Sets the function that computes the vector-Hessian-vector product. The Hessian is the second-order derivative of F (IFunction) w.r.t. the state variable. 378 379 Logically Collective 380 381 Input Parameters: 382 + ts - `TS` context obtained from `TSCreate()` 383 . ihp1 - an array of vectors storing the result of vector-Hessian-vector product for F_UU 384 . hessianproductfunc1 - vector-Hessian-vector product function for F_UU 385 . ihp2 - an array of vectors storing the result of vector-Hessian-vector product for F_UP 386 . hessianproductfunc2 - vector-Hessian-vector product function for F_UP 387 . ihp3 - an array of vectors storing the result of vector-Hessian-vector product for F_PU 388 . hessianproductfunc3 - vector-Hessian-vector product function for F_PU 389 . ihp4 - an array of vectors storing the result of vector-Hessian-vector product for F_PP 390 - hessianproductfunc4 - vector-Hessian-vector product function for F_PP 391 392 Calling sequence of `ihessianproductfunc`: 393 $ PetscErrorCode ihessianproductfunc(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV, void *ctx); 394 + t - current timestep 395 . U - input vector (current ODE solution) 396 . Vl - an array of input vectors to be left-multiplied with the Hessian 397 . Vr - input vector to be right-multiplied with the Hessian 398 . VHV - an array of output vectors for vector-Hessian-vector product 399 - ctx - [optional] user-defined function context 400 401 Level: intermediate 402 403 Notes: 404 The first Hessian function and the working array are required. 405 As an example to implement the callback functions, the second callback function calculates the vector-Hessian-vector product 406 $ Vl_n^T*F_UP*Vr 407 where the vector Vl_n (n-th element in the array Vl) and Vr are of size N and M respectively, and the Hessian F_UP is of size N x N x M. 408 Each entry of F_UP corresponds to the derivative 409 $ F_UP[i][j][k] = \frac{\partial^2 F[i]}{\partial U[j] \partial P[k]}. 410 The result of the vector-Hessian-vector product for Vl_n needs to be stored in vector VHV_n with the j-th entry being 411 $ VHV_n[j] = \sum_i \sum_k {Vl_n[i] * F_UP[i][j][k] * Vr[k]} 412 If the cost function is a scalar, there will be only one vector in Vl and VHV. 413 414 .seealso: [](chapter_ts), `TS` 415 @*/ 416 PetscErrorCode TSSetIHessianProduct(TS ts, Vec *ihp1, PetscErrorCode (*ihessianproductfunc1)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *ihp2, PetscErrorCode (*ihessianproductfunc2)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *ihp3, PetscErrorCode (*ihessianproductfunc3)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *ihp4, PetscErrorCode (*ihessianproductfunc4)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), void *ctx) 417 { 418 PetscFunctionBegin; 419 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 420 PetscValidPointer(ihp1, 2); 421 422 ts->ihessianproductctx = ctx; 423 if (ihp1) ts->vecs_fuu = ihp1; 424 if (ihp2) ts->vecs_fup = ihp2; 425 if (ihp3) ts->vecs_fpu = ihp3; 426 if (ihp4) ts->vecs_fpp = ihp4; 427 ts->ihessianproduct_fuu = ihessianproductfunc1; 428 ts->ihessianproduct_fup = ihessianproductfunc2; 429 ts->ihessianproduct_fpu = ihessianproductfunc3; 430 ts->ihessianproduct_fpp = ihessianproductfunc4; 431 PetscFunctionReturn(PETSC_SUCCESS); 432 } 433 434 /*@C 435 TSComputeIHessianProductFunctionUU - Runs the user-defined vector-Hessian-vector product function for Fuu. 436 437 Collective 438 439 Input Parameters: 440 . ts - The `TS` context obtained from `TSCreate()` 441 442 Level: developer 443 444 Note: 445 `TSComputeIHessianProductFunctionUU()` is typically used for sensitivity implementation, 446 so most users would not generally call this routine themselves. 447 448 .seealso: [](chapter_ts), `TSSetIHessianProduct()` 449 @*/ 450 PetscErrorCode TSComputeIHessianProductFunctionUU(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV) 451 { 452 PetscFunctionBegin; 453 if (!VHV) PetscFunctionReturn(PETSC_SUCCESS); 454 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 455 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 456 457 if (ts->ihessianproduct_fuu) PetscCallBack("TS callback IHessianProduct 1 for sensitivity analysis", (*ts->ihessianproduct_fuu)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx)); 458 459 /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */ 460 if (ts->rhshessianproduct_guu) { 461 PetscInt nadj; 462 PetscCall(TSComputeRHSHessianProductFunctionUU(ts, t, U, Vl, Vr, VHV)); 463 for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1)); 464 } 465 PetscFunctionReturn(PETSC_SUCCESS); 466 } 467 468 /*@C 469 TSComputeIHessianProductFunctionUP - Runs the user-defined vector-Hessian-vector product function for Fup. 470 471 Collective 472 473 Input Parameters: 474 . ts - The `TS` context obtained from `TSCreate()` 475 476 Level: developer 477 478 Note: 479 `TSComputeIHessianProductFunctionUP()` is typically used for sensitivity implementation, 480 so most users would not generally call this routine themselves. 481 482 .seealso: [](chapter_ts), `TSSetIHessianProduct()` 483 @*/ 484 PetscErrorCode TSComputeIHessianProductFunctionUP(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV) 485 { 486 PetscFunctionBegin; 487 if (!VHV) PetscFunctionReturn(PETSC_SUCCESS); 488 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 489 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 490 491 if (ts->ihessianproduct_fup) PetscCallBack("TS callback IHessianProduct 2 for sensitivity analysis", (*ts->ihessianproduct_fup)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx)); 492 493 /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */ 494 if (ts->rhshessianproduct_gup) { 495 PetscInt nadj; 496 PetscCall(TSComputeRHSHessianProductFunctionUP(ts, t, U, Vl, Vr, VHV)); 497 for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1)); 498 } 499 PetscFunctionReturn(PETSC_SUCCESS); 500 } 501 502 /*@C 503 TSComputeIHessianProductFunctionPU - Runs the user-defined vector-Hessian-vector product function for Fpu. 504 505 Collective 506 507 Input Parameters: 508 . ts - The `TS` context obtained from `TSCreate()` 509 510 Level: developer 511 512 Note: 513 `TSComputeIHessianProductFunctionPU()` is typically used for sensitivity implementation, 514 so most users would not generally call this routine themselves. 515 516 .seealso: [](chapter_ts), `TSSetIHessianProduct()` 517 @*/ 518 PetscErrorCode TSComputeIHessianProductFunctionPU(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV) 519 { 520 PetscFunctionBegin; 521 if (!VHV) PetscFunctionReturn(PETSC_SUCCESS); 522 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 523 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 524 525 if (ts->ihessianproduct_fpu) PetscCallBack("TS callback IHessianProduct 3 for sensitivity analysis", (*ts->ihessianproduct_fpu)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx)); 526 527 /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */ 528 if (ts->rhshessianproduct_gpu) { 529 PetscInt nadj; 530 PetscCall(TSComputeRHSHessianProductFunctionPU(ts, t, U, Vl, Vr, VHV)); 531 for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1)); 532 } 533 PetscFunctionReturn(PETSC_SUCCESS); 534 } 535 536 /*@C 537 TSComputeIHessianProductFunctionPP - Runs the user-defined vector-Hessian-vector product function for Fpp. 538 539 Collective 540 541 Input Parameters: 542 . ts - The `TS` context obtained from `TSCreate()` 543 544 Level: developer 545 546 Note: 547 `TSComputeIHessianProductFunctionPP()` is typically used for sensitivity implementation, 548 so most users would not generally call this routine themselves. 549 550 .seealso: [](chapter_ts), `TSSetIHessianProduct()` 551 @*/ 552 PetscErrorCode TSComputeIHessianProductFunctionPP(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV) 553 { 554 PetscFunctionBegin; 555 if (!VHV) PetscFunctionReturn(PETSC_SUCCESS); 556 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 557 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 558 559 if (ts->ihessianproduct_fpp) PetscCallBack("TS callback IHessianProduct 3 for sensitivity analysis", (*ts->ihessianproduct_fpp)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx)); 560 561 /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */ 562 if (ts->rhshessianproduct_gpp) { 563 PetscInt nadj; 564 PetscCall(TSComputeRHSHessianProductFunctionPP(ts, t, U, Vl, Vr, VHV)); 565 for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1)); 566 } 567 PetscFunctionReturn(PETSC_SUCCESS); 568 } 569 570 /*@C 571 TSSetRHSHessianProduct - Sets the function that computes the vector-Hessian-vector product. The Hessian is the second-order derivative of G (RHSFunction) w.r.t. the state variable. 572 573 Logically Collective 574 575 Input Parameters: 576 + ts - `TS` context obtained from `TSCreate()` 577 . rhshp1 - an array of vectors storing the result of vector-Hessian-vector product for G_UU 578 . hessianproductfunc1 - vector-Hessian-vector product function for G_UU 579 . rhshp2 - an array of vectors storing the result of vector-Hessian-vector product for G_UP 580 . hessianproductfunc2 - vector-Hessian-vector product function for G_UP 581 . rhshp3 - an array of vectors storing the result of vector-Hessian-vector product for G_PU 582 . hessianproductfunc3 - vector-Hessian-vector product function for G_PU 583 . rhshp4 - an array of vectors storing the result of vector-Hessian-vector product for G_PP 584 - hessianproductfunc4 - vector-Hessian-vector product function for G_PP 585 586 Calling sequence of `ihessianproductfunc`: 587 $ PetscErrorCode rhshessianproductfunc(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV, void *ctx); 588 + t - current timestep 589 . U - input vector (current ODE solution) 590 . Vl - an array of input vectors to be left-multiplied with the Hessian 591 . Vr - input vector to be right-multiplied with the Hessian 592 . VHV - an array of output vectors for vector-Hessian-vector product 593 - ctx - [optional] user-defined function context 594 595 Level: intermediate 596 597 Notes: 598 The first Hessian function and the working array are required. 599 As an example to implement the callback functions, the second callback function calculates the vector-Hessian-vector product 600 $ Vl_n^T*G_UP*Vr 601 where the vector Vl_n (n-th element in the array Vl) and Vr are of size N and M respectively, and the Hessian G_UP is of size N x N x M. 602 Each entry of G_UP corresponds to the derivative 603 $ G_UP[i][j][k] = \frac{\partial^2 G[i]}{\partial U[j] \partial P[k]}. 604 The result of the vector-Hessian-vector product for Vl_n needs to be stored in vector VHV_n with j-th entry being 605 $ VHV_n[j] = \sum_i \sum_k {Vl_n[i] * G_UP[i][j][k] * Vr[k]} 606 If the cost function is a scalar, there will be only one vector in Vl and VHV. 607 608 .seealso: `TS` 609 @*/ 610 PetscErrorCode TSSetRHSHessianProduct(TS ts, Vec *rhshp1, PetscErrorCode (*rhshessianproductfunc1)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *rhshp2, PetscErrorCode (*rhshessianproductfunc2)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *rhshp3, PetscErrorCode (*rhshessianproductfunc3)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *rhshp4, PetscErrorCode (*rhshessianproductfunc4)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), void *ctx) 611 { 612 PetscFunctionBegin; 613 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 614 PetscValidPointer(rhshp1, 2); 615 616 ts->rhshessianproductctx = ctx; 617 if (rhshp1) ts->vecs_guu = rhshp1; 618 if (rhshp2) ts->vecs_gup = rhshp2; 619 if (rhshp3) ts->vecs_gpu = rhshp3; 620 if (rhshp4) ts->vecs_gpp = rhshp4; 621 ts->rhshessianproduct_guu = rhshessianproductfunc1; 622 ts->rhshessianproduct_gup = rhshessianproductfunc2; 623 ts->rhshessianproduct_gpu = rhshessianproductfunc3; 624 ts->rhshessianproduct_gpp = rhshessianproductfunc4; 625 PetscFunctionReturn(PETSC_SUCCESS); 626 } 627 628 /*@C 629 TSComputeRHSHessianProductFunctionUU - Runs the user-defined vector-Hessian-vector product function for Guu. 630 631 Collective 632 633 Input Parameters: 634 . ts - The `TS` context obtained from `TSCreate()` 635 636 Level: developer 637 638 Note: 639 `TSComputeRHSHessianProductFunctionUU()` is typically used for sensitivity implementation, 640 so most users would not generally call this routine themselves. 641 642 .seealso: [](chapter_ts), `TS`, `TSSetRHSHessianProduct()` 643 @*/ 644 PetscErrorCode TSComputeRHSHessianProductFunctionUU(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV) 645 { 646 PetscFunctionBegin; 647 if (!VHV) PetscFunctionReturn(PETSC_SUCCESS); 648 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 649 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 650 651 PetscCallBack("TS callback RHSHessianProduct 1 for sensitivity analysis", (*ts->rhshessianproduct_guu)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx)); 652 PetscFunctionReturn(PETSC_SUCCESS); 653 } 654 655 /*@C 656 TSComputeRHSHessianProductFunctionUP - Runs the user-defined vector-Hessian-vector product function for Gup. 657 658 Collective 659 660 Input Parameters: 661 . ts - The `TS` context obtained from `TSCreate()` 662 663 Level: developer 664 665 Note: 666 `TSComputeRHSHessianProductFunctionUP()` is typically used for sensitivity implementation, 667 so most users would not generally call this routine themselves. 668 669 .seealso: [](chapter_ts), `TS`, `TSSetRHSHessianProduct()` 670 @*/ 671 PetscErrorCode TSComputeRHSHessianProductFunctionUP(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV) 672 { 673 PetscFunctionBegin; 674 if (!VHV) PetscFunctionReturn(PETSC_SUCCESS); 675 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 676 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 677 678 PetscCallBack("TS callback RHSHessianProduct 2 for sensitivity analysis", (*ts->rhshessianproduct_gup)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx)); 679 PetscFunctionReturn(PETSC_SUCCESS); 680 } 681 682 /*@C 683 TSComputeRHSHessianProductFunctionPU - Runs the user-defined vector-Hessian-vector product function for Gpu. 684 685 Collective 686 687 Input Parameters: 688 . ts - The `TS` context obtained from `TSCreate()` 689 690 Level: developer 691 692 Note: 693 `TSComputeRHSHessianProductFunctionPU()` is typically used for sensitivity implementation, 694 so most users would not generally call this routine themselves. 695 696 .seealso: [](chapter_ts), `TSSetRHSHessianProduct()` 697 @*/ 698 PetscErrorCode TSComputeRHSHessianProductFunctionPU(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV) 699 { 700 PetscFunctionBegin; 701 if (!VHV) PetscFunctionReturn(PETSC_SUCCESS); 702 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 703 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 704 705 PetscCallBack("TS callback RHSHessianProduct 3 for sensitivity analysis", (*ts->rhshessianproduct_gpu)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx)); 706 PetscFunctionReturn(PETSC_SUCCESS); 707 } 708 709 /*@C 710 TSComputeRHSHessianProductFunctionPP - Runs the user-defined vector-Hessian-vector product function for Gpp. 711 712 Collective 713 714 Input Parameters: 715 . ts - The `TS` context obtained from `TSCreate()` 716 717 Level: developer 718 719 Note: 720 `TSComputeRHSHessianProductFunctionPP()` is typically used for sensitivity implementation, 721 so most users would not generally call this routine themselves. 722 723 .seealso: [](chapter_ts), `TSSetRHSHessianProduct()` 724 @*/ 725 PetscErrorCode TSComputeRHSHessianProductFunctionPP(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV) 726 { 727 PetscFunctionBegin; 728 if (!VHV) PetscFunctionReturn(PETSC_SUCCESS); 729 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 730 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 731 732 PetscCallBack("TS callback RHSHessianProduct 3 for sensitivity analysis", (*ts->rhshessianproduct_gpp)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx)); 733 PetscFunctionReturn(PETSC_SUCCESS); 734 } 735 736 /* --------------------------- Adjoint sensitivity ---------------------------*/ 737 738 /*@ 739 TSSetCostGradients - Sets the initial value of the gradients of the cost function w.r.t. initial values and w.r.t. the problem parameters 740 for use by the `TS` adjoint routines. 741 742 Logically Collective 743 744 Input Parameters: 745 + ts - the `TS` context obtained from `TSCreate()` 746 . numcost - number of gradients to be computed, this is the number of cost functions 747 . lambda - gradients with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector 748 - mu - gradients with respect to the parameters, the number of entries in these vectors is the same as the number of parameters 749 750 Level: beginner 751 752 Notes: 753 the entries in these vectors must be correctly initialized with the values lambda_i = df/dy|finaltime mu_i = df/dp|finaltime 754 755 After `TSAdjointSolve()` is called the lambda and the mu contain the computed sensitivities 756 757 .seealso: `TS`, `TSAdjointSolve()`, `TSGetCostGradients()` 758 @*/ 759 PetscErrorCode TSSetCostGradients(TS ts, PetscInt numcost, Vec *lambda, Vec *mu) 760 { 761 PetscFunctionBegin; 762 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 763 PetscValidPointer(lambda, 3); 764 ts->vecs_sensi = lambda; 765 ts->vecs_sensip = mu; 766 PetscCheck(!ts->numcost || ts->numcost == numcost, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand"); 767 ts->numcost = numcost; 768 PetscFunctionReturn(PETSC_SUCCESS); 769 } 770 771 /*@ 772 TSGetCostGradients - Returns the gradients from the `TSAdjointSolve()` 773 774 Not Collective, but the vectors returned are parallel if `TS` is parallel 775 776 Input Parameter: 777 . ts - the `TS` context obtained from `TSCreate()` 778 779 Output Parameters: 780 + numcost - size of returned arrays 781 . lambda - vectors containing the gradients of the cost functions with respect to the ODE/DAE solution variables 782 - mu - vectors containing the gradients of the cost functions with respect to the problem parameters 783 784 Level: intermediate 785 786 .seealso: [](chapter_ts), `TS`, `TSAdjointSolve()`, `TSSetCostGradients()` 787 @*/ 788 PetscErrorCode TSGetCostGradients(TS ts, PetscInt *numcost, Vec **lambda, Vec **mu) 789 { 790 PetscFunctionBegin; 791 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 792 if (numcost) *numcost = ts->numcost; 793 if (lambda) *lambda = ts->vecs_sensi; 794 if (mu) *mu = ts->vecs_sensip; 795 PetscFunctionReturn(PETSC_SUCCESS); 796 } 797 798 /*@ 799 TSSetCostHessianProducts - Sets the initial value of the Hessian-vector products of the cost function w.r.t. initial values and w.r.t. the problem parameters 800 for use by the `TS` adjoint routines. 801 802 Logically Collective 803 804 Input Parameters: 805 + ts - the `TS` context obtained from `TSCreate()` 806 . numcost - number of cost functions 807 . lambda2 - Hessian-vector product with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector 808 . mu2 - Hessian-vector product with respect to the parameters, the number of entries in these vectors is the same as the number of parameters 809 - dir - the direction vector that are multiplied with the Hessian of the cost functions 810 811 Level: beginner 812 813 Notes: 814 Hessian of the cost function is completely different from Hessian of the ODE/DAE system 815 816 For second-order adjoint, one needs to call this function and then `TSAdjointSetForward()` before `TSSolve()`. 817 818 After `TSAdjointSolve()` is called, the lambda2 and the mu2 will contain the computed second-order adjoint sensitivities, and can be used to produce Hessian-vector product (not the full Hessian matrix). Users must provide a direction vector; it is usually generated by an optimization solver. 819 820 Passing NULL for lambda2 disables the second-order calculation. 821 822 .seealso: [](chapter_ts), `TS`, `TSAdjointSolve()`, `TSAdjointSetForward()` 823 @*/ 824 PetscErrorCode TSSetCostHessianProducts(TS ts, PetscInt numcost, Vec *lambda2, Vec *mu2, Vec dir) 825 { 826 PetscFunctionBegin; 827 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 828 PetscCheck(!ts->numcost || ts->numcost == numcost, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand"); 829 ts->numcost = numcost; 830 ts->vecs_sensi2 = lambda2; 831 ts->vecs_sensi2p = mu2; 832 ts->vec_dir = dir; 833 PetscFunctionReturn(PETSC_SUCCESS); 834 } 835 836 /*@ 837 TSGetCostHessianProducts - Returns the gradients from the `TSAdjointSolve()` 838 839 Not Collective, but vectors returned are parallel if `TS` is parallel 840 841 Input Parameter: 842 . ts - the `TS` context obtained from `TSCreate()` 843 844 Output Parameters: 845 + numcost - number of cost functions 846 . lambda2 - Hessian-vector product with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector 847 . mu2 - Hessian-vector product with respect to the parameters, the number of entries in these vectors is the same as the number of parameters 848 - dir - the direction vector that are multiplied with the Hessian of the cost functions 849 850 Level: intermediate 851 852 .seealso: [](chapter_ts), `TSAdjointSolve()`, `TSSetCostHessianProducts()` 853 @*/ 854 PetscErrorCode TSGetCostHessianProducts(TS ts, PetscInt *numcost, Vec **lambda2, Vec **mu2, Vec *dir) 855 { 856 PetscFunctionBegin; 857 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 858 if (numcost) *numcost = ts->numcost; 859 if (lambda2) *lambda2 = ts->vecs_sensi2; 860 if (mu2) *mu2 = ts->vecs_sensi2p; 861 if (dir) *dir = ts->vec_dir; 862 PetscFunctionReturn(PETSC_SUCCESS); 863 } 864 865 /*@ 866 TSAdjointSetForward - Trigger the tangent linear solver and initialize the forward sensitivities 867 868 Logically Collective 869 870 Input Parameters: 871 + ts - the `TS` context obtained from `TSCreate()` 872 - didp - the derivative of initial values w.r.t. parameters 873 874 Level: intermediate 875 876 Notes: 877 When computing sensitivies w.r.t. initial condition, set didp to NULL so that the solver will take it as an identity matrix mathematically. 878 `TS` adjoint does not reset the tangent linear solver automatically, `TSAdjointResetForward()` should be called to reset the tangent linear solver. 879 880 .seealso: [](chapter_ts), `TSAdjointSolve()`, `TSSetCostHessianProducts()`, `TSAdjointResetForward()` 881 @*/ 882 PetscErrorCode TSAdjointSetForward(TS ts, Mat didp) 883 { 884 Mat A; 885 Vec sp; 886 PetscScalar *xarr; 887 PetscInt lsize; 888 889 PetscFunctionBegin; 890 ts->forward_solve = PETSC_TRUE; /* turn on tangent linear mode */ 891 PetscCheck(ts->vecs_sensi2, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "Must call TSSetCostHessianProducts() first"); 892 PetscCheck(ts->vec_dir, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "Directional vector is missing. Call TSSetCostHessianProducts() to set it."); 893 /* create a single-column dense matrix */ 894 PetscCall(VecGetLocalSize(ts->vec_sol, &lsize)); 895 PetscCall(MatCreateDense(PetscObjectComm((PetscObject)ts), lsize, PETSC_DECIDE, PETSC_DECIDE, 1, NULL, &A)); 896 897 PetscCall(VecDuplicate(ts->vec_sol, &sp)); 898 PetscCall(MatDenseGetColumn(A, 0, &xarr)); 899 PetscCall(VecPlaceArray(sp, xarr)); 900 if (ts->vecs_sensi2p) { /* tangent linear variable initialized as 2*dIdP*dir */ 901 if (didp) { 902 PetscCall(MatMult(didp, ts->vec_dir, sp)); 903 PetscCall(VecScale(sp, 2.)); 904 } else { 905 PetscCall(VecZeroEntries(sp)); 906 } 907 } else { /* tangent linear variable initialized as dir */ 908 PetscCall(VecCopy(ts->vec_dir, sp)); 909 } 910 PetscCall(VecResetArray(sp)); 911 PetscCall(MatDenseRestoreColumn(A, &xarr)); 912 PetscCall(VecDestroy(&sp)); 913 914 PetscCall(TSForwardSetInitialSensitivities(ts, A)); /* if didp is NULL, identity matrix is assumed */ 915 916 PetscCall(MatDestroy(&A)); 917 PetscFunctionReturn(PETSC_SUCCESS); 918 } 919 920 /*@ 921 TSAdjointResetForward - Reset the tangent linear solver and destroy the tangent linear context 922 923 Logically Collective 924 925 Input Parameters: 926 . ts - the `TS` context obtained from `TSCreate()` 927 928 Level: intermediate 929 930 .seealso: [](chapter_ts), `TSAdjointSetForward()` 931 @*/ 932 PetscErrorCode TSAdjointResetForward(TS ts) 933 { 934 PetscFunctionBegin; 935 ts->forward_solve = PETSC_FALSE; /* turn off tangent linear mode */ 936 PetscCall(TSForwardReset(ts)); 937 PetscFunctionReturn(PETSC_SUCCESS); 938 } 939 940 /*@ 941 TSAdjointSetUp - Sets up the internal data structures for the later use 942 of an adjoint solver 943 944 Collective 945 946 Input Parameter: 947 . ts - the `TS` context obtained from `TSCreate()` 948 949 Level: advanced 950 951 .seealso: [](chapter_ts), `TSCreate()`, `TSAdjointStep()`, `TSSetCostGradients()` 952 @*/ 953 PetscErrorCode TSAdjointSetUp(TS ts) 954 { 955 TSTrajectory tj; 956 PetscBool match; 957 958 PetscFunctionBegin; 959 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 960 if (ts->adjointsetupcalled) PetscFunctionReturn(PETSC_SUCCESS); 961 PetscCheck(ts->vecs_sensi, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_WRONGSTATE, "Must call TSSetCostGradients() first"); 962 PetscCheck(!ts->vecs_sensip || ts->Jacp || ts->Jacprhs, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_WRONGSTATE, "Must call TSSetRHSJacobianP() or TSSetIJacobianP() first"); 963 PetscCall(TSGetTrajectory(ts, &tj)); 964 PetscCall(PetscObjectTypeCompare((PetscObject)tj, TSTRAJECTORYBASIC, &match)); 965 if (match) { 966 PetscBool solution_only; 967 PetscCall(TSTrajectoryGetSolutionOnly(tj, &solution_only)); 968 PetscCheck(!solution_only, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "TSAdjoint cannot use the solution-only mode when choosing the Basic TSTrajectory type. Turn it off with -ts_trajectory_solution_only 0"); 969 } 970 PetscCall(TSTrajectorySetUseHistory(tj, PETSC_FALSE)); /* not use TSHistory */ 971 972 if (ts->quadraturets) { /* if there is integral in the cost function */ 973 PetscCall(VecDuplicate(ts->vecs_sensi[0], &ts->vec_drdu_col)); 974 if (ts->vecs_sensip) PetscCall(VecDuplicate(ts->vecs_sensip[0], &ts->vec_drdp_col)); 975 } 976 977 PetscTryTypeMethod(ts, adjointsetup); 978 ts->adjointsetupcalled = PETSC_TRUE; 979 PetscFunctionReturn(PETSC_SUCCESS); 980 } 981 982 /*@ 983 TSAdjointReset - Resets a `TS` adjoint context and removes any allocated `Vec`s and `Mat`s. 984 985 Collective 986 987 Input Parameter: 988 . ts - the `TS` context obtained from `TSCreate()` 989 990 Level: beginner 991 992 .seealso: [](chapter_ts), `TSCreate()`, `TSAdjointSetUp()`, `TSADestroy()` 993 @*/ 994 PetscErrorCode TSAdjointReset(TS ts) 995 { 996 PetscFunctionBegin; 997 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 998 PetscTryTypeMethod(ts, adjointreset); 999 if (ts->quadraturets) { /* if there is integral in the cost function */ 1000 PetscCall(VecDestroy(&ts->vec_drdu_col)); 1001 if (ts->vecs_sensip) PetscCall(VecDestroy(&ts->vec_drdp_col)); 1002 } 1003 ts->vecs_sensi = NULL; 1004 ts->vecs_sensip = NULL; 1005 ts->vecs_sensi2 = NULL; 1006 ts->vecs_sensi2p = NULL; 1007 ts->vec_dir = NULL; 1008 ts->adjointsetupcalled = PETSC_FALSE; 1009 PetscFunctionReturn(PETSC_SUCCESS); 1010 } 1011 1012 /*@ 1013 TSAdjointSetSteps - Sets the number of steps the adjoint solver should take backward in time 1014 1015 Logically Collective 1016 1017 Input Parameters: 1018 + ts - the `TS` context obtained from `TSCreate()` 1019 - steps - number of steps to use 1020 1021 Level: intermediate 1022 1023 Notes: 1024 Normally one does not call this and `TSAdjointSolve()` integrates back to the original timestep. One can call this 1025 so as to integrate back to less than the original timestep 1026 1027 .seealso: [](chapter_ts), `TSAdjointSolve()`, `TS`, `TSSetExactFinalTime()` 1028 @*/ 1029 PetscErrorCode TSAdjointSetSteps(TS ts, PetscInt steps) 1030 { 1031 PetscFunctionBegin; 1032 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1033 PetscValidLogicalCollectiveInt(ts, steps, 2); 1034 PetscCheck(steps >= 0, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_OUTOFRANGE, "Cannot step back a negative number of steps"); 1035 PetscCheck(steps <= ts->steps, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_OUTOFRANGE, "Cannot step back more than the total number of forward steps"); 1036 ts->adjoint_max_steps = steps; 1037 PetscFunctionReturn(PETSC_SUCCESS); 1038 } 1039 1040 /*@C 1041 TSAdjointSetRHSJacobian - Deprecated, use `TSSetRHSJacobianP()` 1042 1043 Level: deprecated 1044 1045 @*/ 1046 PetscErrorCode TSAdjointSetRHSJacobian(TS ts, Mat Amat, PetscErrorCode (*func)(TS, PetscReal, Vec, Mat, void *), void *ctx) 1047 { 1048 PetscFunctionBegin; 1049 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1050 PetscValidHeaderSpecific(Amat, MAT_CLASSID, 2); 1051 1052 ts->rhsjacobianp = func; 1053 ts->rhsjacobianpctx = ctx; 1054 if (Amat) { 1055 PetscCall(PetscObjectReference((PetscObject)Amat)); 1056 PetscCall(MatDestroy(&ts->Jacp)); 1057 ts->Jacp = Amat; 1058 } 1059 PetscFunctionReturn(PETSC_SUCCESS); 1060 } 1061 1062 /*@C 1063 TSAdjointComputeRHSJacobian - Deprecated, use `TSComputeRHSJacobianP()` 1064 1065 Level: deprecated 1066 1067 @*/ 1068 PetscErrorCode TSAdjointComputeRHSJacobian(TS ts, PetscReal t, Vec U, Mat Amat) 1069 { 1070 PetscFunctionBegin; 1071 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1072 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 1073 PetscValidPointer(Amat, 4); 1074 1075 PetscCallBack("TS callback JacobianP for sensitivity analysis", (*ts->rhsjacobianp)(ts, t, U, Amat, ts->rhsjacobianpctx)); 1076 PetscFunctionReturn(PETSC_SUCCESS); 1077 } 1078 1079 /*@ 1080 TSAdjointComputeDRDYFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobian()` 1081 1082 Level: deprecated 1083 1084 @*/ 1085 PetscErrorCode TSAdjointComputeDRDYFunction(TS ts, PetscReal t, Vec U, Vec *DRDU) 1086 { 1087 PetscFunctionBegin; 1088 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1089 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 1090 1091 PetscCallBack("TS callback DRDY for sensitivity analysis", (*ts->drdufunction)(ts, t, U, DRDU, ts->costintegrandctx)); 1092 PetscFunctionReturn(PETSC_SUCCESS); 1093 } 1094 1095 /*@ 1096 TSAdjointComputeDRDPFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobianP()` 1097 1098 Level: deprecated 1099 1100 @*/ 1101 PetscErrorCode TSAdjointComputeDRDPFunction(TS ts, PetscReal t, Vec U, Vec *DRDP) 1102 { 1103 PetscFunctionBegin; 1104 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1105 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 1106 1107 PetscCallBack("TS callback DRDP for sensitivity analysis", (*ts->drdpfunction)(ts, t, U, DRDP, ts->costintegrandctx)); 1108 PetscFunctionReturn(PETSC_SUCCESS); 1109 } 1110 1111 /*@C 1112 TSAdjointMonitorSensi - monitors the first lambda sensitivity 1113 1114 Level: intermediate 1115 1116 .seealso: [](chapter_ts), `TSAdjointMonitorSet()` 1117 @*/ 1118 PetscErrorCode TSAdjointMonitorSensi(TS ts, PetscInt step, PetscReal ptime, Vec v, PetscInt numcost, Vec *lambda, Vec *mu, PetscViewerAndFormat *vf) 1119 { 1120 PetscViewer viewer = vf->viewer; 1121 1122 PetscFunctionBegin; 1123 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 8); 1124 PetscCall(PetscViewerPushFormat(viewer, vf->format)); 1125 PetscCall(VecView(lambda[0], viewer)); 1126 PetscCall(PetscViewerPopFormat(viewer)); 1127 PetscFunctionReturn(PETSC_SUCCESS); 1128 } 1129 1130 /*@C 1131 TSAdjointMonitorSetFromOptions - Sets a monitor function and viewer appropriate for the type indicated by the user 1132 1133 Collective 1134 1135 Input Parameters: 1136 + ts - `TS` object you wish to monitor 1137 . name - the monitor type one is seeking 1138 . help - message indicating what monitoring is done 1139 . manual - manual page for the monitor 1140 . monitor - the monitor function 1141 - monitorsetup - a function that is called once ONLY if the user selected this monitor that may set additional features of the `TS` or `PetscViewer` objects 1142 1143 Level: developer 1144 1145 .seealso: [](chapter_ts), `PetscOptionsGetViewer()`, `PetscOptionsGetReal()`, `PetscOptionsHasName()`, `PetscOptionsGetString()`, 1146 `PetscOptionsGetIntArray()`, `PetscOptionsGetRealArray()`, `PetscOptionsBool()` 1147 `PetscOptionsInt()`, `PetscOptionsString()`, `PetscOptionsReal()`, `PetscOptionsBool()`, 1148 `PetscOptionsName()`, `PetscOptionsBegin()`, `PetscOptionsEnd()`, `PetscOptionsHeadBegin()`, 1149 `PetscOptionsStringArray()`, `PetscOptionsRealArray()`, `PetscOptionsScalar()`, 1150 `PetscOptionsBoolGroupBegin()`, `PetscOptionsBoolGroup()`, `PetscOptionsBoolGroupEnd()`, 1151 `PetscOptionsFList()`, `PetscOptionsEList()` 1152 @*/ 1153 PetscErrorCode TSAdjointMonitorSetFromOptions(TS ts, const char name[], const char help[], const char manual[], PetscErrorCode (*monitor)(TS, PetscInt, PetscReal, Vec, PetscInt, Vec *, Vec *, PetscViewerAndFormat *), PetscErrorCode (*monitorsetup)(TS, PetscViewerAndFormat *)) 1154 { 1155 PetscViewer viewer; 1156 PetscViewerFormat format; 1157 PetscBool flg; 1158 1159 PetscFunctionBegin; 1160 PetscCall(PetscOptionsGetViewer(PetscObjectComm((PetscObject)ts), ((PetscObject)ts)->options, ((PetscObject)ts)->prefix, name, &viewer, &format, &flg)); 1161 if (flg) { 1162 PetscViewerAndFormat *vf; 1163 PetscCall(PetscViewerAndFormatCreate(viewer, format, &vf)); 1164 PetscCall(PetscObjectDereference((PetscObject)viewer)); 1165 if (monitorsetup) PetscCall((*monitorsetup)(ts, vf)); 1166 PetscCall(TSAdjointMonitorSet(ts, (PetscErrorCode(*)(TS, PetscInt, PetscReal, Vec, PetscInt, Vec *, Vec *, void *))monitor, vf, (PetscErrorCode(*)(void **))PetscViewerAndFormatDestroy)); 1167 } 1168 PetscFunctionReturn(PETSC_SUCCESS); 1169 } 1170 1171 /*@C 1172 TSAdjointMonitorSet - Sets an ADDITIONAL function that is to be used at every 1173 timestep to display the iteration's progress. 1174 1175 Logically Collective 1176 1177 Input Parameters: 1178 + ts - the `TS` context obtained from `TSCreate()` 1179 . adjointmonitor - monitoring routine 1180 . adjointmctx - [optional] user-defined context for private data for the 1181 monitor routine (use NULL if no context is desired) 1182 - adjointmonitordestroy - [optional] routine that frees monitor context 1183 (may be NULL) 1184 1185 Calling sequence of `adjointmonitor`: 1186 $ PetscErrorCode adjointmonitor(TS ts, PetscInt steps, PetscReal time, Vec u, PetscInt numcost, Vec *lambda, Vec *mu, void *adjointmctx) 1187 + ts - the `TS` context 1188 . steps - iteration number (after the final time step the monitor routine is called with a step of -1, this is at the final time which may have 1189 been interpolated to) 1190 . time - current time 1191 . u - current iterate 1192 . numcost - number of cost functionos 1193 . lambda - sensitivities to initial conditions 1194 . mu - sensitivities to parameters 1195 - adjointmctx - [optional] adjoint monitoring context 1196 1197 Level: intermediate 1198 1199 Note: 1200 This routine adds an additional monitor to the list of monitors that 1201 already has been loaded. 1202 1203 Fortran Note: 1204 Only a single monitor function can be set for each `TS` object 1205 1206 .seealso: [](chapter_ts), `TS`, `TSAdjointSolve()`, `TSAdjointMonitorCancel()` 1207 @*/ 1208 PetscErrorCode TSAdjointMonitorSet(TS ts, PetscErrorCode (*adjointmonitor)(TS, PetscInt, PetscReal, Vec, PetscInt, Vec *, Vec *, void *), void *adjointmctx, PetscErrorCode (*adjointmdestroy)(void **)) 1209 { 1210 PetscInt i; 1211 PetscBool identical; 1212 1213 PetscFunctionBegin; 1214 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1215 for (i = 0; i < ts->numbermonitors; i++) { 1216 PetscCall(PetscMonitorCompare((PetscErrorCode(*)(void))adjointmonitor, adjointmctx, adjointmdestroy, (PetscErrorCode(*)(void))ts->adjointmonitor[i], ts->adjointmonitorcontext[i], ts->adjointmonitordestroy[i], &identical)); 1217 if (identical) PetscFunctionReturn(PETSC_SUCCESS); 1218 } 1219 PetscCheck(ts->numberadjointmonitors < MAXTSMONITORS, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many adjoint monitors set"); 1220 ts->adjointmonitor[ts->numberadjointmonitors] = adjointmonitor; 1221 ts->adjointmonitordestroy[ts->numberadjointmonitors] = adjointmdestroy; 1222 ts->adjointmonitorcontext[ts->numberadjointmonitors++] = (void *)adjointmctx; 1223 PetscFunctionReturn(PETSC_SUCCESS); 1224 } 1225 1226 /*@C 1227 TSAdjointMonitorCancel - Clears all the adjoint monitors that have been set on a time-step object. 1228 1229 Logically Collective 1230 1231 Input Parameters: 1232 . ts - the `TS` context obtained from `TSCreate()` 1233 1234 Notes: 1235 There is no way to remove a single, specific monitor. 1236 1237 Level: intermediate 1238 1239 .seealso: [](chapter_ts), `TS`, `TSAdjointSolve()`, `TSAdjointMonitorSet()` 1240 @*/ 1241 PetscErrorCode TSAdjointMonitorCancel(TS ts) 1242 { 1243 PetscInt i; 1244 1245 PetscFunctionBegin; 1246 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1247 for (i = 0; i < ts->numberadjointmonitors; i++) { 1248 if (ts->adjointmonitordestroy[i]) PetscCall((*ts->adjointmonitordestroy[i])(&ts->adjointmonitorcontext[i])); 1249 } 1250 ts->numberadjointmonitors = 0; 1251 PetscFunctionReturn(PETSC_SUCCESS); 1252 } 1253 1254 /*@C 1255 TSAdjointMonitorDefault - the default monitor of adjoint computations 1256 1257 Level: intermediate 1258 1259 .seealso: [](chapter_ts), `TS`, `TSAdjointSolve()`, `TSAdjointMonitorSet()` 1260 @*/ 1261 PetscErrorCode TSAdjointMonitorDefault(TS ts, PetscInt step, PetscReal ptime, Vec v, PetscInt numcost, Vec *lambda, Vec *mu, PetscViewerAndFormat *vf) 1262 { 1263 PetscViewer viewer = vf->viewer; 1264 1265 PetscFunctionBegin; 1266 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 8); 1267 PetscCall(PetscViewerPushFormat(viewer, vf->format)); 1268 PetscCall(PetscViewerASCIIAddTab(viewer, ((PetscObject)ts)->tablevel)); 1269 PetscCall(PetscViewerASCIIPrintf(viewer, "%" PetscInt_FMT " TS dt %g time %g%s", step, (double)ts->time_step, (double)ptime, ts->steprollback ? " (r)\n" : "\n")); 1270 PetscCall(PetscViewerASCIISubtractTab(viewer, ((PetscObject)ts)->tablevel)); 1271 PetscCall(PetscViewerPopFormat(viewer)); 1272 PetscFunctionReturn(PETSC_SUCCESS); 1273 } 1274 1275 /*@C 1276 TSAdjointMonitorDrawSensi - Monitors progress of the adjoint `TS` solvers by calling 1277 `VecView()` for the sensitivities to initial states at each timestep 1278 1279 Collective 1280 1281 Input Parameters: 1282 + ts - the `TS` context 1283 . step - current time-step 1284 . ptime - current time 1285 . u - current state 1286 . numcost - number of cost functions 1287 . lambda - sensitivities to initial conditions 1288 . mu - sensitivities to parameters 1289 - dummy - either a viewer or NULL 1290 1291 Level: intermediate 1292 1293 .seealso: [](chapter_ts), `TSAdjointSolve()`, `TSAdjointMonitorSet()`, `TSAdjointMonitorDefault()`, `VecView()` 1294 @*/ 1295 PetscErrorCode TSAdjointMonitorDrawSensi(TS ts, PetscInt step, PetscReal ptime, Vec u, PetscInt numcost, Vec *lambda, Vec *mu, void *dummy) 1296 { 1297 TSMonitorDrawCtx ictx = (TSMonitorDrawCtx)dummy; 1298 PetscDraw draw; 1299 PetscReal xl, yl, xr, yr, h; 1300 char time[32]; 1301 1302 PetscFunctionBegin; 1303 if (!(((ictx->howoften > 0) && (!(step % ictx->howoften))) || ((ictx->howoften == -1) && ts->reason))) PetscFunctionReturn(PETSC_SUCCESS); 1304 1305 PetscCall(VecView(lambda[0], ictx->viewer)); 1306 PetscCall(PetscViewerDrawGetDraw(ictx->viewer, 0, &draw)); 1307 PetscCall(PetscSNPrintf(time, 32, "Timestep %d Time %g", (int)step, (double)ptime)); 1308 PetscCall(PetscDrawGetCoordinates(draw, &xl, &yl, &xr, &yr)); 1309 h = yl + .95 * (yr - yl); 1310 PetscCall(PetscDrawStringCentered(draw, .5 * (xl + xr), h, PETSC_DRAW_BLACK, time)); 1311 PetscCall(PetscDrawFlush(draw)); 1312 PetscFunctionReturn(PETSC_SUCCESS); 1313 } 1314 1315 /* 1316 TSAdjointSetFromOptions - Sets various `TS` adjoint parameters from user options. 1317 1318 Collective 1319 1320 Input Parameter: 1321 . ts - the `TS` context 1322 1323 Options Database Keys: 1324 + -ts_adjoint_solve <yes,no> After solving the ODE/DAE solve the adjoint problem (requires -ts_save_trajectory) 1325 . -ts_adjoint_monitor - print information at each adjoint time step 1326 - -ts_adjoint_monitor_draw_sensi - monitor the sensitivity of the first cost function wrt initial conditions (lambda[0]) graphically 1327 1328 Level: developer 1329 1330 Note: 1331 This is not normally called directly by users 1332 1333 .seealso: [](chapter_ts), `TSSetSaveTrajectory()`, `TSTrajectorySetUp()` 1334 */ 1335 PetscErrorCode TSAdjointSetFromOptions(TS ts, PetscOptionItems *PetscOptionsObject) 1336 { 1337 PetscBool tflg, opt; 1338 1339 PetscFunctionBegin; 1340 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1341 PetscOptionsHeadBegin(PetscOptionsObject, "TS Adjoint options"); 1342 tflg = ts->adjoint_solve ? PETSC_TRUE : PETSC_FALSE; 1343 PetscCall(PetscOptionsBool("-ts_adjoint_solve", "Solve the adjoint problem immediately after solving the forward problem", "", tflg, &tflg, &opt)); 1344 if (opt) { 1345 PetscCall(TSSetSaveTrajectory(ts)); 1346 ts->adjoint_solve = tflg; 1347 } 1348 PetscCall(TSAdjointMonitorSetFromOptions(ts, "-ts_adjoint_monitor", "Monitor adjoint timestep size", "TSAdjointMonitorDefault", TSAdjointMonitorDefault, NULL)); 1349 PetscCall(TSAdjointMonitorSetFromOptions(ts, "-ts_adjoint_monitor_sensi", "Monitor sensitivity in the adjoint computation", "TSAdjointMonitorSensi", TSAdjointMonitorSensi, NULL)); 1350 opt = PETSC_FALSE; 1351 PetscCall(PetscOptionsName("-ts_adjoint_monitor_draw_sensi", "Monitor adjoint sensitivities (lambda only) graphically", "TSAdjointMonitorDrawSensi", &opt)); 1352 if (opt) { 1353 TSMonitorDrawCtx ctx; 1354 PetscInt howoften = 1; 1355 1356 PetscCall(PetscOptionsInt("-ts_adjoint_monitor_draw_sensi", "Monitor adjoint sensitivities (lambda only) graphically", "TSAdjointMonitorDrawSensi", howoften, &howoften, NULL)); 1357 PetscCall(TSMonitorDrawCtxCreate(PetscObjectComm((PetscObject)ts), NULL, NULL, PETSC_DECIDE, PETSC_DECIDE, 300, 300, howoften, &ctx)); 1358 PetscCall(TSAdjointMonitorSet(ts, TSAdjointMonitorDrawSensi, ctx, (PetscErrorCode(*)(void **))TSMonitorDrawCtxDestroy)); 1359 } 1360 PetscFunctionReturn(PETSC_SUCCESS); 1361 } 1362 1363 /*@ 1364 TSAdjointStep - Steps one time step backward in the adjoint run 1365 1366 Collective 1367 1368 Input Parameter: 1369 . ts - the `TS` context obtained from `TSCreate()` 1370 1371 Level: intermediate 1372 1373 .seealso: [](chapter_ts), `TSAdjointSetUp()`, `TSAdjointSolve()` 1374 @*/ 1375 PetscErrorCode TSAdjointStep(TS ts) 1376 { 1377 DM dm; 1378 1379 PetscFunctionBegin; 1380 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1381 PetscCall(TSGetDM(ts, &dm)); 1382 PetscCall(TSAdjointSetUp(ts)); 1383 ts->steps--; /* must decrease the step index before the adjoint step is taken. */ 1384 1385 ts->reason = TS_CONVERGED_ITERATING; 1386 ts->ptime_prev = ts->ptime; 1387 PetscCall(PetscLogEventBegin(TS_AdjointStep, ts, 0, 0, 0)); 1388 PetscUseTypeMethod(ts, adjointstep); 1389 PetscCall(PetscLogEventEnd(TS_AdjointStep, ts, 0, 0, 0)); 1390 ts->adjoint_steps++; 1391 1392 if (ts->reason < 0) { 1393 PetscCheck(!ts->errorifstepfailed, PetscObjectComm((PetscObject)ts), PETSC_ERR_NOT_CONVERGED, "TSAdjointStep has failed due to %s", TSConvergedReasons[ts->reason]); 1394 } else if (!ts->reason) { 1395 if (ts->adjoint_steps >= ts->adjoint_max_steps) ts->reason = TS_CONVERGED_ITS; 1396 } 1397 PetscFunctionReturn(PETSC_SUCCESS); 1398 } 1399 1400 /*@ 1401 TSAdjointSolve - Solves the discrete ajoint problem for an ODE/DAE 1402 1403 Collective 1404 ` 1405 Input Parameter: 1406 . ts - the `TS` context obtained from `TSCreate()` 1407 1408 Options Database Key: 1409 . -ts_adjoint_view_solution <viewerinfo> - views the first gradient with respect to the initial values 1410 1411 Level: intermediate 1412 1413 Notes: 1414 This must be called after a call to `TSSolve()` that solves the forward problem 1415 1416 By default this will integrate back to the initial time, one can use `TSAdjointSetSteps()` to step back to a later time 1417 1418 .seealso: [](chapter_ts), `TSAdjointSolve()`, `TSCreate()`, `TSSetCostGradients()`, `TSSetSolution()`, `TSAdjointStep()` 1419 @*/ 1420 PetscErrorCode TSAdjointSolve(TS ts) 1421 { 1422 static PetscBool cite = PETSC_FALSE; 1423 #if defined(TSADJOINT_STAGE) 1424 PetscLogStage adjoint_stage; 1425 #endif 1426 1427 PetscFunctionBegin; 1428 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1429 PetscCall(PetscCitationsRegister("@article{Zhang2022tsadjoint,\n" 1430 " title = {{PETSc TSAdjoint: A Discrete Adjoint ODE Solver for First-Order and Second-Order Sensitivity Analysis}},\n" 1431 " author = {Zhang, Hong and Constantinescu, Emil M. and Smith, Barry F.},\n" 1432 " journal = {SIAM Journal on Scientific Computing},\n" 1433 " volume = {44},\n" 1434 " number = {1},\n" 1435 " pages = {C1-C24},\n" 1436 " doi = {10.1137/21M140078X},\n" 1437 " year = {2022}\n}\n", 1438 &cite)); 1439 #if defined(TSADJOINT_STAGE) 1440 PetscCall(PetscLogStageRegister("TSAdjoint", &adjoint_stage)); 1441 PetscCall(PetscLogStagePush(adjoint_stage)); 1442 #endif 1443 PetscCall(TSAdjointSetUp(ts)); 1444 1445 /* reset time step and iteration counters */ 1446 ts->adjoint_steps = 0; 1447 ts->ksp_its = 0; 1448 ts->snes_its = 0; 1449 ts->num_snes_failures = 0; 1450 ts->reject = 0; 1451 ts->reason = TS_CONVERGED_ITERATING; 1452 1453 if (!ts->adjoint_max_steps) ts->adjoint_max_steps = ts->steps; 1454 if (ts->adjoint_steps >= ts->adjoint_max_steps) ts->reason = TS_CONVERGED_ITS; 1455 1456 while (!ts->reason) { 1457 PetscCall(TSTrajectoryGet(ts->trajectory, ts, ts->steps, &ts->ptime)); 1458 PetscCall(TSAdjointMonitor(ts, ts->steps, ts->ptime, ts->vec_sol, ts->numcost, ts->vecs_sensi, ts->vecs_sensip)); 1459 PetscCall(TSAdjointEventHandler(ts)); 1460 PetscCall(TSAdjointStep(ts)); 1461 if ((ts->vec_costintegral || ts->quadraturets) && !ts->costintegralfwd) PetscCall(TSAdjointCostIntegral(ts)); 1462 } 1463 if (!ts->steps) { 1464 PetscCall(TSTrajectoryGet(ts->trajectory, ts, ts->steps, &ts->ptime)); 1465 PetscCall(TSAdjointMonitor(ts, ts->steps, ts->ptime, ts->vec_sol, ts->numcost, ts->vecs_sensi, ts->vecs_sensip)); 1466 } 1467 ts->solvetime = ts->ptime; 1468 PetscCall(TSTrajectoryViewFromOptions(ts->trajectory, NULL, "-ts_trajectory_view")); 1469 PetscCall(VecViewFromOptions(ts->vecs_sensi[0], (PetscObject)ts, "-ts_adjoint_view_solution")); 1470 ts->adjoint_max_steps = 0; 1471 #if defined(TSADJOINT_STAGE) 1472 PetscCall(PetscLogStagePop()); 1473 #endif 1474 PetscFunctionReturn(PETSC_SUCCESS); 1475 } 1476 1477 /*@C 1478 TSAdjointMonitor - Runs all user-provided adjoint monitor routines set using `TSAdjointMonitorSet()` 1479 1480 Collective 1481 1482 Input Parameters: 1483 + ts - time stepping context obtained from `TSCreate()` 1484 . step - step number that has just completed 1485 . ptime - model time of the state 1486 . u - state at the current model time 1487 . numcost - number of cost functions (dimension of lambda or mu) 1488 . lambda - vectors containing the gradients of the cost functions with respect to the ODE/DAE solution variables 1489 - mu - vectors containing the gradients of the cost functions with respect to the problem parameters 1490 1491 Level: developer 1492 1493 Note: 1494 `TSAdjointMonitor()` is typically used automatically within the time stepping implementations. 1495 Users would almost never call this routine directly. 1496 1497 .seealso: `TSAdjointMonitorSet()`, `TSAdjointSolve()` 1498 @*/ 1499 PetscErrorCode TSAdjointMonitor(TS ts, PetscInt step, PetscReal ptime, Vec u, PetscInt numcost, Vec *lambda, Vec *mu) 1500 { 1501 PetscInt i, n = ts->numberadjointmonitors; 1502 1503 PetscFunctionBegin; 1504 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1505 PetscValidHeaderSpecific(u, VEC_CLASSID, 4); 1506 PetscCall(VecLockReadPush(u)); 1507 for (i = 0; i < n; i++) PetscCall((*ts->adjointmonitor[i])(ts, step, ptime, u, numcost, lambda, mu, ts->adjointmonitorcontext[i])); 1508 PetscCall(VecLockReadPop(u)); 1509 PetscFunctionReturn(PETSC_SUCCESS); 1510 } 1511 1512 /*@ 1513 TSAdjointCostIntegral - Evaluate the cost integral in the adjoint run. 1514 1515 Collective 1516 1517 Input Parameter: 1518 . ts - time stepping context 1519 1520 Level: advanced 1521 1522 Notes: 1523 This function cannot be called until `TSAdjointStep()` has been completed. 1524 1525 .seealso: [](chapter_ts), `TSAdjointSolve()`, `TSAdjointStep()` 1526 @*/ 1527 PetscErrorCode TSAdjointCostIntegral(TS ts) 1528 { 1529 PetscFunctionBegin; 1530 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1531 PetscUseTypeMethod(ts, adjointintegral); 1532 PetscFunctionReturn(PETSC_SUCCESS); 1533 } 1534 1535 /* ------------------ Forward (tangent linear) sensitivity ------------------*/ 1536 1537 /*@ 1538 TSForwardSetUp - Sets up the internal data structures for the later use 1539 of forward sensitivity analysis 1540 1541 Collective 1542 1543 Input Parameter: 1544 . ts - the `TS` context obtained from `TSCreate()` 1545 1546 Level: advanced 1547 1548 .seealso: [](chapter_ts), `TS`, `TSCreate()`, `TSDestroy()`, `TSSetUp()` 1549 @*/ 1550 PetscErrorCode TSForwardSetUp(TS ts) 1551 { 1552 PetscFunctionBegin; 1553 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1554 if (ts->forwardsetupcalled) PetscFunctionReturn(PETSC_SUCCESS); 1555 PetscTryTypeMethod(ts, forwardsetup); 1556 PetscCall(VecDuplicate(ts->vec_sol, &ts->vec_sensip_col)); 1557 ts->forwardsetupcalled = PETSC_TRUE; 1558 PetscFunctionReturn(PETSC_SUCCESS); 1559 } 1560 1561 /*@ 1562 TSForwardReset - Reset the internal data structures used by forward sensitivity analysis 1563 1564 Collective 1565 1566 Input Parameter: 1567 . ts - the `TS` context obtained from `TSCreate()` 1568 1569 Level: advanced 1570 1571 .seealso: [](chapter_ts), `TSCreate()`, `TSDestroy()`, `TSForwardSetUp()` 1572 @*/ 1573 PetscErrorCode TSForwardReset(TS ts) 1574 { 1575 TS quadts = ts->quadraturets; 1576 1577 PetscFunctionBegin; 1578 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1579 PetscTryTypeMethod(ts, forwardreset); 1580 PetscCall(MatDestroy(&ts->mat_sensip)); 1581 if (quadts) PetscCall(MatDestroy(&quadts->mat_sensip)); 1582 PetscCall(VecDestroy(&ts->vec_sensip_col)); 1583 ts->forward_solve = PETSC_FALSE; 1584 ts->forwardsetupcalled = PETSC_FALSE; 1585 PetscFunctionReturn(PETSC_SUCCESS); 1586 } 1587 1588 /*@ 1589 TSForwardSetIntegralGradients - Set the vectors holding forward sensitivities of the integral term. 1590 1591 Input Parameters: 1592 + ts - the `TS` context obtained from `TSCreate()` 1593 . numfwdint - number of integrals 1594 - vp - the vectors containing the gradients for each integral w.r.t. parameters 1595 1596 Level: deprecated 1597 1598 .seealso: [](chapter_ts), `TSForwardGetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()` 1599 @*/ 1600 PetscErrorCode TSForwardSetIntegralGradients(TS ts, PetscInt numfwdint, Vec *vp) 1601 { 1602 PetscFunctionBegin; 1603 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1604 PetscCheck(!ts->numcost || ts->numcost == numfwdint, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand()"); 1605 if (!ts->numcost) ts->numcost = numfwdint; 1606 1607 ts->vecs_integral_sensip = vp; 1608 PetscFunctionReturn(PETSC_SUCCESS); 1609 } 1610 1611 /*@ 1612 TSForwardGetIntegralGradients - Returns the forward sensitivities ofthe integral term. 1613 1614 Input Parameter: 1615 . ts - the `TS` context obtained from `TSCreate()` 1616 1617 Output Parameter: 1618 . vp - the vectors containing the gradients for each integral w.r.t. parameters 1619 1620 Level: deprecated 1621 1622 .seealso: [](chapter_ts), `TSForwardSetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()` 1623 @*/ 1624 PetscErrorCode TSForwardGetIntegralGradients(TS ts, PetscInt *numfwdint, Vec **vp) 1625 { 1626 PetscFunctionBegin; 1627 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1628 PetscValidPointer(vp, 3); 1629 if (numfwdint) *numfwdint = ts->numcost; 1630 if (vp) *vp = ts->vecs_integral_sensip; 1631 PetscFunctionReturn(PETSC_SUCCESS); 1632 } 1633 1634 /*@ 1635 TSForwardStep - Compute the forward sensitivity for one time step. 1636 1637 Collective 1638 1639 Input Parameter: 1640 . ts - time stepping context 1641 1642 Level: advanced 1643 1644 Notes: 1645 This function cannot be called until `TSStep()` has been completed. 1646 1647 .seealso: [](chapter_ts), `TSForwardSetSensitivities()`, `TSForwardGetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardSetUp()` 1648 @*/ 1649 PetscErrorCode TSForwardStep(TS ts) 1650 { 1651 PetscFunctionBegin; 1652 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1653 PetscCall(PetscLogEventBegin(TS_ForwardStep, ts, 0, 0, 0)); 1654 PetscUseTypeMethod(ts, forwardstep); 1655 PetscCall(PetscLogEventEnd(TS_ForwardStep, ts, 0, 0, 0)); 1656 PetscCheck(ts->reason >= 0 || !ts->errorifstepfailed, PetscObjectComm((PetscObject)ts), PETSC_ERR_NOT_CONVERGED, "TSFowardStep has failed due to %s", TSConvergedReasons[ts->reason]); 1657 PetscFunctionReturn(PETSC_SUCCESS); 1658 } 1659 1660 /*@ 1661 TSForwardSetSensitivities - Sets the initial value of the trajectory sensitivities of solution w.r.t. the problem parameters and initial values. 1662 1663 Logically Collective 1664 1665 Input Parameters: 1666 + ts - the `TS` context obtained from `TSCreate()` 1667 . nump - number of parameters 1668 - Smat - sensitivities with respect to the parameters, the number of entries in these vectors is the same as the number of parameters 1669 1670 Level: beginner 1671 1672 Notes: 1673 Forward sensitivity is also called 'trajectory sensitivity' in some fields such as power systems. 1674 This function turns on a flag to trigger `TSSolve()` to compute forward sensitivities automatically. 1675 You must call this function before `TSSolve()`. 1676 The entries in the sensitivity matrix must be correctly initialized with the values S = dy/dp|startingtime. 1677 1678 .seealso: [](chapter_ts), `TSForwardGetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()` 1679 @*/ 1680 PetscErrorCode TSForwardSetSensitivities(TS ts, PetscInt nump, Mat Smat) 1681 { 1682 PetscFunctionBegin; 1683 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1684 PetscValidHeaderSpecific(Smat, MAT_CLASSID, 3); 1685 ts->forward_solve = PETSC_TRUE; 1686 if (nump == PETSC_DEFAULT) { 1687 PetscCall(MatGetSize(Smat, NULL, &ts->num_parameters)); 1688 } else ts->num_parameters = nump; 1689 PetscCall(PetscObjectReference((PetscObject)Smat)); 1690 PetscCall(MatDestroy(&ts->mat_sensip)); 1691 ts->mat_sensip = Smat; 1692 PetscFunctionReturn(PETSC_SUCCESS); 1693 } 1694 1695 /*@ 1696 TSForwardGetSensitivities - Returns the trajectory sensitivities 1697 1698 Not Collective, but Smat returned is parallel if ts is parallel 1699 1700 Output Parameters: 1701 + ts - the `TS` context obtained from `TSCreate()` 1702 . nump - number of parameters 1703 - Smat - sensitivities with respect to the parameters, the number of entries in these vectors is the same as the number of parameters 1704 1705 Level: intermediate 1706 1707 .seealso: [](chapter_ts), `TSForwardSetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()` 1708 @*/ 1709 PetscErrorCode TSForwardGetSensitivities(TS ts, PetscInt *nump, Mat *Smat) 1710 { 1711 PetscFunctionBegin; 1712 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1713 if (nump) *nump = ts->num_parameters; 1714 if (Smat) *Smat = ts->mat_sensip; 1715 PetscFunctionReturn(PETSC_SUCCESS); 1716 } 1717 1718 /*@ 1719 TSForwardCostIntegral - Evaluate the cost integral in the forward run. 1720 1721 Collective 1722 1723 Input Parameter: 1724 . ts - time stepping context 1725 1726 Level: advanced 1727 1728 Note: 1729 This function cannot be called until `TSStep()` has been completed. 1730 1731 .seealso: [](chapter_ts), `TS`, `TSSolve()`, `TSAdjointCostIntegral()` 1732 @*/ 1733 PetscErrorCode TSForwardCostIntegral(TS ts) 1734 { 1735 PetscFunctionBegin; 1736 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1737 PetscUseTypeMethod(ts, forwardintegral); 1738 PetscFunctionReturn(PETSC_SUCCESS); 1739 } 1740 1741 /*@ 1742 TSForwardSetInitialSensitivities - Set initial values for tangent linear sensitivities 1743 1744 Collective 1745 1746 Input Parameters: 1747 + ts - the `TS` context obtained from `TSCreate()` 1748 - didp - parametric sensitivities of the initial condition 1749 1750 Level: intermediate 1751 1752 Notes: 1753 `TSSolve()` allows users to pass the initial solution directly to `TS`. But the tangent linear variables cannot be initialized in this way. 1754 This function is used to set initial values for tangent linear variables. 1755 1756 .seealso: [](chapter_ts), `TS`, `TSForwardSetSensitivities()` 1757 @*/ 1758 PetscErrorCode TSForwardSetInitialSensitivities(TS ts, Mat didp) 1759 { 1760 PetscFunctionBegin; 1761 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1762 PetscValidHeaderSpecific(didp, MAT_CLASSID, 2); 1763 if (!ts->mat_sensip) PetscCall(TSForwardSetSensitivities(ts, PETSC_DEFAULT, didp)); 1764 PetscFunctionReturn(PETSC_SUCCESS); 1765 } 1766 1767 /*@ 1768 TSForwardGetStages - Get the number of stages and the tangent linear sensitivities at the intermediate stages 1769 1770 Input Parameter: 1771 . ts - the `TS` context obtained from `TSCreate()` 1772 1773 Output Parameters: 1774 + ns - number of stages 1775 - S - tangent linear sensitivities at the intermediate stages 1776 1777 Level: advanced 1778 1779 .seealso: `TS` 1780 @*/ 1781 PetscErrorCode TSForwardGetStages(TS ts, PetscInt *ns, Mat **S) 1782 { 1783 PetscFunctionBegin; 1784 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1785 1786 if (!ts->ops->getstages) *S = NULL; 1787 else PetscUseTypeMethod(ts, forwardgetstages, ns, S); 1788 PetscFunctionReturn(PETSC_SUCCESS); 1789 } 1790 1791 /*@ 1792 TSCreateQuadratureTS - Create a sub-`TS` that evaluates integrals over time 1793 1794 Input Parameters: 1795 + ts - the `TS` context obtained from `TSCreate()` 1796 - fwd - flag indicating whether to evaluate cost integral in the forward run or the adjoint run 1797 1798 Output Parameters: 1799 . quadts - the child `TS` context 1800 1801 Level: intermediate 1802 1803 .seealso: [](chapter_ts), `TSGetQuadratureTS()` 1804 @*/ 1805 PetscErrorCode TSCreateQuadratureTS(TS ts, PetscBool fwd, TS *quadts) 1806 { 1807 char prefix[128]; 1808 1809 PetscFunctionBegin; 1810 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1811 PetscValidPointer(quadts, 3); 1812 PetscCall(TSDestroy(&ts->quadraturets)); 1813 PetscCall(TSCreate(PetscObjectComm((PetscObject)ts), &ts->quadraturets)); 1814 PetscCall(PetscObjectIncrementTabLevel((PetscObject)ts->quadraturets, (PetscObject)ts, 1)); 1815 PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%squad_", ((PetscObject)ts)->prefix ? ((PetscObject)ts)->prefix : "")); 1816 PetscCall(TSSetOptionsPrefix(ts->quadraturets, prefix)); 1817 *quadts = ts->quadraturets; 1818 1819 if (ts->numcost) { 1820 PetscCall(VecCreateSeq(PETSC_COMM_SELF, ts->numcost, &(*quadts)->vec_sol)); 1821 } else { 1822 PetscCall(VecCreateSeq(PETSC_COMM_SELF, 1, &(*quadts)->vec_sol)); 1823 } 1824 ts->costintegralfwd = fwd; 1825 PetscFunctionReturn(PETSC_SUCCESS); 1826 } 1827 1828 /*@ 1829 TSGetQuadratureTS - Return the sub-`TS` that evaluates integrals over time 1830 1831 Input Parameter: 1832 . ts - the `TS` context obtained from `TSCreate()` 1833 1834 Output Parameters: 1835 + fwd - flag indicating whether to evaluate cost integral in the forward run or the adjoint run 1836 - quadts - the child `TS` context 1837 1838 Level: intermediate 1839 1840 .seealso: [](chapter_ts), `TSCreateQuadratureTS()` 1841 @*/ 1842 PetscErrorCode TSGetQuadratureTS(TS ts, PetscBool *fwd, TS *quadts) 1843 { 1844 PetscFunctionBegin; 1845 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1846 if (fwd) *fwd = ts->costintegralfwd; 1847 if (quadts) *quadts = ts->quadraturets; 1848 PetscFunctionReturn(PETSC_SUCCESS); 1849 } 1850 1851 /*@ 1852 TSComputeSNESJacobian - Compute the Jacobian needed for the `SNESSolve()` in `TS` 1853 1854 Collective 1855 1856 Input Parameters: 1857 + ts - the `TS` context obtained from `TSCreate()` 1858 - x - state vector 1859 1860 Output Parameters: 1861 + J - Jacobian matrix 1862 - Jpre - preconditioning matrix for J (may be same as J) 1863 1864 Level: developer 1865 1866 Note: 1867 Uses finite differencing when `TS` Jacobian is not available. 1868 1869 .seealso: `SNES`, `TS`, `SNESSetJacobian()`, TSSetRHSJacobian()`, `TSSetIJacobian()` 1870 @*/ 1871 PetscErrorCode TSComputeSNESJacobian(TS ts, Vec x, Mat J, Mat Jpre) 1872 { 1873 SNES snes = ts->snes; 1874 PetscErrorCode (*jac)(SNES, Vec, Mat, Mat, void *) = NULL; 1875 1876 PetscFunctionBegin; 1877 /* 1878 Unlike implicit methods, explicit methods do not have SNESMatFDColoring in the snes object 1879 because SNESSolve() has not been called yet; so querying SNESMatFDColoring does not work for 1880 explicit methods. Instead, we check the Jacobian compute function directly to determine if FD 1881 coloring is used. 1882 */ 1883 PetscCall(SNESGetJacobian(snes, NULL, NULL, &jac, NULL)); 1884 if (jac == SNESComputeJacobianDefaultColor) { 1885 Vec f; 1886 PetscCall(SNESSetSolution(snes, x)); 1887 PetscCall(SNESGetFunction(snes, &f, NULL, NULL)); 1888 /* Force MatFDColoringApply to evaluate the SNES residual function for the base vector */ 1889 PetscCall(SNESComputeFunction(snes, x, f)); 1890 } 1891 PetscCall(SNESComputeJacobian(snes, x, J, Jpre)); 1892 PetscFunctionReturn(PETSC_SUCCESS); 1893 } 1894