1 #include <petsc/private/tsimpl.h> /*I <petscts.h> I*/ 2 #include <petscdraw.h> 3 4 PetscLogEvent TS_AdjointStep, TS_ForwardStep, TS_JacobianPEval; 5 6 /* #define TSADJOINT_STAGE */ 7 8 /* ------------------------ Sensitivity Context ---------------------------*/ 9 10 /*@C 11 TSSetRHSJacobianP - Sets the function that computes the Jacobian of G w.r.t. the parameters P where U_t = G(U,P,t), as well as the location to store the matrix. 12 13 Logically Collective 14 15 Input Parameters: 16 + ts - `TS` context obtained from `TSCreate()` 17 . Amat - JacobianP matrix 18 . func - function 19 - ctx - [optional] user-defined function context 20 21 Level: intermediate 22 23 Note: 24 `Amat` has the same number of rows and the same row parallel layout as `u`, `Amat` has the same number of columns and parallel layout as `p` 25 26 .seealso: [](ch_ts), `TS`, `TSRHSJacobianPFn`, `TSGetRHSJacobianP()` 27 @*/ 28 PetscErrorCode TSSetRHSJacobianP(TS ts, Mat Amat, TSRHSJacobianPFn *func, void *ctx) 29 { 30 PetscFunctionBegin; 31 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 32 PetscValidHeaderSpecific(Amat, MAT_CLASSID, 2); 33 34 ts->rhsjacobianp = func; 35 ts->rhsjacobianpctx = ctx; 36 if (Amat) { 37 PetscCall(PetscObjectReference((PetscObject)Amat)); 38 PetscCall(MatDestroy(&ts->Jacprhs)); 39 ts->Jacprhs = Amat; 40 } 41 PetscFunctionReturn(PETSC_SUCCESS); 42 } 43 44 /*@C 45 TSGetRHSJacobianP - Gets the function that computes the Jacobian of G w.r.t. the parameters P where U_t = G(U,P,t), as well as the location to store the matrix. 46 47 Logically Collective 48 49 Input Parameter: 50 . ts - `TS` context obtained from `TSCreate()` 51 52 Output Parameters: 53 + Amat - JacobianP matrix 54 . func - function 55 - ctx - [optional] user-defined function context 56 57 Level: intermediate 58 59 Note: 60 `Amat` has the same number of rows and the same row parallel layout as `u`, `Amat` has the same number of columns and parallel layout as `p` 61 62 .seealso: [](ch_ts), `TSSetRHSJacobianP()`, `TS`, `TSRHSJacobianPFn` 63 @*/ 64 PetscErrorCode TSGetRHSJacobianP(TS ts, Mat *Amat, TSRHSJacobianPFn **func, void **ctx) 65 { 66 PetscFunctionBegin; 67 if (func) *func = ts->rhsjacobianp; 68 if (ctx) *ctx = ts->rhsjacobianpctx; 69 if (Amat) *Amat = ts->Jacprhs; 70 PetscFunctionReturn(PETSC_SUCCESS); 71 } 72 73 /*@C 74 TSComputeRHSJacobianP - Runs the user-defined JacobianP function. 75 76 Collective 77 78 Input Parameters: 79 + ts - The `TS` context obtained from `TSCreate()` 80 . t - the time 81 - U - the solution at which to compute the Jacobian 82 83 Output Parameter: 84 . Amat - the computed Jacobian 85 86 Level: developer 87 88 .seealso: [](ch_ts), `TSSetRHSJacobianP()`, `TS` 89 @*/ 90 PetscErrorCode TSComputeRHSJacobianP(TS ts, PetscReal t, Vec U, Mat Amat) 91 { 92 PetscFunctionBegin; 93 if (!Amat) PetscFunctionReturn(PETSC_SUCCESS); 94 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 95 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 96 97 if (ts->rhsjacobianp) PetscCallBack("TS callback JacobianP for sensitivity analysis", (*ts->rhsjacobianp)(ts, t, U, Amat, ts->rhsjacobianpctx)); 98 else { 99 PetscBool assembled; 100 PetscCall(MatZeroEntries(Amat)); 101 PetscCall(MatAssembled(Amat, &assembled)); 102 if (!assembled) { 103 PetscCall(MatAssemblyBegin(Amat, MAT_FINAL_ASSEMBLY)); 104 PetscCall(MatAssemblyEnd(Amat, MAT_FINAL_ASSEMBLY)); 105 } 106 } 107 PetscFunctionReturn(PETSC_SUCCESS); 108 } 109 110 /*@C 111 TSSetIJacobianP - Sets the function that computes the Jacobian of F w.r.t. the parameters P where F(Udot,U,t) = G(U,P,t), as well as the location to store the matrix. 112 113 Logically Collective 114 115 Input Parameters: 116 + ts - `TS` context obtained from `TSCreate()` 117 . Amat - JacobianP matrix 118 . func - function 119 - ctx - [optional] user-defined function context 120 121 Calling sequence of `func`: 122 + ts - the `TS` context 123 . t - current timestep 124 . U - input vector (current ODE solution) 125 . Udot - time derivative of state vector 126 . shift - shift to apply, see note below 127 . A - output matrix 128 - ctx - [optional] user-defined function context 129 130 Level: intermediate 131 132 Note: 133 Amat has the same number of rows and the same row parallel layout as u, Amat has the same number of columns and parallel layout as p 134 135 .seealso: [](ch_ts), `TSSetRHSJacobianP()`, `TS` 136 @*/ 137 PetscErrorCode TSSetIJacobianP(TS ts, Mat Amat, PetscErrorCode (*func)(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal shift, Mat A, void *ctx), void *ctx) 138 { 139 PetscFunctionBegin; 140 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 141 PetscValidHeaderSpecific(Amat, MAT_CLASSID, 2); 142 143 ts->ijacobianp = func; 144 ts->ijacobianpctx = ctx; 145 if (Amat) { 146 PetscCall(PetscObjectReference((PetscObject)Amat)); 147 PetscCall(MatDestroy(&ts->Jacp)); 148 ts->Jacp = Amat; 149 } 150 PetscFunctionReturn(PETSC_SUCCESS); 151 } 152 153 /*@C 154 TSComputeIJacobianP - Runs the user-defined IJacobianP function. 155 156 Collective 157 158 Input Parameters: 159 + ts - the `TS` context 160 . t - current timestep 161 . U - state vector 162 . Udot - time derivative of state vector 163 . shift - shift to apply, see note below 164 - imex - flag indicates if the method is IMEX so that the RHSJacobianP should be kept separate 165 166 Output Parameter: 167 . Amat - Jacobian matrix 168 169 Level: developer 170 171 .seealso: [](ch_ts), `TS`, `TSSetIJacobianP()` 172 @*/ 173 PetscErrorCode TSComputeIJacobianP(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal shift, Mat Amat, PetscBool imex) 174 { 175 PetscFunctionBegin; 176 if (!Amat) PetscFunctionReturn(PETSC_SUCCESS); 177 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 178 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 179 PetscValidHeaderSpecific(Udot, VEC_CLASSID, 4); 180 181 PetscCall(PetscLogEventBegin(TS_JacobianPEval, ts, U, Amat, 0)); 182 if (ts->ijacobianp) PetscCallBack("TS callback JacobianP for sensitivity analysis", (*ts->ijacobianp)(ts, t, U, Udot, shift, Amat, ts->ijacobianpctx)); 183 if (imex) { 184 if (!ts->ijacobianp) { /* system was written as Udot = G(t,U) */ 185 PetscBool assembled; 186 PetscCall(MatZeroEntries(Amat)); 187 PetscCall(MatAssembled(Amat, &assembled)); 188 if (!assembled) { 189 PetscCall(MatAssemblyBegin(Amat, MAT_FINAL_ASSEMBLY)); 190 PetscCall(MatAssemblyEnd(Amat, MAT_FINAL_ASSEMBLY)); 191 } 192 } 193 } else { 194 if (ts->rhsjacobianp) PetscCall(TSComputeRHSJacobianP(ts, t, U, ts->Jacprhs)); 195 if (ts->Jacprhs == Amat) { /* No IJacobian, so we only have the RHS matrix */ 196 PetscCall(MatScale(Amat, -1)); 197 } else if (ts->Jacprhs) { /* Both IJacobian and RHSJacobian */ 198 MatStructure axpy = DIFFERENT_NONZERO_PATTERN; 199 if (!ts->ijacobianp) { /* No IJacobianp provided, but we have a separate RHS matrix */ 200 PetscCall(MatZeroEntries(Amat)); 201 } 202 PetscCall(MatAXPY(Amat, -1, ts->Jacprhs, axpy)); 203 } 204 } 205 PetscCall(PetscLogEventEnd(TS_JacobianPEval, ts, U, Amat, 0)); 206 PetscFunctionReturn(PETSC_SUCCESS); 207 } 208 209 /*@C 210 TSSetCostIntegrand - Sets the routine for evaluating the integral term in one or more cost functions 211 212 Logically Collective 213 214 Input Parameters: 215 + ts - the `TS` context obtained from `TSCreate()` 216 . numcost - number of gradients to be computed, this is the number of cost functions 217 . costintegral - vector that stores the integral values 218 . rf - routine for evaluating the integrand function 219 . drduf - function that computes the gradients of the r with respect to u 220 . drdpf - function that computes the gradients of the r with respect to p, can be `NULL` if parametric sensitivity is not desired (`mu` = `NULL`) 221 . fwd - flag indicating whether to evaluate cost integral in the forward run or the adjoint run 222 - ctx - [optional] user-defined context for private data for the function evaluation routine (may be `NULL`) 223 224 Calling sequence of `rf`: 225 + ts - the integrator 226 . t - the time 227 . U - the solution 228 . F - the computed value of the function 229 - ctx - the user context 230 231 Calling sequence of `drduf`: 232 + ts - the integrator 233 . t - the time 234 . U - the solution 235 . dRdU - the computed gradients of the r with respect to u 236 - ctx - the user context 237 238 Calling sequence of `drdpf`: 239 + ts - the integrator 240 . t - the time 241 . U - the solution 242 . dRdP - the computed gradients of the r with respect to p 243 - ctx - the user context 244 245 Level: deprecated 246 247 Notes: 248 For optimization there is usually a single cost function (numcost = 1). For sensitivities there may be multiple cost functions 249 250 Use `TSCreateQuadratureTS()` and `TSForwardSetSensitivities()` instead 251 252 .seealso: [](ch_ts), `TS`, `TSSetRHSJacobianP()`, `TSGetCostGradients()`, `TSSetCostGradients()`, 253 `TSCreateQuadratureTS()`, `TSForwardSetSensitivities()` 254 @*/ 255 PetscErrorCode TSSetCostIntegrand(TS ts, PetscInt numcost, Vec costintegral, PetscErrorCode (*rf)(TS ts, PetscReal t, Vec U, Vec F, void *ctx), PetscErrorCode (*drduf)(TS ts, PetscReal t, Vec U, Vec *dRdU, void *ctx), PetscErrorCode (*drdpf)(TS ts, PetscReal t, Vec U, Vec *dRdP, void *ctx), PetscBool fwd, void *ctx) 256 { 257 PetscFunctionBegin; 258 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 259 if (costintegral) PetscValidHeaderSpecific(costintegral, VEC_CLASSID, 3); 260 PetscCheck(!ts->numcost || ts->numcost == numcost, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostGradients() or TSForwardSetIntegralGradients()"); 261 if (!ts->numcost) ts->numcost = numcost; 262 263 if (costintegral) { 264 PetscCall(PetscObjectReference((PetscObject)costintegral)); 265 PetscCall(VecDestroy(&ts->vec_costintegral)); 266 ts->vec_costintegral = costintegral; 267 } else { 268 if (!ts->vec_costintegral) { /* Create a seq vec if user does not provide one */ 269 PetscCall(VecCreateSeq(PETSC_COMM_SELF, numcost, &ts->vec_costintegral)); 270 } else { 271 PetscCall(VecSet(ts->vec_costintegral, 0.0)); 272 } 273 } 274 if (!ts->vec_costintegrand) { 275 PetscCall(VecDuplicate(ts->vec_costintegral, &ts->vec_costintegrand)); 276 } else { 277 PetscCall(VecSet(ts->vec_costintegrand, 0.0)); 278 } 279 ts->costintegralfwd = fwd; /* Evaluate the cost integral in forward run if fwd is true */ 280 ts->costintegrand = rf; 281 ts->costintegrandctx = ctx; 282 ts->drdufunction = drduf; 283 ts->drdpfunction = drdpf; 284 PetscFunctionReturn(PETSC_SUCCESS); 285 } 286 287 /*@C 288 TSGetCostIntegral - Returns the values of the integral term in the cost functions. 289 It is valid to call the routine after a backward run. 290 291 Not Collective 292 293 Input Parameter: 294 . ts - the `TS` context obtained from `TSCreate()` 295 296 Output Parameter: 297 . v - the vector containing the integrals for each cost function 298 299 Level: intermediate 300 301 .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, ``TSSetCostIntegrand()` 302 @*/ 303 PetscErrorCode TSGetCostIntegral(TS ts, Vec *v) 304 { 305 TS quadts; 306 307 PetscFunctionBegin; 308 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 309 PetscAssertPointer(v, 2); 310 PetscCall(TSGetQuadratureTS(ts, NULL, &quadts)); 311 *v = quadts->vec_sol; 312 PetscFunctionReturn(PETSC_SUCCESS); 313 } 314 315 /*@C 316 TSComputeCostIntegrand - Evaluates the integral function in the cost functions. 317 318 Input Parameters: 319 + ts - the `TS` context 320 . t - current time 321 - U - state vector, i.e. current solution 322 323 Output Parameter: 324 . Q - vector of size numcost to hold the outputs 325 326 Level: deprecated 327 328 Note: 329 Most users should not need to explicitly call this routine, as it 330 is used internally within the sensitivity analysis context. 331 332 .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSSetCostIntegrand()` 333 @*/ 334 PetscErrorCode TSComputeCostIntegrand(TS ts, PetscReal t, Vec U, Vec Q) 335 { 336 PetscFunctionBegin; 337 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 338 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 339 PetscValidHeaderSpecific(Q, VEC_CLASSID, 4); 340 341 PetscCall(PetscLogEventBegin(TS_FunctionEval, ts, U, Q, 0)); 342 if (ts->costintegrand) PetscCallBack("TS callback integrand in the cost function", (*ts->costintegrand)(ts, t, U, Q, ts->costintegrandctx)); 343 else PetscCall(VecZeroEntries(Q)); 344 PetscCall(PetscLogEventEnd(TS_FunctionEval, ts, U, Q, 0)); 345 PetscFunctionReturn(PETSC_SUCCESS); 346 } 347 348 // PetscClangLinter pragma disable: -fdoc-* 349 /*@C 350 TSComputeDRDUFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobian()` 351 352 Level: deprecated 353 354 @*/ 355 PetscErrorCode TSComputeDRDUFunction(TS ts, PetscReal t, Vec U, Vec *DRDU) 356 { 357 PetscFunctionBegin; 358 if (!DRDU) PetscFunctionReturn(PETSC_SUCCESS); 359 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 360 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 361 362 PetscCallBack("TS callback DRDU for sensitivity analysis", (*ts->drdufunction)(ts, t, U, DRDU, ts->costintegrandctx)); 363 PetscFunctionReturn(PETSC_SUCCESS); 364 } 365 366 // PetscClangLinter pragma disable: -fdoc-* 367 /*@C 368 TSComputeDRDPFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobianP()` 369 370 Level: deprecated 371 372 @*/ 373 PetscErrorCode TSComputeDRDPFunction(TS ts, PetscReal t, Vec U, Vec *DRDP) 374 { 375 PetscFunctionBegin; 376 if (!DRDP) PetscFunctionReturn(PETSC_SUCCESS); 377 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 378 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 379 380 PetscCallBack("TS callback DRDP for sensitivity analysis", (*ts->drdpfunction)(ts, t, U, DRDP, ts->costintegrandctx)); 381 PetscFunctionReturn(PETSC_SUCCESS); 382 } 383 384 // PetscClangLinter pragma disable: -fdoc-param-list-func-parameter-documentation 385 // PetscClangLinter pragma disable: -fdoc-section-header-unknown 386 /*@C 387 TSSetIHessianProduct - Sets the function that computes the vector-Hessian-vector product. The Hessian is the second-order derivative of F (IFunction) w.r.t. the state variable. 388 389 Logically Collective 390 391 Input Parameters: 392 + ts - `TS` context obtained from `TSCreate()` 393 . ihp1 - an array of vectors storing the result of vector-Hessian-vector product for F_UU 394 . hessianproductfunc1 - vector-Hessian-vector product function for F_UU 395 . ihp2 - an array of vectors storing the result of vector-Hessian-vector product for F_UP 396 . hessianproductfunc2 - vector-Hessian-vector product function for F_UP 397 . ihp3 - an array of vectors storing the result of vector-Hessian-vector product for F_PU 398 . hessianproductfunc3 - vector-Hessian-vector product function for F_PU 399 . ihp4 - an array of vectors storing the result of vector-Hessian-vector product for F_PP 400 - hessianproductfunc4 - vector-Hessian-vector product function for F_PP 401 402 Calling sequence of `ihessianproductfunc1`: 403 + ts - the `TS` context 404 + t - current timestep 405 . U - input vector (current ODE solution) 406 . Vl - an array of input vectors to be left-multiplied with the Hessian 407 . Vr - input vector to be right-multiplied with the Hessian 408 . VHV - an array of output vectors for vector-Hessian-vector product 409 - ctx - [optional] user-defined function context 410 411 Level: intermediate 412 413 Notes: 414 All other functions have the same calling sequence as `rhhessianproductfunc1`, so their 415 descriptions are omitted for brevity. 416 417 The first Hessian function and the working array are required. 418 As an example to implement the callback functions, the second callback function calculates the vector-Hessian-vector product 419 $ Vl_n^T*F_UP*Vr 420 where the vector Vl_n (n-th element in the array Vl) and Vr are of size N and M respectively, and the Hessian F_UP is of size N x N x M. 421 Each entry of F_UP corresponds to the derivative 422 $ F_UP[i][j][k] = \frac{\partial^2 F[i]}{\partial U[j] \partial P[k]}. 423 The result of the vector-Hessian-vector product for Vl_n needs to be stored in vector VHV_n with the j-th entry being 424 $ VHV_n[j] = \sum_i \sum_k {Vl_n[i] * F_UP[i][j][k] * Vr[k]} 425 If the cost function is a scalar, there will be only one vector in Vl and VHV. 426 427 .seealso: [](ch_ts), `TS` 428 @*/ 429 PetscErrorCode TSSetIHessianProduct(TS ts, Vec *ihp1, PetscErrorCode (*ihessianproductfunc1)(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV, void *ctx), Vec *ihp2, PetscErrorCode (*ihessianproductfunc2)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *ihp3, PetscErrorCode (*ihessianproductfunc3)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *ihp4, PetscErrorCode (*ihessianproductfunc4)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), void *ctx) 430 { 431 PetscFunctionBegin; 432 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 433 PetscAssertPointer(ihp1, 2); 434 435 ts->ihessianproductctx = ctx; 436 if (ihp1) ts->vecs_fuu = ihp1; 437 if (ihp2) ts->vecs_fup = ihp2; 438 if (ihp3) ts->vecs_fpu = ihp3; 439 if (ihp4) ts->vecs_fpp = ihp4; 440 ts->ihessianproduct_fuu = ihessianproductfunc1; 441 ts->ihessianproduct_fup = ihessianproductfunc2; 442 ts->ihessianproduct_fpu = ihessianproductfunc3; 443 ts->ihessianproduct_fpp = ihessianproductfunc4; 444 PetscFunctionReturn(PETSC_SUCCESS); 445 } 446 447 /*@C 448 TSComputeIHessianProductFunctionUU - Runs the user-defined vector-Hessian-vector product function for Fuu. 449 450 Collective 451 452 Input Parameters: 453 + ts - The `TS` context obtained from `TSCreate()` 454 . t - the time 455 . U - the solution at which to compute the Hessian product 456 . Vl - the array of input vectors to be multiplied with the Hessian from the left 457 - Vr - the input vector to be multiplied with the Hessian from the right 458 459 Output Parameter: 460 . VHV - the array of output vectors that store the Hessian product 461 462 Level: developer 463 464 Note: 465 `TSComputeIHessianProductFunctionUU()` is typically used for sensitivity implementation, 466 so most users would not generally call this routine themselves. 467 468 .seealso: [](ch_ts), `TSSetIHessianProduct()` 469 @*/ 470 PetscErrorCode TSComputeIHessianProductFunctionUU(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV) 471 { 472 PetscFunctionBegin; 473 if (!VHV) PetscFunctionReturn(PETSC_SUCCESS); 474 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 475 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 476 477 if (ts->ihessianproduct_fuu) PetscCallBack("TS callback IHessianProduct 1 for sensitivity analysis", (*ts->ihessianproduct_fuu)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx)); 478 479 /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */ 480 if (ts->rhshessianproduct_guu) { 481 PetscInt nadj; 482 PetscCall(TSComputeRHSHessianProductFunctionUU(ts, t, U, Vl, Vr, VHV)); 483 for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1)); 484 } 485 PetscFunctionReturn(PETSC_SUCCESS); 486 } 487 488 /*@C 489 TSComputeIHessianProductFunctionUP - Runs the user-defined vector-Hessian-vector product function for Fup. 490 491 Collective 492 493 Input Parameters: 494 + ts - The `TS` context obtained from `TSCreate()` 495 . t - the time 496 . U - the solution at which to compute the Hessian product 497 . Vl - the array of input vectors to be multiplied with the Hessian from the left 498 - Vr - the input vector to be multiplied with the Hessian from the right 499 500 Output Parameter: 501 . VHV - the array of output vectors that store the Hessian product 502 503 Level: developer 504 505 Note: 506 `TSComputeIHessianProductFunctionUP()` is typically used for sensitivity implementation, 507 so most users would not generally call this routine themselves. 508 509 .seealso: [](ch_ts), `TSSetIHessianProduct()` 510 @*/ 511 PetscErrorCode TSComputeIHessianProductFunctionUP(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV) 512 { 513 PetscFunctionBegin; 514 if (!VHV) PetscFunctionReturn(PETSC_SUCCESS); 515 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 516 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 517 518 if (ts->ihessianproduct_fup) PetscCallBack("TS callback IHessianProduct 2 for sensitivity analysis", (*ts->ihessianproduct_fup)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx)); 519 520 /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */ 521 if (ts->rhshessianproduct_gup) { 522 PetscInt nadj; 523 PetscCall(TSComputeRHSHessianProductFunctionUP(ts, t, U, Vl, Vr, VHV)); 524 for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1)); 525 } 526 PetscFunctionReturn(PETSC_SUCCESS); 527 } 528 529 /*@C 530 TSComputeIHessianProductFunctionPU - Runs the user-defined vector-Hessian-vector product function for Fpu. 531 532 Collective 533 534 Input Parameters: 535 + ts - The `TS` context obtained from `TSCreate()` 536 . t - the time 537 . U - the solution at which to compute the Hessian product 538 . Vl - the array of input vectors to be multiplied with the Hessian from the left 539 - Vr - the input vector to be multiplied with the Hessian from the right 540 541 Output Parameter: 542 . VHV - the array of output vectors that store the Hessian product 543 544 Level: developer 545 546 Note: 547 `TSComputeIHessianProductFunctionPU()` is typically used for sensitivity implementation, 548 so most users would not generally call this routine themselves. 549 550 .seealso: [](ch_ts), `TSSetIHessianProduct()` 551 @*/ 552 PetscErrorCode TSComputeIHessianProductFunctionPU(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV) 553 { 554 PetscFunctionBegin; 555 if (!VHV) PetscFunctionReturn(PETSC_SUCCESS); 556 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 557 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 558 559 if (ts->ihessianproduct_fpu) PetscCallBack("TS callback IHessianProduct 3 for sensitivity analysis", (*ts->ihessianproduct_fpu)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx)); 560 561 /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */ 562 if (ts->rhshessianproduct_gpu) { 563 PetscInt nadj; 564 PetscCall(TSComputeRHSHessianProductFunctionPU(ts, t, U, Vl, Vr, VHV)); 565 for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1)); 566 } 567 PetscFunctionReturn(PETSC_SUCCESS); 568 } 569 570 /*@C 571 TSComputeIHessianProductFunctionPP - Runs the user-defined vector-Hessian-vector product function for Fpp. 572 573 Collective 574 575 Input Parameters: 576 + ts - The `TS` context obtained from `TSCreate()` 577 . t - the time 578 . U - the solution at which to compute the Hessian product 579 . Vl - the array of input vectors to be multiplied with the Hessian from the left 580 - Vr - the input vector to be multiplied with the Hessian from the right 581 582 Output Parameter: 583 . VHV - the array of output vectors that store the Hessian product 584 585 Level: developer 586 587 Note: 588 `TSComputeIHessianProductFunctionPP()` is typically used for sensitivity implementation, 589 so most users would not generally call this routine themselves. 590 591 .seealso: [](ch_ts), `TSSetIHessianProduct()` 592 @*/ 593 PetscErrorCode TSComputeIHessianProductFunctionPP(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV) 594 { 595 PetscFunctionBegin; 596 if (!VHV) PetscFunctionReturn(PETSC_SUCCESS); 597 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 598 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 599 600 if (ts->ihessianproduct_fpp) PetscCallBack("TS callback IHessianProduct 3 for sensitivity analysis", (*ts->ihessianproduct_fpp)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx)); 601 602 /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */ 603 if (ts->rhshessianproduct_gpp) { 604 PetscInt nadj; 605 PetscCall(TSComputeRHSHessianProductFunctionPP(ts, t, U, Vl, Vr, VHV)); 606 for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1)); 607 } 608 PetscFunctionReturn(PETSC_SUCCESS); 609 } 610 611 // PetscClangLinter pragma disable: -fdoc-param-list-func-parameter-documentation 612 // PetscClangLinter pragma disable: -fdoc-section-header-unknown 613 /*@C 614 TSSetRHSHessianProduct - Sets the function that computes the vector-Hessian-vector 615 product. The Hessian is the second-order derivative of G (RHSFunction) w.r.t. the state 616 variable. 617 618 Logically Collective 619 620 Input Parameters: 621 + ts - `TS` context obtained from `TSCreate()` 622 . rhshp1 - an array of vectors storing the result of vector-Hessian-vector product for G_UU 623 . hessianproductfunc1 - vector-Hessian-vector product function for G_UU 624 . rhshp2 - an array of vectors storing the result of vector-Hessian-vector product for G_UP 625 . hessianproductfunc2 - vector-Hessian-vector product function for G_UP 626 . rhshp3 - an array of vectors storing the result of vector-Hessian-vector product for G_PU 627 . hessianproductfunc3 - vector-Hessian-vector product function for G_PU 628 . rhshp4 - an array of vectors storing the result of vector-Hessian-vector product for G_PP 629 . hessianproductfunc4 - vector-Hessian-vector product function for G_PP 630 - ctx - [optional] user-defined function context 631 632 Calling sequence of `rhshessianproductfunc1`: 633 + ts - the `TS` context 634 . t - current timestep 635 . U - input vector (current ODE solution) 636 . Vl - an array of input vectors to be left-multiplied with the Hessian 637 . Vr - input vector to be right-multiplied with the Hessian 638 . VHV - an array of output vectors for vector-Hessian-vector product 639 - ctx - [optional] user-defined function context 640 641 Level: intermediate 642 643 Notes: 644 All other functions have the same calling sequence as `rhhessianproductfunc1`, so their 645 descriptions are omitted for brevity. 646 647 The first Hessian function and the working array are required. 648 649 As an example to implement the callback functions, the second callback function calculates the vector-Hessian-vector product 650 $ Vl_n^T*G_UP*Vr 651 where the vector Vl_n (n-th element in the array Vl) and Vr are of size N and M respectively, and the Hessian G_UP is of size N x N x M. 652 Each entry of G_UP corresponds to the derivative 653 $ G_UP[i][j][k] = \frac{\partial^2 G[i]}{\partial U[j] \partial P[k]}. 654 The result of the vector-Hessian-vector product for Vl_n needs to be stored in vector VHV_n with j-th entry being 655 $ VHV_n[j] = \sum_i \sum_k {Vl_n[i] * G_UP[i][j][k] * Vr[k]} 656 If the cost function is a scalar, there will be only one vector in Vl and VHV. 657 658 .seealso: `TS`, `TSAdjoint` 659 @*/ 660 PetscErrorCode TSSetRHSHessianProduct(TS ts, Vec *rhshp1, PetscErrorCode (*rhshessianproductfunc1)(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV, void *ctx), Vec *rhshp2, PetscErrorCode (*rhshessianproductfunc2)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *rhshp3, PetscErrorCode (*rhshessianproductfunc3)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *rhshp4, PetscErrorCode (*rhshessianproductfunc4)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), void *ctx) 661 { 662 PetscFunctionBegin; 663 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 664 PetscAssertPointer(rhshp1, 2); 665 666 ts->rhshessianproductctx = ctx; 667 if (rhshp1) ts->vecs_guu = rhshp1; 668 if (rhshp2) ts->vecs_gup = rhshp2; 669 if (rhshp3) ts->vecs_gpu = rhshp3; 670 if (rhshp4) ts->vecs_gpp = rhshp4; 671 ts->rhshessianproduct_guu = rhshessianproductfunc1; 672 ts->rhshessianproduct_gup = rhshessianproductfunc2; 673 ts->rhshessianproduct_gpu = rhshessianproductfunc3; 674 ts->rhshessianproduct_gpp = rhshessianproductfunc4; 675 PetscFunctionReturn(PETSC_SUCCESS); 676 } 677 678 /*@C 679 TSComputeRHSHessianProductFunctionUU - Runs the user-defined vector-Hessian-vector product function for Guu. 680 681 Collective 682 683 Input Parameters: 684 + ts - The `TS` context obtained from `TSCreate()` 685 . t - the time 686 . U - the solution at which to compute the Hessian product 687 . Vl - the array of input vectors to be multiplied with the Hessian from the left 688 - Vr - the input vector to be multiplied with the Hessian from the right 689 690 Output Parameter: 691 . VHV - the array of output vectors that store the Hessian product 692 693 Level: developer 694 695 Note: 696 `TSComputeRHSHessianProductFunctionUU()` is typically used for sensitivity implementation, 697 so most users would not generally call this routine themselves. 698 699 .seealso: [](ch_ts), `TS`, `TSSetRHSHessianProduct()` 700 @*/ 701 PetscErrorCode TSComputeRHSHessianProductFunctionUU(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV) 702 { 703 PetscFunctionBegin; 704 if (!VHV) PetscFunctionReturn(PETSC_SUCCESS); 705 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 706 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 707 708 PetscCallBack("TS callback RHSHessianProduct 1 for sensitivity analysis", (*ts->rhshessianproduct_guu)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx)); 709 PetscFunctionReturn(PETSC_SUCCESS); 710 } 711 712 /*@C 713 TSComputeRHSHessianProductFunctionUP - Runs the user-defined vector-Hessian-vector product function for Gup. 714 715 Collective 716 717 Input Parameters: 718 + ts - The `TS` context obtained from `TSCreate()` 719 . t - the time 720 . U - the solution at which to compute the Hessian product 721 . Vl - the array of input vectors to be multiplied with the Hessian from the left 722 - Vr - the input vector to be multiplied with the Hessian from the right 723 724 Output Parameter: 725 . VHV - the array of output vectors that store the Hessian product 726 727 Level: developer 728 729 Note: 730 `TSComputeRHSHessianProductFunctionUP()` is typically used for sensitivity implementation, 731 so most users would not generally call this routine themselves. 732 733 .seealso: [](ch_ts), `TS`, `TSSetRHSHessianProduct()` 734 @*/ 735 PetscErrorCode TSComputeRHSHessianProductFunctionUP(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV) 736 { 737 PetscFunctionBegin; 738 if (!VHV) PetscFunctionReturn(PETSC_SUCCESS); 739 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 740 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 741 742 PetscCallBack("TS callback RHSHessianProduct 2 for sensitivity analysis", (*ts->rhshessianproduct_gup)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx)); 743 PetscFunctionReturn(PETSC_SUCCESS); 744 } 745 746 /*@C 747 TSComputeRHSHessianProductFunctionPU - Runs the user-defined vector-Hessian-vector product function for Gpu. 748 749 Collective 750 751 Input Parameters: 752 + ts - The `TS` context obtained from `TSCreate()` 753 . t - the time 754 . U - the solution at which to compute the Hessian product 755 . Vl - the array of input vectors to be multiplied with the Hessian from the left 756 - Vr - the input vector to be multiplied with the Hessian from the right 757 758 Output Parameter: 759 . VHV - the array of output vectors that store the Hessian product 760 761 Level: developer 762 763 Note: 764 `TSComputeRHSHessianProductFunctionPU()` is typically used for sensitivity implementation, 765 so most users would not generally call this routine themselves. 766 767 .seealso: [](ch_ts), `TSSetRHSHessianProduct()` 768 @*/ 769 PetscErrorCode TSComputeRHSHessianProductFunctionPU(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV) 770 { 771 PetscFunctionBegin; 772 if (!VHV) PetscFunctionReturn(PETSC_SUCCESS); 773 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 774 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 775 776 PetscCallBack("TS callback RHSHessianProduct 3 for sensitivity analysis", (*ts->rhshessianproduct_gpu)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx)); 777 PetscFunctionReturn(PETSC_SUCCESS); 778 } 779 780 /*@C 781 TSComputeRHSHessianProductFunctionPP - Runs the user-defined vector-Hessian-vector product function for Gpp. 782 783 Collective 784 785 Input Parameters: 786 + ts - The `TS` context obtained from `TSCreate()` 787 . t - the time 788 . U - the solution at which to compute the Hessian product 789 . Vl - the array of input vectors to be multiplied with the Hessian from the left 790 - Vr - the input vector to be multiplied with the Hessian from the right 791 792 Output Parameter: 793 . VHV - the array of output vectors that store the Hessian product 794 795 Level: developer 796 797 Note: 798 `TSComputeRHSHessianProductFunctionPP()` is typically used for sensitivity implementation, 799 so most users would not generally call this routine themselves. 800 801 .seealso: [](ch_ts), `TSSetRHSHessianProduct()` 802 @*/ 803 PetscErrorCode TSComputeRHSHessianProductFunctionPP(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV) 804 { 805 PetscFunctionBegin; 806 if (!VHV) PetscFunctionReturn(PETSC_SUCCESS); 807 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 808 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 809 810 PetscCallBack("TS callback RHSHessianProduct 3 for sensitivity analysis", (*ts->rhshessianproduct_gpp)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx)); 811 PetscFunctionReturn(PETSC_SUCCESS); 812 } 813 814 /* --------------------------- Adjoint sensitivity ---------------------------*/ 815 816 /*@ 817 TSSetCostGradients - Sets the initial value of the gradients of the cost function w.r.t. initial values and w.r.t. the problem parameters 818 for use by the `TS` adjoint routines. 819 820 Logically Collective 821 822 Input Parameters: 823 + ts - the `TS` context obtained from `TSCreate()` 824 . numcost - number of gradients to be computed, this is the number of cost functions 825 . lambda - gradients with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector 826 - mu - gradients with respect to the parameters, the number of entries in these vectors is the same as the number of parameters 827 828 Level: beginner 829 830 Notes: 831 the entries in these vectors must be correctly initialized with the values lambda_i = df/dy|finaltime mu_i = df/dp|finaltime 832 833 After `TSAdjointSolve()` is called the lambda and the mu contain the computed sensitivities 834 835 .seealso: `TS`, `TSAdjointSolve()`, `TSGetCostGradients()` 836 @*/ 837 PetscErrorCode TSSetCostGradients(TS ts, PetscInt numcost, Vec *lambda, Vec *mu) 838 { 839 PetscFunctionBegin; 840 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 841 PetscAssertPointer(lambda, 3); 842 ts->vecs_sensi = lambda; 843 ts->vecs_sensip = mu; 844 PetscCheck(!ts->numcost || ts->numcost == numcost, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand"); 845 ts->numcost = numcost; 846 PetscFunctionReturn(PETSC_SUCCESS); 847 } 848 849 /*@ 850 TSGetCostGradients - Returns the gradients from the `TSAdjointSolve()` 851 852 Not Collective, but the vectors returned are parallel if `TS` is parallel 853 854 Input Parameter: 855 . ts - the `TS` context obtained from `TSCreate()` 856 857 Output Parameters: 858 + numcost - size of returned arrays 859 . lambda - vectors containing the gradients of the cost functions with respect to the ODE/DAE solution variables 860 - mu - vectors containing the gradients of the cost functions with respect to the problem parameters 861 862 Level: intermediate 863 864 .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSSetCostGradients()` 865 @*/ 866 PetscErrorCode TSGetCostGradients(TS ts, PetscInt *numcost, Vec **lambda, Vec **mu) 867 { 868 PetscFunctionBegin; 869 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 870 if (numcost) *numcost = ts->numcost; 871 if (lambda) *lambda = ts->vecs_sensi; 872 if (mu) *mu = ts->vecs_sensip; 873 PetscFunctionReturn(PETSC_SUCCESS); 874 } 875 876 /*@ 877 TSSetCostHessianProducts - Sets the initial value of the Hessian-vector products of the cost function w.r.t. initial values and w.r.t. the problem parameters 878 for use by the `TS` adjoint routines. 879 880 Logically Collective 881 882 Input Parameters: 883 + ts - the `TS` context obtained from `TSCreate()` 884 . numcost - number of cost functions 885 . lambda2 - Hessian-vector product with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector 886 . mu2 - Hessian-vector product with respect to the parameters, the number of entries in these vectors is the same as the number of parameters 887 - dir - the direction vector that are multiplied with the Hessian of the cost functions 888 889 Level: beginner 890 891 Notes: 892 Hessian of the cost function is completely different from Hessian of the ODE/DAE system 893 894 For second-order adjoint, one needs to call this function and then `TSAdjointSetForward()` before `TSSolve()`. 895 896 After `TSAdjointSolve()` is called, the lambda2 and the mu2 will contain the computed second-order adjoint sensitivities, and can be used to produce Hessian-vector product (not the full Hessian matrix). Users must provide a direction vector; it is usually generated by an optimization solver. 897 898 Passing `NULL` for `lambda2` disables the second-order calculation. 899 900 .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSAdjointSetForward()` 901 @*/ 902 PetscErrorCode TSSetCostHessianProducts(TS ts, PetscInt numcost, Vec *lambda2, Vec *mu2, Vec dir) 903 { 904 PetscFunctionBegin; 905 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 906 PetscCheck(!ts->numcost || ts->numcost == numcost, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand"); 907 ts->numcost = numcost; 908 ts->vecs_sensi2 = lambda2; 909 ts->vecs_sensi2p = mu2; 910 ts->vec_dir = dir; 911 PetscFunctionReturn(PETSC_SUCCESS); 912 } 913 914 /*@ 915 TSGetCostHessianProducts - Returns the gradients from the `TSAdjointSolve()` 916 917 Not Collective, but vectors returned are parallel if `TS` is parallel 918 919 Input Parameter: 920 . ts - the `TS` context obtained from `TSCreate()` 921 922 Output Parameters: 923 + numcost - number of cost functions 924 . lambda2 - Hessian-vector product with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector 925 . mu2 - Hessian-vector product with respect to the parameters, the number of entries in these vectors is the same as the number of parameters 926 - dir - the direction vector that are multiplied with the Hessian of the cost functions 927 928 Level: intermediate 929 930 .seealso: [](ch_ts), `TSAdjointSolve()`, `TSSetCostHessianProducts()` 931 @*/ 932 PetscErrorCode TSGetCostHessianProducts(TS ts, PetscInt *numcost, Vec **lambda2, Vec **mu2, Vec *dir) 933 { 934 PetscFunctionBegin; 935 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 936 if (numcost) *numcost = ts->numcost; 937 if (lambda2) *lambda2 = ts->vecs_sensi2; 938 if (mu2) *mu2 = ts->vecs_sensi2p; 939 if (dir) *dir = ts->vec_dir; 940 PetscFunctionReturn(PETSC_SUCCESS); 941 } 942 943 /*@ 944 TSAdjointSetForward - Trigger the tangent linear solver and initialize the forward sensitivities 945 946 Logically Collective 947 948 Input Parameters: 949 + ts - the `TS` context obtained from `TSCreate()` 950 - didp - the derivative of initial values w.r.t. parameters 951 952 Level: intermediate 953 954 Notes: 955 When computing sensitivities w.r.t. initial condition, set didp to `NULL` so that the solver will take it as an identity matrix mathematically. 956 `TSAdjoint` does not reset the tangent linear solver automatically, `TSAdjointResetForward()` should be called to reset the tangent linear solver. 957 958 .seealso: [](ch_ts), `TSAdjointSolve()`, `TSSetCostHessianProducts()`, `TSAdjointResetForward()` 959 @*/ 960 PetscErrorCode TSAdjointSetForward(TS ts, Mat didp) 961 { 962 Mat A; 963 Vec sp; 964 PetscScalar *xarr; 965 PetscInt lsize; 966 967 PetscFunctionBegin; 968 ts->forward_solve = PETSC_TRUE; /* turn on tangent linear mode */ 969 PetscCheck(ts->vecs_sensi2, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "Must call TSSetCostHessianProducts() first"); 970 PetscCheck(ts->vec_dir, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "Directional vector is missing. Call TSSetCostHessianProducts() to set it."); 971 /* create a single-column dense matrix */ 972 PetscCall(VecGetLocalSize(ts->vec_sol, &lsize)); 973 PetscCall(MatCreateDense(PetscObjectComm((PetscObject)ts), lsize, PETSC_DECIDE, PETSC_DECIDE, 1, NULL, &A)); 974 975 PetscCall(VecDuplicate(ts->vec_sol, &sp)); 976 PetscCall(MatDenseGetColumn(A, 0, &xarr)); 977 PetscCall(VecPlaceArray(sp, xarr)); 978 if (ts->vecs_sensi2p) { /* tangent linear variable initialized as 2*dIdP*dir */ 979 if (didp) { 980 PetscCall(MatMult(didp, ts->vec_dir, sp)); 981 PetscCall(VecScale(sp, 2.)); 982 } else { 983 PetscCall(VecZeroEntries(sp)); 984 } 985 } else { /* tangent linear variable initialized as dir */ 986 PetscCall(VecCopy(ts->vec_dir, sp)); 987 } 988 PetscCall(VecResetArray(sp)); 989 PetscCall(MatDenseRestoreColumn(A, &xarr)); 990 PetscCall(VecDestroy(&sp)); 991 992 PetscCall(TSForwardSetInitialSensitivities(ts, A)); /* if didp is NULL, identity matrix is assumed */ 993 994 PetscCall(MatDestroy(&A)); 995 PetscFunctionReturn(PETSC_SUCCESS); 996 } 997 998 /*@ 999 TSAdjointResetForward - Reset the tangent linear solver and destroy the tangent linear context 1000 1001 Logically Collective 1002 1003 Input Parameter: 1004 . ts - the `TS` context obtained from `TSCreate()` 1005 1006 Level: intermediate 1007 1008 .seealso: [](ch_ts), `TSAdjointSetForward()` 1009 @*/ 1010 PetscErrorCode TSAdjointResetForward(TS ts) 1011 { 1012 PetscFunctionBegin; 1013 ts->forward_solve = PETSC_FALSE; /* turn off tangent linear mode */ 1014 PetscCall(TSForwardReset(ts)); 1015 PetscFunctionReturn(PETSC_SUCCESS); 1016 } 1017 1018 /*@ 1019 TSAdjointSetUp - Sets up the internal data structures for the later use 1020 of an adjoint solver 1021 1022 Collective 1023 1024 Input Parameter: 1025 . ts - the `TS` context obtained from `TSCreate()` 1026 1027 Level: advanced 1028 1029 .seealso: [](ch_ts), `TSCreate()`, `TSAdjointStep()`, `TSSetCostGradients()` 1030 @*/ 1031 PetscErrorCode TSAdjointSetUp(TS ts) 1032 { 1033 TSTrajectory tj; 1034 PetscBool match; 1035 1036 PetscFunctionBegin; 1037 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1038 if (ts->adjointsetupcalled) PetscFunctionReturn(PETSC_SUCCESS); 1039 PetscCheck(ts->vecs_sensi, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_WRONGSTATE, "Must call TSSetCostGradients() first"); 1040 PetscCheck(!ts->vecs_sensip || ts->Jacp || ts->Jacprhs, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_WRONGSTATE, "Must call TSSetRHSJacobianP() or TSSetIJacobianP() first"); 1041 PetscCall(TSGetTrajectory(ts, &tj)); 1042 PetscCall(PetscObjectTypeCompare((PetscObject)tj, TSTRAJECTORYBASIC, &match)); 1043 if (match) { 1044 PetscBool solution_only; 1045 PetscCall(TSTrajectoryGetSolutionOnly(tj, &solution_only)); 1046 PetscCheck(!solution_only, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "TSAdjoint cannot use the solution-only mode when choosing the Basic TSTrajectory type. Turn it off with -ts_trajectory_solution_only 0"); 1047 } 1048 PetscCall(TSTrajectorySetUseHistory(tj, PETSC_FALSE)); /* not use TSHistory */ 1049 1050 if (ts->quadraturets) { /* if there is integral in the cost function */ 1051 PetscCall(VecDuplicate(ts->vecs_sensi[0], &ts->vec_drdu_col)); 1052 if (ts->vecs_sensip) PetscCall(VecDuplicate(ts->vecs_sensip[0], &ts->vec_drdp_col)); 1053 } 1054 1055 PetscTryTypeMethod(ts, adjointsetup); 1056 ts->adjointsetupcalled = PETSC_TRUE; 1057 PetscFunctionReturn(PETSC_SUCCESS); 1058 } 1059 1060 /*@ 1061 TSAdjointReset - Resets a `TS` adjoint context and removes any allocated `Vec`s and `Mat`s. 1062 1063 Collective 1064 1065 Input Parameter: 1066 . ts - the `TS` context obtained from `TSCreate()` 1067 1068 Level: beginner 1069 1070 .seealso: [](ch_ts), `TSCreate()`, `TSAdjointSetUp()`, `TSADestroy()` 1071 @*/ 1072 PetscErrorCode TSAdjointReset(TS ts) 1073 { 1074 PetscFunctionBegin; 1075 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1076 PetscTryTypeMethod(ts, adjointreset); 1077 if (ts->quadraturets) { /* if there is integral in the cost function */ 1078 PetscCall(VecDestroy(&ts->vec_drdu_col)); 1079 if (ts->vecs_sensip) PetscCall(VecDestroy(&ts->vec_drdp_col)); 1080 } 1081 ts->vecs_sensi = NULL; 1082 ts->vecs_sensip = NULL; 1083 ts->vecs_sensi2 = NULL; 1084 ts->vecs_sensi2p = NULL; 1085 ts->vec_dir = NULL; 1086 ts->adjointsetupcalled = PETSC_FALSE; 1087 PetscFunctionReturn(PETSC_SUCCESS); 1088 } 1089 1090 /*@ 1091 TSAdjointSetSteps - Sets the number of steps the adjoint solver should take backward in time 1092 1093 Logically Collective 1094 1095 Input Parameters: 1096 + ts - the `TS` context obtained from `TSCreate()` 1097 - steps - number of steps to use 1098 1099 Level: intermediate 1100 1101 Notes: 1102 Normally one does not call this and `TSAdjointSolve()` integrates back to the original timestep. One can call this 1103 so as to integrate back to less than the original timestep 1104 1105 .seealso: [](ch_ts), `TSAdjointSolve()`, `TS`, `TSSetExactFinalTime()` 1106 @*/ 1107 PetscErrorCode TSAdjointSetSteps(TS ts, PetscInt steps) 1108 { 1109 PetscFunctionBegin; 1110 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1111 PetscValidLogicalCollectiveInt(ts, steps, 2); 1112 PetscCheck(steps >= 0, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_OUTOFRANGE, "Cannot step back a negative number of steps"); 1113 PetscCheck(steps <= ts->steps, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_OUTOFRANGE, "Cannot step back more than the total number of forward steps"); 1114 ts->adjoint_max_steps = steps; 1115 PetscFunctionReturn(PETSC_SUCCESS); 1116 } 1117 1118 // PetscClangLinter pragma disable: -fdoc-* 1119 /*@C 1120 TSAdjointSetRHSJacobian - Deprecated, use `TSSetRHSJacobianP()` 1121 1122 Level: deprecated 1123 @*/ 1124 PetscErrorCode TSAdjointSetRHSJacobian(TS ts, Mat Amat, PetscErrorCode (*func)(TS, PetscReal, Vec, Mat, void *), void *ctx) 1125 { 1126 PetscFunctionBegin; 1127 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1128 PetscValidHeaderSpecific(Amat, MAT_CLASSID, 2); 1129 1130 ts->rhsjacobianp = func; 1131 ts->rhsjacobianpctx = ctx; 1132 if (Amat) { 1133 PetscCall(PetscObjectReference((PetscObject)Amat)); 1134 PetscCall(MatDestroy(&ts->Jacp)); 1135 ts->Jacp = Amat; 1136 } 1137 PetscFunctionReturn(PETSC_SUCCESS); 1138 } 1139 1140 // PetscClangLinter pragma disable: -fdoc-* 1141 /*@C 1142 TSAdjointComputeRHSJacobian - Deprecated, use `TSComputeRHSJacobianP()` 1143 1144 Level: deprecated 1145 @*/ 1146 PetscErrorCode TSAdjointComputeRHSJacobian(TS ts, PetscReal t, Vec U, Mat Amat) 1147 { 1148 PetscFunctionBegin; 1149 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1150 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 1151 PetscValidHeaderSpecific(Amat, MAT_CLASSID, 4); 1152 1153 PetscCallBack("TS callback JacobianP for sensitivity analysis", (*ts->rhsjacobianp)(ts, t, U, Amat, ts->rhsjacobianpctx)); 1154 PetscFunctionReturn(PETSC_SUCCESS); 1155 } 1156 1157 // PetscClangLinter pragma disable: -fdoc-* 1158 /*@ 1159 TSAdjointComputeDRDYFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobian()` 1160 1161 Level: deprecated 1162 @*/ 1163 PetscErrorCode TSAdjointComputeDRDYFunction(TS ts, PetscReal t, Vec U, Vec *DRDU) 1164 { 1165 PetscFunctionBegin; 1166 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1167 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 1168 1169 PetscCallBack("TS callback DRDY for sensitivity analysis", (*ts->drdufunction)(ts, t, U, DRDU, ts->costintegrandctx)); 1170 PetscFunctionReturn(PETSC_SUCCESS); 1171 } 1172 1173 // PetscClangLinter pragma disable: -fdoc-* 1174 /*@ 1175 TSAdjointComputeDRDPFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobianP()` 1176 1177 Level: deprecated 1178 @*/ 1179 PetscErrorCode TSAdjointComputeDRDPFunction(TS ts, PetscReal t, Vec U, Vec *DRDP) 1180 { 1181 PetscFunctionBegin; 1182 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1183 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 1184 1185 PetscCallBack("TS callback DRDP for sensitivity analysis", (*ts->drdpfunction)(ts, t, U, DRDP, ts->costintegrandctx)); 1186 PetscFunctionReturn(PETSC_SUCCESS); 1187 } 1188 1189 // PetscClangLinter pragma disable: -fdoc-param-list-func-parameter-documentation 1190 /*@C 1191 TSAdjointMonitorSensi - monitors the first lambda sensitivity 1192 1193 Level: intermediate 1194 1195 .seealso: [](ch_ts), `TSAdjointMonitorSet()` 1196 @*/ 1197 static PetscErrorCode TSAdjointMonitorSensi(TS ts, PetscInt step, PetscReal ptime, Vec v, PetscInt numcost, Vec *lambda, Vec *mu, PetscViewerAndFormat *vf) 1198 { 1199 PetscViewer viewer = vf->viewer; 1200 1201 PetscFunctionBegin; 1202 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 8); 1203 PetscCall(PetscViewerPushFormat(viewer, vf->format)); 1204 PetscCall(VecView(lambda[0], viewer)); 1205 PetscCall(PetscViewerPopFormat(viewer)); 1206 PetscFunctionReturn(PETSC_SUCCESS); 1207 } 1208 1209 /*@C 1210 TSAdjointMonitorSetFromOptions - Sets a monitor function and viewer appropriate for the type indicated by the user 1211 1212 Collective 1213 1214 Input Parameters: 1215 + ts - `TS` object you wish to monitor 1216 . name - the monitor type one is seeking 1217 . help - message indicating what monitoring is done 1218 . manual - manual page for the monitor 1219 . monitor - the monitor function 1220 - monitorsetup - a function that is called once ONLY if the user selected this monitor that may set additional features of the `TS` or `PetscViewer` objects 1221 1222 Level: developer 1223 1224 .seealso: [](ch_ts), `PetscOptionsGetViewer()`, `PetscOptionsGetReal()`, `PetscOptionsHasName()`, `PetscOptionsGetString()`, 1225 `PetscOptionsGetIntArray()`, `PetscOptionsGetRealArray()`, `PetscOptionsBool()` 1226 `PetscOptionsInt()`, `PetscOptionsString()`, `PetscOptionsReal()`, 1227 `PetscOptionsName()`, `PetscOptionsBegin()`, `PetscOptionsEnd()`, `PetscOptionsHeadBegin()`, 1228 `PetscOptionsStringArray()`, `PetscOptionsRealArray()`, `PetscOptionsScalar()`, 1229 `PetscOptionsBoolGroupBegin()`, `PetscOptionsBoolGroup()`, `PetscOptionsBoolGroupEnd()`, 1230 `PetscOptionsFList()`, `PetscOptionsEList()` 1231 @*/ 1232 PetscErrorCode TSAdjointMonitorSetFromOptions(TS ts, const char name[], const char help[], const char manual[], PetscErrorCode (*monitor)(TS, PetscInt, PetscReal, Vec, PetscInt, Vec *, Vec *, PetscViewerAndFormat *), PetscErrorCode (*monitorsetup)(TS, PetscViewerAndFormat *)) 1233 { 1234 PetscViewer viewer; 1235 PetscViewerFormat format; 1236 PetscBool flg; 1237 1238 PetscFunctionBegin; 1239 PetscCall(PetscOptionsGetViewer(PetscObjectComm((PetscObject)ts), ((PetscObject)ts)->options, ((PetscObject)ts)->prefix, name, &viewer, &format, &flg)); 1240 if (flg) { 1241 PetscViewerAndFormat *vf; 1242 PetscCall(PetscViewerAndFormatCreate(viewer, format, &vf)); 1243 PetscCall(PetscOptionsRestoreViewer(&viewer)); 1244 if (monitorsetup) PetscCall((*monitorsetup)(ts, vf)); 1245 PetscCall(TSAdjointMonitorSet(ts, (PetscErrorCode(*)(TS, PetscInt, PetscReal, Vec, PetscInt, Vec *, Vec *, void *))monitor, vf, (PetscErrorCode(*)(void **))PetscViewerAndFormatDestroy)); 1246 } 1247 PetscFunctionReturn(PETSC_SUCCESS); 1248 } 1249 1250 /*@C 1251 TSAdjointMonitorSet - Sets an ADDITIONAL function that is to be used at every 1252 timestep to display the iteration's progress. 1253 1254 Logically Collective 1255 1256 Input Parameters: 1257 + ts - the `TS` context obtained from `TSCreate()` 1258 . adjointmonitor - monitoring routine 1259 . adjointmctx - [optional] user-defined context for private data for the monitor routine 1260 (use `NULL` if no context is desired) 1261 - adjointmdestroy - [optional] routine that frees monitor context (may be `NULL`) 1262 1263 Calling sequence of `adjointmonitor`: 1264 + ts - the `TS` context 1265 . steps - iteration number (after the final time step the monitor routine is called with 1266 a step of -1, this is at the final time which may have been interpolated to) 1267 . time - current time 1268 . u - current iterate 1269 . numcost - number of cost functionos 1270 . lambda - sensitivities to initial conditions 1271 . mu - sensitivities to parameters 1272 - adjointmctx - [optional] adjoint monitoring context 1273 1274 Calling sequence of `adjointmdestroy`: 1275 . mctx - the monitor context to destroy 1276 1277 Level: intermediate 1278 1279 Note: 1280 This routine adds an additional monitor to the list of monitors that 1281 already has been loaded. 1282 1283 Fortran Notes: 1284 Only a single monitor function can be set for each `TS` object 1285 1286 .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSAdjointMonitorCancel()` 1287 @*/ 1288 PetscErrorCode TSAdjointMonitorSet(TS ts, PetscErrorCode (*adjointmonitor)(TS ts, PetscInt steps, PetscReal time, Vec u, PetscInt numcost, Vec *lambda, Vec *mu, void *adjointmctx), void *adjointmctx, PetscErrorCode (*adjointmdestroy)(void **mctx)) 1289 { 1290 PetscInt i; 1291 PetscBool identical; 1292 1293 PetscFunctionBegin; 1294 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1295 for (i = 0; i < ts->numbermonitors; i++) { 1296 PetscCall(PetscMonitorCompare((PetscErrorCode(*)(void))adjointmonitor, adjointmctx, adjointmdestroy, (PetscErrorCode(*)(void))ts->adjointmonitor[i], ts->adjointmonitorcontext[i], ts->adjointmonitordestroy[i], &identical)); 1297 if (identical) PetscFunctionReturn(PETSC_SUCCESS); 1298 } 1299 PetscCheck(ts->numberadjointmonitors < MAXTSMONITORS, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many adjoint monitors set"); 1300 ts->adjointmonitor[ts->numberadjointmonitors] = adjointmonitor; 1301 ts->adjointmonitordestroy[ts->numberadjointmonitors] = adjointmdestroy; 1302 ts->adjointmonitorcontext[ts->numberadjointmonitors++] = (void *)adjointmctx; 1303 PetscFunctionReturn(PETSC_SUCCESS); 1304 } 1305 1306 /*@C 1307 TSAdjointMonitorCancel - Clears all the adjoint monitors that have been set on a time-step object. 1308 1309 Logically Collective 1310 1311 Input Parameter: 1312 . ts - the `TS` context obtained from `TSCreate()` 1313 1314 Notes: 1315 There is no way to remove a single, specific monitor. 1316 1317 Level: intermediate 1318 1319 .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSAdjointMonitorSet()` 1320 @*/ 1321 PetscErrorCode TSAdjointMonitorCancel(TS ts) 1322 { 1323 PetscInt i; 1324 1325 PetscFunctionBegin; 1326 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1327 for (i = 0; i < ts->numberadjointmonitors; i++) { 1328 if (ts->adjointmonitordestroy[i]) PetscCall((*ts->adjointmonitordestroy[i])(&ts->adjointmonitorcontext[i])); 1329 } 1330 ts->numberadjointmonitors = 0; 1331 PetscFunctionReturn(PETSC_SUCCESS); 1332 } 1333 1334 /*@C 1335 TSAdjointMonitorDefault - the default monitor of adjoint computations 1336 1337 Input Parameters: 1338 + ts - the `TS` context 1339 . step - iteration number (after the final time step the monitor routine is called with a 1340 step of -1, this is at the final time which may have been interpolated to) 1341 . time - current time 1342 . v - current iterate 1343 . numcost - number of cost functionos 1344 . lambda - sensitivities to initial conditions 1345 . mu - sensitivities to parameters 1346 - vf - the viewer and format 1347 1348 Level: intermediate 1349 1350 .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSAdjointMonitorSet()` 1351 @*/ 1352 PetscErrorCode TSAdjointMonitorDefault(TS ts, PetscInt step, PetscReal time, Vec v, PetscInt numcost, Vec *lambda, Vec *mu, PetscViewerAndFormat *vf) 1353 { 1354 PetscViewer viewer = vf->viewer; 1355 1356 PetscFunctionBegin; 1357 (void)v; 1358 (void)numcost; 1359 (void)lambda; 1360 (void)mu; 1361 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 8); 1362 PetscCall(PetscViewerPushFormat(viewer, vf->format)); 1363 PetscCall(PetscViewerASCIIAddTab(viewer, ((PetscObject)ts)->tablevel)); 1364 PetscCall(PetscViewerASCIIPrintf(viewer, "%" PetscInt_FMT " TS dt %g time %g%s", step, (double)ts->time_step, (double)time, ts->steprollback ? " (r)\n" : "\n")); 1365 PetscCall(PetscViewerASCIISubtractTab(viewer, ((PetscObject)ts)->tablevel)); 1366 PetscCall(PetscViewerPopFormat(viewer)); 1367 PetscFunctionReturn(PETSC_SUCCESS); 1368 } 1369 1370 /*@C 1371 TSAdjointMonitorDrawSensi - Monitors progress of the adjoint `TS` solvers by calling 1372 `VecView()` for the sensitivities to initial states at each timestep 1373 1374 Collective 1375 1376 Input Parameters: 1377 + ts - the `TS` context 1378 . step - current time-step 1379 . ptime - current time 1380 . u - current state 1381 . numcost - number of cost functions 1382 . lambda - sensitivities to initial conditions 1383 . mu - sensitivities to parameters 1384 - dummy - either a viewer or `NULL` 1385 1386 Level: intermediate 1387 1388 .seealso: [](ch_ts), `TSAdjointSolve()`, `TSAdjointMonitorSet()`, `TSAdjointMonitorDefault()`, `VecView()` 1389 @*/ 1390 PetscErrorCode TSAdjointMonitorDrawSensi(TS ts, PetscInt step, PetscReal ptime, Vec u, PetscInt numcost, Vec *lambda, Vec *mu, void *dummy) 1391 { 1392 TSMonitorDrawCtx ictx = (TSMonitorDrawCtx)dummy; 1393 PetscDraw draw; 1394 PetscReal xl, yl, xr, yr, h; 1395 char time[32]; 1396 1397 PetscFunctionBegin; 1398 if (!(((ictx->howoften > 0) && (!(step % ictx->howoften))) || ((ictx->howoften == -1) && ts->reason))) PetscFunctionReturn(PETSC_SUCCESS); 1399 1400 PetscCall(VecView(lambda[0], ictx->viewer)); 1401 PetscCall(PetscViewerDrawGetDraw(ictx->viewer, 0, &draw)); 1402 PetscCall(PetscSNPrintf(time, 32, "Timestep %d Time %g", (int)step, (double)ptime)); 1403 PetscCall(PetscDrawGetCoordinates(draw, &xl, &yl, &xr, &yr)); 1404 h = yl + .95 * (yr - yl); 1405 PetscCall(PetscDrawStringCentered(draw, .5 * (xl + xr), h, PETSC_DRAW_BLACK, time)); 1406 PetscCall(PetscDrawFlush(draw)); 1407 PetscFunctionReturn(PETSC_SUCCESS); 1408 } 1409 1410 /*@C 1411 TSAdjointSetFromOptions - Sets various `TS` adjoint parameters from options database. 1412 1413 Collective 1414 1415 Input Parameters: 1416 + ts - the `TS` context 1417 - PetscOptionsObject - the options context 1418 1419 Options Database Keys: 1420 + -ts_adjoint_solve <yes,no> - After solving the ODE/DAE solve the adjoint problem (requires `-ts_save_trajectory`) 1421 . -ts_adjoint_monitor - print information at each adjoint time step 1422 - -ts_adjoint_monitor_draw_sensi - monitor the sensitivity of the first cost function wrt initial conditions (lambda[0]) graphically 1423 1424 Level: developer 1425 1426 Note: 1427 This is not normally called directly by users 1428 1429 .seealso: [](ch_ts), `TSSetSaveTrajectory()`, `TSTrajectorySetUp()` 1430 @*/ 1431 PetscErrorCode TSAdjointSetFromOptions(TS ts, PetscOptionItems *PetscOptionsObject) 1432 { 1433 PetscBool tflg, opt; 1434 1435 PetscFunctionBegin; 1436 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1437 PetscOptionsHeadBegin(PetscOptionsObject, "TS Adjoint options"); 1438 tflg = ts->adjoint_solve ? PETSC_TRUE : PETSC_FALSE; 1439 PetscCall(PetscOptionsBool("-ts_adjoint_solve", "Solve the adjoint problem immediately after solving the forward problem", "", tflg, &tflg, &opt)); 1440 if (opt) { 1441 PetscCall(TSSetSaveTrajectory(ts)); 1442 ts->adjoint_solve = tflg; 1443 } 1444 PetscCall(TSAdjointMonitorSetFromOptions(ts, "-ts_adjoint_monitor", "Monitor adjoint timestep size", "TSAdjointMonitorDefault", TSAdjointMonitorDefault, NULL)); 1445 PetscCall(TSAdjointMonitorSetFromOptions(ts, "-ts_adjoint_monitor_sensi", "Monitor sensitivity in the adjoint computation", "TSAdjointMonitorSensi", TSAdjointMonitorSensi, NULL)); 1446 opt = PETSC_FALSE; 1447 PetscCall(PetscOptionsName("-ts_adjoint_monitor_draw_sensi", "Monitor adjoint sensitivities (lambda only) graphically", "TSAdjointMonitorDrawSensi", &opt)); 1448 if (opt) { 1449 TSMonitorDrawCtx ctx; 1450 PetscInt howoften = 1; 1451 1452 PetscCall(PetscOptionsInt("-ts_adjoint_monitor_draw_sensi", "Monitor adjoint sensitivities (lambda only) graphically", "TSAdjointMonitorDrawSensi", howoften, &howoften, NULL)); 1453 PetscCall(TSMonitorDrawCtxCreate(PetscObjectComm((PetscObject)ts), NULL, NULL, PETSC_DECIDE, PETSC_DECIDE, 300, 300, howoften, &ctx)); 1454 PetscCall(TSAdjointMonitorSet(ts, TSAdjointMonitorDrawSensi, ctx, (PetscErrorCode(*)(void **))TSMonitorDrawCtxDestroy)); 1455 } 1456 PetscFunctionReturn(PETSC_SUCCESS); 1457 } 1458 1459 /*@ 1460 TSAdjointStep - Steps one time step backward in the adjoint run 1461 1462 Collective 1463 1464 Input Parameter: 1465 . ts - the `TS` context obtained from `TSCreate()` 1466 1467 Level: intermediate 1468 1469 .seealso: [](ch_ts), `TSAdjointSetUp()`, `TSAdjointSolve()` 1470 @*/ 1471 PetscErrorCode TSAdjointStep(TS ts) 1472 { 1473 DM dm; 1474 1475 PetscFunctionBegin; 1476 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1477 PetscCall(TSGetDM(ts, &dm)); 1478 PetscCall(TSAdjointSetUp(ts)); 1479 ts->steps--; /* must decrease the step index before the adjoint step is taken. */ 1480 1481 ts->reason = TS_CONVERGED_ITERATING; 1482 ts->ptime_prev = ts->ptime; 1483 PetscCall(PetscLogEventBegin(TS_AdjointStep, ts, 0, 0, 0)); 1484 PetscUseTypeMethod(ts, adjointstep); 1485 PetscCall(PetscLogEventEnd(TS_AdjointStep, ts, 0, 0, 0)); 1486 ts->adjoint_steps++; 1487 1488 if (ts->reason < 0) { 1489 PetscCheck(!ts->errorifstepfailed, PetscObjectComm((PetscObject)ts), PETSC_ERR_NOT_CONVERGED, "TSAdjointStep has failed due to %s", TSConvergedReasons[ts->reason]); 1490 } else if (!ts->reason) { 1491 if (ts->adjoint_steps >= ts->adjoint_max_steps) ts->reason = TS_CONVERGED_ITS; 1492 } 1493 PetscFunctionReturn(PETSC_SUCCESS); 1494 } 1495 1496 /*@ 1497 TSAdjointSolve - Solves the discrete ajoint problem for an ODE/DAE 1498 1499 Collective 1500 ` 1501 1502 Input Parameter: 1503 . ts - the `TS` context obtained from `TSCreate()` 1504 1505 Options Database Key: 1506 . -ts_adjoint_view_solution <viewerinfo> - views the first gradient with respect to the initial values 1507 1508 Level: intermediate 1509 1510 Notes: 1511 This must be called after a call to `TSSolve()` that solves the forward problem 1512 1513 By default this will integrate back to the initial time, one can use `TSAdjointSetSteps()` to step back to a later time 1514 1515 .seealso: [](ch_ts), `TSCreate()`, `TSSetCostGradients()`, `TSSetSolution()`, `TSAdjointStep()` 1516 @*/ 1517 PetscErrorCode TSAdjointSolve(TS ts) 1518 { 1519 static PetscBool cite = PETSC_FALSE; 1520 #if defined(TSADJOINT_STAGE) 1521 PetscLogStage adjoint_stage; 1522 #endif 1523 1524 PetscFunctionBegin; 1525 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1526 PetscCall(PetscCitationsRegister("@article{Zhang2022tsadjoint,\n" 1527 " title = {{PETSc TSAdjoint: A Discrete Adjoint ODE Solver for First-Order and Second-Order Sensitivity Analysis}},\n" 1528 " author = {Zhang, Hong and Constantinescu, Emil M. and Smith, Barry F.},\n" 1529 " journal = {SIAM Journal on Scientific Computing},\n" 1530 " volume = {44},\n" 1531 " number = {1},\n" 1532 " pages = {C1-C24},\n" 1533 " doi = {10.1137/21M140078X},\n" 1534 " year = {2022}\n}\n", 1535 &cite)); 1536 #if defined(TSADJOINT_STAGE) 1537 PetscCall(PetscLogStageRegister("TSAdjoint", &adjoint_stage)); 1538 PetscCall(PetscLogStagePush(adjoint_stage)); 1539 #endif 1540 PetscCall(TSAdjointSetUp(ts)); 1541 1542 /* reset time step and iteration counters */ 1543 ts->adjoint_steps = 0; 1544 ts->ksp_its = 0; 1545 ts->snes_its = 0; 1546 ts->num_snes_failures = 0; 1547 ts->reject = 0; 1548 ts->reason = TS_CONVERGED_ITERATING; 1549 1550 if (!ts->adjoint_max_steps) ts->adjoint_max_steps = ts->steps; 1551 if (ts->adjoint_steps >= ts->adjoint_max_steps) ts->reason = TS_CONVERGED_ITS; 1552 1553 while (!ts->reason) { 1554 PetscCall(TSTrajectoryGet(ts->trajectory, ts, ts->steps, &ts->ptime)); 1555 PetscCall(TSAdjointMonitor(ts, ts->steps, ts->ptime, ts->vec_sol, ts->numcost, ts->vecs_sensi, ts->vecs_sensip)); 1556 PetscCall(TSAdjointEventHandler(ts)); 1557 PetscCall(TSAdjointStep(ts)); 1558 if ((ts->vec_costintegral || ts->quadraturets) && !ts->costintegralfwd) PetscCall(TSAdjointCostIntegral(ts)); 1559 } 1560 if (!ts->steps) { 1561 PetscCall(TSTrajectoryGet(ts->trajectory, ts, ts->steps, &ts->ptime)); 1562 PetscCall(TSAdjointMonitor(ts, ts->steps, ts->ptime, ts->vec_sol, ts->numcost, ts->vecs_sensi, ts->vecs_sensip)); 1563 } 1564 ts->solvetime = ts->ptime; 1565 PetscCall(TSTrajectoryViewFromOptions(ts->trajectory, NULL, "-ts_trajectory_view")); 1566 PetscCall(VecViewFromOptions(ts->vecs_sensi[0], (PetscObject)ts, "-ts_adjoint_view_solution")); 1567 ts->adjoint_max_steps = 0; 1568 #if defined(TSADJOINT_STAGE) 1569 PetscCall(PetscLogStagePop()); 1570 #endif 1571 PetscFunctionReturn(PETSC_SUCCESS); 1572 } 1573 1574 /*@C 1575 TSAdjointMonitor - Runs all user-provided adjoint monitor routines set using `TSAdjointMonitorSet()` 1576 1577 Collective 1578 1579 Input Parameters: 1580 + ts - time stepping context obtained from `TSCreate()` 1581 . step - step number that has just completed 1582 . ptime - model time of the state 1583 . u - state at the current model time 1584 . numcost - number of cost functions (dimension of lambda or mu) 1585 . lambda - vectors containing the gradients of the cost functions with respect to the ODE/DAE solution variables 1586 - mu - vectors containing the gradients of the cost functions with respect to the problem parameters 1587 1588 Level: developer 1589 1590 Note: 1591 `TSAdjointMonitor()` is typically used automatically within the time stepping implementations. 1592 Users would almost never call this routine directly. 1593 1594 .seealso: `TSAdjointMonitorSet()`, `TSAdjointSolve()` 1595 @*/ 1596 PetscErrorCode TSAdjointMonitor(TS ts, PetscInt step, PetscReal ptime, Vec u, PetscInt numcost, Vec *lambda, Vec *mu) 1597 { 1598 PetscInt i, n = ts->numberadjointmonitors; 1599 1600 PetscFunctionBegin; 1601 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1602 PetscValidHeaderSpecific(u, VEC_CLASSID, 4); 1603 PetscCall(VecLockReadPush(u)); 1604 for (i = 0; i < n; i++) PetscCall((*ts->adjointmonitor[i])(ts, step, ptime, u, numcost, lambda, mu, ts->adjointmonitorcontext[i])); 1605 PetscCall(VecLockReadPop(u)); 1606 PetscFunctionReturn(PETSC_SUCCESS); 1607 } 1608 1609 /*@ 1610 TSAdjointCostIntegral - Evaluate the cost integral in the adjoint run. 1611 1612 Collective 1613 1614 Input Parameter: 1615 . ts - time stepping context 1616 1617 Level: advanced 1618 1619 Notes: 1620 This function cannot be called until `TSAdjointStep()` has been completed. 1621 1622 .seealso: [](ch_ts), `TSAdjointSolve()`, `TSAdjointStep()` 1623 @*/ 1624 PetscErrorCode TSAdjointCostIntegral(TS ts) 1625 { 1626 PetscFunctionBegin; 1627 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1628 PetscUseTypeMethod(ts, adjointintegral); 1629 PetscFunctionReturn(PETSC_SUCCESS); 1630 } 1631 1632 /* ------------------ Forward (tangent linear) sensitivity ------------------*/ 1633 1634 /*@ 1635 TSForwardSetUp - Sets up the internal data structures for the later use 1636 of forward sensitivity analysis 1637 1638 Collective 1639 1640 Input Parameter: 1641 . ts - the `TS` context obtained from `TSCreate()` 1642 1643 Level: advanced 1644 1645 .seealso: [](ch_ts), `TS`, `TSCreate()`, `TSDestroy()`, `TSSetUp()` 1646 @*/ 1647 PetscErrorCode TSForwardSetUp(TS ts) 1648 { 1649 PetscFunctionBegin; 1650 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1651 if (ts->forwardsetupcalled) PetscFunctionReturn(PETSC_SUCCESS); 1652 PetscTryTypeMethod(ts, forwardsetup); 1653 PetscCall(VecDuplicate(ts->vec_sol, &ts->vec_sensip_col)); 1654 ts->forwardsetupcalled = PETSC_TRUE; 1655 PetscFunctionReturn(PETSC_SUCCESS); 1656 } 1657 1658 /*@ 1659 TSForwardReset - Reset the internal data structures used by forward sensitivity analysis 1660 1661 Collective 1662 1663 Input Parameter: 1664 . ts - the `TS` context obtained from `TSCreate()` 1665 1666 Level: advanced 1667 1668 .seealso: [](ch_ts), `TSCreate()`, `TSDestroy()`, `TSForwardSetUp()` 1669 @*/ 1670 PetscErrorCode TSForwardReset(TS ts) 1671 { 1672 TS quadts = ts->quadraturets; 1673 1674 PetscFunctionBegin; 1675 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1676 PetscTryTypeMethod(ts, forwardreset); 1677 PetscCall(MatDestroy(&ts->mat_sensip)); 1678 if (quadts) PetscCall(MatDestroy(&quadts->mat_sensip)); 1679 PetscCall(VecDestroy(&ts->vec_sensip_col)); 1680 ts->forward_solve = PETSC_FALSE; 1681 ts->forwardsetupcalled = PETSC_FALSE; 1682 PetscFunctionReturn(PETSC_SUCCESS); 1683 } 1684 1685 /*@ 1686 TSForwardSetIntegralGradients - Set the vectors holding forward sensitivities of the integral term. 1687 1688 Input Parameters: 1689 + ts - the `TS` context obtained from `TSCreate()` 1690 . numfwdint - number of integrals 1691 - vp - the vectors containing the gradients for each integral w.r.t. parameters 1692 1693 Level: deprecated 1694 1695 .seealso: [](ch_ts), `TSForwardGetSensitivities()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()` 1696 @*/ 1697 PetscErrorCode TSForwardSetIntegralGradients(TS ts, PetscInt numfwdint, Vec *vp) 1698 { 1699 PetscFunctionBegin; 1700 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1701 PetscCheck(!ts->numcost || ts->numcost == numfwdint, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand()"); 1702 if (!ts->numcost) ts->numcost = numfwdint; 1703 1704 ts->vecs_integral_sensip = vp; 1705 PetscFunctionReturn(PETSC_SUCCESS); 1706 } 1707 1708 /*@ 1709 TSForwardGetIntegralGradients - Returns the forward sensitivities of the integral term. 1710 1711 Input Parameter: 1712 . ts - the `TS` context obtained from `TSCreate()` 1713 1714 Output Parameters: 1715 + numfwdint - number of integrals 1716 - vp - the vectors containing the gradients for each integral w.r.t. parameters 1717 1718 Level: deprecated 1719 1720 .seealso: [](ch_ts), `TSForwardSetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardStep()` 1721 @*/ 1722 PetscErrorCode TSForwardGetIntegralGradients(TS ts, PetscInt *numfwdint, Vec **vp) 1723 { 1724 PetscFunctionBegin; 1725 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1726 PetscAssertPointer(vp, 3); 1727 if (numfwdint) *numfwdint = ts->numcost; 1728 if (vp) *vp = ts->vecs_integral_sensip; 1729 PetscFunctionReturn(PETSC_SUCCESS); 1730 } 1731 1732 /*@ 1733 TSForwardStep - Compute the forward sensitivity for one time step. 1734 1735 Collective 1736 1737 Input Parameter: 1738 . ts - time stepping context 1739 1740 Level: advanced 1741 1742 Notes: 1743 This function cannot be called until `TSStep()` has been completed. 1744 1745 .seealso: [](ch_ts), `TSForwardSetSensitivities()`, `TSForwardGetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardSetUp()` 1746 @*/ 1747 PetscErrorCode TSForwardStep(TS ts) 1748 { 1749 PetscFunctionBegin; 1750 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1751 PetscCall(PetscLogEventBegin(TS_ForwardStep, ts, 0, 0, 0)); 1752 PetscUseTypeMethod(ts, forwardstep); 1753 PetscCall(PetscLogEventEnd(TS_ForwardStep, ts, 0, 0, 0)); 1754 PetscCheck(ts->reason >= 0 || !ts->errorifstepfailed, PetscObjectComm((PetscObject)ts), PETSC_ERR_NOT_CONVERGED, "TSFowardStep has failed due to %s", TSConvergedReasons[ts->reason]); 1755 PetscFunctionReturn(PETSC_SUCCESS); 1756 } 1757 1758 /*@ 1759 TSForwardSetSensitivities - Sets the initial value of the trajectory sensitivities of solution w.r.t. the problem parameters and initial values. 1760 1761 Logically Collective 1762 1763 Input Parameters: 1764 + ts - the `TS` context obtained from `TSCreate()` 1765 . nump - number of parameters 1766 - Smat - sensitivities with respect to the parameters, the number of entries in these vectors is the same as the number of parameters 1767 1768 Level: beginner 1769 1770 Notes: 1771 Forward sensitivity is also called 'trajectory sensitivity' in some fields such as power systems. 1772 This function turns on a flag to trigger `TSSolve()` to compute forward sensitivities automatically. 1773 You must call this function before `TSSolve()`. 1774 The entries in the sensitivity matrix must be correctly initialized with the values S = dy/dp|startingtime. 1775 1776 .seealso: [](ch_ts), `TSForwardGetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()` 1777 @*/ 1778 PetscErrorCode TSForwardSetSensitivities(TS ts, PetscInt nump, Mat Smat) 1779 { 1780 PetscFunctionBegin; 1781 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1782 PetscValidHeaderSpecific(Smat, MAT_CLASSID, 3); 1783 ts->forward_solve = PETSC_TRUE; 1784 if (nump == PETSC_DEFAULT) { 1785 PetscCall(MatGetSize(Smat, NULL, &ts->num_parameters)); 1786 } else ts->num_parameters = nump; 1787 PetscCall(PetscObjectReference((PetscObject)Smat)); 1788 PetscCall(MatDestroy(&ts->mat_sensip)); 1789 ts->mat_sensip = Smat; 1790 PetscFunctionReturn(PETSC_SUCCESS); 1791 } 1792 1793 /*@ 1794 TSForwardGetSensitivities - Returns the trajectory sensitivities 1795 1796 Not Collective, but Smat returned is parallel if ts is parallel 1797 1798 Output Parameters: 1799 + ts - the `TS` context obtained from `TSCreate()` 1800 . nump - number of parameters 1801 - Smat - sensitivities with respect to the parameters, the number of entries in these vectors is the same as the number of parameters 1802 1803 Level: intermediate 1804 1805 .seealso: [](ch_ts), `TSForwardSetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()` 1806 @*/ 1807 PetscErrorCode TSForwardGetSensitivities(TS ts, PetscInt *nump, Mat *Smat) 1808 { 1809 PetscFunctionBegin; 1810 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1811 if (nump) *nump = ts->num_parameters; 1812 if (Smat) *Smat = ts->mat_sensip; 1813 PetscFunctionReturn(PETSC_SUCCESS); 1814 } 1815 1816 /*@ 1817 TSForwardCostIntegral - Evaluate the cost integral in the forward run. 1818 1819 Collective 1820 1821 Input Parameter: 1822 . ts - time stepping context 1823 1824 Level: advanced 1825 1826 Note: 1827 This function cannot be called until `TSStep()` has been completed. 1828 1829 .seealso: [](ch_ts), `TS`, `TSSolve()`, `TSAdjointCostIntegral()` 1830 @*/ 1831 PetscErrorCode TSForwardCostIntegral(TS ts) 1832 { 1833 PetscFunctionBegin; 1834 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1835 PetscUseTypeMethod(ts, forwardintegral); 1836 PetscFunctionReturn(PETSC_SUCCESS); 1837 } 1838 1839 /*@ 1840 TSForwardSetInitialSensitivities - Set initial values for tangent linear sensitivities 1841 1842 Collective 1843 1844 Input Parameters: 1845 + ts - the `TS` context obtained from `TSCreate()` 1846 - didp - parametric sensitivities of the initial condition 1847 1848 Level: intermediate 1849 1850 Notes: 1851 `TSSolve()` allows users to pass the initial solution directly to `TS`. But the tangent linear variables cannot be initialized in this way. 1852 This function is used to set initial values for tangent linear variables. 1853 1854 .seealso: [](ch_ts), `TS`, `TSForwardSetSensitivities()` 1855 @*/ 1856 PetscErrorCode TSForwardSetInitialSensitivities(TS ts, Mat didp) 1857 { 1858 PetscFunctionBegin; 1859 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1860 PetscValidHeaderSpecific(didp, MAT_CLASSID, 2); 1861 if (!ts->mat_sensip) PetscCall(TSForwardSetSensitivities(ts, PETSC_DEFAULT, didp)); 1862 PetscFunctionReturn(PETSC_SUCCESS); 1863 } 1864 1865 /*@ 1866 TSForwardGetStages - Get the number of stages and the tangent linear sensitivities at the intermediate stages 1867 1868 Input Parameter: 1869 . ts - the `TS` context obtained from `TSCreate()` 1870 1871 Output Parameters: 1872 + ns - number of stages 1873 - S - tangent linear sensitivities at the intermediate stages 1874 1875 Level: advanced 1876 1877 .seealso: `TS` 1878 @*/ 1879 PetscErrorCode TSForwardGetStages(TS ts, PetscInt *ns, Mat **S) 1880 { 1881 PetscFunctionBegin; 1882 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1883 1884 if (!ts->ops->getstages) *S = NULL; 1885 else PetscUseTypeMethod(ts, forwardgetstages, ns, S); 1886 PetscFunctionReturn(PETSC_SUCCESS); 1887 } 1888 1889 /*@ 1890 TSCreateQuadratureTS - Create a sub-`TS` that evaluates integrals over time 1891 1892 Input Parameters: 1893 + ts - the `TS` context obtained from `TSCreate()` 1894 - fwd - flag indicating whether to evaluate cost integral in the forward run or the adjoint run 1895 1896 Output Parameter: 1897 . quadts - the child `TS` context 1898 1899 Level: intermediate 1900 1901 .seealso: [](ch_ts), `TSGetQuadratureTS()` 1902 @*/ 1903 PetscErrorCode TSCreateQuadratureTS(TS ts, PetscBool fwd, TS *quadts) 1904 { 1905 char prefix[128]; 1906 1907 PetscFunctionBegin; 1908 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1909 PetscAssertPointer(quadts, 3); 1910 PetscCall(TSDestroy(&ts->quadraturets)); 1911 PetscCall(TSCreate(PetscObjectComm((PetscObject)ts), &ts->quadraturets)); 1912 PetscCall(PetscObjectIncrementTabLevel((PetscObject)ts->quadraturets, (PetscObject)ts, 1)); 1913 PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%squad_", ((PetscObject)ts)->prefix ? ((PetscObject)ts)->prefix : "")); 1914 PetscCall(TSSetOptionsPrefix(ts->quadraturets, prefix)); 1915 *quadts = ts->quadraturets; 1916 1917 if (ts->numcost) { 1918 PetscCall(VecCreateSeq(PETSC_COMM_SELF, ts->numcost, &(*quadts)->vec_sol)); 1919 } else { 1920 PetscCall(VecCreateSeq(PETSC_COMM_SELF, 1, &(*quadts)->vec_sol)); 1921 } 1922 ts->costintegralfwd = fwd; 1923 PetscFunctionReturn(PETSC_SUCCESS); 1924 } 1925 1926 /*@ 1927 TSGetQuadratureTS - Return the sub-`TS` that evaluates integrals over time 1928 1929 Input Parameter: 1930 . ts - the `TS` context obtained from `TSCreate()` 1931 1932 Output Parameters: 1933 + fwd - flag indicating whether to evaluate cost integral in the forward run or the adjoint run 1934 - quadts - the child `TS` context 1935 1936 Level: intermediate 1937 1938 .seealso: [](ch_ts), `TSCreateQuadratureTS()` 1939 @*/ 1940 PetscErrorCode TSGetQuadratureTS(TS ts, PetscBool *fwd, TS *quadts) 1941 { 1942 PetscFunctionBegin; 1943 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1944 if (fwd) *fwd = ts->costintegralfwd; 1945 if (quadts) *quadts = ts->quadraturets; 1946 PetscFunctionReturn(PETSC_SUCCESS); 1947 } 1948 1949 /*@ 1950 TSComputeSNESJacobian - Compute the Jacobian needed for the `SNESSolve()` in `TS` 1951 1952 Collective 1953 1954 Input Parameters: 1955 + ts - the `TS` context obtained from `TSCreate()` 1956 - x - state vector 1957 1958 Output Parameters: 1959 + J - Jacobian matrix 1960 - Jpre - preconditioning matrix for J (may be same as J) 1961 1962 Level: developer 1963 1964 Note: 1965 Uses finite differencing when `TS` Jacobian is not available. 1966 1967 .seealso: `SNES`, `TS`, `SNESSetJacobian()`, `TSSetRHSJacobian()`, `TSSetIJacobian()` 1968 @*/ 1969 PetscErrorCode TSComputeSNESJacobian(TS ts, Vec x, Mat J, Mat Jpre) 1970 { 1971 SNES snes = ts->snes; 1972 PetscErrorCode (*jac)(SNES, Vec, Mat, Mat, void *) = NULL; 1973 1974 PetscFunctionBegin; 1975 /* 1976 Unlike implicit methods, explicit methods do not have SNESMatFDColoring in the snes object 1977 because SNESSolve() has not been called yet; so querying SNESMatFDColoring does not work for 1978 explicit methods. Instead, we check the Jacobian compute function directly to determine if FD 1979 coloring is used. 1980 */ 1981 PetscCall(SNESGetJacobian(snes, NULL, NULL, &jac, NULL)); 1982 if (jac == SNESComputeJacobianDefaultColor) { 1983 Vec f; 1984 PetscCall(SNESSetSolution(snes, x)); 1985 PetscCall(SNESGetFunction(snes, &f, NULL, NULL)); 1986 /* Force MatFDColoringApply to evaluate the SNES residual function for the base vector */ 1987 PetscCall(SNESComputeFunction(snes, x, f)); 1988 } 1989 PetscCall(SNESComputeJacobian(snes, x, J, Jpre)); 1990 PetscFunctionReturn(PETSC_SUCCESS); 1991 } 1992