1 #include <petsc/private/tsimpl.h> /*I <petscts.h> I*/ 2 #include <petscdraw.h> 3 4 PetscLogEvent TS_AdjointStep, TS_ForwardStep, TS_JacobianPEval; 5 6 /* #define TSADJOINT_STAGE */ 7 8 /* ------------------------ Sensitivity Context ---------------------------*/ 9 10 /*@C 11 TSSetRHSJacobianP - Sets the function that computes the Jacobian of G w.r.t. the parameters P where U_t = G(U,P,t), as well as the location to store the matrix. 12 13 Logically Collective 14 15 Input Parameters: 16 + ts - `TS` context obtained from `TSCreate()` 17 . Amat - JacobianP matrix 18 . func - function 19 - ctx - [optional] user-defined function context 20 21 Level: intermediate 22 23 Note: 24 `Amat` has the same number of rows and the same row parallel layout as `u`, `Amat` has the same number of columns and parallel layout as `p` 25 26 .seealso: [](ch_ts), `TS`, `TSRHSJacobianP`, `TSGetRHSJacobianP()` 27 @*/ 28 PetscErrorCode TSSetRHSJacobianP(TS ts, Mat Amat, TSRHSJacobianP func, void *ctx) 29 { 30 PetscFunctionBegin; 31 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 32 PetscValidHeaderSpecific(Amat, MAT_CLASSID, 2); 33 34 ts->rhsjacobianp = func; 35 ts->rhsjacobianpctx = ctx; 36 if (Amat) { 37 PetscCall(PetscObjectReference((PetscObject)Amat)); 38 PetscCall(MatDestroy(&ts->Jacprhs)); 39 ts->Jacprhs = Amat; 40 } 41 PetscFunctionReturn(PETSC_SUCCESS); 42 } 43 44 /*@C 45 TSGetRHSJacobianP - Gets the function that computes the Jacobian of G w.r.t. the parameters P where U_t = G(U,P,t), as well as the location to store the matrix. 46 47 Logically Collective 48 49 Input Parameter: 50 . ts - `TS` context obtained from `TSCreate()` 51 52 Output Parameters: 53 + Amat - JacobianP matrix 54 . func - function 55 - ctx - [optional] user-defined function context 56 57 Level: intermediate 58 59 Note: 60 `Amat` has the same number of rows and the same row parallel layout as `u`, `Amat` has the same number of columns and parallel layout as `p` 61 62 .seealso: [](ch_ts), `TSSetRHSJacobianP()`, `TS`, `TSRHSJacobianP` 63 @*/ 64 PetscErrorCode TSGetRHSJacobianP(TS ts, Mat *Amat, TSRHSJacobianP *func, void **ctx) 65 { 66 PetscFunctionBegin; 67 if (func) *func = ts->rhsjacobianp; 68 if (ctx) *ctx = ts->rhsjacobianpctx; 69 if (Amat) *Amat = ts->Jacprhs; 70 PetscFunctionReturn(PETSC_SUCCESS); 71 } 72 73 /*@C 74 TSComputeRHSJacobianP - Runs the user-defined JacobianP function. 75 76 Collective 77 78 Input Parameters: 79 + ts - The `TS` context obtained from `TSCreate()` 80 . t - the time 81 - U - the solution at which to compute the Jacobian 82 83 Output Parameter: 84 . Amat - the computed Jacobian 85 86 Level: developer 87 88 .seealso: [](ch_ts), `TSSetRHSJacobianP()`, `TS` 89 @*/ 90 PetscErrorCode TSComputeRHSJacobianP(TS ts, PetscReal t, Vec U, Mat Amat) 91 { 92 PetscFunctionBegin; 93 if (!Amat) PetscFunctionReturn(PETSC_SUCCESS); 94 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 95 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 96 97 if (ts->rhsjacobianp) PetscCallBack("TS callback JacobianP for sensitivity analysis", (*ts->rhsjacobianp)(ts, t, U, Amat, ts->rhsjacobianpctx)); 98 else { 99 PetscBool assembled; 100 PetscCall(MatZeroEntries(Amat)); 101 PetscCall(MatAssembled(Amat, &assembled)); 102 if (!assembled) { 103 PetscCall(MatAssemblyBegin(Amat, MAT_FINAL_ASSEMBLY)); 104 PetscCall(MatAssemblyEnd(Amat, MAT_FINAL_ASSEMBLY)); 105 } 106 } 107 PetscFunctionReturn(PETSC_SUCCESS); 108 } 109 110 /*@C 111 TSSetIJacobianP - Sets the function that computes the Jacobian of F w.r.t. the parameters P where F(Udot,U,t) = G(U,P,t), as well as the location to store the matrix. 112 113 Logically Collective 114 115 Input Parameters: 116 + ts - `TS` context obtained from `TSCreate()` 117 . Amat - JacobianP matrix 118 . func - function 119 - ctx - [optional] user-defined function context 120 121 Calling sequence of `func`: 122 + ts - the `TS` context 123 . t - current timestep 124 . U - input vector (current ODE solution) 125 . Udot - time derivative of state vector 126 . shift - shift to apply, see note below 127 . A - output matrix 128 - ctx - [optional] user-defined function context 129 130 Level: intermediate 131 132 Note: 133 Amat has the same number of rows and the same row parallel layout as u, Amat has the same number of columns and parallel layout as p 134 135 .seealso: [](ch_ts), `TSSetRHSJacobianP()`, `TS` 136 @*/ 137 PetscErrorCode TSSetIJacobianP(TS ts, Mat Amat, PetscErrorCode (*func)(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal shift, Mat A, void *ctx), void *ctx) 138 { 139 PetscFunctionBegin; 140 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 141 PetscValidHeaderSpecific(Amat, MAT_CLASSID, 2); 142 143 ts->ijacobianp = func; 144 ts->ijacobianpctx = ctx; 145 if (Amat) { 146 PetscCall(PetscObjectReference((PetscObject)Amat)); 147 PetscCall(MatDestroy(&ts->Jacp)); 148 ts->Jacp = Amat; 149 } 150 PetscFunctionReturn(PETSC_SUCCESS); 151 } 152 153 /*@C 154 TSComputeIJacobianP - Runs the user-defined IJacobianP function. 155 156 Collective 157 158 Input Parameters: 159 + ts - the `TS` context 160 . t - current timestep 161 . U - state vector 162 . Udot - time derivative of state vector 163 . shift - shift to apply, see note below 164 - imex - flag indicates if the method is IMEX so that the RHSJacobianP should be kept separate 165 166 Output Parameter: 167 . Amat - Jacobian matrix 168 169 Level: developer 170 171 .seealso: [](ch_ts), `TS`, `TSSetIJacobianP()` 172 @*/ 173 PetscErrorCode TSComputeIJacobianP(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal shift, Mat Amat, PetscBool imex) 174 { 175 PetscFunctionBegin; 176 if (!Amat) PetscFunctionReturn(PETSC_SUCCESS); 177 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 178 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 179 PetscValidHeaderSpecific(Udot, VEC_CLASSID, 4); 180 181 PetscCall(PetscLogEventBegin(TS_JacobianPEval, ts, U, Amat, 0)); 182 if (ts->ijacobianp) PetscCallBack("TS callback JacobianP for sensitivity analysis", (*ts->ijacobianp)(ts, t, U, Udot, shift, Amat, ts->ijacobianpctx)); 183 if (imex) { 184 if (!ts->ijacobianp) { /* system was written as Udot = G(t,U) */ 185 PetscBool assembled; 186 PetscCall(MatZeroEntries(Amat)); 187 PetscCall(MatAssembled(Amat, &assembled)); 188 if (!assembled) { 189 PetscCall(MatAssemblyBegin(Amat, MAT_FINAL_ASSEMBLY)); 190 PetscCall(MatAssemblyEnd(Amat, MAT_FINAL_ASSEMBLY)); 191 } 192 } 193 } else { 194 if (ts->rhsjacobianp) PetscCall(TSComputeRHSJacobianP(ts, t, U, ts->Jacprhs)); 195 if (ts->Jacprhs == Amat) { /* No IJacobian, so we only have the RHS matrix */ 196 PetscCall(MatScale(Amat, -1)); 197 } else if (ts->Jacprhs) { /* Both IJacobian and RHSJacobian */ 198 MatStructure axpy = DIFFERENT_NONZERO_PATTERN; 199 if (!ts->ijacobianp) { /* No IJacobianp provided, but we have a separate RHS matrix */ 200 PetscCall(MatZeroEntries(Amat)); 201 } 202 PetscCall(MatAXPY(Amat, -1, ts->Jacprhs, axpy)); 203 } 204 } 205 PetscCall(PetscLogEventEnd(TS_JacobianPEval, ts, U, Amat, 0)); 206 PetscFunctionReturn(PETSC_SUCCESS); 207 } 208 209 /*@C 210 TSSetCostIntegrand - Sets the routine for evaluating the integral term in one or more cost functions 211 212 Logically Collective 213 214 Input Parameters: 215 + ts - the `TS` context obtained from `TSCreate()` 216 . numcost - number of gradients to be computed, this is the number of cost functions 217 . costintegral - vector that stores the integral values 218 . rf - routine for evaluating the integrand function 219 . drduf - function that computes the gradients of the r's with respect to u 220 . drdpf - function that computes the gradients of the r's with respect to p, can be `NULL` if parametric sensitivity is not desired (`mu` = `NULL`) 221 . fwd - flag indicating whether to evaluate cost integral in the forward run or the adjoint run 222 - ctx - [optional] user-defined context for private data for the function evaluation routine (may be `NULL`) 223 224 Calling sequence of `rf`: 225 $ PetscErrorCode rf(TS ts, PetscReal t, Vec U, Vec F, oid *ctx) 226 227 Calling sequence of `drduf`: 228 $ PetscErroCode drduf(TS ts, PetscReal t, Vec U, Vec *dRdU, void *ctx) 229 230 Calling sequence of `drdpf`: 231 $ PetscErroCode drdpf(TS ts, PetscReal t, Vec U, Vec *dRdP, void *ctx) 232 233 Level: deprecated 234 235 Note: 236 For optimization there is usually a single cost function (numcost = 1). For sensitivities there may be multiple cost functions 237 238 .seealso: [](ch_ts), `TS`, `TSSetRHSJacobianP()`, `TSGetCostGradients()`, `TSSetCostGradients()` 239 @*/ 240 PetscErrorCode TSSetCostIntegrand(TS ts, PetscInt numcost, Vec costintegral, PetscErrorCode (*rf)(TS, PetscReal, Vec, Vec, void *), PetscErrorCode (*drduf)(TS, PetscReal, Vec, Vec *, void *), PetscErrorCode (*drdpf)(TS, PetscReal, Vec, Vec *, void *), PetscBool fwd, void *ctx) 241 { 242 PetscFunctionBegin; 243 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 244 if (costintegral) PetscValidHeaderSpecific(costintegral, VEC_CLASSID, 3); 245 PetscCheck(!ts->numcost || ts->numcost == numcost, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostGradients() or TSForwardSetIntegralGradients()"); 246 if (!ts->numcost) ts->numcost = numcost; 247 248 if (costintegral) { 249 PetscCall(PetscObjectReference((PetscObject)costintegral)); 250 PetscCall(VecDestroy(&ts->vec_costintegral)); 251 ts->vec_costintegral = costintegral; 252 } else { 253 if (!ts->vec_costintegral) { /* Create a seq vec if user does not provide one */ 254 PetscCall(VecCreateSeq(PETSC_COMM_SELF, numcost, &ts->vec_costintegral)); 255 } else { 256 PetscCall(VecSet(ts->vec_costintegral, 0.0)); 257 } 258 } 259 if (!ts->vec_costintegrand) { 260 PetscCall(VecDuplicate(ts->vec_costintegral, &ts->vec_costintegrand)); 261 } else { 262 PetscCall(VecSet(ts->vec_costintegrand, 0.0)); 263 } 264 ts->costintegralfwd = fwd; /* Evaluate the cost integral in forward run if fwd is true */ 265 ts->costintegrand = rf; 266 ts->costintegrandctx = ctx; 267 ts->drdufunction = drduf; 268 ts->drdpfunction = drdpf; 269 PetscFunctionReturn(PETSC_SUCCESS); 270 } 271 272 /*@C 273 TSGetCostIntegral - Returns the values of the integral term in the cost functions. 274 It is valid to call the routine after a backward run. 275 276 Not Collective 277 278 Input Parameter: 279 . ts - the `TS` context obtained from `TSCreate()` 280 281 Output Parameter: 282 . v - the vector containing the integrals for each cost function 283 284 Level: intermediate 285 286 .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, ``TSSetCostIntegrand()` 287 @*/ 288 PetscErrorCode TSGetCostIntegral(TS ts, Vec *v) 289 { 290 TS quadts; 291 292 PetscFunctionBegin; 293 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 294 PetscAssertPointer(v, 2); 295 PetscCall(TSGetQuadratureTS(ts, NULL, &quadts)); 296 *v = quadts->vec_sol; 297 PetscFunctionReturn(PETSC_SUCCESS); 298 } 299 300 /*@C 301 TSComputeCostIntegrand - Evaluates the integral function in the cost functions. 302 303 Input Parameters: 304 + ts - the `TS` context 305 . t - current time 306 - U - state vector, i.e. current solution 307 308 Output Parameter: 309 . Q - vector of size numcost to hold the outputs 310 311 Level: deprecated 312 313 Note: 314 Most users should not need to explicitly call this routine, as it 315 is used internally within the sensitivity analysis context. 316 317 .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSSetCostIntegrand()` 318 @*/ 319 PetscErrorCode TSComputeCostIntegrand(TS ts, PetscReal t, Vec U, Vec Q) 320 { 321 PetscFunctionBegin; 322 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 323 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 324 PetscValidHeaderSpecific(Q, VEC_CLASSID, 4); 325 326 PetscCall(PetscLogEventBegin(TS_FunctionEval, ts, U, Q, 0)); 327 if (ts->costintegrand) PetscCallBack("TS callback integrand in the cost function", (*ts->costintegrand)(ts, t, U, Q, ts->costintegrandctx)); 328 else PetscCall(VecZeroEntries(Q)); 329 PetscCall(PetscLogEventEnd(TS_FunctionEval, ts, U, Q, 0)); 330 PetscFunctionReturn(PETSC_SUCCESS); 331 } 332 333 // PetscClangLinter pragma disable: -fdoc-* 334 /*@C 335 TSComputeDRDUFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobian()` 336 337 Level: deprecated 338 339 @*/ 340 PetscErrorCode TSComputeDRDUFunction(TS ts, PetscReal t, Vec U, Vec *DRDU) 341 { 342 PetscFunctionBegin; 343 if (!DRDU) PetscFunctionReturn(PETSC_SUCCESS); 344 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 345 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 346 347 PetscCallBack("TS callback DRDU for sensitivity analysis", (*ts->drdufunction)(ts, t, U, DRDU, ts->costintegrandctx)); 348 PetscFunctionReturn(PETSC_SUCCESS); 349 } 350 351 // PetscClangLinter pragma disable: -fdoc-* 352 /*@C 353 TSComputeDRDPFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobianP()` 354 355 Level: deprecated 356 357 @*/ 358 PetscErrorCode TSComputeDRDPFunction(TS ts, PetscReal t, Vec U, Vec *DRDP) 359 { 360 PetscFunctionBegin; 361 if (!DRDP) PetscFunctionReturn(PETSC_SUCCESS); 362 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 363 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 364 365 PetscCallBack("TS callback DRDP for sensitivity analysis", (*ts->drdpfunction)(ts, t, U, DRDP, ts->costintegrandctx)); 366 PetscFunctionReturn(PETSC_SUCCESS); 367 } 368 369 // PetscClangLinter pragma disable: -fdoc-param-list-func-parameter-documentation 370 // PetscClangLinter pragma disable: -fdoc-section-header-unknown 371 /*@C 372 TSSetIHessianProduct - Sets the function that computes the vector-Hessian-vector product. The Hessian is the second-order derivative of F (IFunction) w.r.t. the state variable. 373 374 Logically Collective 375 376 Input Parameters: 377 + ts - `TS` context obtained from `TSCreate()` 378 . ihp1 - an array of vectors storing the result of vector-Hessian-vector product for F_UU 379 . hessianproductfunc1 - vector-Hessian-vector product function for F_UU 380 . ihp2 - an array of vectors storing the result of vector-Hessian-vector product for F_UP 381 . hessianproductfunc2 - vector-Hessian-vector product function for F_UP 382 . ihp3 - an array of vectors storing the result of vector-Hessian-vector product for F_PU 383 . hessianproductfunc3 - vector-Hessian-vector product function for F_PU 384 . ihp4 - an array of vectors storing the result of vector-Hessian-vector product for F_PP 385 - hessianproductfunc4 - vector-Hessian-vector product function for F_PP 386 387 Calling sequence of `ihessianproductfunc1`: 388 + ts - the `TS` context 389 + t - current timestep 390 . U - input vector (current ODE solution) 391 . Vl - an array of input vectors to be left-multiplied with the Hessian 392 . Vr - input vector to be right-multiplied with the Hessian 393 . VHV - an array of output vectors for vector-Hessian-vector product 394 - ctx - [optional] user-defined function context 395 396 Level: intermediate 397 398 Notes: 399 All other functions have the same calling sequence as `rhhessianproductfunc1`, so their 400 descriptions are omitted for brevity. 401 402 The first Hessian function and the working array are required. 403 As an example to implement the callback functions, the second callback function calculates the vector-Hessian-vector product 404 $ Vl_n^T*F_UP*Vr 405 where the vector Vl_n (n-th element in the array Vl) and Vr are of size N and M respectively, and the Hessian F_UP is of size N x N x M. 406 Each entry of F_UP corresponds to the derivative 407 $ F_UP[i][j][k] = \frac{\partial^2 F[i]}{\partial U[j] \partial P[k]}. 408 The result of the vector-Hessian-vector product for Vl_n needs to be stored in vector VHV_n with the j-th entry being 409 $ VHV_n[j] = \sum_i \sum_k {Vl_n[i] * F_UP[i][j][k] * Vr[k]} 410 If the cost function is a scalar, there will be only one vector in Vl and VHV. 411 412 .seealso: [](ch_ts), `TS` 413 @*/ 414 PetscErrorCode TSSetIHessianProduct(TS ts, Vec *ihp1, PetscErrorCode (*ihessianproductfunc1)(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV, void *ctx), Vec *ihp2, PetscErrorCode (*ihessianproductfunc2)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *ihp3, PetscErrorCode (*ihessianproductfunc3)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *ihp4, PetscErrorCode (*ihessianproductfunc4)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), void *ctx) 415 { 416 PetscFunctionBegin; 417 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 418 PetscAssertPointer(ihp1, 2); 419 420 ts->ihessianproductctx = ctx; 421 if (ihp1) ts->vecs_fuu = ihp1; 422 if (ihp2) ts->vecs_fup = ihp2; 423 if (ihp3) ts->vecs_fpu = ihp3; 424 if (ihp4) ts->vecs_fpp = ihp4; 425 ts->ihessianproduct_fuu = ihessianproductfunc1; 426 ts->ihessianproduct_fup = ihessianproductfunc2; 427 ts->ihessianproduct_fpu = ihessianproductfunc3; 428 ts->ihessianproduct_fpp = ihessianproductfunc4; 429 PetscFunctionReturn(PETSC_SUCCESS); 430 } 431 432 /*@C 433 TSComputeIHessianProductFunctionUU - Runs the user-defined vector-Hessian-vector product function for Fuu. 434 435 Collective 436 437 Input Parameters: 438 + ts - The `TS` context obtained from `TSCreate()` 439 . t - the time 440 . U - the solution at which to compute the Hessian product 441 . Vl - the array of input vectors to be multiplied with the Hessian from the left 442 - Vr - the input vector to be multiplied with the Hessian from the right 443 444 Output Parameter: 445 . VHV - the array of output vectors that store the Hessian product 446 447 Level: developer 448 449 Note: 450 `TSComputeIHessianProductFunctionUU()` is typically used for sensitivity implementation, 451 so most users would not generally call this routine themselves. 452 453 .seealso: [](ch_ts), `TSSetIHessianProduct()` 454 @*/ 455 PetscErrorCode TSComputeIHessianProductFunctionUU(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV) 456 { 457 PetscFunctionBegin; 458 if (!VHV) PetscFunctionReturn(PETSC_SUCCESS); 459 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 460 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 461 462 if (ts->ihessianproduct_fuu) PetscCallBack("TS callback IHessianProduct 1 for sensitivity analysis", (*ts->ihessianproduct_fuu)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx)); 463 464 /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */ 465 if (ts->rhshessianproduct_guu) { 466 PetscInt nadj; 467 PetscCall(TSComputeRHSHessianProductFunctionUU(ts, t, U, Vl, Vr, VHV)); 468 for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1)); 469 } 470 PetscFunctionReturn(PETSC_SUCCESS); 471 } 472 473 /*@C 474 TSComputeIHessianProductFunctionUP - Runs the user-defined vector-Hessian-vector product function for Fup. 475 476 Collective 477 478 Input Parameters: 479 + ts - The `TS` context obtained from `TSCreate()` 480 . t - the time 481 . U - the solution at which to compute the Hessian product 482 . Vl - the array of input vectors to be multiplied with the Hessian from the left 483 - Vr - the input vector to be multiplied with the Hessian from the right 484 485 Output Parameter: 486 . VHV - the array of output vectors that store the Hessian product 487 488 Level: developer 489 490 Note: 491 `TSComputeIHessianProductFunctionUP()` is typically used for sensitivity implementation, 492 so most users would not generally call this routine themselves. 493 494 .seealso: [](ch_ts), `TSSetIHessianProduct()` 495 @*/ 496 PetscErrorCode TSComputeIHessianProductFunctionUP(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV) 497 { 498 PetscFunctionBegin; 499 if (!VHV) PetscFunctionReturn(PETSC_SUCCESS); 500 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 501 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 502 503 if (ts->ihessianproduct_fup) PetscCallBack("TS callback IHessianProduct 2 for sensitivity analysis", (*ts->ihessianproduct_fup)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx)); 504 505 /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */ 506 if (ts->rhshessianproduct_gup) { 507 PetscInt nadj; 508 PetscCall(TSComputeRHSHessianProductFunctionUP(ts, t, U, Vl, Vr, VHV)); 509 for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1)); 510 } 511 PetscFunctionReturn(PETSC_SUCCESS); 512 } 513 514 /*@C 515 TSComputeIHessianProductFunctionPU - Runs the user-defined vector-Hessian-vector product function for Fpu. 516 517 Collective 518 519 Input Parameters: 520 + ts - The `TS` context obtained from `TSCreate()` 521 . t - the time 522 . U - the solution at which to compute the Hessian product 523 . Vl - the array of input vectors to be multiplied with the Hessian from the left 524 - Vr - the input vector to be multiplied with the Hessian from the right 525 526 Output Parameter: 527 . VHV - the array of output vectors that store the Hessian product 528 529 Level: developer 530 531 Note: 532 `TSComputeIHessianProductFunctionPU()` is typically used for sensitivity implementation, 533 so most users would not generally call this routine themselves. 534 535 .seealso: [](ch_ts), `TSSetIHessianProduct()` 536 @*/ 537 PetscErrorCode TSComputeIHessianProductFunctionPU(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV) 538 { 539 PetscFunctionBegin; 540 if (!VHV) PetscFunctionReturn(PETSC_SUCCESS); 541 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 542 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 543 544 if (ts->ihessianproduct_fpu) PetscCallBack("TS callback IHessianProduct 3 for sensitivity analysis", (*ts->ihessianproduct_fpu)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx)); 545 546 /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */ 547 if (ts->rhshessianproduct_gpu) { 548 PetscInt nadj; 549 PetscCall(TSComputeRHSHessianProductFunctionPU(ts, t, U, Vl, Vr, VHV)); 550 for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1)); 551 } 552 PetscFunctionReturn(PETSC_SUCCESS); 553 } 554 555 /*@C 556 TSComputeIHessianProductFunctionPP - Runs the user-defined vector-Hessian-vector product function for Fpp. 557 558 Collective 559 560 Input Parameters: 561 + ts - The `TS` context obtained from `TSCreate()` 562 . t - the time 563 . U - the solution at which to compute the Hessian product 564 . Vl - the array of input vectors to be multiplied with the Hessian from the left 565 - Vr - the input vector to be multiplied with the Hessian from the right 566 567 Output Parameter: 568 . VHV - the array of output vectors that store the Hessian product 569 570 Level: developer 571 572 Note: 573 `TSComputeIHessianProductFunctionPP()` is typically used for sensitivity implementation, 574 so most users would not generally call this routine themselves. 575 576 .seealso: [](ch_ts), `TSSetIHessianProduct()` 577 @*/ 578 PetscErrorCode TSComputeIHessianProductFunctionPP(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV) 579 { 580 PetscFunctionBegin; 581 if (!VHV) PetscFunctionReturn(PETSC_SUCCESS); 582 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 583 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 584 585 if (ts->ihessianproduct_fpp) PetscCallBack("TS callback IHessianProduct 3 for sensitivity analysis", (*ts->ihessianproduct_fpp)(ts, t, U, Vl, Vr, VHV, ts->ihessianproductctx)); 586 587 /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */ 588 if (ts->rhshessianproduct_gpp) { 589 PetscInt nadj; 590 PetscCall(TSComputeRHSHessianProductFunctionPP(ts, t, U, Vl, Vr, VHV)); 591 for (nadj = 0; nadj < ts->numcost; nadj++) PetscCall(VecScale(VHV[nadj], -1)); 592 } 593 PetscFunctionReturn(PETSC_SUCCESS); 594 } 595 596 // PetscClangLinter pragma disable: -fdoc-param-list-func-parameter-documentation 597 // PetscClangLinter pragma disable: -fdoc-section-header-unknown 598 /*@C 599 TSSetRHSHessianProduct - Sets the function that computes the vector-Hessian-vector 600 product. The Hessian is the second-order derivative of G (RHSFunction) w.r.t. the state 601 variable. 602 603 Logically Collective 604 605 Input Parameters: 606 + ts - `TS` context obtained from `TSCreate()` 607 . rhshp1 - an array of vectors storing the result of vector-Hessian-vector product for G_UU 608 . hessianproductfunc1 - vector-Hessian-vector product function for G_UU 609 . rhshp2 - an array of vectors storing the result of vector-Hessian-vector product for G_UP 610 . hessianproductfunc2 - vector-Hessian-vector product function for G_UP 611 . rhshp3 - an array of vectors storing the result of vector-Hessian-vector product for G_PU 612 . hessianproductfunc3 - vector-Hessian-vector product function for G_PU 613 . rhshp4 - an array of vectors storing the result of vector-Hessian-vector product for G_PP 614 . hessianproductfunc4 - vector-Hessian-vector product function for G_PP 615 - ctx - [optional] user-defined function context 616 617 Calling sequence of `rhshessianproductfunc1`: 618 + ts - the `TS` context 619 . t - current timestep 620 . U - input vector (current ODE solution) 621 . Vl - an array of input vectors to be left-multiplied with the Hessian 622 . Vr - input vector to be right-multiplied with the Hessian 623 . VHV - an array of output vectors for vector-Hessian-vector product 624 - ctx - [optional] user-defined function context 625 626 Level: intermediate 627 628 Notes: 629 All other functions have the same calling sequence as `rhhessianproductfunc1`, so their 630 descriptions are omitted for brevity. 631 632 The first Hessian function and the working array are required. 633 634 As an example to implement the callback functions, the second callback function calculates the vector-Hessian-vector product 635 $ Vl_n^T*G_UP*Vr 636 where the vector Vl_n (n-th element in the array Vl) and Vr are of size N and M respectively, and the Hessian G_UP is of size N x N x M. 637 Each entry of G_UP corresponds to the derivative 638 $ G_UP[i][j][k] = \frac{\partial^2 G[i]}{\partial U[j] \partial P[k]}. 639 The result of the vector-Hessian-vector product for Vl_n needs to be stored in vector VHV_n with j-th entry being 640 $ VHV_n[j] = \sum_i \sum_k {Vl_n[i] * G_UP[i][j][k] * Vr[k]} 641 If the cost function is a scalar, there will be only one vector in Vl and VHV. 642 643 .seealso: `TS`, `TSAdjoint` 644 @*/ 645 PetscErrorCode TSSetRHSHessianProduct(TS ts, Vec *rhshp1, PetscErrorCode (*rhshessianproductfunc1)(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV, void *ctx), Vec *rhshp2, PetscErrorCode (*rhshessianproductfunc2)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *rhshp3, PetscErrorCode (*rhshessianproductfunc3)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), Vec *rhshp4, PetscErrorCode (*rhshessianproductfunc4)(TS, PetscReal, Vec, Vec *, Vec, Vec *, void *), void *ctx) 646 { 647 PetscFunctionBegin; 648 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 649 PetscAssertPointer(rhshp1, 2); 650 651 ts->rhshessianproductctx = ctx; 652 if (rhshp1) ts->vecs_guu = rhshp1; 653 if (rhshp2) ts->vecs_gup = rhshp2; 654 if (rhshp3) ts->vecs_gpu = rhshp3; 655 if (rhshp4) ts->vecs_gpp = rhshp4; 656 ts->rhshessianproduct_guu = rhshessianproductfunc1; 657 ts->rhshessianproduct_gup = rhshessianproductfunc2; 658 ts->rhshessianproduct_gpu = rhshessianproductfunc3; 659 ts->rhshessianproduct_gpp = rhshessianproductfunc4; 660 PetscFunctionReturn(PETSC_SUCCESS); 661 } 662 663 /*@C 664 TSComputeRHSHessianProductFunctionUU - Runs the user-defined vector-Hessian-vector product function for Guu. 665 666 Collective 667 668 Input Parameters: 669 + ts - The `TS` context obtained from `TSCreate()` 670 . t - the time 671 . U - the solution at which to compute the Hessian product 672 . Vl - the array of input vectors to be multiplied with the Hessian from the left 673 - Vr - the input vector to be multiplied with the Hessian from the right 674 675 Output Parameter: 676 . VHV - the array of output vectors that store the Hessian product 677 678 Level: developer 679 680 Note: 681 `TSComputeRHSHessianProductFunctionUU()` is typically used for sensitivity implementation, 682 so most users would not generally call this routine themselves. 683 684 .seealso: [](ch_ts), `TS`, `TSSetRHSHessianProduct()` 685 @*/ 686 PetscErrorCode TSComputeRHSHessianProductFunctionUU(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV) 687 { 688 PetscFunctionBegin; 689 if (!VHV) PetscFunctionReturn(PETSC_SUCCESS); 690 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 691 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 692 693 PetscCallBack("TS callback RHSHessianProduct 1 for sensitivity analysis", (*ts->rhshessianproduct_guu)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx)); 694 PetscFunctionReturn(PETSC_SUCCESS); 695 } 696 697 /*@C 698 TSComputeRHSHessianProductFunctionUP - Runs the user-defined vector-Hessian-vector product function for Gup. 699 700 Collective 701 702 Input Parameters: 703 + ts - The `TS` context obtained from `TSCreate()` 704 . t - the time 705 . U - the solution at which to compute the Hessian product 706 . Vl - the array of input vectors to be multiplied with the Hessian from the left 707 - Vr - the input vector to be multiplied with the Hessian from the right 708 709 Output Parameter: 710 . VHV - the array of output vectors that store the Hessian product 711 712 Level: developer 713 714 Note: 715 `TSComputeRHSHessianProductFunctionUP()` is typically used for sensitivity implementation, 716 so most users would not generally call this routine themselves. 717 718 .seealso: [](ch_ts), `TS`, `TSSetRHSHessianProduct()` 719 @*/ 720 PetscErrorCode TSComputeRHSHessianProductFunctionUP(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV) 721 { 722 PetscFunctionBegin; 723 if (!VHV) PetscFunctionReturn(PETSC_SUCCESS); 724 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 725 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 726 727 PetscCallBack("TS callback RHSHessianProduct 2 for sensitivity analysis", (*ts->rhshessianproduct_gup)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx)); 728 PetscFunctionReturn(PETSC_SUCCESS); 729 } 730 731 /*@C 732 TSComputeRHSHessianProductFunctionPU - Runs the user-defined vector-Hessian-vector product function for Gpu. 733 734 Collective 735 736 Input Parameters: 737 + ts - The `TS` context obtained from `TSCreate()` 738 . t - the time 739 . U - the solution at which to compute the Hessian product 740 . Vl - the array of input vectors to be multiplied with the Hessian from the left 741 - Vr - the input vector to be multiplied with the Hessian from the right 742 743 Output Parameter: 744 . VHV - the array of output vectors that store the Hessian product 745 746 Level: developer 747 748 Note: 749 `TSComputeRHSHessianProductFunctionPU()` is typically used for sensitivity implementation, 750 so most users would not generally call this routine themselves. 751 752 .seealso: [](ch_ts), `TSSetRHSHessianProduct()` 753 @*/ 754 PetscErrorCode TSComputeRHSHessianProductFunctionPU(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV) 755 { 756 PetscFunctionBegin; 757 if (!VHV) PetscFunctionReturn(PETSC_SUCCESS); 758 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 759 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 760 761 PetscCallBack("TS callback RHSHessianProduct 3 for sensitivity analysis", (*ts->rhshessianproduct_gpu)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx)); 762 PetscFunctionReturn(PETSC_SUCCESS); 763 } 764 765 /*@C 766 TSComputeRHSHessianProductFunctionPP - Runs the user-defined vector-Hessian-vector product function for Gpp. 767 768 Collective 769 770 Input Parameters: 771 + ts - The `TS` context obtained from `TSCreate()` 772 . t - the time 773 . U - the solution at which to compute the Hessian product 774 . Vl - the array of input vectors to be multiplied with the Hessian from the left 775 - Vr - the input vector to be multiplied with the Hessian from the right 776 777 Output Parameter: 778 . VHV - the array of output vectors that store the Hessian product 779 780 Level: developer 781 782 Note: 783 `TSComputeRHSHessianProductFunctionPP()` is typically used for sensitivity implementation, 784 so most users would not generally call this routine themselves. 785 786 .seealso: [](ch_ts), `TSSetRHSHessianProduct()` 787 @*/ 788 PetscErrorCode TSComputeRHSHessianProductFunctionPP(TS ts, PetscReal t, Vec U, Vec *Vl, Vec Vr, Vec *VHV) 789 { 790 PetscFunctionBegin; 791 if (!VHV) PetscFunctionReturn(PETSC_SUCCESS); 792 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 793 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 794 795 PetscCallBack("TS callback RHSHessianProduct 3 for sensitivity analysis", (*ts->rhshessianproduct_gpp)(ts, t, U, Vl, Vr, VHV, ts->rhshessianproductctx)); 796 PetscFunctionReturn(PETSC_SUCCESS); 797 } 798 799 /* --------------------------- Adjoint sensitivity ---------------------------*/ 800 801 /*@ 802 TSSetCostGradients - Sets the initial value of the gradients of the cost function w.r.t. initial values and w.r.t. the problem parameters 803 for use by the `TS` adjoint routines. 804 805 Logically Collective 806 807 Input Parameters: 808 + ts - the `TS` context obtained from `TSCreate()` 809 . numcost - number of gradients to be computed, this is the number of cost functions 810 . lambda - gradients with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector 811 - mu - gradients with respect to the parameters, the number of entries in these vectors is the same as the number of parameters 812 813 Level: beginner 814 815 Notes: 816 the entries in these vectors must be correctly initialized with the values lambda_i = df/dy|finaltime mu_i = df/dp|finaltime 817 818 After `TSAdjointSolve()` is called the lambda and the mu contain the computed sensitivities 819 820 .seealso: `TS`, `TSAdjointSolve()`, `TSGetCostGradients()` 821 @*/ 822 PetscErrorCode TSSetCostGradients(TS ts, PetscInt numcost, Vec *lambda, Vec *mu) 823 { 824 PetscFunctionBegin; 825 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 826 PetscAssertPointer(lambda, 3); 827 ts->vecs_sensi = lambda; 828 ts->vecs_sensip = mu; 829 PetscCheck(!ts->numcost || ts->numcost == numcost, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand"); 830 ts->numcost = numcost; 831 PetscFunctionReturn(PETSC_SUCCESS); 832 } 833 834 /*@ 835 TSGetCostGradients - Returns the gradients from the `TSAdjointSolve()` 836 837 Not Collective, but the vectors returned are parallel if `TS` is parallel 838 839 Input Parameter: 840 . ts - the `TS` context obtained from `TSCreate()` 841 842 Output Parameters: 843 + numcost - size of returned arrays 844 . lambda - vectors containing the gradients of the cost functions with respect to the ODE/DAE solution variables 845 - mu - vectors containing the gradients of the cost functions with respect to the problem parameters 846 847 Level: intermediate 848 849 .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSSetCostGradients()` 850 @*/ 851 PetscErrorCode TSGetCostGradients(TS ts, PetscInt *numcost, Vec **lambda, Vec **mu) 852 { 853 PetscFunctionBegin; 854 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 855 if (numcost) *numcost = ts->numcost; 856 if (lambda) *lambda = ts->vecs_sensi; 857 if (mu) *mu = ts->vecs_sensip; 858 PetscFunctionReturn(PETSC_SUCCESS); 859 } 860 861 /*@ 862 TSSetCostHessianProducts - Sets the initial value of the Hessian-vector products of the cost function w.r.t. initial values and w.r.t. the problem parameters 863 for use by the `TS` adjoint routines. 864 865 Logically Collective 866 867 Input Parameters: 868 + ts - the `TS` context obtained from `TSCreate()` 869 . numcost - number of cost functions 870 . lambda2 - Hessian-vector product with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector 871 . mu2 - Hessian-vector product with respect to the parameters, the number of entries in these vectors is the same as the number of parameters 872 - dir - the direction vector that are multiplied with the Hessian of the cost functions 873 874 Level: beginner 875 876 Notes: 877 Hessian of the cost function is completely different from Hessian of the ODE/DAE system 878 879 For second-order adjoint, one needs to call this function and then `TSAdjointSetForward()` before `TSSolve()`. 880 881 After `TSAdjointSolve()` is called, the lambda2 and the mu2 will contain the computed second-order adjoint sensitivities, and can be used to produce Hessian-vector product (not the full Hessian matrix). Users must provide a direction vector; it is usually generated by an optimization solver. 882 883 Passing `NULL` for `lambda2` disables the second-order calculation. 884 885 .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSAdjointSetForward()` 886 @*/ 887 PetscErrorCode TSSetCostHessianProducts(TS ts, PetscInt numcost, Vec *lambda2, Vec *mu2, Vec dir) 888 { 889 PetscFunctionBegin; 890 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 891 PetscCheck(!ts->numcost || ts->numcost == numcost, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand"); 892 ts->numcost = numcost; 893 ts->vecs_sensi2 = lambda2; 894 ts->vecs_sensi2p = mu2; 895 ts->vec_dir = dir; 896 PetscFunctionReturn(PETSC_SUCCESS); 897 } 898 899 /*@ 900 TSGetCostHessianProducts - Returns the gradients from the `TSAdjointSolve()` 901 902 Not Collective, but vectors returned are parallel if `TS` is parallel 903 904 Input Parameter: 905 . ts - the `TS` context obtained from `TSCreate()` 906 907 Output Parameters: 908 + numcost - number of cost functions 909 . lambda2 - Hessian-vector product with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector 910 . mu2 - Hessian-vector product with respect to the parameters, the number of entries in these vectors is the same as the number of parameters 911 - dir - the direction vector that are multiplied with the Hessian of the cost functions 912 913 Level: intermediate 914 915 .seealso: [](ch_ts), `TSAdjointSolve()`, `TSSetCostHessianProducts()` 916 @*/ 917 PetscErrorCode TSGetCostHessianProducts(TS ts, PetscInt *numcost, Vec **lambda2, Vec **mu2, Vec *dir) 918 { 919 PetscFunctionBegin; 920 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 921 if (numcost) *numcost = ts->numcost; 922 if (lambda2) *lambda2 = ts->vecs_sensi2; 923 if (mu2) *mu2 = ts->vecs_sensi2p; 924 if (dir) *dir = ts->vec_dir; 925 PetscFunctionReturn(PETSC_SUCCESS); 926 } 927 928 /*@ 929 TSAdjointSetForward - Trigger the tangent linear solver and initialize the forward sensitivities 930 931 Logically Collective 932 933 Input Parameters: 934 + ts - the `TS` context obtained from `TSCreate()` 935 - didp - the derivative of initial values w.r.t. parameters 936 937 Level: intermediate 938 939 Notes: 940 When computing sensitivities w.r.t. initial condition, set didp to `NULL` so that the solver will take it as an identity matrix mathematically. 941 `TSAdjoint` does not reset the tangent linear solver automatically, `TSAdjointResetForward()` should be called to reset the tangent linear solver. 942 943 .seealso: [](ch_ts), `TSAdjointSolve()`, `TSSetCostHessianProducts()`, `TSAdjointResetForward()` 944 @*/ 945 PetscErrorCode TSAdjointSetForward(TS ts, Mat didp) 946 { 947 Mat A; 948 Vec sp; 949 PetscScalar *xarr; 950 PetscInt lsize; 951 952 PetscFunctionBegin; 953 ts->forward_solve = PETSC_TRUE; /* turn on tangent linear mode */ 954 PetscCheck(ts->vecs_sensi2, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "Must call TSSetCostHessianProducts() first"); 955 PetscCheck(ts->vec_dir, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "Directional vector is missing. Call TSSetCostHessianProducts() to set it."); 956 /* create a single-column dense matrix */ 957 PetscCall(VecGetLocalSize(ts->vec_sol, &lsize)); 958 PetscCall(MatCreateDense(PetscObjectComm((PetscObject)ts), lsize, PETSC_DECIDE, PETSC_DECIDE, 1, NULL, &A)); 959 960 PetscCall(VecDuplicate(ts->vec_sol, &sp)); 961 PetscCall(MatDenseGetColumn(A, 0, &xarr)); 962 PetscCall(VecPlaceArray(sp, xarr)); 963 if (ts->vecs_sensi2p) { /* tangent linear variable initialized as 2*dIdP*dir */ 964 if (didp) { 965 PetscCall(MatMult(didp, ts->vec_dir, sp)); 966 PetscCall(VecScale(sp, 2.)); 967 } else { 968 PetscCall(VecZeroEntries(sp)); 969 } 970 } else { /* tangent linear variable initialized as dir */ 971 PetscCall(VecCopy(ts->vec_dir, sp)); 972 } 973 PetscCall(VecResetArray(sp)); 974 PetscCall(MatDenseRestoreColumn(A, &xarr)); 975 PetscCall(VecDestroy(&sp)); 976 977 PetscCall(TSForwardSetInitialSensitivities(ts, A)); /* if didp is NULL, identity matrix is assumed */ 978 979 PetscCall(MatDestroy(&A)); 980 PetscFunctionReturn(PETSC_SUCCESS); 981 } 982 983 /*@ 984 TSAdjointResetForward - Reset the tangent linear solver and destroy the tangent linear context 985 986 Logically Collective 987 988 Input Parameter: 989 . ts - the `TS` context obtained from `TSCreate()` 990 991 Level: intermediate 992 993 .seealso: [](ch_ts), `TSAdjointSetForward()` 994 @*/ 995 PetscErrorCode TSAdjointResetForward(TS ts) 996 { 997 PetscFunctionBegin; 998 ts->forward_solve = PETSC_FALSE; /* turn off tangent linear mode */ 999 PetscCall(TSForwardReset(ts)); 1000 PetscFunctionReturn(PETSC_SUCCESS); 1001 } 1002 1003 /*@ 1004 TSAdjointSetUp - Sets up the internal data structures for the later use 1005 of an adjoint solver 1006 1007 Collective 1008 1009 Input Parameter: 1010 . ts - the `TS` context obtained from `TSCreate()` 1011 1012 Level: advanced 1013 1014 .seealso: [](ch_ts), `TSCreate()`, `TSAdjointStep()`, `TSSetCostGradients()` 1015 @*/ 1016 PetscErrorCode TSAdjointSetUp(TS ts) 1017 { 1018 TSTrajectory tj; 1019 PetscBool match; 1020 1021 PetscFunctionBegin; 1022 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1023 if (ts->adjointsetupcalled) PetscFunctionReturn(PETSC_SUCCESS); 1024 PetscCheck(ts->vecs_sensi, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_WRONGSTATE, "Must call TSSetCostGradients() first"); 1025 PetscCheck(!ts->vecs_sensip || ts->Jacp || ts->Jacprhs, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_WRONGSTATE, "Must call TSSetRHSJacobianP() or TSSetIJacobianP() first"); 1026 PetscCall(TSGetTrajectory(ts, &tj)); 1027 PetscCall(PetscObjectTypeCompare((PetscObject)tj, TSTRAJECTORYBASIC, &match)); 1028 if (match) { 1029 PetscBool solution_only; 1030 PetscCall(TSTrajectoryGetSolutionOnly(tj, &solution_only)); 1031 PetscCheck(!solution_only, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "TSAdjoint cannot use the solution-only mode when choosing the Basic TSTrajectory type. Turn it off with -ts_trajectory_solution_only 0"); 1032 } 1033 PetscCall(TSTrajectorySetUseHistory(tj, PETSC_FALSE)); /* not use TSHistory */ 1034 1035 if (ts->quadraturets) { /* if there is integral in the cost function */ 1036 PetscCall(VecDuplicate(ts->vecs_sensi[0], &ts->vec_drdu_col)); 1037 if (ts->vecs_sensip) PetscCall(VecDuplicate(ts->vecs_sensip[0], &ts->vec_drdp_col)); 1038 } 1039 1040 PetscTryTypeMethod(ts, adjointsetup); 1041 ts->adjointsetupcalled = PETSC_TRUE; 1042 PetscFunctionReturn(PETSC_SUCCESS); 1043 } 1044 1045 /*@ 1046 TSAdjointReset - Resets a `TS` adjoint context and removes any allocated `Vec`s and `Mat`s. 1047 1048 Collective 1049 1050 Input Parameter: 1051 . ts - the `TS` context obtained from `TSCreate()` 1052 1053 Level: beginner 1054 1055 .seealso: [](ch_ts), `TSCreate()`, `TSAdjointSetUp()`, `TSADestroy()` 1056 @*/ 1057 PetscErrorCode TSAdjointReset(TS ts) 1058 { 1059 PetscFunctionBegin; 1060 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1061 PetscTryTypeMethod(ts, adjointreset); 1062 if (ts->quadraturets) { /* if there is integral in the cost function */ 1063 PetscCall(VecDestroy(&ts->vec_drdu_col)); 1064 if (ts->vecs_sensip) PetscCall(VecDestroy(&ts->vec_drdp_col)); 1065 } 1066 ts->vecs_sensi = NULL; 1067 ts->vecs_sensip = NULL; 1068 ts->vecs_sensi2 = NULL; 1069 ts->vecs_sensi2p = NULL; 1070 ts->vec_dir = NULL; 1071 ts->adjointsetupcalled = PETSC_FALSE; 1072 PetscFunctionReturn(PETSC_SUCCESS); 1073 } 1074 1075 /*@ 1076 TSAdjointSetSteps - Sets the number of steps the adjoint solver should take backward in time 1077 1078 Logically Collective 1079 1080 Input Parameters: 1081 + ts - the `TS` context obtained from `TSCreate()` 1082 - steps - number of steps to use 1083 1084 Level: intermediate 1085 1086 Notes: 1087 Normally one does not call this and `TSAdjointSolve()` integrates back to the original timestep. One can call this 1088 so as to integrate back to less than the original timestep 1089 1090 .seealso: [](ch_ts), `TSAdjointSolve()`, `TS`, `TSSetExactFinalTime()` 1091 @*/ 1092 PetscErrorCode TSAdjointSetSteps(TS ts, PetscInt steps) 1093 { 1094 PetscFunctionBegin; 1095 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1096 PetscValidLogicalCollectiveInt(ts, steps, 2); 1097 PetscCheck(steps >= 0, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_OUTOFRANGE, "Cannot step back a negative number of steps"); 1098 PetscCheck(steps <= ts->steps, PetscObjectComm((PetscObject)ts), PETSC_ERR_ARG_OUTOFRANGE, "Cannot step back more than the total number of forward steps"); 1099 ts->adjoint_max_steps = steps; 1100 PetscFunctionReturn(PETSC_SUCCESS); 1101 } 1102 1103 // PetscClangLinter pragma disable: -fdoc-* 1104 /*@C 1105 TSAdjointSetRHSJacobian - Deprecated, use `TSSetRHSJacobianP()` 1106 1107 Level: deprecated 1108 @*/ 1109 PetscErrorCode TSAdjointSetRHSJacobian(TS ts, Mat Amat, PetscErrorCode (*func)(TS, PetscReal, Vec, Mat, void *), void *ctx) 1110 { 1111 PetscFunctionBegin; 1112 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1113 PetscValidHeaderSpecific(Amat, MAT_CLASSID, 2); 1114 1115 ts->rhsjacobianp = func; 1116 ts->rhsjacobianpctx = ctx; 1117 if (Amat) { 1118 PetscCall(PetscObjectReference((PetscObject)Amat)); 1119 PetscCall(MatDestroy(&ts->Jacp)); 1120 ts->Jacp = Amat; 1121 } 1122 PetscFunctionReturn(PETSC_SUCCESS); 1123 } 1124 1125 // PetscClangLinter pragma disable: -fdoc-* 1126 /*@C 1127 TSAdjointComputeRHSJacobian - Deprecated, use `TSComputeRHSJacobianP()` 1128 1129 Level: deprecated 1130 @*/ 1131 PetscErrorCode TSAdjointComputeRHSJacobian(TS ts, PetscReal t, Vec U, Mat Amat) 1132 { 1133 PetscFunctionBegin; 1134 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1135 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 1136 PetscValidHeaderSpecific(Amat, MAT_CLASSID, 4); 1137 1138 PetscCallBack("TS callback JacobianP for sensitivity analysis", (*ts->rhsjacobianp)(ts, t, U, Amat, ts->rhsjacobianpctx)); 1139 PetscFunctionReturn(PETSC_SUCCESS); 1140 } 1141 1142 // PetscClangLinter pragma disable: -fdoc-* 1143 /*@ 1144 TSAdjointComputeDRDYFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobian()` 1145 1146 Level: deprecated 1147 @*/ 1148 PetscErrorCode TSAdjointComputeDRDYFunction(TS ts, PetscReal t, Vec U, Vec *DRDU) 1149 { 1150 PetscFunctionBegin; 1151 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1152 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 1153 1154 PetscCallBack("TS callback DRDY for sensitivity analysis", (*ts->drdufunction)(ts, t, U, DRDU, ts->costintegrandctx)); 1155 PetscFunctionReturn(PETSC_SUCCESS); 1156 } 1157 1158 // PetscClangLinter pragma disable: -fdoc-* 1159 /*@ 1160 TSAdjointComputeDRDPFunction - Deprecated, use `TSGetQuadratureTS()` then `TSComputeRHSJacobianP()` 1161 1162 Level: deprecated 1163 @*/ 1164 PetscErrorCode TSAdjointComputeDRDPFunction(TS ts, PetscReal t, Vec U, Vec *DRDP) 1165 { 1166 PetscFunctionBegin; 1167 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1168 PetscValidHeaderSpecific(U, VEC_CLASSID, 3); 1169 1170 PetscCallBack("TS callback DRDP for sensitivity analysis", (*ts->drdpfunction)(ts, t, U, DRDP, ts->costintegrandctx)); 1171 PetscFunctionReturn(PETSC_SUCCESS); 1172 } 1173 1174 // PetscClangLinter pragma disable: -fdoc-param-list-func-parameter-documentation 1175 /*@C 1176 TSAdjointMonitorSensi - monitors the first lambda sensitivity 1177 1178 Level: intermediate 1179 1180 .seealso: [](ch_ts), `TSAdjointMonitorSet()` 1181 @*/ 1182 static PetscErrorCode TSAdjointMonitorSensi(TS ts, PetscInt step, PetscReal ptime, Vec v, PetscInt numcost, Vec *lambda, Vec *mu, PetscViewerAndFormat *vf) 1183 { 1184 PetscViewer viewer = vf->viewer; 1185 1186 PetscFunctionBegin; 1187 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 8); 1188 PetscCall(PetscViewerPushFormat(viewer, vf->format)); 1189 PetscCall(VecView(lambda[0], viewer)); 1190 PetscCall(PetscViewerPopFormat(viewer)); 1191 PetscFunctionReturn(PETSC_SUCCESS); 1192 } 1193 1194 /*@C 1195 TSAdjointMonitorSetFromOptions - Sets a monitor function and viewer appropriate for the type indicated by the user 1196 1197 Collective 1198 1199 Input Parameters: 1200 + ts - `TS` object you wish to monitor 1201 . name - the monitor type one is seeking 1202 . help - message indicating what monitoring is done 1203 . manual - manual page for the monitor 1204 . monitor - the monitor function 1205 - monitorsetup - a function that is called once ONLY if the user selected this monitor that may set additional features of the `TS` or `PetscViewer` objects 1206 1207 Level: developer 1208 1209 .seealso: [](ch_ts), `PetscOptionsGetViewer()`, `PetscOptionsGetReal()`, `PetscOptionsHasName()`, `PetscOptionsGetString()`, 1210 `PetscOptionsGetIntArray()`, `PetscOptionsGetRealArray()`, `PetscOptionsBool()` 1211 `PetscOptionsInt()`, `PetscOptionsString()`, `PetscOptionsReal()`, 1212 `PetscOptionsName()`, `PetscOptionsBegin()`, `PetscOptionsEnd()`, `PetscOptionsHeadBegin()`, 1213 `PetscOptionsStringArray()`, `PetscOptionsRealArray()`, `PetscOptionsScalar()`, 1214 `PetscOptionsBoolGroupBegin()`, `PetscOptionsBoolGroup()`, `PetscOptionsBoolGroupEnd()`, 1215 `PetscOptionsFList()`, `PetscOptionsEList()` 1216 @*/ 1217 PetscErrorCode TSAdjointMonitorSetFromOptions(TS ts, const char name[], const char help[], const char manual[], PetscErrorCode (*monitor)(TS, PetscInt, PetscReal, Vec, PetscInt, Vec *, Vec *, PetscViewerAndFormat *), PetscErrorCode (*monitorsetup)(TS, PetscViewerAndFormat *)) 1218 { 1219 PetscViewer viewer; 1220 PetscViewerFormat format; 1221 PetscBool flg; 1222 1223 PetscFunctionBegin; 1224 PetscCall(PetscOptionsGetViewer(PetscObjectComm((PetscObject)ts), ((PetscObject)ts)->options, ((PetscObject)ts)->prefix, name, &viewer, &format, &flg)); 1225 if (flg) { 1226 PetscViewerAndFormat *vf; 1227 PetscCall(PetscViewerAndFormatCreate(viewer, format, &vf)); 1228 PetscCall(PetscObjectDereference((PetscObject)viewer)); 1229 if (monitorsetup) PetscCall((*monitorsetup)(ts, vf)); 1230 PetscCall(TSAdjointMonitorSet(ts, (PetscErrorCode(*)(TS, PetscInt, PetscReal, Vec, PetscInt, Vec *, Vec *, void *))monitor, vf, (PetscErrorCode(*)(void **))PetscViewerAndFormatDestroy)); 1231 } 1232 PetscFunctionReturn(PETSC_SUCCESS); 1233 } 1234 1235 /*@C 1236 TSAdjointMonitorSet - Sets an ADDITIONAL function that is to be used at every 1237 timestep to display the iteration's progress. 1238 1239 Logically Collective 1240 1241 Input Parameters: 1242 + ts - the `TS` context obtained from `TSCreate()` 1243 . adjointmonitor - monitoring routine 1244 . adjointmctx - [optional] user-defined context for private data for the monitor routine 1245 (use `NULL` if no context is desired) 1246 - adjointmdestroy - [optional] routine that frees monitor context (may be `NULL`) 1247 1248 Calling sequence of `adjointmonitor`: 1249 + ts - the `TS` context 1250 . steps - iteration number (after the final time step the monitor routine is called with 1251 a step of -1, this is at the final time which may have been interpolated to) 1252 . time - current time 1253 . u - current iterate 1254 . numcost - number of cost functionos 1255 . lambda - sensitivities to initial conditions 1256 . mu - sensitivities to parameters 1257 - adjointmctx - [optional] adjoint monitoring context 1258 1259 Calling sequence of `adjointmdestroy`: 1260 . mctx - the monitor context to destroy 1261 1262 Level: intermediate 1263 1264 Note: 1265 This routine adds an additional monitor to the list of monitors that 1266 already has been loaded. 1267 1268 Fortran Notes: 1269 Only a single monitor function can be set for each `TS` object 1270 1271 .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSAdjointMonitorCancel()` 1272 @*/ 1273 PetscErrorCode TSAdjointMonitorSet(TS ts, PetscErrorCode (*adjointmonitor)(TS ts, PetscInt steps, PetscReal time, Vec u, PetscInt numcost, Vec *lambda, Vec *mu, void *adjointmctx), void *adjointmctx, PetscErrorCode (*adjointmdestroy)(void **mctx)) 1274 { 1275 PetscInt i; 1276 PetscBool identical; 1277 1278 PetscFunctionBegin; 1279 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1280 for (i = 0; i < ts->numbermonitors; i++) { 1281 PetscCall(PetscMonitorCompare((PetscErrorCode(*)(void))adjointmonitor, adjointmctx, adjointmdestroy, (PetscErrorCode(*)(void))ts->adjointmonitor[i], ts->adjointmonitorcontext[i], ts->adjointmonitordestroy[i], &identical)); 1282 if (identical) PetscFunctionReturn(PETSC_SUCCESS); 1283 } 1284 PetscCheck(ts->numberadjointmonitors < MAXTSMONITORS, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Too many adjoint monitors set"); 1285 ts->adjointmonitor[ts->numberadjointmonitors] = adjointmonitor; 1286 ts->adjointmonitordestroy[ts->numberadjointmonitors] = adjointmdestroy; 1287 ts->adjointmonitorcontext[ts->numberadjointmonitors++] = (void *)adjointmctx; 1288 PetscFunctionReturn(PETSC_SUCCESS); 1289 } 1290 1291 /*@C 1292 TSAdjointMonitorCancel - Clears all the adjoint monitors that have been set on a time-step object. 1293 1294 Logically Collective 1295 1296 Input Parameter: 1297 . ts - the `TS` context obtained from `TSCreate()` 1298 1299 Notes: 1300 There is no way to remove a single, specific monitor. 1301 1302 Level: intermediate 1303 1304 .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSAdjointMonitorSet()` 1305 @*/ 1306 PetscErrorCode TSAdjointMonitorCancel(TS ts) 1307 { 1308 PetscInt i; 1309 1310 PetscFunctionBegin; 1311 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1312 for (i = 0; i < ts->numberadjointmonitors; i++) { 1313 if (ts->adjointmonitordestroy[i]) PetscCall((*ts->adjointmonitordestroy[i])(&ts->adjointmonitorcontext[i])); 1314 } 1315 ts->numberadjointmonitors = 0; 1316 PetscFunctionReturn(PETSC_SUCCESS); 1317 } 1318 1319 /*@C 1320 TSAdjointMonitorDefault - the default monitor of adjoint computations 1321 1322 Input Parameters: 1323 + ts - the `TS` context 1324 . step - iteration number (after the final time step the monitor routine is called with a 1325 step of -1, this is at the final time which may have been interpolated to) 1326 . time - current time 1327 . v - current iterate 1328 . numcost - number of cost functionos 1329 . lambda - sensitivities to initial conditions 1330 . mu - sensitivities to parameters 1331 - vf - the viewer and format 1332 1333 Level: intermediate 1334 1335 .seealso: [](ch_ts), `TS`, `TSAdjointSolve()`, `TSAdjointMonitorSet()` 1336 @*/ 1337 PetscErrorCode TSAdjointMonitorDefault(TS ts, PetscInt step, PetscReal time, Vec v, PetscInt numcost, Vec *lambda, Vec *mu, PetscViewerAndFormat *vf) 1338 { 1339 PetscViewer viewer = vf->viewer; 1340 1341 PetscFunctionBegin; 1342 (void)v; 1343 (void)numcost; 1344 (void)lambda; 1345 (void)mu; 1346 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 8); 1347 PetscCall(PetscViewerPushFormat(viewer, vf->format)); 1348 PetscCall(PetscViewerASCIIAddTab(viewer, ((PetscObject)ts)->tablevel)); 1349 PetscCall(PetscViewerASCIIPrintf(viewer, "%" PetscInt_FMT " TS dt %g time %g%s", step, (double)ts->time_step, (double)time, ts->steprollback ? " (r)\n" : "\n")); 1350 PetscCall(PetscViewerASCIISubtractTab(viewer, ((PetscObject)ts)->tablevel)); 1351 PetscCall(PetscViewerPopFormat(viewer)); 1352 PetscFunctionReturn(PETSC_SUCCESS); 1353 } 1354 1355 /*@C 1356 TSAdjointMonitorDrawSensi - Monitors progress of the adjoint `TS` solvers by calling 1357 `VecView()` for the sensitivities to initial states at each timestep 1358 1359 Collective 1360 1361 Input Parameters: 1362 + ts - the `TS` context 1363 . step - current time-step 1364 . ptime - current time 1365 . u - current state 1366 . numcost - number of cost functions 1367 . lambda - sensitivities to initial conditions 1368 . mu - sensitivities to parameters 1369 - dummy - either a viewer or `NULL` 1370 1371 Level: intermediate 1372 1373 .seealso: [](ch_ts), `TSAdjointSolve()`, `TSAdjointMonitorSet()`, `TSAdjointMonitorDefault()`, `VecView()` 1374 @*/ 1375 PetscErrorCode TSAdjointMonitorDrawSensi(TS ts, PetscInt step, PetscReal ptime, Vec u, PetscInt numcost, Vec *lambda, Vec *mu, void *dummy) 1376 { 1377 TSMonitorDrawCtx ictx = (TSMonitorDrawCtx)dummy; 1378 PetscDraw draw; 1379 PetscReal xl, yl, xr, yr, h; 1380 char time[32]; 1381 1382 PetscFunctionBegin; 1383 if (!(((ictx->howoften > 0) && (!(step % ictx->howoften))) || ((ictx->howoften == -1) && ts->reason))) PetscFunctionReturn(PETSC_SUCCESS); 1384 1385 PetscCall(VecView(lambda[0], ictx->viewer)); 1386 PetscCall(PetscViewerDrawGetDraw(ictx->viewer, 0, &draw)); 1387 PetscCall(PetscSNPrintf(time, 32, "Timestep %d Time %g", (int)step, (double)ptime)); 1388 PetscCall(PetscDrawGetCoordinates(draw, &xl, &yl, &xr, &yr)); 1389 h = yl + .95 * (yr - yl); 1390 PetscCall(PetscDrawStringCentered(draw, .5 * (xl + xr), h, PETSC_DRAW_BLACK, time)); 1391 PetscCall(PetscDrawFlush(draw)); 1392 PetscFunctionReturn(PETSC_SUCCESS); 1393 } 1394 1395 /*@C 1396 TSAdjointSetFromOptions - Sets various `TS` adjoint parameters from options database. 1397 1398 Collective 1399 1400 Input Parameters: 1401 + ts - the `TS` context 1402 - PetscOptionsObject - the options context 1403 1404 Options Database Keys: 1405 + -ts_adjoint_solve <yes,no> - After solving the ODE/DAE solve the adjoint problem (requires `-ts_save_trajectory`) 1406 . -ts_adjoint_monitor - print information at each adjoint time step 1407 - -ts_adjoint_monitor_draw_sensi - monitor the sensitivity of the first cost function wrt initial conditions (lambda[0]) graphically 1408 1409 Level: developer 1410 1411 Note: 1412 This is not normally called directly by users 1413 1414 .seealso: [](ch_ts), `TSSetSaveTrajectory()`, `TSTrajectorySetUp()` 1415 @*/ 1416 PetscErrorCode TSAdjointSetFromOptions(TS ts, PetscOptionItems *PetscOptionsObject) 1417 { 1418 PetscBool tflg, opt; 1419 1420 PetscFunctionBegin; 1421 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1422 PetscOptionsHeadBegin(PetscOptionsObject, "TS Adjoint options"); 1423 tflg = ts->adjoint_solve ? PETSC_TRUE : PETSC_FALSE; 1424 PetscCall(PetscOptionsBool("-ts_adjoint_solve", "Solve the adjoint problem immediately after solving the forward problem", "", tflg, &tflg, &opt)); 1425 if (opt) { 1426 PetscCall(TSSetSaveTrajectory(ts)); 1427 ts->adjoint_solve = tflg; 1428 } 1429 PetscCall(TSAdjointMonitorSetFromOptions(ts, "-ts_adjoint_monitor", "Monitor adjoint timestep size", "TSAdjointMonitorDefault", TSAdjointMonitorDefault, NULL)); 1430 PetscCall(TSAdjointMonitorSetFromOptions(ts, "-ts_adjoint_monitor_sensi", "Monitor sensitivity in the adjoint computation", "TSAdjointMonitorSensi", TSAdjointMonitorSensi, NULL)); 1431 opt = PETSC_FALSE; 1432 PetscCall(PetscOptionsName("-ts_adjoint_monitor_draw_sensi", "Monitor adjoint sensitivities (lambda only) graphically", "TSAdjointMonitorDrawSensi", &opt)); 1433 if (opt) { 1434 TSMonitorDrawCtx ctx; 1435 PetscInt howoften = 1; 1436 1437 PetscCall(PetscOptionsInt("-ts_adjoint_monitor_draw_sensi", "Monitor adjoint sensitivities (lambda only) graphically", "TSAdjointMonitorDrawSensi", howoften, &howoften, NULL)); 1438 PetscCall(TSMonitorDrawCtxCreate(PetscObjectComm((PetscObject)ts), NULL, NULL, PETSC_DECIDE, PETSC_DECIDE, 300, 300, howoften, &ctx)); 1439 PetscCall(TSAdjointMonitorSet(ts, TSAdjointMonitorDrawSensi, ctx, (PetscErrorCode(*)(void **))TSMonitorDrawCtxDestroy)); 1440 } 1441 PetscFunctionReturn(PETSC_SUCCESS); 1442 } 1443 1444 /*@ 1445 TSAdjointStep - Steps one time step backward in the adjoint run 1446 1447 Collective 1448 1449 Input Parameter: 1450 . ts - the `TS` context obtained from `TSCreate()` 1451 1452 Level: intermediate 1453 1454 .seealso: [](ch_ts), `TSAdjointSetUp()`, `TSAdjointSolve()` 1455 @*/ 1456 PetscErrorCode TSAdjointStep(TS ts) 1457 { 1458 DM dm; 1459 1460 PetscFunctionBegin; 1461 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1462 PetscCall(TSGetDM(ts, &dm)); 1463 PetscCall(TSAdjointSetUp(ts)); 1464 ts->steps--; /* must decrease the step index before the adjoint step is taken. */ 1465 1466 ts->reason = TS_CONVERGED_ITERATING; 1467 ts->ptime_prev = ts->ptime; 1468 PetscCall(PetscLogEventBegin(TS_AdjointStep, ts, 0, 0, 0)); 1469 PetscUseTypeMethod(ts, adjointstep); 1470 PetscCall(PetscLogEventEnd(TS_AdjointStep, ts, 0, 0, 0)); 1471 ts->adjoint_steps++; 1472 1473 if (ts->reason < 0) { 1474 PetscCheck(!ts->errorifstepfailed, PetscObjectComm((PetscObject)ts), PETSC_ERR_NOT_CONVERGED, "TSAdjointStep has failed due to %s", TSConvergedReasons[ts->reason]); 1475 } else if (!ts->reason) { 1476 if (ts->adjoint_steps >= ts->adjoint_max_steps) ts->reason = TS_CONVERGED_ITS; 1477 } 1478 PetscFunctionReturn(PETSC_SUCCESS); 1479 } 1480 1481 /*@ 1482 TSAdjointSolve - Solves the discrete ajoint problem for an ODE/DAE 1483 1484 Collective 1485 ` 1486 1487 Input Parameter: 1488 . ts - the `TS` context obtained from `TSCreate()` 1489 1490 Options Database Key: 1491 . -ts_adjoint_view_solution <viewerinfo> - views the first gradient with respect to the initial values 1492 1493 Level: intermediate 1494 1495 Notes: 1496 This must be called after a call to `TSSolve()` that solves the forward problem 1497 1498 By default this will integrate back to the initial time, one can use `TSAdjointSetSteps()` to step back to a later time 1499 1500 .seealso: [](ch_ts), `TSCreate()`, `TSSetCostGradients()`, `TSSetSolution()`, `TSAdjointStep()` 1501 @*/ 1502 PetscErrorCode TSAdjointSolve(TS ts) 1503 { 1504 static PetscBool cite = PETSC_FALSE; 1505 #if defined(TSADJOINT_STAGE) 1506 PetscLogStage adjoint_stage; 1507 #endif 1508 1509 PetscFunctionBegin; 1510 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1511 PetscCall(PetscCitationsRegister("@article{Zhang2022tsadjoint,\n" 1512 " title = {{PETSc TSAdjoint: A Discrete Adjoint ODE Solver for First-Order and Second-Order Sensitivity Analysis}},\n" 1513 " author = {Zhang, Hong and Constantinescu, Emil M. and Smith, Barry F.},\n" 1514 " journal = {SIAM Journal on Scientific Computing},\n" 1515 " volume = {44},\n" 1516 " number = {1},\n" 1517 " pages = {C1-C24},\n" 1518 " doi = {10.1137/21M140078X},\n" 1519 " year = {2022}\n}\n", 1520 &cite)); 1521 #if defined(TSADJOINT_STAGE) 1522 PetscCall(PetscLogStageRegister("TSAdjoint", &adjoint_stage)); 1523 PetscCall(PetscLogStagePush(adjoint_stage)); 1524 #endif 1525 PetscCall(TSAdjointSetUp(ts)); 1526 1527 /* reset time step and iteration counters */ 1528 ts->adjoint_steps = 0; 1529 ts->ksp_its = 0; 1530 ts->snes_its = 0; 1531 ts->num_snes_failures = 0; 1532 ts->reject = 0; 1533 ts->reason = TS_CONVERGED_ITERATING; 1534 1535 if (!ts->adjoint_max_steps) ts->adjoint_max_steps = ts->steps; 1536 if (ts->adjoint_steps >= ts->adjoint_max_steps) ts->reason = TS_CONVERGED_ITS; 1537 1538 while (!ts->reason) { 1539 PetscCall(TSTrajectoryGet(ts->trajectory, ts, ts->steps, &ts->ptime)); 1540 PetscCall(TSAdjointMonitor(ts, ts->steps, ts->ptime, ts->vec_sol, ts->numcost, ts->vecs_sensi, ts->vecs_sensip)); 1541 PetscCall(TSAdjointEventHandler(ts)); 1542 PetscCall(TSAdjointStep(ts)); 1543 if ((ts->vec_costintegral || ts->quadraturets) && !ts->costintegralfwd) PetscCall(TSAdjointCostIntegral(ts)); 1544 } 1545 if (!ts->steps) { 1546 PetscCall(TSTrajectoryGet(ts->trajectory, ts, ts->steps, &ts->ptime)); 1547 PetscCall(TSAdjointMonitor(ts, ts->steps, ts->ptime, ts->vec_sol, ts->numcost, ts->vecs_sensi, ts->vecs_sensip)); 1548 } 1549 ts->solvetime = ts->ptime; 1550 PetscCall(TSTrajectoryViewFromOptions(ts->trajectory, NULL, "-ts_trajectory_view")); 1551 PetscCall(VecViewFromOptions(ts->vecs_sensi[0], (PetscObject)ts, "-ts_adjoint_view_solution")); 1552 ts->adjoint_max_steps = 0; 1553 #if defined(TSADJOINT_STAGE) 1554 PetscCall(PetscLogStagePop()); 1555 #endif 1556 PetscFunctionReturn(PETSC_SUCCESS); 1557 } 1558 1559 /*@C 1560 TSAdjointMonitor - Runs all user-provided adjoint monitor routines set using `TSAdjointMonitorSet()` 1561 1562 Collective 1563 1564 Input Parameters: 1565 + ts - time stepping context obtained from `TSCreate()` 1566 . step - step number that has just completed 1567 . ptime - model time of the state 1568 . u - state at the current model time 1569 . numcost - number of cost functions (dimension of lambda or mu) 1570 . lambda - vectors containing the gradients of the cost functions with respect to the ODE/DAE solution variables 1571 - mu - vectors containing the gradients of the cost functions with respect to the problem parameters 1572 1573 Level: developer 1574 1575 Note: 1576 `TSAdjointMonitor()` is typically used automatically within the time stepping implementations. 1577 Users would almost never call this routine directly. 1578 1579 .seealso: `TSAdjointMonitorSet()`, `TSAdjointSolve()` 1580 @*/ 1581 PetscErrorCode TSAdjointMonitor(TS ts, PetscInt step, PetscReal ptime, Vec u, PetscInt numcost, Vec *lambda, Vec *mu) 1582 { 1583 PetscInt i, n = ts->numberadjointmonitors; 1584 1585 PetscFunctionBegin; 1586 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1587 PetscValidHeaderSpecific(u, VEC_CLASSID, 4); 1588 PetscCall(VecLockReadPush(u)); 1589 for (i = 0; i < n; i++) PetscCall((*ts->adjointmonitor[i])(ts, step, ptime, u, numcost, lambda, mu, ts->adjointmonitorcontext[i])); 1590 PetscCall(VecLockReadPop(u)); 1591 PetscFunctionReturn(PETSC_SUCCESS); 1592 } 1593 1594 /*@ 1595 TSAdjointCostIntegral - Evaluate the cost integral in the adjoint run. 1596 1597 Collective 1598 1599 Input Parameter: 1600 . ts - time stepping context 1601 1602 Level: advanced 1603 1604 Notes: 1605 This function cannot be called until `TSAdjointStep()` has been completed. 1606 1607 .seealso: [](ch_ts), `TSAdjointSolve()`, `TSAdjointStep()` 1608 @*/ 1609 PetscErrorCode TSAdjointCostIntegral(TS ts) 1610 { 1611 PetscFunctionBegin; 1612 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1613 PetscUseTypeMethod(ts, adjointintegral); 1614 PetscFunctionReturn(PETSC_SUCCESS); 1615 } 1616 1617 /* ------------------ Forward (tangent linear) sensitivity ------------------*/ 1618 1619 /*@ 1620 TSForwardSetUp - Sets up the internal data structures for the later use 1621 of forward sensitivity analysis 1622 1623 Collective 1624 1625 Input Parameter: 1626 . ts - the `TS` context obtained from `TSCreate()` 1627 1628 Level: advanced 1629 1630 .seealso: [](ch_ts), `TS`, `TSCreate()`, `TSDestroy()`, `TSSetUp()` 1631 @*/ 1632 PetscErrorCode TSForwardSetUp(TS ts) 1633 { 1634 PetscFunctionBegin; 1635 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1636 if (ts->forwardsetupcalled) PetscFunctionReturn(PETSC_SUCCESS); 1637 PetscTryTypeMethod(ts, forwardsetup); 1638 PetscCall(VecDuplicate(ts->vec_sol, &ts->vec_sensip_col)); 1639 ts->forwardsetupcalled = PETSC_TRUE; 1640 PetscFunctionReturn(PETSC_SUCCESS); 1641 } 1642 1643 /*@ 1644 TSForwardReset - Reset the internal data structures used by forward sensitivity analysis 1645 1646 Collective 1647 1648 Input Parameter: 1649 . ts - the `TS` context obtained from `TSCreate()` 1650 1651 Level: advanced 1652 1653 .seealso: [](ch_ts), `TSCreate()`, `TSDestroy()`, `TSForwardSetUp()` 1654 @*/ 1655 PetscErrorCode TSForwardReset(TS ts) 1656 { 1657 TS quadts = ts->quadraturets; 1658 1659 PetscFunctionBegin; 1660 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1661 PetscTryTypeMethod(ts, forwardreset); 1662 PetscCall(MatDestroy(&ts->mat_sensip)); 1663 if (quadts) PetscCall(MatDestroy(&quadts->mat_sensip)); 1664 PetscCall(VecDestroy(&ts->vec_sensip_col)); 1665 ts->forward_solve = PETSC_FALSE; 1666 ts->forwardsetupcalled = PETSC_FALSE; 1667 PetscFunctionReturn(PETSC_SUCCESS); 1668 } 1669 1670 /*@ 1671 TSForwardSetIntegralGradients - Set the vectors holding forward sensitivities of the integral term. 1672 1673 Input Parameters: 1674 + ts - the `TS` context obtained from `TSCreate()` 1675 . numfwdint - number of integrals 1676 - vp - the vectors containing the gradients for each integral w.r.t. parameters 1677 1678 Level: deprecated 1679 1680 .seealso: [](ch_ts), `TSForwardGetSensitivities()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()` 1681 @*/ 1682 PetscErrorCode TSForwardSetIntegralGradients(TS ts, PetscInt numfwdint, Vec *vp) 1683 { 1684 PetscFunctionBegin; 1685 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1686 PetscCheck(!ts->numcost || ts->numcost == numfwdint, PetscObjectComm((PetscObject)ts), PETSC_ERR_USER, "The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand()"); 1687 if (!ts->numcost) ts->numcost = numfwdint; 1688 1689 ts->vecs_integral_sensip = vp; 1690 PetscFunctionReturn(PETSC_SUCCESS); 1691 } 1692 1693 /*@ 1694 TSForwardGetIntegralGradients - Returns the forward sensitivities of the integral term. 1695 1696 Input Parameter: 1697 . ts - the `TS` context obtained from `TSCreate()` 1698 1699 Output Parameters: 1700 + numfwdint - number of integrals 1701 - vp - the vectors containing the gradients for each integral w.r.t. parameters 1702 1703 Level: deprecated 1704 1705 .seealso: [](ch_ts), `TSForwardSetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardStep()` 1706 @*/ 1707 PetscErrorCode TSForwardGetIntegralGradients(TS ts, PetscInt *numfwdint, Vec **vp) 1708 { 1709 PetscFunctionBegin; 1710 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1711 PetscAssertPointer(vp, 3); 1712 if (numfwdint) *numfwdint = ts->numcost; 1713 if (vp) *vp = ts->vecs_integral_sensip; 1714 PetscFunctionReturn(PETSC_SUCCESS); 1715 } 1716 1717 /*@ 1718 TSForwardStep - Compute the forward sensitivity for one time step. 1719 1720 Collective 1721 1722 Input Parameter: 1723 . ts - time stepping context 1724 1725 Level: advanced 1726 1727 Notes: 1728 This function cannot be called until `TSStep()` has been completed. 1729 1730 .seealso: [](ch_ts), `TSForwardSetSensitivities()`, `TSForwardGetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardSetUp()` 1731 @*/ 1732 PetscErrorCode TSForwardStep(TS ts) 1733 { 1734 PetscFunctionBegin; 1735 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1736 PetscCall(PetscLogEventBegin(TS_ForwardStep, ts, 0, 0, 0)); 1737 PetscUseTypeMethod(ts, forwardstep); 1738 PetscCall(PetscLogEventEnd(TS_ForwardStep, ts, 0, 0, 0)); 1739 PetscCheck(ts->reason >= 0 || !ts->errorifstepfailed, PetscObjectComm((PetscObject)ts), PETSC_ERR_NOT_CONVERGED, "TSFowardStep has failed due to %s", TSConvergedReasons[ts->reason]); 1740 PetscFunctionReturn(PETSC_SUCCESS); 1741 } 1742 1743 /*@ 1744 TSForwardSetSensitivities - Sets the initial value of the trajectory sensitivities of solution w.r.t. the problem parameters and initial values. 1745 1746 Logically Collective 1747 1748 Input Parameters: 1749 + ts - the `TS` context obtained from `TSCreate()` 1750 . nump - number of parameters 1751 - Smat - sensitivities with respect to the parameters, the number of entries in these vectors is the same as the number of parameters 1752 1753 Level: beginner 1754 1755 Notes: 1756 Forward sensitivity is also called 'trajectory sensitivity' in some fields such as power systems. 1757 This function turns on a flag to trigger `TSSolve()` to compute forward sensitivities automatically. 1758 You must call this function before `TSSolve()`. 1759 The entries in the sensitivity matrix must be correctly initialized with the values S = dy/dp|startingtime. 1760 1761 .seealso: [](ch_ts), `TSForwardGetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()` 1762 @*/ 1763 PetscErrorCode TSForwardSetSensitivities(TS ts, PetscInt nump, Mat Smat) 1764 { 1765 PetscFunctionBegin; 1766 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1767 PetscValidHeaderSpecific(Smat, MAT_CLASSID, 3); 1768 ts->forward_solve = PETSC_TRUE; 1769 if (nump == PETSC_DEFAULT) { 1770 PetscCall(MatGetSize(Smat, NULL, &ts->num_parameters)); 1771 } else ts->num_parameters = nump; 1772 PetscCall(PetscObjectReference((PetscObject)Smat)); 1773 PetscCall(MatDestroy(&ts->mat_sensip)); 1774 ts->mat_sensip = Smat; 1775 PetscFunctionReturn(PETSC_SUCCESS); 1776 } 1777 1778 /*@ 1779 TSForwardGetSensitivities - Returns the trajectory sensitivities 1780 1781 Not Collective, but Smat returned is parallel if ts is parallel 1782 1783 Output Parameters: 1784 + ts - the `TS` context obtained from `TSCreate()` 1785 . nump - number of parameters 1786 - Smat - sensitivities with respect to the parameters, the number of entries in these vectors is the same as the number of parameters 1787 1788 Level: intermediate 1789 1790 .seealso: [](ch_ts), `TSForwardSetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()` 1791 @*/ 1792 PetscErrorCode TSForwardGetSensitivities(TS ts, PetscInt *nump, Mat *Smat) 1793 { 1794 PetscFunctionBegin; 1795 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1796 if (nump) *nump = ts->num_parameters; 1797 if (Smat) *Smat = ts->mat_sensip; 1798 PetscFunctionReturn(PETSC_SUCCESS); 1799 } 1800 1801 /*@ 1802 TSForwardCostIntegral - Evaluate the cost integral in the forward run. 1803 1804 Collective 1805 1806 Input Parameter: 1807 . ts - time stepping context 1808 1809 Level: advanced 1810 1811 Note: 1812 This function cannot be called until `TSStep()` has been completed. 1813 1814 .seealso: [](ch_ts), `TS`, `TSSolve()`, `TSAdjointCostIntegral()` 1815 @*/ 1816 PetscErrorCode TSForwardCostIntegral(TS ts) 1817 { 1818 PetscFunctionBegin; 1819 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1820 PetscUseTypeMethod(ts, forwardintegral); 1821 PetscFunctionReturn(PETSC_SUCCESS); 1822 } 1823 1824 /*@ 1825 TSForwardSetInitialSensitivities - Set initial values for tangent linear sensitivities 1826 1827 Collective 1828 1829 Input Parameters: 1830 + ts - the `TS` context obtained from `TSCreate()` 1831 - didp - parametric sensitivities of the initial condition 1832 1833 Level: intermediate 1834 1835 Notes: 1836 `TSSolve()` allows users to pass the initial solution directly to `TS`. But the tangent linear variables cannot be initialized in this way. 1837 This function is used to set initial values for tangent linear variables. 1838 1839 .seealso: [](ch_ts), `TS`, `TSForwardSetSensitivities()` 1840 @*/ 1841 PetscErrorCode TSForwardSetInitialSensitivities(TS ts, Mat didp) 1842 { 1843 PetscFunctionBegin; 1844 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1845 PetscValidHeaderSpecific(didp, MAT_CLASSID, 2); 1846 if (!ts->mat_sensip) PetscCall(TSForwardSetSensitivities(ts, PETSC_DEFAULT, didp)); 1847 PetscFunctionReturn(PETSC_SUCCESS); 1848 } 1849 1850 /*@ 1851 TSForwardGetStages - Get the number of stages and the tangent linear sensitivities at the intermediate stages 1852 1853 Input Parameter: 1854 . ts - the `TS` context obtained from `TSCreate()` 1855 1856 Output Parameters: 1857 + ns - number of stages 1858 - S - tangent linear sensitivities at the intermediate stages 1859 1860 Level: advanced 1861 1862 .seealso: `TS` 1863 @*/ 1864 PetscErrorCode TSForwardGetStages(TS ts, PetscInt *ns, Mat **S) 1865 { 1866 PetscFunctionBegin; 1867 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1868 1869 if (!ts->ops->getstages) *S = NULL; 1870 else PetscUseTypeMethod(ts, forwardgetstages, ns, S); 1871 PetscFunctionReturn(PETSC_SUCCESS); 1872 } 1873 1874 /*@ 1875 TSCreateQuadratureTS - Create a sub-`TS` that evaluates integrals over time 1876 1877 Input Parameters: 1878 + ts - the `TS` context obtained from `TSCreate()` 1879 - fwd - flag indicating whether to evaluate cost integral in the forward run or the adjoint run 1880 1881 Output Parameter: 1882 . quadts - the child `TS` context 1883 1884 Level: intermediate 1885 1886 .seealso: [](ch_ts), `TSGetQuadratureTS()` 1887 @*/ 1888 PetscErrorCode TSCreateQuadratureTS(TS ts, PetscBool fwd, TS *quadts) 1889 { 1890 char prefix[128]; 1891 1892 PetscFunctionBegin; 1893 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1894 PetscAssertPointer(quadts, 3); 1895 PetscCall(TSDestroy(&ts->quadraturets)); 1896 PetscCall(TSCreate(PetscObjectComm((PetscObject)ts), &ts->quadraturets)); 1897 PetscCall(PetscObjectIncrementTabLevel((PetscObject)ts->quadraturets, (PetscObject)ts, 1)); 1898 PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%squad_", ((PetscObject)ts)->prefix ? ((PetscObject)ts)->prefix : "")); 1899 PetscCall(TSSetOptionsPrefix(ts->quadraturets, prefix)); 1900 *quadts = ts->quadraturets; 1901 1902 if (ts->numcost) { 1903 PetscCall(VecCreateSeq(PETSC_COMM_SELF, ts->numcost, &(*quadts)->vec_sol)); 1904 } else { 1905 PetscCall(VecCreateSeq(PETSC_COMM_SELF, 1, &(*quadts)->vec_sol)); 1906 } 1907 ts->costintegralfwd = fwd; 1908 PetscFunctionReturn(PETSC_SUCCESS); 1909 } 1910 1911 /*@ 1912 TSGetQuadratureTS - Return the sub-`TS` that evaluates integrals over time 1913 1914 Input Parameter: 1915 . ts - the `TS` context obtained from `TSCreate()` 1916 1917 Output Parameters: 1918 + fwd - flag indicating whether to evaluate cost integral in the forward run or the adjoint run 1919 - quadts - the child `TS` context 1920 1921 Level: intermediate 1922 1923 .seealso: [](ch_ts), `TSCreateQuadratureTS()` 1924 @*/ 1925 PetscErrorCode TSGetQuadratureTS(TS ts, PetscBool *fwd, TS *quadts) 1926 { 1927 PetscFunctionBegin; 1928 PetscValidHeaderSpecific(ts, TS_CLASSID, 1); 1929 if (fwd) *fwd = ts->costintegralfwd; 1930 if (quadts) *quadts = ts->quadraturets; 1931 PetscFunctionReturn(PETSC_SUCCESS); 1932 } 1933 1934 /*@ 1935 TSComputeSNESJacobian - Compute the Jacobian needed for the `SNESSolve()` in `TS` 1936 1937 Collective 1938 1939 Input Parameters: 1940 + ts - the `TS` context obtained from `TSCreate()` 1941 - x - state vector 1942 1943 Output Parameters: 1944 + J - Jacobian matrix 1945 - Jpre - preconditioning matrix for J (may be same as J) 1946 1947 Level: developer 1948 1949 Note: 1950 Uses finite differencing when `TS` Jacobian is not available. 1951 1952 .seealso: `SNES`, `TS`, `SNESSetJacobian()`, `TSSetRHSJacobian()`, `TSSetIJacobian()` 1953 @*/ 1954 PetscErrorCode TSComputeSNESJacobian(TS ts, Vec x, Mat J, Mat Jpre) 1955 { 1956 SNES snes = ts->snes; 1957 PetscErrorCode (*jac)(SNES, Vec, Mat, Mat, void *) = NULL; 1958 1959 PetscFunctionBegin; 1960 /* 1961 Unlike implicit methods, explicit methods do not have SNESMatFDColoring in the snes object 1962 because SNESSolve() has not been called yet; so querying SNESMatFDColoring does not work for 1963 explicit methods. Instead, we check the Jacobian compute function directly to determine if FD 1964 coloring is used. 1965 */ 1966 PetscCall(SNESGetJacobian(snes, NULL, NULL, &jac, NULL)); 1967 if (jac == SNESComputeJacobianDefaultColor) { 1968 Vec f; 1969 PetscCall(SNESSetSolution(snes, x)); 1970 PetscCall(SNESGetFunction(snes, &f, NULL, NULL)); 1971 /* Force MatFDColoringApply to evaluate the SNES residual function for the base vector */ 1972 PetscCall(SNESComputeFunction(snes, x, f)); 1973 } 1974 PetscCall(SNESComputeJacobian(snes, x, J, Jpre)); 1975 PetscFunctionReturn(PETSC_SUCCESS); 1976 } 1977