1 #include <petsc/private/tsimpl.h> /*I "petscts.h" I*/ 2 #include <petscdraw.h> 3 4 PetscLogEvent TS_AdjointStep,TS_ForwardStep,TS_JacobianPEval; 5 6 /* #define TSADJOINT_STAGE */ 7 8 /* ------------------------ Sensitivity Context ---------------------------*/ 9 10 /*@C 11 TSSetRHSJacobianP - Sets the function that computes the Jacobian of G w.r.t. the parameters P where U_t = G(U,P,t), as well as the location to store the matrix. 12 13 Logically Collective on TS 14 15 Input Parameters: 16 + ts - TS context obtained from TSCreate() 17 . Amat - JacobianP matrix 18 . func - function 19 - ctx - [optional] user-defined function context 20 21 Calling sequence of func: 22 $ func (TS ts,PetscReal t,Vec y,Mat A,void *ctx); 23 + t - current timestep 24 . U - input vector (current ODE solution) 25 . A - output matrix 26 - ctx - [optional] user-defined function context 27 28 Level: intermediate 29 30 Notes: 31 Amat has the same number of rows and the same row parallel layout as u, Amat has the same number of columns and parallel layout as p 32 33 .seealso: `TSGetRHSJacobianP()` 34 @*/ 35 PetscErrorCode TSSetRHSJacobianP(TS ts,Mat Amat,PetscErrorCode (*func)(TS,PetscReal,Vec,Mat,void*),void *ctx) 36 { 37 PetscFunctionBegin; 38 PetscValidHeaderSpecific(ts, TS_CLASSID,1); 39 PetscValidHeaderSpecific(Amat,MAT_CLASSID,2); 40 41 ts->rhsjacobianp = func; 42 ts->rhsjacobianpctx = ctx; 43 if (Amat) { 44 PetscCall(PetscObjectReference((PetscObject)Amat)); 45 PetscCall(MatDestroy(&ts->Jacprhs)); 46 ts->Jacprhs = Amat; 47 } 48 PetscFunctionReturn(0); 49 } 50 51 /*@C 52 TSGetRHSJacobianP - Gets the function that computes the Jacobian of G w.r.t. the parameters P where U_t = G(U,P,t), as well as the location to store the matrix. 53 54 Logically Collective on TS 55 56 Input Parameter: 57 . ts - TS context obtained from TSCreate() 58 59 Output Parameters: 60 + Amat - JacobianP matrix 61 . func - function 62 - ctx - [optional] user-defined function context 63 64 Calling sequence of func: 65 $ func (TS ts,PetscReal t,Vec y,Mat A,void *ctx); 66 + t - current timestep 67 . U - input vector (current ODE solution) 68 . A - output matrix 69 - ctx - [optional] user-defined function context 70 71 Level: intermediate 72 73 Notes: 74 Amat has the same number of rows and the same row parallel layout as u, Amat has the same number of columns and parallel layout as p 75 76 .seealso: `TSSetRHSJacobianP()` 77 @*/ 78 PetscErrorCode TSGetRHSJacobianP(TS ts,Mat *Amat,PetscErrorCode (**func)(TS,PetscReal,Vec,Mat,void*),void **ctx) 79 { 80 PetscFunctionBegin; 81 if (func) *func = ts->rhsjacobianp; 82 if (ctx) *ctx = ts->rhsjacobianpctx; 83 if (Amat) *Amat = ts->Jacprhs; 84 PetscFunctionReturn(0); 85 } 86 87 /*@C 88 TSComputeRHSJacobianP - Runs the user-defined JacobianP function. 89 90 Collective on TS 91 92 Input Parameters: 93 . ts - The TS context obtained from TSCreate() 94 95 Level: developer 96 97 .seealso: `TSSetRHSJacobianP()` 98 @*/ 99 PetscErrorCode TSComputeRHSJacobianP(TS ts,PetscReal t,Vec U,Mat Amat) 100 { 101 PetscFunctionBegin; 102 if (!Amat) PetscFunctionReturn(0); 103 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 104 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 105 106 PetscStackPush("TS user JacobianP function for sensitivity analysis"); 107 PetscCall((*ts->rhsjacobianp)(ts,t,U,Amat,ts->rhsjacobianpctx)); 108 PetscStackPop; 109 PetscFunctionReturn(0); 110 } 111 112 /*@C 113 TSSetIJacobianP - Sets the function that computes the Jacobian of F w.r.t. the parameters P where F(Udot,U,t) = G(U,P,t), as well as the location to store the matrix. 114 115 Logically Collective on TS 116 117 Input Parameters: 118 + ts - TS context obtained from TSCreate() 119 . Amat - JacobianP matrix 120 . func - function 121 - ctx - [optional] user-defined function context 122 123 Calling sequence of func: 124 $ func (TS ts,PetscReal t,Vec y,Mat A,void *ctx); 125 + t - current timestep 126 . U - input vector (current ODE solution) 127 . Udot - time derivative of state vector 128 . shift - shift to apply, see note below 129 . A - output matrix 130 - ctx - [optional] user-defined function context 131 132 Level: intermediate 133 134 Notes: 135 Amat has the same number of rows and the same row parallel layout as u, Amat has the same number of columns and parallel layout as p 136 137 .seealso: 138 @*/ 139 PetscErrorCode TSSetIJacobianP(TS ts,Mat Amat,PetscErrorCode (*func)(TS,PetscReal,Vec,Vec,PetscReal,Mat,void*),void *ctx) 140 { 141 PetscFunctionBegin; 142 PetscValidHeaderSpecific(ts, TS_CLASSID,1); 143 PetscValidHeaderSpecific(Amat,MAT_CLASSID,2); 144 145 ts->ijacobianp = func; 146 ts->ijacobianpctx = ctx; 147 if (Amat) { 148 PetscCall(PetscObjectReference((PetscObject)Amat)); 149 PetscCall(MatDestroy(&ts->Jacp)); 150 ts->Jacp = Amat; 151 } 152 PetscFunctionReturn(0); 153 } 154 155 /*@C 156 TSComputeIJacobianP - Runs the user-defined IJacobianP function. 157 158 Collective on TS 159 160 Input Parameters: 161 + ts - the TS context 162 . t - current timestep 163 . U - state vector 164 . Udot - time derivative of state vector 165 . shift - shift to apply, see note below 166 - imex - flag indicates if the method is IMEX so that the RHSJacobian should be kept separate 167 168 Output Parameters: 169 . A - Jacobian matrix 170 171 Level: developer 172 173 .seealso: `TSSetIJacobianP()` 174 @*/ 175 PetscErrorCode TSComputeIJacobianP(TS ts,PetscReal t,Vec U,Vec Udot,PetscReal shift,Mat Amat,PetscBool imex) 176 { 177 PetscFunctionBegin; 178 if (!Amat) PetscFunctionReturn(0); 179 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 180 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 181 PetscValidHeaderSpecific(Udot,VEC_CLASSID,4); 182 183 PetscCall(PetscLogEventBegin(TS_JacobianPEval,ts,U,Amat,0)); 184 if (ts->ijacobianp) { 185 PetscStackPush("TS user JacobianP function for sensitivity analysis"); 186 PetscCall((*ts->ijacobianp)(ts,t,U,Udot,shift,Amat,ts->ijacobianpctx)); 187 PetscStackPop; 188 } 189 if (imex) { 190 if (!ts->ijacobianp) { /* system was written as Udot = G(t,U) */ 191 PetscBool assembled; 192 PetscCall(MatZeroEntries(Amat)); 193 PetscCall(MatAssembled(Amat,&assembled)); 194 if (!assembled) { 195 PetscCall(MatAssemblyBegin(Amat,MAT_FINAL_ASSEMBLY)); 196 PetscCall(MatAssemblyEnd(Amat,MAT_FINAL_ASSEMBLY)); 197 } 198 } 199 } else { 200 if (ts->rhsjacobianp) { 201 PetscCall(TSComputeRHSJacobianP(ts,t,U,ts->Jacprhs)); 202 } 203 if (ts->Jacprhs == Amat) { /* No IJacobian, so we only have the RHS matrix */ 204 PetscCall(MatScale(Amat,-1)); 205 } else if (ts->Jacprhs) { /* Both IJacobian and RHSJacobian */ 206 MatStructure axpy = DIFFERENT_NONZERO_PATTERN; 207 if (!ts->ijacobianp) { /* No IJacobianp provided, but we have a separate RHS matrix */ 208 PetscCall(MatZeroEntries(Amat)); 209 } 210 PetscCall(MatAXPY(Amat,-1,ts->Jacprhs,axpy)); 211 } 212 } 213 PetscCall(PetscLogEventEnd(TS_JacobianPEval,ts,U,Amat,0)); 214 PetscFunctionReturn(0); 215 } 216 217 /*@C 218 TSSetCostIntegrand - Sets the routine for evaluating the integral term in one or more cost functions 219 220 Logically Collective on TS 221 222 Input Parameters: 223 + ts - the TS context obtained from TSCreate() 224 . numcost - number of gradients to be computed, this is the number of cost functions 225 . costintegral - vector that stores the integral values 226 . rf - routine for evaluating the integrand function 227 . drduf - function that computes the gradients of the r's with respect to u 228 . drdpf - function that computes the gradients of the r's with respect to p, can be NULL if parametric sensitivity is not desired (mu=NULL) 229 . fwd - flag indicating whether to evaluate cost integral in the forward run or the adjoint run 230 - ctx - [optional] user-defined context for private data for the function evaluation routine (may be NULL) 231 232 Calling sequence of rf: 233 $ PetscErrorCode rf(TS ts,PetscReal t,Vec U,Vec F,void *ctx); 234 235 Calling sequence of drduf: 236 $ PetscErroCode drduf(TS ts,PetscReal t,Vec U,Vec *dRdU,void *ctx); 237 238 Calling sequence of drdpf: 239 $ PetscErroCode drdpf(TS ts,PetscReal t,Vec U,Vec *dRdP,void *ctx); 240 241 Level: deprecated 242 243 Notes: 244 For optimization there is usually a single cost function (numcost = 1). For sensitivities there may be multiple cost functions 245 246 .seealso: `TSSetRHSJacobianP()`, `TSGetCostGradients()`, `TSSetCostGradients()` 247 @*/ 248 PetscErrorCode TSSetCostIntegrand(TS ts,PetscInt numcost,Vec costintegral,PetscErrorCode (*rf)(TS,PetscReal,Vec,Vec,void*), 249 PetscErrorCode (*drduf)(TS,PetscReal,Vec,Vec*,void*), 250 PetscErrorCode (*drdpf)(TS,PetscReal,Vec,Vec*,void*), 251 PetscBool fwd,void *ctx) 252 { 253 PetscFunctionBegin; 254 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 255 if (costintegral) PetscValidHeaderSpecific(costintegral,VEC_CLASSID,3); 256 PetscCheck(!ts->numcost || ts->numcost == numcost,PetscObjectComm((PetscObject)ts),PETSC_ERR_USER,"The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostGradients() or TSForwardSetIntegralGradients()"); 257 if (!ts->numcost) ts->numcost=numcost; 258 259 if (costintegral) { 260 PetscCall(PetscObjectReference((PetscObject)costintegral)); 261 PetscCall(VecDestroy(&ts->vec_costintegral)); 262 ts->vec_costintegral = costintegral; 263 } else { 264 if (!ts->vec_costintegral) { /* Create a seq vec if user does not provide one */ 265 PetscCall(VecCreateSeq(PETSC_COMM_SELF,numcost,&ts->vec_costintegral)); 266 } else { 267 PetscCall(VecSet(ts->vec_costintegral,0.0)); 268 } 269 } 270 if (!ts->vec_costintegrand) { 271 PetscCall(VecDuplicate(ts->vec_costintegral,&ts->vec_costintegrand)); 272 } else { 273 PetscCall(VecSet(ts->vec_costintegrand,0.0)); 274 } 275 ts->costintegralfwd = fwd; /* Evaluate the cost integral in forward run if fwd is true */ 276 ts->costintegrand = rf; 277 ts->costintegrandctx = ctx; 278 ts->drdufunction = drduf; 279 ts->drdpfunction = drdpf; 280 PetscFunctionReturn(0); 281 } 282 283 /*@C 284 TSGetCostIntegral - Returns the values of the integral term in the cost functions. 285 It is valid to call the routine after a backward run. 286 287 Not Collective 288 289 Input Parameter: 290 . ts - the TS context obtained from TSCreate() 291 292 Output Parameter: 293 . v - the vector containing the integrals for each cost function 294 295 Level: intermediate 296 297 .seealso: `TSSetCostIntegrand()` 298 299 @*/ 300 PetscErrorCode TSGetCostIntegral(TS ts,Vec *v) 301 { 302 TS quadts; 303 304 PetscFunctionBegin; 305 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 306 PetscValidPointer(v,2); 307 PetscCall(TSGetQuadratureTS(ts,NULL,&quadts)); 308 *v = quadts->vec_sol; 309 PetscFunctionReturn(0); 310 } 311 312 /*@C 313 TSComputeCostIntegrand - Evaluates the integral function in the cost functions. 314 315 Input Parameters: 316 + ts - the TS context 317 . t - current time 318 - U - state vector, i.e. current solution 319 320 Output Parameter: 321 . Q - vector of size numcost to hold the outputs 322 323 Notes: 324 Most users should not need to explicitly call this routine, as it 325 is used internally within the sensitivity analysis context. 326 327 Level: deprecated 328 329 .seealso: `TSSetCostIntegrand()` 330 @*/ 331 PetscErrorCode TSComputeCostIntegrand(TS ts,PetscReal t,Vec U,Vec Q) 332 { 333 PetscFunctionBegin; 334 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 335 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 336 PetscValidHeaderSpecific(Q,VEC_CLASSID,4); 337 338 PetscCall(PetscLogEventBegin(TS_FunctionEval,ts,U,Q,0)); 339 if (ts->costintegrand) { 340 PetscStackPush("TS user integrand in the cost function"); 341 PetscCall((*ts->costintegrand)(ts,t,U,Q,ts->costintegrandctx)); 342 PetscStackPop; 343 } else { 344 PetscCall(VecZeroEntries(Q)); 345 } 346 347 PetscCall(PetscLogEventEnd(TS_FunctionEval,ts,U,Q,0)); 348 PetscFunctionReturn(0); 349 } 350 351 /*@C 352 TSComputeDRDUFunction - Deprecated, use TSGetQuadratureTS() then TSComputeRHSJacobian() 353 354 Level: deprecated 355 356 @*/ 357 PetscErrorCode TSComputeDRDUFunction(TS ts,PetscReal t,Vec U,Vec *DRDU) 358 { 359 PetscFunctionBegin; 360 if (!DRDU) PetscFunctionReturn(0); 361 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 362 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 363 364 PetscStackPush("TS user DRDU function for sensitivity analysis"); 365 PetscCall((*ts->drdufunction)(ts,t,U,DRDU,ts->costintegrandctx)); 366 PetscStackPop; 367 PetscFunctionReturn(0); 368 } 369 370 /*@C 371 TSComputeDRDPFunction - Deprecated, use TSGetQuadratureTS() then TSComputeRHSJacobianP() 372 373 Level: deprecated 374 375 @*/ 376 PetscErrorCode TSComputeDRDPFunction(TS ts,PetscReal t,Vec U,Vec *DRDP) 377 { 378 PetscFunctionBegin; 379 if (!DRDP) PetscFunctionReturn(0); 380 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 381 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 382 383 PetscStackPush("TS user DRDP function for sensitivity analysis"); 384 PetscCall((*ts->drdpfunction)(ts,t,U,DRDP,ts->costintegrandctx)); 385 PetscStackPop; 386 PetscFunctionReturn(0); 387 } 388 389 /*@C 390 TSSetIHessianProduct - Sets the function that computes the vector-Hessian-vector product. The Hessian is the second-order derivative of F (IFunction) w.r.t. the state variable. 391 392 Logically Collective on TS 393 394 Input Parameters: 395 + ts - TS context obtained from TSCreate() 396 . ihp1 - an array of vectors storing the result of vector-Hessian-vector product for F_UU 397 . hessianproductfunc1 - vector-Hessian-vector product function for F_UU 398 . ihp2 - an array of vectors storing the result of vector-Hessian-vector product for F_UP 399 . hessianproductfunc2 - vector-Hessian-vector product function for F_UP 400 . ihp3 - an array of vectors storing the result of vector-Hessian-vector product for F_PU 401 . hessianproductfunc3 - vector-Hessian-vector product function for F_PU 402 . ihp4 - an array of vectors storing the result of vector-Hessian-vector product for F_PP 403 - hessianproductfunc4 - vector-Hessian-vector product function for F_PP 404 405 Calling sequence of ihessianproductfunc: 406 $ ihessianproductfunc (TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV,void *ctx); 407 + t - current timestep 408 . U - input vector (current ODE solution) 409 . Vl - an array of input vectors to be left-multiplied with the Hessian 410 . Vr - input vector to be right-multiplied with the Hessian 411 . VHV - an array of output vectors for vector-Hessian-vector product 412 - ctx - [optional] user-defined function context 413 414 Level: intermediate 415 416 Notes: 417 The first Hessian function and the working array are required. 418 As an example to implement the callback functions, the second callback function calculates the vector-Hessian-vector product 419 $ Vl_n^T*F_UP*Vr 420 where the vector Vl_n (n-th element in the array Vl) and Vr are of size N and M respectively, and the Hessian F_UP is of size N x N x M. 421 Each entry of F_UP corresponds to the derivative 422 $ F_UP[i][j][k] = \frac{\partial^2 F[i]}{\partial U[j] \partial P[k]}. 423 The result of the vector-Hessian-vector product for Vl_n needs to be stored in vector VHV_n with the j-th entry being 424 $ VHV_n[j] = \sum_i \sum_k {Vl_n[i] * F_UP[i][j][k] * Vr[k]} 425 If the cost function is a scalar, there will be only one vector in Vl and VHV. 426 427 .seealso: 428 @*/ 429 PetscErrorCode TSSetIHessianProduct(TS ts,Vec *ihp1,PetscErrorCode (*ihessianproductfunc1)(TS,PetscReal,Vec,Vec*,Vec,Vec*,void*), 430 Vec *ihp2,PetscErrorCode (*ihessianproductfunc2)(TS,PetscReal,Vec,Vec*,Vec,Vec*,void*), 431 Vec *ihp3,PetscErrorCode (*ihessianproductfunc3)(TS,PetscReal,Vec,Vec*,Vec,Vec*,void*), 432 Vec *ihp4,PetscErrorCode (*ihessianproductfunc4)(TS,PetscReal,Vec,Vec*,Vec,Vec*,void*), 433 void *ctx) 434 { 435 PetscFunctionBegin; 436 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 437 PetscValidPointer(ihp1,2); 438 439 ts->ihessianproductctx = ctx; 440 if (ihp1) ts->vecs_fuu = ihp1; 441 if (ihp2) ts->vecs_fup = ihp2; 442 if (ihp3) ts->vecs_fpu = ihp3; 443 if (ihp4) ts->vecs_fpp = ihp4; 444 ts->ihessianproduct_fuu = ihessianproductfunc1; 445 ts->ihessianproduct_fup = ihessianproductfunc2; 446 ts->ihessianproduct_fpu = ihessianproductfunc3; 447 ts->ihessianproduct_fpp = ihessianproductfunc4; 448 PetscFunctionReturn(0); 449 } 450 451 /*@C 452 TSComputeIHessianProductFunctionUU - Runs the user-defined vector-Hessian-vector product function for Fuu. 453 454 Collective on TS 455 456 Input Parameters: 457 . ts - The TS context obtained from TSCreate() 458 459 Notes: 460 TSComputeIHessianProductFunctionUU() is typically used for sensitivity implementation, 461 so most users would not generally call this routine themselves. 462 463 Level: developer 464 465 .seealso: `TSSetIHessianProduct()` 466 @*/ 467 PetscErrorCode TSComputeIHessianProductFunctionUU(TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV) 468 { 469 PetscFunctionBegin; 470 if (!VHV) PetscFunctionReturn(0); 471 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 472 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 473 474 if (ts->ihessianproduct_fuu) { 475 PetscStackPush("TS user IHessianProduct function 1 for sensitivity analysis"); 476 PetscCall((*ts->ihessianproduct_fuu)(ts,t,U,Vl,Vr,VHV,ts->ihessianproductctx)); 477 PetscStackPop; 478 } 479 /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */ 480 if (ts->rhshessianproduct_guu) { 481 PetscInt nadj; 482 PetscCall(TSComputeRHSHessianProductFunctionUU(ts,t,U,Vl,Vr,VHV)); 483 for (nadj=0; nadj<ts->numcost; nadj++) { 484 PetscCall(VecScale(VHV[nadj],-1)); 485 } 486 } 487 PetscFunctionReturn(0); 488 } 489 490 /*@C 491 TSComputeIHessianProductFunctionUP - Runs the user-defined vector-Hessian-vector product function for Fup. 492 493 Collective on TS 494 495 Input Parameters: 496 . ts - The TS context obtained from TSCreate() 497 498 Notes: 499 TSComputeIHessianProductFunctionUP() is typically used for sensitivity implementation, 500 so most users would not generally call this routine themselves. 501 502 Level: developer 503 504 .seealso: `TSSetIHessianProduct()` 505 @*/ 506 PetscErrorCode TSComputeIHessianProductFunctionUP(TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV) 507 { 508 PetscFunctionBegin; 509 if (!VHV) PetscFunctionReturn(0); 510 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 511 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 512 513 if (ts->ihessianproduct_fup) { 514 PetscStackPush("TS user IHessianProduct function 2 for sensitivity analysis"); 515 PetscCall((*ts->ihessianproduct_fup)(ts,t,U,Vl,Vr,VHV,ts->ihessianproductctx)); 516 PetscStackPop; 517 } 518 /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */ 519 if (ts->rhshessianproduct_gup) { 520 PetscInt nadj; 521 PetscCall(TSComputeRHSHessianProductFunctionUP(ts,t,U,Vl,Vr,VHV)); 522 for (nadj=0; nadj<ts->numcost; nadj++) { 523 PetscCall(VecScale(VHV[nadj],-1)); 524 } 525 } 526 PetscFunctionReturn(0); 527 } 528 529 /*@C 530 TSComputeIHessianProductFunctionPU - Runs the user-defined vector-Hessian-vector product function for Fpu. 531 532 Collective on TS 533 534 Input Parameters: 535 . ts - The TS context obtained from TSCreate() 536 537 Notes: 538 TSComputeIHessianProductFunctionPU() is typically used for sensitivity implementation, 539 so most users would not generally call this routine themselves. 540 541 Level: developer 542 543 .seealso: `TSSetIHessianProduct()` 544 @*/ 545 PetscErrorCode TSComputeIHessianProductFunctionPU(TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV) 546 { 547 PetscFunctionBegin; 548 if (!VHV) PetscFunctionReturn(0); 549 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 550 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 551 552 if (ts->ihessianproduct_fpu) { 553 PetscStackPush("TS user IHessianProduct function 3 for sensitivity analysis"); 554 PetscCall((*ts->ihessianproduct_fpu)(ts,t,U,Vl,Vr,VHV,ts->ihessianproductctx)); 555 PetscStackPop; 556 } 557 /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */ 558 if (ts->rhshessianproduct_gpu) { 559 PetscInt nadj; 560 PetscCall(TSComputeRHSHessianProductFunctionPU(ts,t,U,Vl,Vr,VHV)); 561 for (nadj=0; nadj<ts->numcost; nadj++) { 562 PetscCall(VecScale(VHV[nadj],-1)); 563 } 564 } 565 PetscFunctionReturn(0); 566 } 567 568 /*@C 569 TSComputeIHessianProductFunctionPP - Runs the user-defined vector-Hessian-vector product function for Fpp. 570 571 Collective on TS 572 573 Input Parameters: 574 . ts - The TS context obtained from TSCreate() 575 576 Notes: 577 TSComputeIHessianProductFunctionPP() is typically used for sensitivity implementation, 578 so most users would not generally call this routine themselves. 579 580 Level: developer 581 582 .seealso: `TSSetIHessianProduct()` 583 @*/ 584 PetscErrorCode TSComputeIHessianProductFunctionPP(TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV) 585 { 586 PetscFunctionBegin; 587 if (!VHV) PetscFunctionReturn(0); 588 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 589 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 590 591 if (ts->ihessianproduct_fpp) { 592 PetscStackPush("TS user IHessianProduct function 3 for sensitivity analysis"); 593 PetscCall((*ts->ihessianproduct_fpp)(ts,t,U,Vl,Vr,VHV,ts->ihessianproductctx)); 594 PetscStackPop; 595 } 596 /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */ 597 if (ts->rhshessianproduct_gpp) { 598 PetscInt nadj; 599 PetscCall(TSComputeRHSHessianProductFunctionPP(ts,t,U,Vl,Vr,VHV)); 600 for (nadj=0; nadj<ts->numcost; nadj++) { 601 PetscCall(VecScale(VHV[nadj],-1)); 602 } 603 } 604 PetscFunctionReturn(0); 605 } 606 607 /*@C 608 TSSetRHSHessianProduct - Sets the function that computes the vector-Hessian-vector product. The Hessian is the second-order derivative of G (RHSFunction) w.r.t. the state variable. 609 610 Logically Collective on TS 611 612 Input Parameters: 613 + ts - TS context obtained from TSCreate() 614 . rhshp1 - an array of vectors storing the result of vector-Hessian-vector product for G_UU 615 . hessianproductfunc1 - vector-Hessian-vector product function for G_UU 616 . rhshp2 - an array of vectors storing the result of vector-Hessian-vector product for G_UP 617 . hessianproductfunc2 - vector-Hessian-vector product function for G_UP 618 . rhshp3 - an array of vectors storing the result of vector-Hessian-vector product for G_PU 619 . hessianproductfunc3 - vector-Hessian-vector product function for G_PU 620 . rhshp4 - an array of vectors storing the result of vector-Hessian-vector product for G_PP 621 - hessianproductfunc4 - vector-Hessian-vector product function for G_PP 622 623 Calling sequence of ihessianproductfunc: 624 $ rhshessianproductfunc (TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV,void *ctx); 625 + t - current timestep 626 . U - input vector (current ODE solution) 627 . Vl - an array of input vectors to be left-multiplied with the Hessian 628 . Vr - input vector to be right-multiplied with the Hessian 629 . VHV - an array of output vectors for vector-Hessian-vector product 630 - ctx - [optional] user-defined function context 631 632 Level: intermediate 633 634 Notes: 635 The first Hessian function and the working array are required. 636 As an example to implement the callback functions, the second callback function calculates the vector-Hessian-vector product 637 $ Vl_n^T*G_UP*Vr 638 where the vector Vl_n (n-th element in the array Vl) and Vr are of size N and M respectively, and the Hessian G_UP is of size N x N x M. 639 Each entry of G_UP corresponds to the derivative 640 $ G_UP[i][j][k] = \frac{\partial^2 G[i]}{\partial U[j] \partial P[k]}. 641 The result of the vector-Hessian-vector product for Vl_n needs to be stored in vector VHV_n with j-th entry being 642 $ VHV_n[j] = \sum_i \sum_k {Vl_n[i] * G_UP[i][j][k] * Vr[k]} 643 If the cost function is a scalar, there will be only one vector in Vl and VHV. 644 645 .seealso: 646 @*/ 647 PetscErrorCode TSSetRHSHessianProduct(TS ts,Vec *rhshp1,PetscErrorCode (*rhshessianproductfunc1)(TS,PetscReal,Vec,Vec*,Vec,Vec*,void*), 648 Vec *rhshp2,PetscErrorCode (*rhshessianproductfunc2)(TS,PetscReal,Vec,Vec*,Vec,Vec*,void*), 649 Vec *rhshp3,PetscErrorCode (*rhshessianproductfunc3)(TS,PetscReal,Vec,Vec*,Vec,Vec*,void*), 650 Vec *rhshp4,PetscErrorCode (*rhshessianproductfunc4)(TS,PetscReal,Vec,Vec*,Vec,Vec*,void*), 651 void *ctx) 652 { 653 PetscFunctionBegin; 654 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 655 PetscValidPointer(rhshp1,2); 656 657 ts->rhshessianproductctx = ctx; 658 if (rhshp1) ts->vecs_guu = rhshp1; 659 if (rhshp2) ts->vecs_gup = rhshp2; 660 if (rhshp3) ts->vecs_gpu = rhshp3; 661 if (rhshp4) ts->vecs_gpp = rhshp4; 662 ts->rhshessianproduct_guu = rhshessianproductfunc1; 663 ts->rhshessianproduct_gup = rhshessianproductfunc2; 664 ts->rhshessianproduct_gpu = rhshessianproductfunc3; 665 ts->rhshessianproduct_gpp = rhshessianproductfunc4; 666 PetscFunctionReturn(0); 667 } 668 669 /*@C 670 TSComputeRHSHessianProductFunctionUU - Runs the user-defined vector-Hessian-vector product function for Guu. 671 672 Collective on TS 673 674 Input Parameters: 675 . ts - The TS context obtained from TSCreate() 676 677 Notes: 678 TSComputeRHSHessianProductFunctionUU() is typically used for sensitivity implementation, 679 so most users would not generally call this routine themselves. 680 681 Level: developer 682 683 .seealso: `TSSetRHSHessianProduct()` 684 @*/ 685 PetscErrorCode TSComputeRHSHessianProductFunctionUU(TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV) 686 { 687 PetscFunctionBegin; 688 if (!VHV) PetscFunctionReturn(0); 689 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 690 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 691 692 PetscStackPush("TS user RHSHessianProduct function 1 for sensitivity analysis"); 693 PetscCall((*ts->rhshessianproduct_guu)(ts,t,U,Vl,Vr,VHV,ts->rhshessianproductctx)); 694 PetscStackPop; 695 PetscFunctionReturn(0); 696 } 697 698 /*@C 699 TSComputeRHSHessianProductFunctionUP - Runs the user-defined vector-Hessian-vector product function for Gup. 700 701 Collective on TS 702 703 Input Parameters: 704 . ts - The TS context obtained from TSCreate() 705 706 Notes: 707 TSComputeRHSHessianProductFunctionUP() is typically used for sensitivity implementation, 708 so most users would not generally call this routine themselves. 709 710 Level: developer 711 712 .seealso: `TSSetRHSHessianProduct()` 713 @*/ 714 PetscErrorCode TSComputeRHSHessianProductFunctionUP(TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV) 715 { 716 PetscFunctionBegin; 717 if (!VHV) PetscFunctionReturn(0); 718 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 719 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 720 721 PetscStackPush("TS user RHSHessianProduct function 2 for sensitivity analysis"); 722 PetscCall((*ts->rhshessianproduct_gup)(ts,t,U,Vl,Vr,VHV,ts->rhshessianproductctx)); 723 PetscStackPop; 724 PetscFunctionReturn(0); 725 } 726 727 /*@C 728 TSComputeRHSHessianProductFunctionPU - Runs the user-defined vector-Hessian-vector product function for Gpu. 729 730 Collective on TS 731 732 Input Parameters: 733 . ts - The TS context obtained from TSCreate() 734 735 Notes: 736 TSComputeRHSHessianProductFunctionPU() is typically used for sensitivity implementation, 737 so most users would not generally call this routine themselves. 738 739 Level: developer 740 741 .seealso: `TSSetRHSHessianProduct()` 742 @*/ 743 PetscErrorCode TSComputeRHSHessianProductFunctionPU(TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV) 744 { 745 PetscFunctionBegin; 746 if (!VHV) PetscFunctionReturn(0); 747 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 748 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 749 750 PetscStackPush("TS user RHSHessianProduct function 3 for sensitivity analysis"); 751 PetscCall((*ts->rhshessianproduct_gpu)(ts,t,U,Vl,Vr,VHV,ts->rhshessianproductctx)); 752 PetscStackPop; 753 PetscFunctionReturn(0); 754 } 755 756 /*@C 757 TSComputeRHSHessianProductFunctionPP - Runs the user-defined vector-Hessian-vector product function for Gpp. 758 759 Collective on TS 760 761 Input Parameters: 762 . ts - The TS context obtained from TSCreate() 763 764 Notes: 765 TSComputeRHSHessianProductFunctionPP() is typically used for sensitivity implementation, 766 so most users would not generally call this routine themselves. 767 768 Level: developer 769 770 .seealso: `TSSetRHSHessianProduct()` 771 @*/ 772 PetscErrorCode TSComputeRHSHessianProductFunctionPP(TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV) 773 { 774 PetscFunctionBegin; 775 if (!VHV) PetscFunctionReturn(0); 776 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 777 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 778 779 PetscStackPush("TS user RHSHessianProduct function 3 for sensitivity analysis"); 780 PetscCall((*ts->rhshessianproduct_gpp)(ts,t,U,Vl,Vr,VHV,ts->rhshessianproductctx)); 781 PetscStackPop; 782 PetscFunctionReturn(0); 783 } 784 785 /* --------------------------- Adjoint sensitivity ---------------------------*/ 786 787 /*@ 788 TSSetCostGradients - Sets the initial value of the gradients of the cost function w.r.t. initial values and w.r.t. the problem parameters 789 for use by the TSAdjoint routines. 790 791 Logically Collective on TS 792 793 Input Parameters: 794 + ts - the TS context obtained from TSCreate() 795 . numcost - number of gradients to be computed, this is the number of cost functions 796 . lambda - gradients with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector 797 - mu - gradients with respect to the parameters, the number of entries in these vectors is the same as the number of parameters 798 799 Level: beginner 800 801 Notes: 802 the entries in these vectors must be correctly initialized with the values lamda_i = df/dy|finaltime mu_i = df/dp|finaltime 803 804 After TSAdjointSolve() is called the lamba and the mu contain the computed sensitivities 805 806 .seealso `TSGetCostGradients()` 807 @*/ 808 PetscErrorCode TSSetCostGradients(TS ts,PetscInt numcost,Vec *lambda,Vec *mu) 809 { 810 PetscFunctionBegin; 811 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 812 PetscValidPointer(lambda,3); 813 ts->vecs_sensi = lambda; 814 ts->vecs_sensip = mu; 815 PetscCheck(!ts->numcost || ts->numcost == numcost,PetscObjectComm((PetscObject)ts),PETSC_ERR_USER,"The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand"); 816 ts->numcost = numcost; 817 PetscFunctionReturn(0); 818 } 819 820 /*@ 821 TSGetCostGradients - Returns the gradients from the TSAdjointSolve() 822 823 Not Collective, but Vec returned is parallel if TS is parallel 824 825 Input Parameter: 826 . ts - the TS context obtained from TSCreate() 827 828 Output Parameters: 829 + numcost - size of returned arrays 830 . lambda - vectors containing the gradients of the cost functions with respect to the ODE/DAE solution variables 831 - mu - vectors containing the gradients of the cost functions with respect to the problem parameters 832 833 Level: intermediate 834 835 .seealso: `TSSetCostGradients()` 836 @*/ 837 PetscErrorCode TSGetCostGradients(TS ts,PetscInt *numcost,Vec **lambda,Vec **mu) 838 { 839 PetscFunctionBegin; 840 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 841 if (numcost) *numcost = ts->numcost; 842 if (lambda) *lambda = ts->vecs_sensi; 843 if (mu) *mu = ts->vecs_sensip; 844 PetscFunctionReturn(0); 845 } 846 847 /*@ 848 TSSetCostHessianProducts - Sets the initial value of the Hessian-vector products of the cost function w.r.t. initial values and w.r.t. the problem parameters 849 for use by the TSAdjoint routines. 850 851 Logically Collective on TS 852 853 Input Parameters: 854 + ts - the TS context obtained from TSCreate() 855 . numcost - number of cost functions 856 . lambda2 - Hessian-vector product with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector 857 . mu2 - Hessian-vector product with respect to the parameters, the number of entries in these vectors is the same as the number of parameters 858 - dir - the direction vector that are multiplied with the Hessian of the cost functions 859 860 Level: beginner 861 862 Notes: Hessian of the cost function is completely different from Hessian of the ODE/DAE system 863 864 For second-order adjoint, one needs to call this function and then TSAdjointSetForward() before TSSolve(). 865 866 After TSAdjointSolve() is called, the lamba2 and the mu2 will contain the computed second-order adjoint sensitivities, and can be used to produce Hessian-vector product (not the full Hessian matrix). Users must provide a direction vector; it is usually generated by an optimization solver. 867 868 Passing NULL for lambda2 disables the second-order calculation. 869 .seealso: `TSAdjointSetForward()` 870 @*/ 871 PetscErrorCode TSSetCostHessianProducts(TS ts,PetscInt numcost,Vec *lambda2,Vec *mu2,Vec dir) 872 { 873 PetscFunctionBegin; 874 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 875 PetscCheck(!ts->numcost || ts->numcost == numcost,PetscObjectComm((PetscObject)ts),PETSC_ERR_USER,"The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand"); 876 ts->numcost = numcost; 877 ts->vecs_sensi2 = lambda2; 878 ts->vecs_sensi2p = mu2; 879 ts->vec_dir = dir; 880 PetscFunctionReturn(0); 881 } 882 883 /*@ 884 TSGetCostHessianProducts - Returns the gradients from the TSAdjointSolve() 885 886 Not Collective, but Vec returned is parallel if TS is parallel 887 888 Input Parameter: 889 . ts - the TS context obtained from TSCreate() 890 891 Output Parameters: 892 + numcost - number of cost functions 893 . lambda2 - Hessian-vector product with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector 894 . mu2 - Hessian-vector product with respect to the parameters, the number of entries in these vectors is the same as the number of parameters 895 - dir - the direction vector that are multiplied with the Hessian of the cost functions 896 897 Level: intermediate 898 899 .seealso: `TSSetCostHessianProducts()` 900 @*/ 901 PetscErrorCode TSGetCostHessianProducts(TS ts,PetscInt *numcost,Vec **lambda2,Vec **mu2, Vec *dir) 902 { 903 PetscFunctionBegin; 904 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 905 if (numcost) *numcost = ts->numcost; 906 if (lambda2) *lambda2 = ts->vecs_sensi2; 907 if (mu2) *mu2 = ts->vecs_sensi2p; 908 if (dir) *dir = ts->vec_dir; 909 PetscFunctionReturn(0); 910 } 911 912 /*@ 913 TSAdjointSetForward - Trigger the tangent linear solver and initialize the forward sensitivities 914 915 Logically Collective on TS 916 917 Input Parameters: 918 + ts - the TS context obtained from TSCreate() 919 - didp - the derivative of initial values w.r.t. parameters 920 921 Level: intermediate 922 923 Notes: When computing sensitivies w.r.t. initial condition, set didp to NULL so that the solver will take it as an identity matrix mathematically. TSAdjoint does not reset the tangent linear solver automatically, TSAdjointResetForward() should be called to reset the tangent linear solver. 924 925 .seealso: `TSSetCostHessianProducts()`, `TSAdjointResetForward()` 926 @*/ 927 PetscErrorCode TSAdjointSetForward(TS ts,Mat didp) 928 { 929 Mat A; 930 Vec sp; 931 PetscScalar *xarr; 932 PetscInt lsize; 933 934 PetscFunctionBegin; 935 ts->forward_solve = PETSC_TRUE; /* turn on tangent linear mode */ 936 PetscCheck(ts->vecs_sensi2,PetscObjectComm((PetscObject)ts),PETSC_ERR_USER,"Must call TSSetCostHessianProducts() first"); 937 PetscCheck(ts->vec_dir,PetscObjectComm((PetscObject)ts),PETSC_ERR_USER,"Directional vector is missing. Call TSSetCostHessianProducts() to set it."); 938 /* create a single-column dense matrix */ 939 PetscCall(VecGetLocalSize(ts->vec_sol,&lsize)); 940 PetscCall(MatCreateDense(PetscObjectComm((PetscObject)ts),lsize,PETSC_DECIDE,PETSC_DECIDE,1,NULL,&A)); 941 942 PetscCall(VecDuplicate(ts->vec_sol,&sp)); 943 PetscCall(MatDenseGetColumn(A,0,&xarr)); 944 PetscCall(VecPlaceArray(sp,xarr)); 945 if (ts->vecs_sensi2p) { /* tangent linear variable initialized as 2*dIdP*dir */ 946 if (didp) { 947 PetscCall(MatMult(didp,ts->vec_dir,sp)); 948 PetscCall(VecScale(sp,2.)); 949 } else { 950 PetscCall(VecZeroEntries(sp)); 951 } 952 } else { /* tangent linear variable initialized as dir */ 953 PetscCall(VecCopy(ts->vec_dir,sp)); 954 } 955 PetscCall(VecResetArray(sp)); 956 PetscCall(MatDenseRestoreColumn(A,&xarr)); 957 PetscCall(VecDestroy(&sp)); 958 959 PetscCall(TSForwardSetInitialSensitivities(ts,A)); /* if didp is NULL, identity matrix is assumed */ 960 961 PetscCall(MatDestroy(&A)); 962 PetscFunctionReturn(0); 963 } 964 965 /*@ 966 TSAdjointResetForward - Reset the tangent linear solver and destroy the tangent linear context 967 968 Logically Collective on TS 969 970 Input Parameters: 971 . ts - the TS context obtained from TSCreate() 972 973 Level: intermediate 974 975 .seealso: `TSAdjointSetForward()` 976 @*/ 977 PetscErrorCode TSAdjointResetForward(TS ts) 978 { 979 PetscFunctionBegin; 980 ts->forward_solve = PETSC_FALSE; /* turn off tangent linear mode */ 981 PetscCall(TSForwardReset(ts)); 982 PetscFunctionReturn(0); 983 } 984 985 /*@ 986 TSAdjointSetUp - Sets up the internal data structures for the later use 987 of an adjoint solver 988 989 Collective on TS 990 991 Input Parameter: 992 . ts - the TS context obtained from TSCreate() 993 994 Level: advanced 995 996 .seealso: `TSCreate()`, `TSAdjointStep()`, `TSSetCostGradients()` 997 @*/ 998 PetscErrorCode TSAdjointSetUp(TS ts) 999 { 1000 TSTrajectory tj; 1001 PetscBool match; 1002 1003 PetscFunctionBegin; 1004 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1005 if (ts->adjointsetupcalled) PetscFunctionReturn(0); 1006 PetscCheck(ts->vecs_sensi,PetscObjectComm((PetscObject)ts),PETSC_ERR_ARG_WRONGSTATE,"Must call TSSetCostGradients() first"); 1007 PetscCheck(!ts->vecs_sensip || ts->Jacp || ts->Jacprhs,PetscObjectComm((PetscObject)ts),PETSC_ERR_ARG_WRONGSTATE,"Must call TSSetRHSJacobianP() or TSSetIJacobianP() first"); 1008 PetscCall(TSGetTrajectory(ts,&tj)); 1009 PetscCall(PetscObjectTypeCompare((PetscObject)tj,TSTRAJECTORYBASIC,&match)); 1010 if (match) { 1011 PetscBool solution_only; 1012 PetscCall(TSTrajectoryGetSolutionOnly(tj,&solution_only)); 1013 PetscCheck(!solution_only,PetscObjectComm((PetscObject)ts),PETSC_ERR_USER,"TSAdjoint cannot use the solution-only mode when choosing the Basic TSTrajectory type. Turn it off with -ts_trajectory_solution_only 0"); 1014 } 1015 PetscCall(TSTrajectorySetUseHistory(tj,PETSC_FALSE)); /* not use TSHistory */ 1016 1017 if (ts->quadraturets) { /* if there is integral in the cost function */ 1018 PetscCall(VecDuplicate(ts->vecs_sensi[0],&ts->vec_drdu_col)); 1019 if (ts->vecs_sensip) { 1020 PetscCall(VecDuplicate(ts->vecs_sensip[0],&ts->vec_drdp_col)); 1021 } 1022 } 1023 1024 if (ts->ops->adjointsetup) { 1025 PetscCall((*ts->ops->adjointsetup)(ts)); 1026 } 1027 ts->adjointsetupcalled = PETSC_TRUE; 1028 PetscFunctionReturn(0); 1029 } 1030 1031 /*@ 1032 TSAdjointReset - Resets a TSAdjoint context and removes any allocated Vecs and Mats. 1033 1034 Collective on TS 1035 1036 Input Parameter: 1037 . ts - the TS context obtained from TSCreate() 1038 1039 Level: beginner 1040 1041 .seealso: `TSCreate()`, `TSAdjointSetUp()`, `TSADestroy()` 1042 @*/ 1043 PetscErrorCode TSAdjointReset(TS ts) 1044 { 1045 PetscFunctionBegin; 1046 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1047 if (ts->ops->adjointreset) { 1048 PetscCall((*ts->ops->adjointreset)(ts)); 1049 } 1050 if (ts->quadraturets) { /* if there is integral in the cost function */ 1051 PetscCall(VecDestroy(&ts->vec_drdu_col)); 1052 if (ts->vecs_sensip) { 1053 PetscCall(VecDestroy(&ts->vec_drdp_col)); 1054 } 1055 } 1056 ts->vecs_sensi = NULL; 1057 ts->vecs_sensip = NULL; 1058 ts->vecs_sensi2 = NULL; 1059 ts->vecs_sensi2p = NULL; 1060 ts->vec_dir = NULL; 1061 ts->adjointsetupcalled = PETSC_FALSE; 1062 PetscFunctionReturn(0); 1063 } 1064 1065 /*@ 1066 TSAdjointSetSteps - Sets the number of steps the adjoint solver should take backward in time 1067 1068 Logically Collective on TS 1069 1070 Input Parameters: 1071 + ts - the TS context obtained from TSCreate() 1072 - steps - number of steps to use 1073 1074 Level: intermediate 1075 1076 Notes: 1077 Normally one does not call this and TSAdjointSolve() integrates back to the original timestep. One can call this 1078 so as to integrate back to less than the original timestep 1079 1080 .seealso: `TSSetExactFinalTime()` 1081 @*/ 1082 PetscErrorCode TSAdjointSetSteps(TS ts,PetscInt steps) 1083 { 1084 PetscFunctionBegin; 1085 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1086 PetscValidLogicalCollectiveInt(ts,steps,2); 1087 PetscCheck(steps >= 0,PetscObjectComm((PetscObject)ts),PETSC_ERR_ARG_OUTOFRANGE,"Cannot step back a negative number of steps"); 1088 PetscCheck(steps <= ts->steps,PetscObjectComm((PetscObject)ts),PETSC_ERR_ARG_OUTOFRANGE,"Cannot step back more than the total number of forward steps"); 1089 ts->adjoint_max_steps = steps; 1090 PetscFunctionReturn(0); 1091 } 1092 1093 /*@C 1094 TSAdjointSetRHSJacobian - Deprecated, use TSSetRHSJacobianP() 1095 1096 Level: deprecated 1097 1098 @*/ 1099 PetscErrorCode TSAdjointSetRHSJacobian(TS ts,Mat Amat,PetscErrorCode (*func)(TS,PetscReal,Vec,Mat,void*),void *ctx) 1100 { 1101 PetscFunctionBegin; 1102 PetscValidHeaderSpecific(ts, TS_CLASSID,1); 1103 PetscValidHeaderSpecific(Amat,MAT_CLASSID,2); 1104 1105 ts->rhsjacobianp = func; 1106 ts->rhsjacobianpctx = ctx; 1107 if (Amat) { 1108 PetscCall(PetscObjectReference((PetscObject)Amat)); 1109 PetscCall(MatDestroy(&ts->Jacp)); 1110 ts->Jacp = Amat; 1111 } 1112 PetscFunctionReturn(0); 1113 } 1114 1115 /*@C 1116 TSAdjointComputeRHSJacobian - Deprecated, use TSComputeRHSJacobianP() 1117 1118 Level: deprecated 1119 1120 @*/ 1121 PetscErrorCode TSAdjointComputeRHSJacobian(TS ts,PetscReal t,Vec U,Mat Amat) 1122 { 1123 PetscFunctionBegin; 1124 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1125 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 1126 PetscValidPointer(Amat,4); 1127 1128 PetscStackPush("TS user JacobianP function for sensitivity analysis"); 1129 PetscCall((*ts->rhsjacobianp)(ts,t,U,Amat,ts->rhsjacobianpctx)); 1130 PetscStackPop; 1131 PetscFunctionReturn(0); 1132 } 1133 1134 /*@ 1135 TSAdjointComputeDRDYFunction - Deprecated, use TSGetQuadratureTS() then TSComputeRHSJacobian() 1136 1137 Level: deprecated 1138 1139 @*/ 1140 PetscErrorCode TSAdjointComputeDRDYFunction(TS ts,PetscReal t,Vec U,Vec *DRDU) 1141 { 1142 PetscFunctionBegin; 1143 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1144 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 1145 1146 PetscStackPush("TS user DRDY function for sensitivity analysis"); 1147 PetscCall((*ts->drdufunction)(ts,t,U,DRDU,ts->costintegrandctx)); 1148 PetscStackPop; 1149 PetscFunctionReturn(0); 1150 } 1151 1152 /*@ 1153 TSAdjointComputeDRDPFunction - Deprecated, use TSGetQuadratureTS() then TSComputeRHSJacobianP() 1154 1155 Level: deprecated 1156 1157 @*/ 1158 PetscErrorCode TSAdjointComputeDRDPFunction(TS ts,PetscReal t,Vec U,Vec *DRDP) 1159 { 1160 PetscFunctionBegin; 1161 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1162 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 1163 1164 PetscStackPush("TS user DRDP function for sensitivity analysis"); 1165 PetscCall((*ts->drdpfunction)(ts,t,U,DRDP,ts->costintegrandctx)); 1166 PetscStackPop; 1167 PetscFunctionReturn(0); 1168 } 1169 1170 /*@C 1171 TSAdjointMonitorSensi - monitors the first lambda sensitivity 1172 1173 Level: intermediate 1174 1175 .seealso: `TSAdjointMonitorSet()` 1176 @*/ 1177 PetscErrorCode TSAdjointMonitorSensi(TS ts,PetscInt step,PetscReal ptime,Vec v,PetscInt numcost,Vec *lambda,Vec *mu,PetscViewerAndFormat *vf) 1178 { 1179 PetscViewer viewer = vf->viewer; 1180 1181 PetscFunctionBegin; 1182 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,8); 1183 PetscCall(PetscViewerPushFormat(viewer,vf->format)); 1184 PetscCall(VecView(lambda[0],viewer)); 1185 PetscCall(PetscViewerPopFormat(viewer)); 1186 PetscFunctionReturn(0); 1187 } 1188 1189 /*@C 1190 TSAdjointMonitorSetFromOptions - Sets a monitor function and viewer appropriate for the type indicated by the user 1191 1192 Collective on TS 1193 1194 Input Parameters: 1195 + ts - TS object you wish to monitor 1196 . name - the monitor type one is seeking 1197 . help - message indicating what monitoring is done 1198 . manual - manual page for the monitor 1199 . monitor - the monitor function 1200 - monitorsetup - a function that is called once ONLY if the user selected this monitor that may set additional features of the TS or PetscViewer objects 1201 1202 Level: developer 1203 1204 .seealso: `PetscOptionsGetViewer()`, `PetscOptionsGetReal()`, `PetscOptionsHasName()`, `PetscOptionsGetString()`, 1205 `PetscOptionsGetIntArray()`, `PetscOptionsGetRealArray()`, `PetscOptionsBool()` 1206 `PetscOptionsInt()`, `PetscOptionsString()`, `PetscOptionsReal()`, `PetscOptionsBool()`, 1207 `PetscOptionsName()`, `PetscOptionsBegin()`, `PetscOptionsEnd()`, `PetscOptionsHeadBegin()`, 1208 `PetscOptionsStringArray()`, `PetscOptionsRealArray()`, `PetscOptionsScalar()`, 1209 `PetscOptionsBoolGroupBegin()`, `PetscOptionsBoolGroup()`, `PetscOptionsBoolGroupEnd()`, 1210 `PetscOptionsFList()`, `PetscOptionsEList()` 1211 @*/ 1212 PetscErrorCode TSAdjointMonitorSetFromOptions(TS ts,const char name[],const char help[], const char manual[],PetscErrorCode (*monitor)(TS,PetscInt,PetscReal,Vec,PetscInt,Vec*,Vec*,PetscViewerAndFormat*),PetscErrorCode (*monitorsetup)(TS,PetscViewerAndFormat*)) 1213 { 1214 PetscViewer viewer; 1215 PetscViewerFormat format; 1216 PetscBool flg; 1217 1218 PetscFunctionBegin; 1219 PetscCall(PetscOptionsGetViewer(PetscObjectComm((PetscObject)ts),((PetscObject) ts)->options,((PetscObject)ts)->prefix,name,&viewer,&format,&flg)); 1220 if (flg) { 1221 PetscViewerAndFormat *vf; 1222 PetscCall(PetscViewerAndFormatCreate(viewer,format,&vf)); 1223 PetscCall(PetscObjectDereference((PetscObject)viewer)); 1224 if (monitorsetup) { 1225 PetscCall((*monitorsetup)(ts,vf)); 1226 } 1227 PetscCall(TSAdjointMonitorSet(ts,(PetscErrorCode (*)(TS,PetscInt,PetscReal,Vec,PetscInt,Vec*,Vec*,void*))monitor,vf,(PetscErrorCode (*)(void**))PetscViewerAndFormatDestroy)); 1228 } 1229 PetscFunctionReturn(0); 1230 } 1231 1232 /*@C 1233 TSAdjointMonitorSet - Sets an ADDITIONAL function that is to be used at every 1234 timestep to display the iteration's progress. 1235 1236 Logically Collective on TS 1237 1238 Input Parameters: 1239 + ts - the TS context obtained from TSCreate() 1240 . adjointmonitor - monitoring routine 1241 . adjointmctx - [optional] user-defined context for private data for the 1242 monitor routine (use NULL if no context is desired) 1243 - adjointmonitordestroy - [optional] routine that frees monitor context 1244 (may be NULL) 1245 1246 Calling sequence of monitor: 1247 $ int adjointmonitor(TS ts,PetscInt steps,PetscReal time,Vec u,PetscInt numcost,Vec *lambda, Vec *mu,void *adjointmctx) 1248 1249 + ts - the TS context 1250 . steps - iteration number (after the final time step the monitor routine is called with a step of -1, this is at the final time which may have 1251 been interpolated to) 1252 . time - current time 1253 . u - current iterate 1254 . numcost - number of cost functionos 1255 . lambda - sensitivities to initial conditions 1256 . mu - sensitivities to parameters 1257 - adjointmctx - [optional] adjoint monitoring context 1258 1259 Notes: 1260 This routine adds an additional monitor to the list of monitors that 1261 already has been loaded. 1262 1263 Fortran Notes: 1264 Only a single monitor function can be set for each TS object 1265 1266 Level: intermediate 1267 1268 .seealso: `TSAdjointMonitorCancel()` 1269 @*/ 1270 PetscErrorCode TSAdjointMonitorSet(TS ts,PetscErrorCode (*adjointmonitor)(TS,PetscInt,PetscReal,Vec,PetscInt,Vec*,Vec*,void*),void *adjointmctx,PetscErrorCode (*adjointmdestroy)(void**)) 1271 { 1272 PetscInt i; 1273 PetscBool identical; 1274 1275 PetscFunctionBegin; 1276 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1277 for (i=0; i<ts->numbermonitors;i++) { 1278 PetscCall(PetscMonitorCompare((PetscErrorCode (*)(void))adjointmonitor,adjointmctx,adjointmdestroy,(PetscErrorCode (*)(void))ts->adjointmonitor[i],ts->adjointmonitorcontext[i],ts->adjointmonitordestroy[i],&identical)); 1279 if (identical) PetscFunctionReturn(0); 1280 } 1281 PetscCheck(ts->numberadjointmonitors < MAXTSMONITORS,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many adjoint monitors set"); 1282 ts->adjointmonitor[ts->numberadjointmonitors] = adjointmonitor; 1283 ts->adjointmonitordestroy[ts->numberadjointmonitors] = adjointmdestroy; 1284 ts->adjointmonitorcontext[ts->numberadjointmonitors++] = (void*)adjointmctx; 1285 PetscFunctionReturn(0); 1286 } 1287 1288 /*@C 1289 TSAdjointMonitorCancel - Clears all the adjoint monitors that have been set on a time-step object. 1290 1291 Logically Collective on TS 1292 1293 Input Parameters: 1294 . ts - the TS context obtained from TSCreate() 1295 1296 Notes: 1297 There is no way to remove a single, specific monitor. 1298 1299 Level: intermediate 1300 1301 .seealso: `TSAdjointMonitorSet()` 1302 @*/ 1303 PetscErrorCode TSAdjointMonitorCancel(TS ts) 1304 { 1305 PetscInt i; 1306 1307 PetscFunctionBegin; 1308 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1309 for (i=0; i<ts->numberadjointmonitors; i++) { 1310 if (ts->adjointmonitordestroy[i]) { 1311 PetscCall((*ts->adjointmonitordestroy[i])(&ts->adjointmonitorcontext[i])); 1312 } 1313 } 1314 ts->numberadjointmonitors = 0; 1315 PetscFunctionReturn(0); 1316 } 1317 1318 /*@C 1319 TSAdjointMonitorDefault - the default monitor of adjoint computations 1320 1321 Level: intermediate 1322 1323 .seealso: `TSAdjointMonitorSet()` 1324 @*/ 1325 PetscErrorCode TSAdjointMonitorDefault(TS ts,PetscInt step,PetscReal ptime,Vec v,PetscInt numcost,Vec *lambda,Vec *mu,PetscViewerAndFormat *vf) 1326 { 1327 PetscViewer viewer = vf->viewer; 1328 1329 PetscFunctionBegin; 1330 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,8); 1331 PetscCall(PetscViewerPushFormat(viewer,vf->format)); 1332 PetscCall(PetscViewerASCIIAddTab(viewer,((PetscObject)ts)->tablevel)); 1333 PetscCall(PetscViewerASCIIPrintf(viewer,"%" PetscInt_FMT " TS dt %g time %g%s",step,(double)ts->time_step,(double)ptime,ts->steprollback ? " (r)\n" : "\n")); 1334 PetscCall(PetscViewerASCIISubtractTab(viewer,((PetscObject)ts)->tablevel)); 1335 PetscCall(PetscViewerPopFormat(viewer)); 1336 PetscFunctionReturn(0); 1337 } 1338 1339 /*@C 1340 TSAdjointMonitorDrawSensi - Monitors progress of the adjoint TS solvers by calling 1341 VecView() for the sensitivities to initial states at each timestep 1342 1343 Collective on TS 1344 1345 Input Parameters: 1346 + ts - the TS context 1347 . step - current time-step 1348 . ptime - current time 1349 . u - current state 1350 . numcost - number of cost functions 1351 . lambda - sensitivities to initial conditions 1352 . mu - sensitivities to parameters 1353 - dummy - either a viewer or NULL 1354 1355 Level: intermediate 1356 1357 .seealso: `TSAdjointMonitorSet()`, `TSAdjointMonitorDefault()`, `VecView()` 1358 @*/ 1359 PetscErrorCode TSAdjointMonitorDrawSensi(TS ts,PetscInt step,PetscReal ptime,Vec u,PetscInt numcost,Vec *lambda,Vec *mu,void *dummy) 1360 { 1361 TSMonitorDrawCtx ictx = (TSMonitorDrawCtx)dummy; 1362 PetscDraw draw; 1363 PetscReal xl,yl,xr,yr,h; 1364 char time[32]; 1365 1366 PetscFunctionBegin; 1367 if (!(((ictx->howoften > 0) && (!(step % ictx->howoften))) || ((ictx->howoften == -1) && ts->reason))) PetscFunctionReturn(0); 1368 1369 PetscCall(VecView(lambda[0],ictx->viewer)); 1370 PetscCall(PetscViewerDrawGetDraw(ictx->viewer,0,&draw)); 1371 PetscCall(PetscSNPrintf(time,32,"Timestep %d Time %g",(int)step,(double)ptime)); 1372 PetscCall(PetscDrawGetCoordinates(draw,&xl,&yl,&xr,&yr)); 1373 h = yl + .95*(yr - yl); 1374 PetscCall(PetscDrawStringCentered(draw,.5*(xl+xr),h,PETSC_DRAW_BLACK,time)); 1375 PetscCall(PetscDrawFlush(draw)); 1376 PetscFunctionReturn(0); 1377 } 1378 1379 /* 1380 TSAdjointSetFromOptions - Sets various TSAdjoint parameters from user options. 1381 1382 Collective on TSAdjoint 1383 1384 Input Parameter: 1385 . ts - the TS context 1386 1387 Options Database Keys: 1388 + -ts_adjoint_solve <yes,no> After solving the ODE/DAE solve the adjoint problem (requires -ts_save_trajectory) 1389 . -ts_adjoint_monitor - print information at each adjoint time step 1390 - -ts_adjoint_monitor_draw_sensi - monitor the sensitivity of the first cost function wrt initial conditions (lambda[0]) graphically 1391 1392 Level: developer 1393 1394 Notes: 1395 This is not normally called directly by users 1396 1397 .seealso: `TSSetSaveTrajectory()`, `TSTrajectorySetUp()` 1398 */ 1399 PetscErrorCode TSAdjointSetFromOptions(PetscOptionItems *PetscOptionsObject,TS ts) 1400 { 1401 PetscBool tflg,opt; 1402 1403 PetscFunctionBegin; 1404 PetscValidHeaderSpecific(ts,TS_CLASSID,2); 1405 PetscOptionsHeadBegin(PetscOptionsObject,"TS Adjoint options"); 1406 tflg = ts->adjoint_solve ? PETSC_TRUE : PETSC_FALSE; 1407 PetscCall(PetscOptionsBool("-ts_adjoint_solve","Solve the adjoint problem immediately after solving the forward problem","",tflg,&tflg,&opt)); 1408 if (opt) { 1409 PetscCall(TSSetSaveTrajectory(ts)); 1410 ts->adjoint_solve = tflg; 1411 } 1412 PetscCall(TSAdjointMonitorSetFromOptions(ts,"-ts_adjoint_monitor","Monitor adjoint timestep size","TSAdjointMonitorDefault",TSAdjointMonitorDefault,NULL)); 1413 PetscCall(TSAdjointMonitorSetFromOptions(ts,"-ts_adjoint_monitor_sensi","Monitor sensitivity in the adjoint computation","TSAdjointMonitorSensi",TSAdjointMonitorSensi,NULL)); 1414 opt = PETSC_FALSE; 1415 PetscCall(PetscOptionsName("-ts_adjoint_monitor_draw_sensi","Monitor adjoint sensitivities (lambda only) graphically","TSAdjointMonitorDrawSensi",&opt)); 1416 if (opt) { 1417 TSMonitorDrawCtx ctx; 1418 PetscInt howoften = 1; 1419 1420 PetscCall(PetscOptionsInt("-ts_adjoint_monitor_draw_sensi","Monitor adjoint sensitivities (lambda only) graphically","TSAdjointMonitorDrawSensi",howoften,&howoften,NULL)); 1421 PetscCall(TSMonitorDrawCtxCreate(PetscObjectComm((PetscObject)ts),NULL,NULL,PETSC_DECIDE,PETSC_DECIDE,300,300,howoften,&ctx)); 1422 PetscCall(TSAdjointMonitorSet(ts,TSAdjointMonitorDrawSensi,ctx,(PetscErrorCode (*)(void**))TSMonitorDrawCtxDestroy)); 1423 } 1424 PetscFunctionReturn(0); 1425 } 1426 1427 /*@ 1428 TSAdjointStep - Steps one time step backward in the adjoint run 1429 1430 Collective on TS 1431 1432 Input Parameter: 1433 . ts - the TS context obtained from TSCreate() 1434 1435 Level: intermediate 1436 1437 .seealso: `TSAdjointSetUp()`, `TSAdjointSolve()` 1438 @*/ 1439 PetscErrorCode TSAdjointStep(TS ts) 1440 { 1441 DM dm; 1442 1443 PetscFunctionBegin; 1444 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1445 PetscCall(TSGetDM(ts,&dm)); 1446 PetscCall(TSAdjointSetUp(ts)); 1447 ts->steps--; /* must decrease the step index before the adjoint step is taken. */ 1448 1449 ts->reason = TS_CONVERGED_ITERATING; 1450 ts->ptime_prev = ts->ptime; 1451 PetscCheck(ts->ops->adjointstep,PetscObjectComm((PetscObject)ts),PETSC_ERR_NOT_CONVERGED,"TSStep has failed because the adjoint of %s has not been implemented, try other time stepping methods for adjoint sensitivity analysis",((PetscObject)ts)->type_name); 1452 PetscCall(PetscLogEventBegin(TS_AdjointStep,ts,0,0,0)); 1453 PetscCall((*ts->ops->adjointstep)(ts)); 1454 PetscCall(PetscLogEventEnd(TS_AdjointStep,ts,0,0,0)); 1455 ts->adjoint_steps++; 1456 1457 if (ts->reason < 0) { 1458 PetscCheck(!ts->errorifstepfailed,PetscObjectComm((PetscObject)ts),PETSC_ERR_NOT_CONVERGED,"TSAdjointStep has failed due to %s",TSConvergedReasons[ts->reason]); 1459 } else if (!ts->reason) { 1460 if (ts->adjoint_steps >= ts->adjoint_max_steps) ts->reason = TS_CONVERGED_ITS; 1461 } 1462 PetscFunctionReturn(0); 1463 } 1464 1465 /*@ 1466 TSAdjointSolve - Solves the discrete ajoint problem for an ODE/DAE 1467 1468 Collective on TS 1469 1470 Input Parameter: 1471 . ts - the TS context obtained from TSCreate() 1472 1473 Options Database: 1474 . -ts_adjoint_view_solution <viewerinfo> - views the first gradient with respect to the initial values 1475 1476 Level: intermediate 1477 1478 Notes: 1479 This must be called after a call to TSSolve() that solves the forward problem 1480 1481 By default this will integrate back to the initial time, one can use TSAdjointSetSteps() to step back to a later time 1482 1483 .seealso: `TSCreate()`, `TSSetCostGradients()`, `TSSetSolution()`, `TSAdjointStep()` 1484 @*/ 1485 PetscErrorCode TSAdjointSolve(TS ts) 1486 { 1487 static PetscBool cite = PETSC_FALSE; 1488 #if defined(TSADJOINT_STAGE) 1489 PetscLogStage adjoint_stage; 1490 #endif 1491 1492 PetscFunctionBegin; 1493 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1494 PetscCall(PetscCitationsRegister("@article{Zhang2022tsadjoint,\n" 1495 " title = {{PETSc TSAdjoint: A Discrete Adjoint ODE Solver for First-Order and Second-Order Sensitivity Analysis}},\n" 1496 " author = {Zhang, Hong and Constantinescu, Emil M. and Smith, Barry F.},\n" 1497 " journal = {SIAM Journal on Scientific Computing},\n" 1498 " volume = {44},\n" 1499 " number = {1},\n" 1500 " pages = {C1-C24},\n" 1501 " doi = {10.1137/21M140078X},\n" 1502 " year = {2022}\n}\n",&cite)); 1503 #if defined(TSADJOINT_STAGE) 1504 PetscCall(PetscLogStageRegister("TSAdjoint",&adjoint_stage)); 1505 PetscCall(PetscLogStagePush(adjoint_stage)); 1506 #endif 1507 PetscCall(TSAdjointSetUp(ts)); 1508 1509 /* reset time step and iteration counters */ 1510 ts->adjoint_steps = 0; 1511 ts->ksp_its = 0; 1512 ts->snes_its = 0; 1513 ts->num_snes_failures = 0; 1514 ts->reject = 0; 1515 ts->reason = TS_CONVERGED_ITERATING; 1516 1517 if (!ts->adjoint_max_steps) ts->adjoint_max_steps = ts->steps; 1518 if (ts->adjoint_steps >= ts->adjoint_max_steps) ts->reason = TS_CONVERGED_ITS; 1519 1520 while (!ts->reason) { 1521 PetscCall(TSTrajectoryGet(ts->trajectory,ts,ts->steps,&ts->ptime)); 1522 PetscCall(TSAdjointMonitor(ts,ts->steps,ts->ptime,ts->vec_sol,ts->numcost,ts->vecs_sensi,ts->vecs_sensip)); 1523 PetscCall(TSAdjointEventHandler(ts)); 1524 PetscCall(TSAdjointStep(ts)); 1525 if ((ts->vec_costintegral || ts->quadraturets) && !ts->costintegralfwd) { 1526 PetscCall(TSAdjointCostIntegral(ts)); 1527 } 1528 } 1529 if (!ts->steps) { 1530 PetscCall(TSTrajectoryGet(ts->trajectory,ts,ts->steps,&ts->ptime)); 1531 PetscCall(TSAdjointMonitor(ts,ts->steps,ts->ptime,ts->vec_sol,ts->numcost,ts->vecs_sensi,ts->vecs_sensip)); 1532 } 1533 ts->solvetime = ts->ptime; 1534 PetscCall(TSTrajectoryViewFromOptions(ts->trajectory,NULL,"-ts_trajectory_view")); 1535 PetscCall(VecViewFromOptions(ts->vecs_sensi[0],(PetscObject) ts, "-ts_adjoint_view_solution")); 1536 ts->adjoint_max_steps = 0; 1537 #if defined(TSADJOINT_STAGE) 1538 PetscCall(PetscLogStagePop()); 1539 #endif 1540 PetscFunctionReturn(0); 1541 } 1542 1543 /*@C 1544 TSAdjointMonitor - Runs all user-provided adjoint monitor routines set using TSAdjointMonitorSet() 1545 1546 Collective on TS 1547 1548 Input Parameters: 1549 + ts - time stepping context obtained from TSCreate() 1550 . step - step number that has just completed 1551 . ptime - model time of the state 1552 . u - state at the current model time 1553 . numcost - number of cost functions (dimension of lambda or mu) 1554 . lambda - vectors containing the gradients of the cost functions with respect to the ODE/DAE solution variables 1555 - mu - vectors containing the gradients of the cost functions with respect to the problem parameters 1556 1557 Notes: 1558 TSAdjointMonitor() is typically used automatically within the time stepping implementations. 1559 Users would almost never call this routine directly. 1560 1561 Level: developer 1562 1563 @*/ 1564 PetscErrorCode TSAdjointMonitor(TS ts,PetscInt step,PetscReal ptime,Vec u,PetscInt numcost,Vec *lambda, Vec *mu) 1565 { 1566 PetscInt i,n = ts->numberadjointmonitors; 1567 1568 PetscFunctionBegin; 1569 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1570 PetscValidHeaderSpecific(u,VEC_CLASSID,4); 1571 PetscCall(VecLockReadPush(u)); 1572 for (i=0; i<n; i++) { 1573 PetscCall((*ts->adjointmonitor[i])(ts,step,ptime,u,numcost,lambda,mu,ts->adjointmonitorcontext[i])); 1574 } 1575 PetscCall(VecLockReadPop(u)); 1576 PetscFunctionReturn(0); 1577 } 1578 1579 /*@ 1580 TSAdjointCostIntegral - Evaluate the cost integral in the adjoint run. 1581 1582 Collective on TS 1583 1584 Input Parameter: 1585 . ts - time stepping context 1586 1587 Level: advanced 1588 1589 Notes: 1590 This function cannot be called until TSAdjointStep() has been completed. 1591 1592 .seealso: `TSAdjointSolve()`, `TSAdjointStep` 1593 @*/ 1594 PetscErrorCode TSAdjointCostIntegral(TS ts) 1595 { 1596 PetscFunctionBegin; 1597 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1598 PetscCheck(ts->ops->adjointintegral,PetscObjectComm((PetscObject)ts),PETSC_ERR_SUP,"%s does not provide integral evaluation in the adjoint run",((PetscObject)ts)->type_name); 1599 PetscCall((*ts->ops->adjointintegral)(ts)); 1600 PetscFunctionReturn(0); 1601 } 1602 1603 /* ------------------ Forward (tangent linear) sensitivity ------------------*/ 1604 1605 /*@ 1606 TSForwardSetUp - Sets up the internal data structures for the later use 1607 of forward sensitivity analysis 1608 1609 Collective on TS 1610 1611 Input Parameter: 1612 . ts - the TS context obtained from TSCreate() 1613 1614 Level: advanced 1615 1616 .seealso: `TSCreate()`, `TSDestroy()`, `TSSetUp()` 1617 @*/ 1618 PetscErrorCode TSForwardSetUp(TS ts) 1619 { 1620 PetscFunctionBegin; 1621 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1622 if (ts->forwardsetupcalled) PetscFunctionReturn(0); 1623 if (ts->ops->forwardsetup) { 1624 PetscCall((*ts->ops->forwardsetup)(ts)); 1625 } 1626 PetscCall(VecDuplicate(ts->vec_sol,&ts->vec_sensip_col)); 1627 ts->forwardsetupcalled = PETSC_TRUE; 1628 PetscFunctionReturn(0); 1629 } 1630 1631 /*@ 1632 TSForwardReset - Reset the internal data structures used by forward sensitivity analysis 1633 1634 Collective on TS 1635 1636 Input Parameter: 1637 . ts - the TS context obtained from TSCreate() 1638 1639 Level: advanced 1640 1641 .seealso: `TSCreate()`, `TSDestroy()`, `TSForwardSetUp()` 1642 @*/ 1643 PetscErrorCode TSForwardReset(TS ts) 1644 { 1645 TS quadts = ts->quadraturets; 1646 1647 PetscFunctionBegin; 1648 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1649 if (ts->ops->forwardreset) { 1650 PetscCall((*ts->ops->forwardreset)(ts)); 1651 } 1652 PetscCall(MatDestroy(&ts->mat_sensip)); 1653 if (quadts) { 1654 PetscCall(MatDestroy(&quadts->mat_sensip)); 1655 } 1656 PetscCall(VecDestroy(&ts->vec_sensip_col)); 1657 ts->forward_solve = PETSC_FALSE; 1658 ts->forwardsetupcalled = PETSC_FALSE; 1659 PetscFunctionReturn(0); 1660 } 1661 1662 /*@ 1663 TSForwardSetIntegralGradients - Set the vectors holding forward sensitivities of the integral term. 1664 1665 Input Parameters: 1666 + ts - the TS context obtained from TSCreate() 1667 . numfwdint - number of integrals 1668 - vp - the vectors containing the gradients for each integral w.r.t. parameters 1669 1670 Level: deprecated 1671 1672 .seealso: `TSForwardGetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()` 1673 @*/ 1674 PetscErrorCode TSForwardSetIntegralGradients(TS ts,PetscInt numfwdint,Vec *vp) 1675 { 1676 PetscFunctionBegin; 1677 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1678 PetscCheck(!ts->numcost || ts->numcost == numfwdint,PetscObjectComm((PetscObject)ts),PETSC_ERR_USER,"The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand()"); 1679 if (!ts->numcost) ts->numcost = numfwdint; 1680 1681 ts->vecs_integral_sensip = vp; 1682 PetscFunctionReturn(0); 1683 } 1684 1685 /*@ 1686 TSForwardGetIntegralGradients - Returns the forward sensitivities ofthe integral term. 1687 1688 Input Parameter: 1689 . ts - the TS context obtained from TSCreate() 1690 1691 Output Parameter: 1692 . vp - the vectors containing the gradients for each integral w.r.t. parameters 1693 1694 Level: deprecated 1695 1696 .seealso: `TSForwardSetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()` 1697 @*/ 1698 PetscErrorCode TSForwardGetIntegralGradients(TS ts,PetscInt *numfwdint,Vec **vp) 1699 { 1700 PetscFunctionBegin; 1701 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1702 PetscValidPointer(vp,3); 1703 if (numfwdint) *numfwdint = ts->numcost; 1704 if (vp) *vp = ts->vecs_integral_sensip; 1705 PetscFunctionReturn(0); 1706 } 1707 1708 /*@ 1709 TSForwardStep - Compute the forward sensitivity for one time step. 1710 1711 Collective on TS 1712 1713 Input Parameter: 1714 . ts - time stepping context 1715 1716 Level: advanced 1717 1718 Notes: 1719 This function cannot be called until TSStep() has been completed. 1720 1721 .seealso: `TSForwardSetSensitivities()`, `TSForwardGetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardSetUp()` 1722 @*/ 1723 PetscErrorCode TSForwardStep(TS ts) 1724 { 1725 PetscFunctionBegin; 1726 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1727 PetscCheck(ts->ops->forwardstep,PetscObjectComm((PetscObject)ts),PETSC_ERR_SUP,"%s does not provide forward sensitivity analysis",((PetscObject)ts)->type_name); 1728 PetscCall(PetscLogEventBegin(TS_ForwardStep,ts,0,0,0)); 1729 PetscCall((*ts->ops->forwardstep)(ts)); 1730 PetscCall(PetscLogEventEnd(TS_ForwardStep,ts,0,0,0)); 1731 PetscCheck(ts->reason >= 0 || !ts->errorifstepfailed,PetscObjectComm((PetscObject)ts),PETSC_ERR_NOT_CONVERGED,"TSFowardStep has failed due to %s",TSConvergedReasons[ts->reason]); 1732 PetscFunctionReturn(0); 1733 } 1734 1735 /*@ 1736 TSForwardSetSensitivities - Sets the initial value of the trajectory sensitivities of solution w.r.t. the problem parameters and initial values. 1737 1738 Logically Collective on TS 1739 1740 Input Parameters: 1741 + ts - the TS context obtained from TSCreate() 1742 . nump - number of parameters 1743 - Smat - sensitivities with respect to the parameters, the number of entries in these vectors is the same as the number of parameters 1744 1745 Level: beginner 1746 1747 Notes: 1748 Forward sensitivity is also called 'trajectory sensitivity' in some fields such as power systems. 1749 This function turns on a flag to trigger TSSolve() to compute forward sensitivities automatically. 1750 You must call this function before TSSolve(). 1751 The entries in the sensitivity matrix must be correctly initialized with the values S = dy/dp|startingtime. 1752 1753 .seealso: `TSForwardGetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()` 1754 @*/ 1755 PetscErrorCode TSForwardSetSensitivities(TS ts,PetscInt nump,Mat Smat) 1756 { 1757 PetscFunctionBegin; 1758 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1759 PetscValidHeaderSpecific(Smat,MAT_CLASSID,3); 1760 ts->forward_solve = PETSC_TRUE; 1761 if (nump == PETSC_DEFAULT) { 1762 PetscCall(MatGetSize(Smat,NULL,&ts->num_parameters)); 1763 } else ts->num_parameters = nump; 1764 PetscCall(PetscObjectReference((PetscObject)Smat)); 1765 PetscCall(MatDestroy(&ts->mat_sensip)); 1766 ts->mat_sensip = Smat; 1767 PetscFunctionReturn(0); 1768 } 1769 1770 /*@ 1771 TSForwardGetSensitivities - Returns the trajectory sensitivities 1772 1773 Not Collective, but Vec returned is parallel if TS is parallel 1774 1775 Output Parameters: 1776 + ts - the TS context obtained from TSCreate() 1777 . nump - number of parameters 1778 - Smat - sensitivities with respect to the parameters, the number of entries in these vectors is the same as the number of parameters 1779 1780 Level: intermediate 1781 1782 .seealso: `TSForwardSetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()` 1783 @*/ 1784 PetscErrorCode TSForwardGetSensitivities(TS ts,PetscInt *nump,Mat *Smat) 1785 { 1786 PetscFunctionBegin; 1787 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1788 if (nump) *nump = ts->num_parameters; 1789 if (Smat) *Smat = ts->mat_sensip; 1790 PetscFunctionReturn(0); 1791 } 1792 1793 /*@ 1794 TSForwardCostIntegral - Evaluate the cost integral in the forward run. 1795 1796 Collective on TS 1797 1798 Input Parameter: 1799 . ts - time stepping context 1800 1801 Level: advanced 1802 1803 Notes: 1804 This function cannot be called until TSStep() has been completed. 1805 1806 .seealso: `TSSolve()`, `TSAdjointCostIntegral()` 1807 @*/ 1808 PetscErrorCode TSForwardCostIntegral(TS ts) 1809 { 1810 PetscFunctionBegin; 1811 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1812 PetscCheck(ts->ops->forwardintegral,PetscObjectComm((PetscObject)ts),PETSC_ERR_SUP,"%s does not provide integral evaluation in the forward run",((PetscObject)ts)->type_name); 1813 PetscCall((*ts->ops->forwardintegral)(ts)); 1814 PetscFunctionReturn(0); 1815 } 1816 1817 /*@ 1818 TSForwardSetInitialSensitivities - Set initial values for tangent linear sensitivities 1819 1820 Collective on TS 1821 1822 Input Parameters: 1823 + ts - the TS context obtained from TSCreate() 1824 - didp - parametric sensitivities of the initial condition 1825 1826 Level: intermediate 1827 1828 Notes: TSSolve() allows users to pass the initial solution directly to TS. But the tangent linear variables cannot be initialized in this way. This function is used to set initial values for tangent linear variables. 1829 1830 .seealso: `TSForwardSetSensitivities()` 1831 @*/ 1832 PetscErrorCode TSForwardSetInitialSensitivities(TS ts,Mat didp) 1833 { 1834 PetscFunctionBegin; 1835 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1836 PetscValidHeaderSpecific(didp,MAT_CLASSID,2); 1837 if (!ts->mat_sensip) { 1838 PetscCall(TSForwardSetSensitivities(ts,PETSC_DEFAULT,didp)); 1839 } 1840 PetscFunctionReturn(0); 1841 } 1842 1843 /*@ 1844 TSForwardGetStages - Get the number of stages and the tangent linear sensitivities at the intermediate stages 1845 1846 Input Parameter: 1847 . ts - the TS context obtained from TSCreate() 1848 1849 Output Parameters: 1850 + ns - number of stages 1851 - S - tangent linear sensitivities at the intermediate stages 1852 1853 Level: advanced 1854 1855 @*/ 1856 PetscErrorCode TSForwardGetStages(TS ts,PetscInt *ns,Mat **S) 1857 { 1858 PetscFunctionBegin; 1859 PetscValidHeaderSpecific(ts, TS_CLASSID,1); 1860 1861 if (!ts->ops->getstages) *S=NULL; 1862 else { 1863 PetscCall((*ts->ops->forwardgetstages)(ts,ns,S)); 1864 } 1865 PetscFunctionReturn(0); 1866 } 1867 1868 /*@ 1869 TSCreateQuadratureTS - Create a sub-TS that evaluates integrals over time 1870 1871 Input Parameters: 1872 + ts - the TS context obtained from TSCreate() 1873 - fwd - flag indicating whether to evaluate cost integral in the forward run or the adjoint run 1874 1875 Output Parameters: 1876 . quadts - the child TS context 1877 1878 Level: intermediate 1879 1880 .seealso: `TSGetQuadratureTS()` 1881 @*/ 1882 PetscErrorCode TSCreateQuadratureTS(TS ts,PetscBool fwd,TS *quadts) 1883 { 1884 char prefix[128]; 1885 1886 PetscFunctionBegin; 1887 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1888 PetscValidPointer(quadts,3); 1889 PetscCall(TSDestroy(&ts->quadraturets)); 1890 PetscCall(TSCreate(PetscObjectComm((PetscObject)ts),&ts->quadraturets)); 1891 PetscCall(PetscObjectIncrementTabLevel((PetscObject)ts->quadraturets,(PetscObject)ts,1)); 1892 PetscCall(PetscLogObjectParent((PetscObject)ts,(PetscObject)ts->quadraturets)); 1893 PetscCall(PetscSNPrintf(prefix,sizeof(prefix),"%squad_",((PetscObject)ts)->prefix ? ((PetscObject)ts)->prefix : "")); 1894 PetscCall(TSSetOptionsPrefix(ts->quadraturets,prefix)); 1895 *quadts = ts->quadraturets; 1896 1897 if (ts->numcost) { 1898 PetscCall(VecCreateSeq(PETSC_COMM_SELF,ts->numcost,&(*quadts)->vec_sol)); 1899 } else { 1900 PetscCall(VecCreateSeq(PETSC_COMM_SELF,1,&(*quadts)->vec_sol)); 1901 } 1902 ts->costintegralfwd = fwd; 1903 PetscFunctionReturn(0); 1904 } 1905 1906 /*@ 1907 TSGetQuadratureTS - Return the sub-TS that evaluates integrals over time 1908 1909 Input Parameter: 1910 . ts - the TS context obtained from TSCreate() 1911 1912 Output Parameters: 1913 + fwd - flag indicating whether to evaluate cost integral in the forward run or the adjoint run 1914 - quadts - the child TS context 1915 1916 Level: intermediate 1917 1918 .seealso: `TSCreateQuadratureTS()` 1919 @*/ 1920 PetscErrorCode TSGetQuadratureTS(TS ts,PetscBool *fwd,TS *quadts) 1921 { 1922 PetscFunctionBegin; 1923 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1924 if (fwd) *fwd = ts->costintegralfwd; 1925 if (quadts) *quadts = ts->quadraturets; 1926 PetscFunctionReturn(0); 1927 } 1928 1929 /*@ 1930 TSComputeSNESJacobian - Compute the SNESJacobian 1931 1932 Input Parameters: 1933 + ts - the TS context obtained from TSCreate() 1934 - x - state vector 1935 1936 Output Parameters: 1937 + J - Jacobian matrix 1938 - Jpre - preconditioning matrix for J (may be same as J) 1939 1940 Level: developer 1941 1942 Notes: 1943 Using SNES to compute the Jacobian enables finite differencing when TS Jacobian is not available. 1944 @*/ 1945 PetscErrorCode TSComputeSNESJacobian(TS ts,Vec x,Mat J,Mat Jpre) 1946 { 1947 SNES snes = ts->snes; 1948 PetscErrorCode (*jac)(SNES,Vec,Mat,Mat,void*) = NULL; 1949 1950 PetscFunctionBegin; 1951 /* 1952 Unlike implicit methods, explicit methods do not have SNESMatFDColoring in the snes object 1953 because SNESSolve() has not been called yet; so querying SNESMatFDColoring does not work for 1954 explicit methods. Instead, we check the Jacobian compute function directly to determin if FD 1955 coloring is used. 1956 */ 1957 PetscCall(SNESGetJacobian(snes,NULL,NULL,&jac,NULL)); 1958 if (jac == SNESComputeJacobianDefaultColor) { 1959 Vec f; 1960 PetscCall(SNESSetSolution(snes,x)); 1961 PetscCall(SNESGetFunction(snes,&f,NULL,NULL)); 1962 /* Force MatFDColoringApply to evaluate the SNES residual function for the base vector */ 1963 PetscCall(SNESComputeFunction(snes,x,f)); 1964 } 1965 PetscCall(SNESComputeJacobian(snes,x,J,Jpre)); 1966 PetscFunctionReturn(0); 1967 } 1968