1 #include <petsc/private/tsimpl.h> /*I "petscts.h" I*/ 2 #include <petscdraw.h> 3 4 PetscLogEvent TS_AdjointStep,TS_ForwardStep,TS_JacobianPEval; 5 6 /* #define TSADJOINT_STAGE */ 7 8 /* ------------------------ Sensitivity Context ---------------------------*/ 9 10 /*@C 11 TSSetRHSJacobianP - Sets the function that computes the Jacobian of G w.r.t. the parameters P where U_t = G(U,P,t), as well as the location to store the matrix. 12 13 Logically Collective on TS 14 15 Input Parameters: 16 + ts - TS context obtained from TSCreate() 17 . Amat - JacobianP matrix 18 . func - function 19 - ctx - [optional] user-defined function context 20 21 Calling sequence of func: 22 $ func (TS ts,PetscReal t,Vec y,Mat A,void *ctx); 23 + t - current timestep 24 . U - input vector (current ODE solution) 25 . A - output matrix 26 - ctx - [optional] user-defined function context 27 28 Level: intermediate 29 30 Notes: 31 Amat has the same number of rows and the same row parallel layout as u, Amat has the same number of columns and parallel layout as p 32 33 .seealso: `TSGetRHSJacobianP()` 34 @*/ 35 PetscErrorCode TSSetRHSJacobianP(TS ts,Mat Amat,PetscErrorCode (*func)(TS,PetscReal,Vec,Mat,void*),void *ctx) 36 { 37 PetscFunctionBegin; 38 PetscValidHeaderSpecific(ts, TS_CLASSID,1); 39 PetscValidHeaderSpecific(Amat,MAT_CLASSID,2); 40 41 ts->rhsjacobianp = func; 42 ts->rhsjacobianpctx = ctx; 43 if (Amat) { 44 PetscCall(PetscObjectReference((PetscObject)Amat)); 45 PetscCall(MatDestroy(&ts->Jacprhs)); 46 ts->Jacprhs = Amat; 47 } 48 PetscFunctionReturn(0); 49 } 50 51 /*@C 52 TSGetRHSJacobianP - Gets the function that computes the Jacobian of G w.r.t. the parameters P where U_t = G(U,P,t), as well as the location to store the matrix. 53 54 Logically Collective on TS 55 56 Input Parameter: 57 . ts - TS context obtained from TSCreate() 58 59 Output Parameters: 60 + Amat - JacobianP matrix 61 . func - function 62 - ctx - [optional] user-defined function context 63 64 Calling sequence of func: 65 $ func (TS ts,PetscReal t,Vec y,Mat A,void *ctx); 66 + t - current timestep 67 . U - input vector (current ODE solution) 68 . A - output matrix 69 - ctx - [optional] user-defined function context 70 71 Level: intermediate 72 73 Notes: 74 Amat has the same number of rows and the same row parallel layout as u, Amat has the same number of columns and parallel layout as p 75 76 .seealso: `TSSetRHSJacobianP()` 77 @*/ 78 PetscErrorCode TSGetRHSJacobianP(TS ts,Mat *Amat,PetscErrorCode (**func)(TS,PetscReal,Vec,Mat,void*),void **ctx) 79 { 80 PetscFunctionBegin; 81 if (func) *func = ts->rhsjacobianp; 82 if (ctx) *ctx = ts->rhsjacobianpctx; 83 if (Amat) *Amat = ts->Jacprhs; 84 PetscFunctionReturn(0); 85 } 86 87 /*@C 88 TSComputeRHSJacobianP - Runs the user-defined JacobianP function. 89 90 Collective on TS 91 92 Input Parameters: 93 . ts - The TS context obtained from TSCreate() 94 95 Level: developer 96 97 .seealso: `TSSetRHSJacobianP()` 98 @*/ 99 PetscErrorCode TSComputeRHSJacobianP(TS ts,PetscReal t,Vec U,Mat Amat) 100 { 101 PetscFunctionBegin; 102 if (!Amat) PetscFunctionReturn(0); 103 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 104 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 105 106 PetscStackPush("TS user JacobianP function for sensitivity analysis"); 107 PetscCall((*ts->rhsjacobianp)(ts,t,U,Amat,ts->rhsjacobianpctx)); 108 PetscStackPop; 109 PetscFunctionReturn(0); 110 } 111 112 /*@C 113 TSSetIJacobianP - Sets the function that computes the Jacobian of F w.r.t. the parameters P where F(Udot,U,t) = G(U,P,t), as well as the location to store the matrix. 114 115 Logically Collective on TS 116 117 Input Parameters: 118 + ts - TS context obtained from TSCreate() 119 . Amat - JacobianP matrix 120 . func - function 121 - ctx - [optional] user-defined function context 122 123 Calling sequence of func: 124 $ func (TS ts,PetscReal t,Vec y,Mat A,void *ctx); 125 + t - current timestep 126 . U - input vector (current ODE solution) 127 . Udot - time derivative of state vector 128 . shift - shift to apply, see note below 129 . A - output matrix 130 - ctx - [optional] user-defined function context 131 132 Level: intermediate 133 134 Notes: 135 Amat has the same number of rows and the same row parallel layout as u, Amat has the same number of columns and parallel layout as p 136 137 .seealso: 138 @*/ 139 PetscErrorCode TSSetIJacobianP(TS ts,Mat Amat,PetscErrorCode (*func)(TS,PetscReal,Vec,Vec,PetscReal,Mat,void*),void *ctx) 140 { 141 PetscFunctionBegin; 142 PetscValidHeaderSpecific(ts, TS_CLASSID,1); 143 PetscValidHeaderSpecific(Amat,MAT_CLASSID,2); 144 145 ts->ijacobianp = func; 146 ts->ijacobianpctx = ctx; 147 if (Amat) { 148 PetscCall(PetscObjectReference((PetscObject)Amat)); 149 PetscCall(MatDestroy(&ts->Jacp)); 150 ts->Jacp = Amat; 151 } 152 PetscFunctionReturn(0); 153 } 154 155 /*@C 156 TSComputeIJacobianP - Runs the user-defined IJacobianP function. 157 158 Collective on TS 159 160 Input Parameters: 161 + ts - the TS context 162 . t - current timestep 163 . U - state vector 164 . Udot - time derivative of state vector 165 . shift - shift to apply, see note below 166 - imex - flag indicates if the method is IMEX so that the RHSJacobian should be kept separate 167 168 Output Parameters: 169 . A - Jacobian matrix 170 171 Level: developer 172 173 .seealso: `TSSetIJacobianP()` 174 @*/ 175 PetscErrorCode TSComputeIJacobianP(TS ts,PetscReal t,Vec U,Vec Udot,PetscReal shift,Mat Amat,PetscBool imex) 176 { 177 PetscFunctionBegin; 178 if (!Amat) PetscFunctionReturn(0); 179 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 180 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 181 PetscValidHeaderSpecific(Udot,VEC_CLASSID,4); 182 183 PetscCall(PetscLogEventBegin(TS_JacobianPEval,ts,U,Amat,0)); 184 if (ts->ijacobianp) { 185 PetscStackPush("TS user JacobianP function for sensitivity analysis"); 186 PetscCall((*ts->ijacobianp)(ts,t,U,Udot,shift,Amat,ts->ijacobianpctx)); 187 PetscStackPop; 188 } 189 if (imex) { 190 if (!ts->ijacobianp) { /* system was written as Udot = G(t,U) */ 191 PetscBool assembled; 192 PetscCall(MatZeroEntries(Amat)); 193 PetscCall(MatAssembled(Amat,&assembled)); 194 if (!assembled) { 195 PetscCall(MatAssemblyBegin(Amat,MAT_FINAL_ASSEMBLY)); 196 PetscCall(MatAssemblyEnd(Amat,MAT_FINAL_ASSEMBLY)); 197 } 198 } 199 } else { 200 if (ts->rhsjacobianp) PetscCall(TSComputeRHSJacobianP(ts,t,U,ts->Jacprhs)); 201 if (ts->Jacprhs == Amat) { /* No IJacobian, so we only have the RHS matrix */ 202 PetscCall(MatScale(Amat,-1)); 203 } else if (ts->Jacprhs) { /* Both IJacobian and RHSJacobian */ 204 MatStructure axpy = DIFFERENT_NONZERO_PATTERN; 205 if (!ts->ijacobianp) { /* No IJacobianp provided, but we have a separate RHS matrix */ 206 PetscCall(MatZeroEntries(Amat)); 207 } 208 PetscCall(MatAXPY(Amat,-1,ts->Jacprhs,axpy)); 209 } 210 } 211 PetscCall(PetscLogEventEnd(TS_JacobianPEval,ts,U,Amat,0)); 212 PetscFunctionReturn(0); 213 } 214 215 /*@C 216 TSSetCostIntegrand - Sets the routine for evaluating the integral term in one or more cost functions 217 218 Logically Collective on TS 219 220 Input Parameters: 221 + ts - the TS context obtained from TSCreate() 222 . numcost - number of gradients to be computed, this is the number of cost functions 223 . costintegral - vector that stores the integral values 224 . rf - routine for evaluating the integrand function 225 . drduf - function that computes the gradients of the r's with respect to u 226 . drdpf - function that computes the gradients of the r's with respect to p, can be NULL if parametric sensitivity is not desired (mu=NULL) 227 . fwd - flag indicating whether to evaluate cost integral in the forward run or the adjoint run 228 - ctx - [optional] user-defined context for private data for the function evaluation routine (may be NULL) 229 230 Calling sequence of rf: 231 $ PetscErrorCode rf(TS ts,PetscReal t,Vec U,Vec F,void *ctx); 232 233 Calling sequence of drduf: 234 $ PetscErroCode drduf(TS ts,PetscReal t,Vec U,Vec *dRdU,void *ctx); 235 236 Calling sequence of drdpf: 237 $ PetscErroCode drdpf(TS ts,PetscReal t,Vec U,Vec *dRdP,void *ctx); 238 239 Level: deprecated 240 241 Notes: 242 For optimization there is usually a single cost function (numcost = 1). For sensitivities there may be multiple cost functions 243 244 .seealso: `TSSetRHSJacobianP()`, `TSGetCostGradients()`, `TSSetCostGradients()` 245 @*/ 246 PetscErrorCode TSSetCostIntegrand(TS ts,PetscInt numcost,Vec costintegral,PetscErrorCode (*rf)(TS,PetscReal,Vec,Vec,void*), 247 PetscErrorCode (*drduf)(TS,PetscReal,Vec,Vec*,void*), 248 PetscErrorCode (*drdpf)(TS,PetscReal,Vec,Vec*,void*), 249 PetscBool fwd,void *ctx) 250 { 251 PetscFunctionBegin; 252 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 253 if (costintegral) PetscValidHeaderSpecific(costintegral,VEC_CLASSID,3); 254 PetscCheck(!ts->numcost || ts->numcost == numcost,PetscObjectComm((PetscObject)ts),PETSC_ERR_USER,"The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostGradients() or TSForwardSetIntegralGradients()"); 255 if (!ts->numcost) ts->numcost=numcost; 256 257 if (costintegral) { 258 PetscCall(PetscObjectReference((PetscObject)costintegral)); 259 PetscCall(VecDestroy(&ts->vec_costintegral)); 260 ts->vec_costintegral = costintegral; 261 } else { 262 if (!ts->vec_costintegral) { /* Create a seq vec if user does not provide one */ 263 PetscCall(VecCreateSeq(PETSC_COMM_SELF,numcost,&ts->vec_costintegral)); 264 } else { 265 PetscCall(VecSet(ts->vec_costintegral,0.0)); 266 } 267 } 268 if (!ts->vec_costintegrand) { 269 PetscCall(VecDuplicate(ts->vec_costintegral,&ts->vec_costintegrand)); 270 } else { 271 PetscCall(VecSet(ts->vec_costintegrand,0.0)); 272 } 273 ts->costintegralfwd = fwd; /* Evaluate the cost integral in forward run if fwd is true */ 274 ts->costintegrand = rf; 275 ts->costintegrandctx = ctx; 276 ts->drdufunction = drduf; 277 ts->drdpfunction = drdpf; 278 PetscFunctionReturn(0); 279 } 280 281 /*@C 282 TSGetCostIntegral - Returns the values of the integral term in the cost functions. 283 It is valid to call the routine after a backward run. 284 285 Not Collective 286 287 Input Parameter: 288 . ts - the TS context obtained from TSCreate() 289 290 Output Parameter: 291 . v - the vector containing the integrals for each cost function 292 293 Level: intermediate 294 295 .seealso: `TSSetCostIntegrand()` 296 297 @*/ 298 PetscErrorCode TSGetCostIntegral(TS ts,Vec *v) 299 { 300 TS quadts; 301 302 PetscFunctionBegin; 303 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 304 PetscValidPointer(v,2); 305 PetscCall(TSGetQuadratureTS(ts,NULL,&quadts)); 306 *v = quadts->vec_sol; 307 PetscFunctionReturn(0); 308 } 309 310 /*@C 311 TSComputeCostIntegrand - Evaluates the integral function in the cost functions. 312 313 Input Parameters: 314 + ts - the TS context 315 . t - current time 316 - U - state vector, i.e. current solution 317 318 Output Parameter: 319 . Q - vector of size numcost to hold the outputs 320 321 Notes: 322 Most users should not need to explicitly call this routine, as it 323 is used internally within the sensitivity analysis context. 324 325 Level: deprecated 326 327 .seealso: `TSSetCostIntegrand()` 328 @*/ 329 PetscErrorCode TSComputeCostIntegrand(TS ts,PetscReal t,Vec U,Vec Q) 330 { 331 PetscFunctionBegin; 332 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 333 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 334 PetscValidHeaderSpecific(Q,VEC_CLASSID,4); 335 336 PetscCall(PetscLogEventBegin(TS_FunctionEval,ts,U,Q,0)); 337 if (ts->costintegrand) { 338 PetscStackPush("TS user integrand in the cost function"); 339 PetscCall((*ts->costintegrand)(ts,t,U,Q,ts->costintegrandctx)); 340 PetscStackPop; 341 } else { 342 PetscCall(VecZeroEntries(Q)); 343 } 344 345 PetscCall(PetscLogEventEnd(TS_FunctionEval,ts,U,Q,0)); 346 PetscFunctionReturn(0); 347 } 348 349 /*@C 350 TSComputeDRDUFunction - Deprecated, use TSGetQuadratureTS() then TSComputeRHSJacobian() 351 352 Level: deprecated 353 354 @*/ 355 PetscErrorCode TSComputeDRDUFunction(TS ts,PetscReal t,Vec U,Vec *DRDU) 356 { 357 PetscFunctionBegin; 358 if (!DRDU) PetscFunctionReturn(0); 359 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 360 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 361 362 PetscStackPush("TS user DRDU function for sensitivity analysis"); 363 PetscCall((*ts->drdufunction)(ts,t,U,DRDU,ts->costintegrandctx)); 364 PetscStackPop; 365 PetscFunctionReturn(0); 366 } 367 368 /*@C 369 TSComputeDRDPFunction - Deprecated, use TSGetQuadratureTS() then TSComputeRHSJacobianP() 370 371 Level: deprecated 372 373 @*/ 374 PetscErrorCode TSComputeDRDPFunction(TS ts,PetscReal t,Vec U,Vec *DRDP) 375 { 376 PetscFunctionBegin; 377 if (!DRDP) PetscFunctionReturn(0); 378 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 379 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 380 381 PetscStackPush("TS user DRDP function for sensitivity analysis"); 382 PetscCall((*ts->drdpfunction)(ts,t,U,DRDP,ts->costintegrandctx)); 383 PetscStackPop; 384 PetscFunctionReturn(0); 385 } 386 387 /*@C 388 TSSetIHessianProduct - Sets the function that computes the vector-Hessian-vector product. The Hessian is the second-order derivative of F (IFunction) w.r.t. the state variable. 389 390 Logically Collective on TS 391 392 Input Parameters: 393 + ts - TS context obtained from TSCreate() 394 . ihp1 - an array of vectors storing the result of vector-Hessian-vector product for F_UU 395 . hessianproductfunc1 - vector-Hessian-vector product function for F_UU 396 . ihp2 - an array of vectors storing the result of vector-Hessian-vector product for F_UP 397 . hessianproductfunc2 - vector-Hessian-vector product function for F_UP 398 . ihp3 - an array of vectors storing the result of vector-Hessian-vector product for F_PU 399 . hessianproductfunc3 - vector-Hessian-vector product function for F_PU 400 . ihp4 - an array of vectors storing the result of vector-Hessian-vector product for F_PP 401 - hessianproductfunc4 - vector-Hessian-vector product function for F_PP 402 403 Calling sequence of ihessianproductfunc: 404 $ ihessianproductfunc (TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV,void *ctx); 405 + t - current timestep 406 . U - input vector (current ODE solution) 407 . Vl - an array of input vectors to be left-multiplied with the Hessian 408 . Vr - input vector to be right-multiplied with the Hessian 409 . VHV - an array of output vectors for vector-Hessian-vector product 410 - ctx - [optional] user-defined function context 411 412 Level: intermediate 413 414 Notes: 415 The first Hessian function and the working array are required. 416 As an example to implement the callback functions, the second callback function calculates the vector-Hessian-vector product 417 $ Vl_n^T*F_UP*Vr 418 where the vector Vl_n (n-th element in the array Vl) and Vr are of size N and M respectively, and the Hessian F_UP is of size N x N x M. 419 Each entry of F_UP corresponds to the derivative 420 $ F_UP[i][j][k] = \frac{\partial^2 F[i]}{\partial U[j] \partial P[k]}. 421 The result of the vector-Hessian-vector product for Vl_n needs to be stored in vector VHV_n with the j-th entry being 422 $ VHV_n[j] = \sum_i \sum_k {Vl_n[i] * F_UP[i][j][k] * Vr[k]} 423 If the cost function is a scalar, there will be only one vector in Vl and VHV. 424 425 .seealso: 426 @*/ 427 PetscErrorCode TSSetIHessianProduct(TS ts,Vec *ihp1,PetscErrorCode (*ihessianproductfunc1)(TS,PetscReal,Vec,Vec*,Vec,Vec*,void*), 428 Vec *ihp2,PetscErrorCode (*ihessianproductfunc2)(TS,PetscReal,Vec,Vec*,Vec,Vec*,void*), 429 Vec *ihp3,PetscErrorCode (*ihessianproductfunc3)(TS,PetscReal,Vec,Vec*,Vec,Vec*,void*), 430 Vec *ihp4,PetscErrorCode (*ihessianproductfunc4)(TS,PetscReal,Vec,Vec*,Vec,Vec*,void*), 431 void *ctx) 432 { 433 PetscFunctionBegin; 434 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 435 PetscValidPointer(ihp1,2); 436 437 ts->ihessianproductctx = ctx; 438 if (ihp1) ts->vecs_fuu = ihp1; 439 if (ihp2) ts->vecs_fup = ihp2; 440 if (ihp3) ts->vecs_fpu = ihp3; 441 if (ihp4) ts->vecs_fpp = ihp4; 442 ts->ihessianproduct_fuu = ihessianproductfunc1; 443 ts->ihessianproduct_fup = ihessianproductfunc2; 444 ts->ihessianproduct_fpu = ihessianproductfunc3; 445 ts->ihessianproduct_fpp = ihessianproductfunc4; 446 PetscFunctionReturn(0); 447 } 448 449 /*@C 450 TSComputeIHessianProductFunctionUU - Runs the user-defined vector-Hessian-vector product function for Fuu. 451 452 Collective on TS 453 454 Input Parameters: 455 . ts - The TS context obtained from TSCreate() 456 457 Notes: 458 TSComputeIHessianProductFunctionUU() is typically used for sensitivity implementation, 459 so most users would not generally call this routine themselves. 460 461 Level: developer 462 463 .seealso: `TSSetIHessianProduct()` 464 @*/ 465 PetscErrorCode TSComputeIHessianProductFunctionUU(TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV) 466 { 467 PetscFunctionBegin; 468 if (!VHV) PetscFunctionReturn(0); 469 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 470 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 471 472 if (ts->ihessianproduct_fuu) { 473 PetscStackPush("TS user IHessianProduct function 1 for sensitivity analysis"); 474 PetscCall((*ts->ihessianproduct_fuu)(ts,t,U,Vl,Vr,VHV,ts->ihessianproductctx)); 475 PetscStackPop; 476 } 477 /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */ 478 if (ts->rhshessianproduct_guu) { 479 PetscInt nadj; 480 PetscCall(TSComputeRHSHessianProductFunctionUU(ts,t,U,Vl,Vr,VHV)); 481 for (nadj=0; nadj<ts->numcost; nadj++) { 482 PetscCall(VecScale(VHV[nadj],-1)); 483 } 484 } 485 PetscFunctionReturn(0); 486 } 487 488 /*@C 489 TSComputeIHessianProductFunctionUP - Runs the user-defined vector-Hessian-vector product function for Fup. 490 491 Collective on TS 492 493 Input Parameters: 494 . ts - The TS context obtained from TSCreate() 495 496 Notes: 497 TSComputeIHessianProductFunctionUP() is typically used for sensitivity implementation, 498 so most users would not generally call this routine themselves. 499 500 Level: developer 501 502 .seealso: `TSSetIHessianProduct()` 503 @*/ 504 PetscErrorCode TSComputeIHessianProductFunctionUP(TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV) 505 { 506 PetscFunctionBegin; 507 if (!VHV) PetscFunctionReturn(0); 508 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 509 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 510 511 if (ts->ihessianproduct_fup) { 512 PetscStackPush("TS user IHessianProduct function 2 for sensitivity analysis"); 513 PetscCall((*ts->ihessianproduct_fup)(ts,t,U,Vl,Vr,VHV,ts->ihessianproductctx)); 514 PetscStackPop; 515 } 516 /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */ 517 if (ts->rhshessianproduct_gup) { 518 PetscInt nadj; 519 PetscCall(TSComputeRHSHessianProductFunctionUP(ts,t,U,Vl,Vr,VHV)); 520 for (nadj=0; nadj<ts->numcost; nadj++) { 521 PetscCall(VecScale(VHV[nadj],-1)); 522 } 523 } 524 PetscFunctionReturn(0); 525 } 526 527 /*@C 528 TSComputeIHessianProductFunctionPU - Runs the user-defined vector-Hessian-vector product function for Fpu. 529 530 Collective on TS 531 532 Input Parameters: 533 . ts - The TS context obtained from TSCreate() 534 535 Notes: 536 TSComputeIHessianProductFunctionPU() is typically used for sensitivity implementation, 537 so most users would not generally call this routine themselves. 538 539 Level: developer 540 541 .seealso: `TSSetIHessianProduct()` 542 @*/ 543 PetscErrorCode TSComputeIHessianProductFunctionPU(TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV) 544 { 545 PetscFunctionBegin; 546 if (!VHV) PetscFunctionReturn(0); 547 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 548 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 549 550 if (ts->ihessianproduct_fpu) { 551 PetscStackPush("TS user IHessianProduct function 3 for sensitivity analysis"); 552 PetscCall((*ts->ihessianproduct_fpu)(ts,t,U,Vl,Vr,VHV,ts->ihessianproductctx)); 553 PetscStackPop; 554 } 555 /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */ 556 if (ts->rhshessianproduct_gpu) { 557 PetscInt nadj; 558 PetscCall(TSComputeRHSHessianProductFunctionPU(ts,t,U,Vl,Vr,VHV)); 559 for (nadj=0; nadj<ts->numcost; nadj++) { 560 PetscCall(VecScale(VHV[nadj],-1)); 561 } 562 } 563 PetscFunctionReturn(0); 564 } 565 566 /*@C 567 TSComputeIHessianProductFunctionPP - Runs the user-defined vector-Hessian-vector product function for Fpp. 568 569 Collective on TS 570 571 Input Parameters: 572 . ts - The TS context obtained from TSCreate() 573 574 Notes: 575 TSComputeIHessianProductFunctionPP() is typically used for sensitivity implementation, 576 so most users would not generally call this routine themselves. 577 578 Level: developer 579 580 .seealso: `TSSetIHessianProduct()` 581 @*/ 582 PetscErrorCode TSComputeIHessianProductFunctionPP(TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV) 583 { 584 PetscFunctionBegin; 585 if (!VHV) PetscFunctionReturn(0); 586 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 587 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 588 589 if (ts->ihessianproduct_fpp) { 590 PetscStackPush("TS user IHessianProduct function 3 for sensitivity analysis"); 591 PetscCall((*ts->ihessianproduct_fpp)(ts,t,U,Vl,Vr,VHV,ts->ihessianproductctx)); 592 PetscStackPop; 593 } 594 /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */ 595 if (ts->rhshessianproduct_gpp) { 596 PetscInt nadj; 597 PetscCall(TSComputeRHSHessianProductFunctionPP(ts,t,U,Vl,Vr,VHV)); 598 for (nadj=0; nadj<ts->numcost; nadj++) { 599 PetscCall(VecScale(VHV[nadj],-1)); 600 } 601 } 602 PetscFunctionReturn(0); 603 } 604 605 /*@C 606 TSSetRHSHessianProduct - Sets the function that computes the vector-Hessian-vector product. The Hessian is the second-order derivative of G (RHSFunction) w.r.t. the state variable. 607 608 Logically Collective on TS 609 610 Input Parameters: 611 + ts - TS context obtained from TSCreate() 612 . rhshp1 - an array of vectors storing the result of vector-Hessian-vector product for G_UU 613 . hessianproductfunc1 - vector-Hessian-vector product function for G_UU 614 . rhshp2 - an array of vectors storing the result of vector-Hessian-vector product for G_UP 615 . hessianproductfunc2 - vector-Hessian-vector product function for G_UP 616 . rhshp3 - an array of vectors storing the result of vector-Hessian-vector product for G_PU 617 . hessianproductfunc3 - vector-Hessian-vector product function for G_PU 618 . rhshp4 - an array of vectors storing the result of vector-Hessian-vector product for G_PP 619 - hessianproductfunc4 - vector-Hessian-vector product function for G_PP 620 621 Calling sequence of ihessianproductfunc: 622 $ rhshessianproductfunc (TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV,void *ctx); 623 + t - current timestep 624 . U - input vector (current ODE solution) 625 . Vl - an array of input vectors to be left-multiplied with the Hessian 626 . Vr - input vector to be right-multiplied with the Hessian 627 . VHV - an array of output vectors for vector-Hessian-vector product 628 - ctx - [optional] user-defined function context 629 630 Level: intermediate 631 632 Notes: 633 The first Hessian function and the working array are required. 634 As an example to implement the callback functions, the second callback function calculates the vector-Hessian-vector product 635 $ Vl_n^T*G_UP*Vr 636 where the vector Vl_n (n-th element in the array Vl) and Vr are of size N and M respectively, and the Hessian G_UP is of size N x N x M. 637 Each entry of G_UP corresponds to the derivative 638 $ G_UP[i][j][k] = \frac{\partial^2 G[i]}{\partial U[j] \partial P[k]}. 639 The result of the vector-Hessian-vector product for Vl_n needs to be stored in vector VHV_n with j-th entry being 640 $ VHV_n[j] = \sum_i \sum_k {Vl_n[i] * G_UP[i][j][k] * Vr[k]} 641 If the cost function is a scalar, there will be only one vector in Vl and VHV. 642 643 .seealso: 644 @*/ 645 PetscErrorCode TSSetRHSHessianProduct(TS ts,Vec *rhshp1,PetscErrorCode (*rhshessianproductfunc1)(TS,PetscReal,Vec,Vec*,Vec,Vec*,void*), 646 Vec *rhshp2,PetscErrorCode (*rhshessianproductfunc2)(TS,PetscReal,Vec,Vec*,Vec,Vec*,void*), 647 Vec *rhshp3,PetscErrorCode (*rhshessianproductfunc3)(TS,PetscReal,Vec,Vec*,Vec,Vec*,void*), 648 Vec *rhshp4,PetscErrorCode (*rhshessianproductfunc4)(TS,PetscReal,Vec,Vec*,Vec,Vec*,void*), 649 void *ctx) 650 { 651 PetscFunctionBegin; 652 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 653 PetscValidPointer(rhshp1,2); 654 655 ts->rhshessianproductctx = ctx; 656 if (rhshp1) ts->vecs_guu = rhshp1; 657 if (rhshp2) ts->vecs_gup = rhshp2; 658 if (rhshp3) ts->vecs_gpu = rhshp3; 659 if (rhshp4) ts->vecs_gpp = rhshp4; 660 ts->rhshessianproduct_guu = rhshessianproductfunc1; 661 ts->rhshessianproduct_gup = rhshessianproductfunc2; 662 ts->rhshessianproduct_gpu = rhshessianproductfunc3; 663 ts->rhshessianproduct_gpp = rhshessianproductfunc4; 664 PetscFunctionReturn(0); 665 } 666 667 /*@C 668 TSComputeRHSHessianProductFunctionUU - Runs the user-defined vector-Hessian-vector product function for Guu. 669 670 Collective on TS 671 672 Input Parameters: 673 . ts - The TS context obtained from TSCreate() 674 675 Notes: 676 TSComputeRHSHessianProductFunctionUU() is typically used for sensitivity implementation, 677 so most users would not generally call this routine themselves. 678 679 Level: developer 680 681 .seealso: `TSSetRHSHessianProduct()` 682 @*/ 683 PetscErrorCode TSComputeRHSHessianProductFunctionUU(TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV) 684 { 685 PetscFunctionBegin; 686 if (!VHV) PetscFunctionReturn(0); 687 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 688 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 689 690 PetscStackPush("TS user RHSHessianProduct function 1 for sensitivity analysis"); 691 PetscCall((*ts->rhshessianproduct_guu)(ts,t,U,Vl,Vr,VHV,ts->rhshessianproductctx)); 692 PetscStackPop; 693 PetscFunctionReturn(0); 694 } 695 696 /*@C 697 TSComputeRHSHessianProductFunctionUP - Runs the user-defined vector-Hessian-vector product function for Gup. 698 699 Collective on TS 700 701 Input Parameters: 702 . ts - The TS context obtained from TSCreate() 703 704 Notes: 705 TSComputeRHSHessianProductFunctionUP() is typically used for sensitivity implementation, 706 so most users would not generally call this routine themselves. 707 708 Level: developer 709 710 .seealso: `TSSetRHSHessianProduct()` 711 @*/ 712 PetscErrorCode TSComputeRHSHessianProductFunctionUP(TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV) 713 { 714 PetscFunctionBegin; 715 if (!VHV) PetscFunctionReturn(0); 716 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 717 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 718 719 PetscStackPush("TS user RHSHessianProduct function 2 for sensitivity analysis"); 720 PetscCall((*ts->rhshessianproduct_gup)(ts,t,U,Vl,Vr,VHV,ts->rhshessianproductctx)); 721 PetscStackPop; 722 PetscFunctionReturn(0); 723 } 724 725 /*@C 726 TSComputeRHSHessianProductFunctionPU - Runs the user-defined vector-Hessian-vector product function for Gpu. 727 728 Collective on TS 729 730 Input Parameters: 731 . ts - The TS context obtained from TSCreate() 732 733 Notes: 734 TSComputeRHSHessianProductFunctionPU() is typically used for sensitivity implementation, 735 so most users would not generally call this routine themselves. 736 737 Level: developer 738 739 .seealso: `TSSetRHSHessianProduct()` 740 @*/ 741 PetscErrorCode TSComputeRHSHessianProductFunctionPU(TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV) 742 { 743 PetscFunctionBegin; 744 if (!VHV) PetscFunctionReturn(0); 745 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 746 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 747 748 PetscStackPush("TS user RHSHessianProduct function 3 for sensitivity analysis"); 749 PetscCall((*ts->rhshessianproduct_gpu)(ts,t,U,Vl,Vr,VHV,ts->rhshessianproductctx)); 750 PetscStackPop; 751 PetscFunctionReturn(0); 752 } 753 754 /*@C 755 TSComputeRHSHessianProductFunctionPP - Runs the user-defined vector-Hessian-vector product function for Gpp. 756 757 Collective on TS 758 759 Input Parameters: 760 . ts - The TS context obtained from TSCreate() 761 762 Notes: 763 TSComputeRHSHessianProductFunctionPP() is typically used for sensitivity implementation, 764 so most users would not generally call this routine themselves. 765 766 Level: developer 767 768 .seealso: `TSSetRHSHessianProduct()` 769 @*/ 770 PetscErrorCode TSComputeRHSHessianProductFunctionPP(TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV) 771 { 772 PetscFunctionBegin; 773 if (!VHV) PetscFunctionReturn(0); 774 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 775 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 776 777 PetscStackPush("TS user RHSHessianProduct function 3 for sensitivity analysis"); 778 PetscCall((*ts->rhshessianproduct_gpp)(ts,t,U,Vl,Vr,VHV,ts->rhshessianproductctx)); 779 PetscStackPop; 780 PetscFunctionReturn(0); 781 } 782 783 /* --------------------------- Adjoint sensitivity ---------------------------*/ 784 785 /*@ 786 TSSetCostGradients - Sets the initial value of the gradients of the cost function w.r.t. initial values and w.r.t. the problem parameters 787 for use by the TSAdjoint routines. 788 789 Logically Collective on TS 790 791 Input Parameters: 792 + ts - the TS context obtained from TSCreate() 793 . numcost - number of gradients to be computed, this is the number of cost functions 794 . lambda - gradients with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector 795 - mu - gradients with respect to the parameters, the number of entries in these vectors is the same as the number of parameters 796 797 Level: beginner 798 799 Notes: 800 the entries in these vectors must be correctly initialized with the values lamda_i = df/dy|finaltime mu_i = df/dp|finaltime 801 802 After TSAdjointSolve() is called the lamba and the mu contain the computed sensitivities 803 804 .seealso `TSGetCostGradients()` 805 @*/ 806 PetscErrorCode TSSetCostGradients(TS ts,PetscInt numcost,Vec *lambda,Vec *mu) 807 { 808 PetscFunctionBegin; 809 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 810 PetscValidPointer(lambda,3); 811 ts->vecs_sensi = lambda; 812 ts->vecs_sensip = mu; 813 PetscCheck(!ts->numcost || ts->numcost == numcost,PetscObjectComm((PetscObject)ts),PETSC_ERR_USER,"The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand"); 814 ts->numcost = numcost; 815 PetscFunctionReturn(0); 816 } 817 818 /*@ 819 TSGetCostGradients - Returns the gradients from the TSAdjointSolve() 820 821 Not Collective, but Vec returned is parallel if TS is parallel 822 823 Input Parameter: 824 . ts - the TS context obtained from TSCreate() 825 826 Output Parameters: 827 + numcost - size of returned arrays 828 . lambda - vectors containing the gradients of the cost functions with respect to the ODE/DAE solution variables 829 - mu - vectors containing the gradients of the cost functions with respect to the problem parameters 830 831 Level: intermediate 832 833 .seealso: `TSSetCostGradients()` 834 @*/ 835 PetscErrorCode TSGetCostGradients(TS ts,PetscInt *numcost,Vec **lambda,Vec **mu) 836 { 837 PetscFunctionBegin; 838 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 839 if (numcost) *numcost = ts->numcost; 840 if (lambda) *lambda = ts->vecs_sensi; 841 if (mu) *mu = ts->vecs_sensip; 842 PetscFunctionReturn(0); 843 } 844 845 /*@ 846 TSSetCostHessianProducts - Sets the initial value of the Hessian-vector products of the cost function w.r.t. initial values and w.r.t. the problem parameters 847 for use by the TSAdjoint routines. 848 849 Logically Collective on TS 850 851 Input Parameters: 852 + ts - the TS context obtained from TSCreate() 853 . numcost - number of cost functions 854 . lambda2 - Hessian-vector product with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector 855 . mu2 - Hessian-vector product with respect to the parameters, the number of entries in these vectors is the same as the number of parameters 856 - dir - the direction vector that are multiplied with the Hessian of the cost functions 857 858 Level: beginner 859 860 Notes: Hessian of the cost function is completely different from Hessian of the ODE/DAE system 861 862 For second-order adjoint, one needs to call this function and then TSAdjointSetForward() before TSSolve(). 863 864 After TSAdjointSolve() is called, the lamba2 and the mu2 will contain the computed second-order adjoint sensitivities, and can be used to produce Hessian-vector product (not the full Hessian matrix). Users must provide a direction vector; it is usually generated by an optimization solver. 865 866 Passing NULL for lambda2 disables the second-order calculation. 867 .seealso: `TSAdjointSetForward()` 868 @*/ 869 PetscErrorCode TSSetCostHessianProducts(TS ts,PetscInt numcost,Vec *lambda2,Vec *mu2,Vec dir) 870 { 871 PetscFunctionBegin; 872 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 873 PetscCheck(!ts->numcost || ts->numcost == numcost,PetscObjectComm((PetscObject)ts),PETSC_ERR_USER,"The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand"); 874 ts->numcost = numcost; 875 ts->vecs_sensi2 = lambda2; 876 ts->vecs_sensi2p = mu2; 877 ts->vec_dir = dir; 878 PetscFunctionReturn(0); 879 } 880 881 /*@ 882 TSGetCostHessianProducts - Returns the gradients from the TSAdjointSolve() 883 884 Not Collective, but Vec returned is parallel if TS is parallel 885 886 Input Parameter: 887 . ts - the TS context obtained from TSCreate() 888 889 Output Parameters: 890 + numcost - number of cost functions 891 . lambda2 - Hessian-vector product with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector 892 . mu2 - Hessian-vector product with respect to the parameters, the number of entries in these vectors is the same as the number of parameters 893 - dir - the direction vector that are multiplied with the Hessian of the cost functions 894 895 Level: intermediate 896 897 .seealso: `TSSetCostHessianProducts()` 898 @*/ 899 PetscErrorCode TSGetCostHessianProducts(TS ts,PetscInt *numcost,Vec **lambda2,Vec **mu2, Vec *dir) 900 { 901 PetscFunctionBegin; 902 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 903 if (numcost) *numcost = ts->numcost; 904 if (lambda2) *lambda2 = ts->vecs_sensi2; 905 if (mu2) *mu2 = ts->vecs_sensi2p; 906 if (dir) *dir = ts->vec_dir; 907 PetscFunctionReturn(0); 908 } 909 910 /*@ 911 TSAdjointSetForward - Trigger the tangent linear solver and initialize the forward sensitivities 912 913 Logically Collective on TS 914 915 Input Parameters: 916 + ts - the TS context obtained from TSCreate() 917 - didp - the derivative of initial values w.r.t. parameters 918 919 Level: intermediate 920 921 Notes: When computing sensitivies w.r.t. initial condition, set didp to NULL so that the solver will take it as an identity matrix mathematically. TSAdjoint does not reset the tangent linear solver automatically, TSAdjointResetForward() should be called to reset the tangent linear solver. 922 923 .seealso: `TSSetCostHessianProducts()`, `TSAdjointResetForward()` 924 @*/ 925 PetscErrorCode TSAdjointSetForward(TS ts,Mat didp) 926 { 927 Mat A; 928 Vec sp; 929 PetscScalar *xarr; 930 PetscInt lsize; 931 932 PetscFunctionBegin; 933 ts->forward_solve = PETSC_TRUE; /* turn on tangent linear mode */ 934 PetscCheck(ts->vecs_sensi2,PetscObjectComm((PetscObject)ts),PETSC_ERR_USER,"Must call TSSetCostHessianProducts() first"); 935 PetscCheck(ts->vec_dir,PetscObjectComm((PetscObject)ts),PETSC_ERR_USER,"Directional vector is missing. Call TSSetCostHessianProducts() to set it."); 936 /* create a single-column dense matrix */ 937 PetscCall(VecGetLocalSize(ts->vec_sol,&lsize)); 938 PetscCall(MatCreateDense(PetscObjectComm((PetscObject)ts),lsize,PETSC_DECIDE,PETSC_DECIDE,1,NULL,&A)); 939 940 PetscCall(VecDuplicate(ts->vec_sol,&sp)); 941 PetscCall(MatDenseGetColumn(A,0,&xarr)); 942 PetscCall(VecPlaceArray(sp,xarr)); 943 if (ts->vecs_sensi2p) { /* tangent linear variable initialized as 2*dIdP*dir */ 944 if (didp) { 945 PetscCall(MatMult(didp,ts->vec_dir,sp)); 946 PetscCall(VecScale(sp,2.)); 947 } else { 948 PetscCall(VecZeroEntries(sp)); 949 } 950 } else { /* tangent linear variable initialized as dir */ 951 PetscCall(VecCopy(ts->vec_dir,sp)); 952 } 953 PetscCall(VecResetArray(sp)); 954 PetscCall(MatDenseRestoreColumn(A,&xarr)); 955 PetscCall(VecDestroy(&sp)); 956 957 PetscCall(TSForwardSetInitialSensitivities(ts,A)); /* if didp is NULL, identity matrix is assumed */ 958 959 PetscCall(MatDestroy(&A)); 960 PetscFunctionReturn(0); 961 } 962 963 /*@ 964 TSAdjointResetForward - Reset the tangent linear solver and destroy the tangent linear context 965 966 Logically Collective on TS 967 968 Input Parameters: 969 . ts - the TS context obtained from TSCreate() 970 971 Level: intermediate 972 973 .seealso: `TSAdjointSetForward()` 974 @*/ 975 PetscErrorCode TSAdjointResetForward(TS ts) 976 { 977 PetscFunctionBegin; 978 ts->forward_solve = PETSC_FALSE; /* turn off tangent linear mode */ 979 PetscCall(TSForwardReset(ts)); 980 PetscFunctionReturn(0); 981 } 982 983 /*@ 984 TSAdjointSetUp - Sets up the internal data structures for the later use 985 of an adjoint solver 986 987 Collective on TS 988 989 Input Parameter: 990 . ts - the TS context obtained from TSCreate() 991 992 Level: advanced 993 994 .seealso: `TSCreate()`, `TSAdjointStep()`, `TSSetCostGradients()` 995 @*/ 996 PetscErrorCode TSAdjointSetUp(TS ts) 997 { 998 TSTrajectory tj; 999 PetscBool match; 1000 1001 PetscFunctionBegin; 1002 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1003 if (ts->adjointsetupcalled) PetscFunctionReturn(0); 1004 PetscCheck(ts->vecs_sensi,PetscObjectComm((PetscObject)ts),PETSC_ERR_ARG_WRONGSTATE,"Must call TSSetCostGradients() first"); 1005 PetscCheck(!ts->vecs_sensip || ts->Jacp || ts->Jacprhs,PetscObjectComm((PetscObject)ts),PETSC_ERR_ARG_WRONGSTATE,"Must call TSSetRHSJacobianP() or TSSetIJacobianP() first"); 1006 PetscCall(TSGetTrajectory(ts,&tj)); 1007 PetscCall(PetscObjectTypeCompare((PetscObject)tj,TSTRAJECTORYBASIC,&match)); 1008 if (match) { 1009 PetscBool solution_only; 1010 PetscCall(TSTrajectoryGetSolutionOnly(tj,&solution_only)); 1011 PetscCheck(!solution_only,PetscObjectComm((PetscObject)ts),PETSC_ERR_USER,"TSAdjoint cannot use the solution-only mode when choosing the Basic TSTrajectory type. Turn it off with -ts_trajectory_solution_only 0"); 1012 } 1013 PetscCall(TSTrajectorySetUseHistory(tj,PETSC_FALSE)); /* not use TSHistory */ 1014 1015 if (ts->quadraturets) { /* if there is integral in the cost function */ 1016 PetscCall(VecDuplicate(ts->vecs_sensi[0],&ts->vec_drdu_col)); 1017 if (ts->vecs_sensip) { 1018 PetscCall(VecDuplicate(ts->vecs_sensip[0],&ts->vec_drdp_col)); 1019 } 1020 } 1021 1022 if (ts->ops->adjointsetup) PetscCall((*ts->ops->adjointsetup)(ts)); 1023 ts->adjointsetupcalled = PETSC_TRUE; 1024 PetscFunctionReturn(0); 1025 } 1026 1027 /*@ 1028 TSAdjointReset - Resets a TSAdjoint context and removes any allocated Vecs and Mats. 1029 1030 Collective on TS 1031 1032 Input Parameter: 1033 . ts - the TS context obtained from TSCreate() 1034 1035 Level: beginner 1036 1037 .seealso: `TSCreate()`, `TSAdjointSetUp()`, `TSADestroy()` 1038 @*/ 1039 PetscErrorCode TSAdjointReset(TS ts) 1040 { 1041 PetscFunctionBegin; 1042 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1043 if (ts->ops->adjointreset) PetscCall((*ts->ops->adjointreset)(ts)); 1044 if (ts->quadraturets) { /* if there is integral in the cost function */ 1045 PetscCall(VecDestroy(&ts->vec_drdu_col)); 1046 if (ts->vecs_sensip) { 1047 PetscCall(VecDestroy(&ts->vec_drdp_col)); 1048 } 1049 } 1050 ts->vecs_sensi = NULL; 1051 ts->vecs_sensip = NULL; 1052 ts->vecs_sensi2 = NULL; 1053 ts->vecs_sensi2p = NULL; 1054 ts->vec_dir = NULL; 1055 ts->adjointsetupcalled = PETSC_FALSE; 1056 PetscFunctionReturn(0); 1057 } 1058 1059 /*@ 1060 TSAdjointSetSteps - Sets the number of steps the adjoint solver should take backward in time 1061 1062 Logically Collective on TS 1063 1064 Input Parameters: 1065 + ts - the TS context obtained from TSCreate() 1066 - steps - number of steps to use 1067 1068 Level: intermediate 1069 1070 Notes: 1071 Normally one does not call this and TSAdjointSolve() integrates back to the original timestep. One can call this 1072 so as to integrate back to less than the original timestep 1073 1074 .seealso: `TSSetExactFinalTime()` 1075 @*/ 1076 PetscErrorCode TSAdjointSetSteps(TS ts,PetscInt steps) 1077 { 1078 PetscFunctionBegin; 1079 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1080 PetscValidLogicalCollectiveInt(ts,steps,2); 1081 PetscCheck(steps >= 0,PetscObjectComm((PetscObject)ts),PETSC_ERR_ARG_OUTOFRANGE,"Cannot step back a negative number of steps"); 1082 PetscCheck(steps <= ts->steps,PetscObjectComm((PetscObject)ts),PETSC_ERR_ARG_OUTOFRANGE,"Cannot step back more than the total number of forward steps"); 1083 ts->adjoint_max_steps = steps; 1084 PetscFunctionReturn(0); 1085 } 1086 1087 /*@C 1088 TSAdjointSetRHSJacobian - Deprecated, use TSSetRHSJacobianP() 1089 1090 Level: deprecated 1091 1092 @*/ 1093 PetscErrorCode TSAdjointSetRHSJacobian(TS ts,Mat Amat,PetscErrorCode (*func)(TS,PetscReal,Vec,Mat,void*),void *ctx) 1094 { 1095 PetscFunctionBegin; 1096 PetscValidHeaderSpecific(ts, TS_CLASSID,1); 1097 PetscValidHeaderSpecific(Amat,MAT_CLASSID,2); 1098 1099 ts->rhsjacobianp = func; 1100 ts->rhsjacobianpctx = ctx; 1101 if (Amat) { 1102 PetscCall(PetscObjectReference((PetscObject)Amat)); 1103 PetscCall(MatDestroy(&ts->Jacp)); 1104 ts->Jacp = Amat; 1105 } 1106 PetscFunctionReturn(0); 1107 } 1108 1109 /*@C 1110 TSAdjointComputeRHSJacobian - Deprecated, use TSComputeRHSJacobianP() 1111 1112 Level: deprecated 1113 1114 @*/ 1115 PetscErrorCode TSAdjointComputeRHSJacobian(TS ts,PetscReal t,Vec U,Mat Amat) 1116 { 1117 PetscFunctionBegin; 1118 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1119 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 1120 PetscValidPointer(Amat,4); 1121 1122 PetscStackPush("TS user JacobianP function for sensitivity analysis"); 1123 PetscCall((*ts->rhsjacobianp)(ts,t,U,Amat,ts->rhsjacobianpctx)); 1124 PetscStackPop; 1125 PetscFunctionReturn(0); 1126 } 1127 1128 /*@ 1129 TSAdjointComputeDRDYFunction - Deprecated, use TSGetQuadratureTS() then TSComputeRHSJacobian() 1130 1131 Level: deprecated 1132 1133 @*/ 1134 PetscErrorCode TSAdjointComputeDRDYFunction(TS ts,PetscReal t,Vec U,Vec *DRDU) 1135 { 1136 PetscFunctionBegin; 1137 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1138 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 1139 1140 PetscStackPush("TS user DRDY function for sensitivity analysis"); 1141 PetscCall((*ts->drdufunction)(ts,t,U,DRDU,ts->costintegrandctx)); 1142 PetscStackPop; 1143 PetscFunctionReturn(0); 1144 } 1145 1146 /*@ 1147 TSAdjointComputeDRDPFunction - Deprecated, use TSGetQuadratureTS() then TSComputeRHSJacobianP() 1148 1149 Level: deprecated 1150 1151 @*/ 1152 PetscErrorCode TSAdjointComputeDRDPFunction(TS ts,PetscReal t,Vec U,Vec *DRDP) 1153 { 1154 PetscFunctionBegin; 1155 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1156 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 1157 1158 PetscStackPush("TS user DRDP function for sensitivity analysis"); 1159 PetscCall((*ts->drdpfunction)(ts,t,U,DRDP,ts->costintegrandctx)); 1160 PetscStackPop; 1161 PetscFunctionReturn(0); 1162 } 1163 1164 /*@C 1165 TSAdjointMonitorSensi - monitors the first lambda sensitivity 1166 1167 Level: intermediate 1168 1169 .seealso: `TSAdjointMonitorSet()` 1170 @*/ 1171 PetscErrorCode TSAdjointMonitorSensi(TS ts,PetscInt step,PetscReal ptime,Vec v,PetscInt numcost,Vec *lambda,Vec *mu,PetscViewerAndFormat *vf) 1172 { 1173 PetscViewer viewer = vf->viewer; 1174 1175 PetscFunctionBegin; 1176 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,8); 1177 PetscCall(PetscViewerPushFormat(viewer,vf->format)); 1178 PetscCall(VecView(lambda[0],viewer)); 1179 PetscCall(PetscViewerPopFormat(viewer)); 1180 PetscFunctionReturn(0); 1181 } 1182 1183 /*@C 1184 TSAdjointMonitorSetFromOptions - Sets a monitor function and viewer appropriate for the type indicated by the user 1185 1186 Collective on TS 1187 1188 Input Parameters: 1189 + ts - TS object you wish to monitor 1190 . name - the monitor type one is seeking 1191 . help - message indicating what monitoring is done 1192 . manual - manual page for the monitor 1193 . monitor - the monitor function 1194 - monitorsetup - a function that is called once ONLY if the user selected this monitor that may set additional features of the TS or PetscViewer objects 1195 1196 Level: developer 1197 1198 .seealso: `PetscOptionsGetViewer()`, `PetscOptionsGetReal()`, `PetscOptionsHasName()`, `PetscOptionsGetString()`, 1199 `PetscOptionsGetIntArray()`, `PetscOptionsGetRealArray()`, `PetscOptionsBool()` 1200 `PetscOptionsInt()`, `PetscOptionsString()`, `PetscOptionsReal()`, `PetscOptionsBool()`, 1201 `PetscOptionsName()`, `PetscOptionsBegin()`, `PetscOptionsEnd()`, `PetscOptionsHeadBegin()`, 1202 `PetscOptionsStringArray()`, `PetscOptionsRealArray()`, `PetscOptionsScalar()`, 1203 `PetscOptionsBoolGroupBegin()`, `PetscOptionsBoolGroup()`, `PetscOptionsBoolGroupEnd()`, 1204 `PetscOptionsFList()`, `PetscOptionsEList()` 1205 @*/ 1206 PetscErrorCode TSAdjointMonitorSetFromOptions(TS ts,const char name[],const char help[], const char manual[],PetscErrorCode (*monitor)(TS,PetscInt,PetscReal,Vec,PetscInt,Vec*,Vec*,PetscViewerAndFormat*),PetscErrorCode (*monitorsetup)(TS,PetscViewerAndFormat*)) 1207 { 1208 PetscViewer viewer; 1209 PetscViewerFormat format; 1210 PetscBool flg; 1211 1212 PetscFunctionBegin; 1213 PetscCall(PetscOptionsGetViewer(PetscObjectComm((PetscObject)ts),((PetscObject) ts)->options,((PetscObject)ts)->prefix,name,&viewer,&format,&flg)); 1214 if (flg) { 1215 PetscViewerAndFormat *vf; 1216 PetscCall(PetscViewerAndFormatCreate(viewer,format,&vf)); 1217 PetscCall(PetscObjectDereference((PetscObject)viewer)); 1218 if (monitorsetup) PetscCall((*monitorsetup)(ts,vf)); 1219 PetscCall(TSAdjointMonitorSet(ts,(PetscErrorCode (*)(TS,PetscInt,PetscReal,Vec,PetscInt,Vec*,Vec*,void*))monitor,vf,(PetscErrorCode (*)(void**))PetscViewerAndFormatDestroy)); 1220 } 1221 PetscFunctionReturn(0); 1222 } 1223 1224 /*@C 1225 TSAdjointMonitorSet - Sets an ADDITIONAL function that is to be used at every 1226 timestep to display the iteration's progress. 1227 1228 Logically Collective on TS 1229 1230 Input Parameters: 1231 + ts - the TS context obtained from TSCreate() 1232 . adjointmonitor - monitoring routine 1233 . adjointmctx - [optional] user-defined context for private data for the 1234 monitor routine (use NULL if no context is desired) 1235 - adjointmonitordestroy - [optional] routine that frees monitor context 1236 (may be NULL) 1237 1238 Calling sequence of monitor: 1239 $ int adjointmonitor(TS ts,PetscInt steps,PetscReal time,Vec u,PetscInt numcost,Vec *lambda, Vec *mu,void *adjointmctx) 1240 1241 + ts - the TS context 1242 . steps - iteration number (after the final time step the monitor routine is called with a step of -1, this is at the final time which may have 1243 been interpolated to) 1244 . time - current time 1245 . u - current iterate 1246 . numcost - number of cost functionos 1247 . lambda - sensitivities to initial conditions 1248 . mu - sensitivities to parameters 1249 - adjointmctx - [optional] adjoint monitoring context 1250 1251 Notes: 1252 This routine adds an additional monitor to the list of monitors that 1253 already has been loaded. 1254 1255 Fortran Notes: 1256 Only a single monitor function can be set for each TS object 1257 1258 Level: intermediate 1259 1260 .seealso: `TSAdjointMonitorCancel()` 1261 @*/ 1262 PetscErrorCode TSAdjointMonitorSet(TS ts,PetscErrorCode (*adjointmonitor)(TS,PetscInt,PetscReal,Vec,PetscInt,Vec*,Vec*,void*),void *adjointmctx,PetscErrorCode (*adjointmdestroy)(void**)) 1263 { 1264 PetscInt i; 1265 PetscBool identical; 1266 1267 PetscFunctionBegin; 1268 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1269 for (i=0; i<ts->numbermonitors;i++) { 1270 PetscCall(PetscMonitorCompare((PetscErrorCode (*)(void))adjointmonitor,adjointmctx,adjointmdestroy,(PetscErrorCode (*)(void))ts->adjointmonitor[i],ts->adjointmonitorcontext[i],ts->adjointmonitordestroy[i],&identical)); 1271 if (identical) PetscFunctionReturn(0); 1272 } 1273 PetscCheck(ts->numberadjointmonitors < MAXTSMONITORS,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many adjoint monitors set"); 1274 ts->adjointmonitor[ts->numberadjointmonitors] = adjointmonitor; 1275 ts->adjointmonitordestroy[ts->numberadjointmonitors] = adjointmdestroy; 1276 ts->adjointmonitorcontext[ts->numberadjointmonitors++] = (void*)adjointmctx; 1277 PetscFunctionReturn(0); 1278 } 1279 1280 /*@C 1281 TSAdjointMonitorCancel - Clears all the adjoint monitors that have been set on a time-step object. 1282 1283 Logically Collective on TS 1284 1285 Input Parameters: 1286 . ts - the TS context obtained from TSCreate() 1287 1288 Notes: 1289 There is no way to remove a single, specific monitor. 1290 1291 Level: intermediate 1292 1293 .seealso: `TSAdjointMonitorSet()` 1294 @*/ 1295 PetscErrorCode TSAdjointMonitorCancel(TS ts) 1296 { 1297 PetscInt i; 1298 1299 PetscFunctionBegin; 1300 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1301 for (i=0; i<ts->numberadjointmonitors; i++) { 1302 if (ts->adjointmonitordestroy[i]) { 1303 PetscCall((*ts->adjointmonitordestroy[i])(&ts->adjointmonitorcontext[i])); 1304 } 1305 } 1306 ts->numberadjointmonitors = 0; 1307 PetscFunctionReturn(0); 1308 } 1309 1310 /*@C 1311 TSAdjointMonitorDefault - the default monitor of adjoint computations 1312 1313 Level: intermediate 1314 1315 .seealso: `TSAdjointMonitorSet()` 1316 @*/ 1317 PetscErrorCode TSAdjointMonitorDefault(TS ts,PetscInt step,PetscReal ptime,Vec v,PetscInt numcost,Vec *lambda,Vec *mu,PetscViewerAndFormat *vf) 1318 { 1319 PetscViewer viewer = vf->viewer; 1320 1321 PetscFunctionBegin; 1322 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,8); 1323 PetscCall(PetscViewerPushFormat(viewer,vf->format)); 1324 PetscCall(PetscViewerASCIIAddTab(viewer,((PetscObject)ts)->tablevel)); 1325 PetscCall(PetscViewerASCIIPrintf(viewer,"%" PetscInt_FMT " TS dt %g time %g%s",step,(double)ts->time_step,(double)ptime,ts->steprollback ? " (r)\n" : "\n")); 1326 PetscCall(PetscViewerASCIISubtractTab(viewer,((PetscObject)ts)->tablevel)); 1327 PetscCall(PetscViewerPopFormat(viewer)); 1328 PetscFunctionReturn(0); 1329 } 1330 1331 /*@C 1332 TSAdjointMonitorDrawSensi - Monitors progress of the adjoint TS solvers by calling 1333 VecView() for the sensitivities to initial states at each timestep 1334 1335 Collective on TS 1336 1337 Input Parameters: 1338 + ts - the TS context 1339 . step - current time-step 1340 . ptime - current time 1341 . u - current state 1342 . numcost - number of cost functions 1343 . lambda - sensitivities to initial conditions 1344 . mu - sensitivities to parameters 1345 - dummy - either a viewer or NULL 1346 1347 Level: intermediate 1348 1349 .seealso: `TSAdjointMonitorSet()`, `TSAdjointMonitorDefault()`, `VecView()` 1350 @*/ 1351 PetscErrorCode TSAdjointMonitorDrawSensi(TS ts,PetscInt step,PetscReal ptime,Vec u,PetscInt numcost,Vec *lambda,Vec *mu,void *dummy) 1352 { 1353 TSMonitorDrawCtx ictx = (TSMonitorDrawCtx)dummy; 1354 PetscDraw draw; 1355 PetscReal xl,yl,xr,yr,h; 1356 char time[32]; 1357 1358 PetscFunctionBegin; 1359 if (!(((ictx->howoften > 0) && (!(step % ictx->howoften))) || ((ictx->howoften == -1) && ts->reason))) PetscFunctionReturn(0); 1360 1361 PetscCall(VecView(lambda[0],ictx->viewer)); 1362 PetscCall(PetscViewerDrawGetDraw(ictx->viewer,0,&draw)); 1363 PetscCall(PetscSNPrintf(time,32,"Timestep %d Time %g",(int)step,(double)ptime)); 1364 PetscCall(PetscDrawGetCoordinates(draw,&xl,&yl,&xr,&yr)); 1365 h = yl + .95*(yr - yl); 1366 PetscCall(PetscDrawStringCentered(draw,.5*(xl+xr),h,PETSC_DRAW_BLACK,time)); 1367 PetscCall(PetscDrawFlush(draw)); 1368 PetscFunctionReturn(0); 1369 } 1370 1371 /* 1372 TSAdjointSetFromOptions - Sets various TSAdjoint parameters from user options. 1373 1374 Collective on TSAdjoint 1375 1376 Input Parameter: 1377 . ts - the TS context 1378 1379 Options Database Keys: 1380 + -ts_adjoint_solve <yes,no> After solving the ODE/DAE solve the adjoint problem (requires -ts_save_trajectory) 1381 . -ts_adjoint_monitor - print information at each adjoint time step 1382 - -ts_adjoint_monitor_draw_sensi - monitor the sensitivity of the first cost function wrt initial conditions (lambda[0]) graphically 1383 1384 Level: developer 1385 1386 Notes: 1387 This is not normally called directly by users 1388 1389 .seealso: `TSSetSaveTrajectory()`, `TSTrajectorySetUp()` 1390 */ 1391 PetscErrorCode TSAdjointSetFromOptions(PetscOptionItems *PetscOptionsObject,TS ts) 1392 { 1393 PetscBool tflg,opt; 1394 1395 PetscFunctionBegin; 1396 PetscValidHeaderSpecific(ts,TS_CLASSID,2); 1397 PetscOptionsHeadBegin(PetscOptionsObject,"TS Adjoint options"); 1398 tflg = ts->adjoint_solve ? PETSC_TRUE : PETSC_FALSE; 1399 PetscCall(PetscOptionsBool("-ts_adjoint_solve","Solve the adjoint problem immediately after solving the forward problem","",tflg,&tflg,&opt)); 1400 if (opt) { 1401 PetscCall(TSSetSaveTrajectory(ts)); 1402 ts->adjoint_solve = tflg; 1403 } 1404 PetscCall(TSAdjointMonitorSetFromOptions(ts,"-ts_adjoint_monitor","Monitor adjoint timestep size","TSAdjointMonitorDefault",TSAdjointMonitorDefault,NULL)); 1405 PetscCall(TSAdjointMonitorSetFromOptions(ts,"-ts_adjoint_monitor_sensi","Monitor sensitivity in the adjoint computation","TSAdjointMonitorSensi",TSAdjointMonitorSensi,NULL)); 1406 opt = PETSC_FALSE; 1407 PetscCall(PetscOptionsName("-ts_adjoint_monitor_draw_sensi","Monitor adjoint sensitivities (lambda only) graphically","TSAdjointMonitorDrawSensi",&opt)); 1408 if (opt) { 1409 TSMonitorDrawCtx ctx; 1410 PetscInt howoften = 1; 1411 1412 PetscCall(PetscOptionsInt("-ts_adjoint_monitor_draw_sensi","Monitor adjoint sensitivities (lambda only) graphically","TSAdjointMonitorDrawSensi",howoften,&howoften,NULL)); 1413 PetscCall(TSMonitorDrawCtxCreate(PetscObjectComm((PetscObject)ts),NULL,NULL,PETSC_DECIDE,PETSC_DECIDE,300,300,howoften,&ctx)); 1414 PetscCall(TSAdjointMonitorSet(ts,TSAdjointMonitorDrawSensi,ctx,(PetscErrorCode (*)(void**))TSMonitorDrawCtxDestroy)); 1415 } 1416 PetscFunctionReturn(0); 1417 } 1418 1419 /*@ 1420 TSAdjointStep - Steps one time step backward in the adjoint run 1421 1422 Collective on TS 1423 1424 Input Parameter: 1425 . ts - the TS context obtained from TSCreate() 1426 1427 Level: intermediate 1428 1429 .seealso: `TSAdjointSetUp()`, `TSAdjointSolve()` 1430 @*/ 1431 PetscErrorCode TSAdjointStep(TS ts) 1432 { 1433 DM dm; 1434 1435 PetscFunctionBegin; 1436 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1437 PetscCall(TSGetDM(ts,&dm)); 1438 PetscCall(TSAdjointSetUp(ts)); 1439 ts->steps--; /* must decrease the step index before the adjoint step is taken. */ 1440 1441 ts->reason = TS_CONVERGED_ITERATING; 1442 ts->ptime_prev = ts->ptime; 1443 PetscCheck(ts->ops->adjointstep,PetscObjectComm((PetscObject)ts),PETSC_ERR_NOT_CONVERGED,"TSStep has failed because the adjoint of %s has not been implemented, try other time stepping methods for adjoint sensitivity analysis",((PetscObject)ts)->type_name); 1444 PetscCall(PetscLogEventBegin(TS_AdjointStep,ts,0,0,0)); 1445 PetscCall((*ts->ops->adjointstep)(ts)); 1446 PetscCall(PetscLogEventEnd(TS_AdjointStep,ts,0,0,0)); 1447 ts->adjoint_steps++; 1448 1449 if (ts->reason < 0) { 1450 PetscCheck(!ts->errorifstepfailed,PetscObjectComm((PetscObject)ts),PETSC_ERR_NOT_CONVERGED,"TSAdjointStep has failed due to %s",TSConvergedReasons[ts->reason]); 1451 } else if (!ts->reason) { 1452 if (ts->adjoint_steps >= ts->adjoint_max_steps) ts->reason = TS_CONVERGED_ITS; 1453 } 1454 PetscFunctionReturn(0); 1455 } 1456 1457 /*@ 1458 TSAdjointSolve - Solves the discrete ajoint problem for an ODE/DAE 1459 1460 Collective on TS 1461 1462 Input Parameter: 1463 . ts - the TS context obtained from TSCreate() 1464 1465 Options Database: 1466 . -ts_adjoint_view_solution <viewerinfo> - views the first gradient with respect to the initial values 1467 1468 Level: intermediate 1469 1470 Notes: 1471 This must be called after a call to TSSolve() that solves the forward problem 1472 1473 By default this will integrate back to the initial time, one can use TSAdjointSetSteps() to step back to a later time 1474 1475 .seealso: `TSCreate()`, `TSSetCostGradients()`, `TSSetSolution()`, `TSAdjointStep()` 1476 @*/ 1477 PetscErrorCode TSAdjointSolve(TS ts) 1478 { 1479 static PetscBool cite = PETSC_FALSE; 1480 #if defined(TSADJOINT_STAGE) 1481 PetscLogStage adjoint_stage; 1482 #endif 1483 1484 PetscFunctionBegin; 1485 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1486 PetscCall(PetscCitationsRegister("@article{Zhang2022tsadjoint,\n" 1487 " title = {{PETSc TSAdjoint: A Discrete Adjoint ODE Solver for First-Order and Second-Order Sensitivity Analysis}},\n" 1488 " author = {Zhang, Hong and Constantinescu, Emil M. and Smith, Barry F.},\n" 1489 " journal = {SIAM Journal on Scientific Computing},\n" 1490 " volume = {44},\n" 1491 " number = {1},\n" 1492 " pages = {C1-C24},\n" 1493 " doi = {10.1137/21M140078X},\n" 1494 " year = {2022}\n}\n",&cite)); 1495 #if defined(TSADJOINT_STAGE) 1496 PetscCall(PetscLogStageRegister("TSAdjoint",&adjoint_stage)); 1497 PetscCall(PetscLogStagePush(adjoint_stage)); 1498 #endif 1499 PetscCall(TSAdjointSetUp(ts)); 1500 1501 /* reset time step and iteration counters */ 1502 ts->adjoint_steps = 0; 1503 ts->ksp_its = 0; 1504 ts->snes_its = 0; 1505 ts->num_snes_failures = 0; 1506 ts->reject = 0; 1507 ts->reason = TS_CONVERGED_ITERATING; 1508 1509 if (!ts->adjoint_max_steps) ts->adjoint_max_steps = ts->steps; 1510 if (ts->adjoint_steps >= ts->adjoint_max_steps) ts->reason = TS_CONVERGED_ITS; 1511 1512 while (!ts->reason) { 1513 PetscCall(TSTrajectoryGet(ts->trajectory,ts,ts->steps,&ts->ptime)); 1514 PetscCall(TSAdjointMonitor(ts,ts->steps,ts->ptime,ts->vec_sol,ts->numcost,ts->vecs_sensi,ts->vecs_sensip)); 1515 PetscCall(TSAdjointEventHandler(ts)); 1516 PetscCall(TSAdjointStep(ts)); 1517 if ((ts->vec_costintegral || ts->quadraturets) && !ts->costintegralfwd) { 1518 PetscCall(TSAdjointCostIntegral(ts)); 1519 } 1520 } 1521 if (!ts->steps) { 1522 PetscCall(TSTrajectoryGet(ts->trajectory,ts,ts->steps,&ts->ptime)); 1523 PetscCall(TSAdjointMonitor(ts,ts->steps,ts->ptime,ts->vec_sol,ts->numcost,ts->vecs_sensi,ts->vecs_sensip)); 1524 } 1525 ts->solvetime = ts->ptime; 1526 PetscCall(TSTrajectoryViewFromOptions(ts->trajectory,NULL,"-ts_trajectory_view")); 1527 PetscCall(VecViewFromOptions(ts->vecs_sensi[0],(PetscObject) ts, "-ts_adjoint_view_solution")); 1528 ts->adjoint_max_steps = 0; 1529 #if defined(TSADJOINT_STAGE) 1530 PetscCall(PetscLogStagePop()); 1531 #endif 1532 PetscFunctionReturn(0); 1533 } 1534 1535 /*@C 1536 TSAdjointMonitor - Runs all user-provided adjoint monitor routines set using TSAdjointMonitorSet() 1537 1538 Collective on TS 1539 1540 Input Parameters: 1541 + ts - time stepping context obtained from TSCreate() 1542 . step - step number that has just completed 1543 . ptime - model time of the state 1544 . u - state at the current model time 1545 . numcost - number of cost functions (dimension of lambda or mu) 1546 . lambda - vectors containing the gradients of the cost functions with respect to the ODE/DAE solution variables 1547 - mu - vectors containing the gradients of the cost functions with respect to the problem parameters 1548 1549 Notes: 1550 TSAdjointMonitor() is typically used automatically within the time stepping implementations. 1551 Users would almost never call this routine directly. 1552 1553 Level: developer 1554 1555 @*/ 1556 PetscErrorCode TSAdjointMonitor(TS ts,PetscInt step,PetscReal ptime,Vec u,PetscInt numcost,Vec *lambda, Vec *mu) 1557 { 1558 PetscInt i,n = ts->numberadjointmonitors; 1559 1560 PetscFunctionBegin; 1561 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1562 PetscValidHeaderSpecific(u,VEC_CLASSID,4); 1563 PetscCall(VecLockReadPush(u)); 1564 for (i=0; i<n; i++) { 1565 PetscCall((*ts->adjointmonitor[i])(ts,step,ptime,u,numcost,lambda,mu,ts->adjointmonitorcontext[i])); 1566 } 1567 PetscCall(VecLockReadPop(u)); 1568 PetscFunctionReturn(0); 1569 } 1570 1571 /*@ 1572 TSAdjointCostIntegral - Evaluate the cost integral in the adjoint run. 1573 1574 Collective on TS 1575 1576 Input Parameter: 1577 . ts - time stepping context 1578 1579 Level: advanced 1580 1581 Notes: 1582 This function cannot be called until TSAdjointStep() has been completed. 1583 1584 .seealso: `TSAdjointSolve()`, `TSAdjointStep` 1585 @*/ 1586 PetscErrorCode TSAdjointCostIntegral(TS ts) 1587 { 1588 PetscFunctionBegin; 1589 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1590 PetscCheck(ts->ops->adjointintegral,PetscObjectComm((PetscObject)ts),PETSC_ERR_SUP,"%s does not provide integral evaluation in the adjoint run",((PetscObject)ts)->type_name); 1591 PetscCall((*ts->ops->adjointintegral)(ts)); 1592 PetscFunctionReturn(0); 1593 } 1594 1595 /* ------------------ Forward (tangent linear) sensitivity ------------------*/ 1596 1597 /*@ 1598 TSForwardSetUp - Sets up the internal data structures for the later use 1599 of forward sensitivity analysis 1600 1601 Collective on TS 1602 1603 Input Parameter: 1604 . ts - the TS context obtained from TSCreate() 1605 1606 Level: advanced 1607 1608 .seealso: `TSCreate()`, `TSDestroy()`, `TSSetUp()` 1609 @*/ 1610 PetscErrorCode TSForwardSetUp(TS ts) 1611 { 1612 PetscFunctionBegin; 1613 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1614 if (ts->forwardsetupcalled) PetscFunctionReturn(0); 1615 if (ts->ops->forwardsetup) PetscCall((*ts->ops->forwardsetup)(ts)); 1616 PetscCall(VecDuplicate(ts->vec_sol,&ts->vec_sensip_col)); 1617 ts->forwardsetupcalled = PETSC_TRUE; 1618 PetscFunctionReturn(0); 1619 } 1620 1621 /*@ 1622 TSForwardReset - Reset the internal data structures used by forward sensitivity analysis 1623 1624 Collective on TS 1625 1626 Input Parameter: 1627 . ts - the TS context obtained from TSCreate() 1628 1629 Level: advanced 1630 1631 .seealso: `TSCreate()`, `TSDestroy()`, `TSForwardSetUp()` 1632 @*/ 1633 PetscErrorCode TSForwardReset(TS ts) 1634 { 1635 TS quadts = ts->quadraturets; 1636 1637 PetscFunctionBegin; 1638 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1639 if (ts->ops->forwardreset) PetscCall((*ts->ops->forwardreset)(ts)); 1640 PetscCall(MatDestroy(&ts->mat_sensip)); 1641 if (quadts) { 1642 PetscCall(MatDestroy(&quadts->mat_sensip)); 1643 } 1644 PetscCall(VecDestroy(&ts->vec_sensip_col)); 1645 ts->forward_solve = PETSC_FALSE; 1646 ts->forwardsetupcalled = PETSC_FALSE; 1647 PetscFunctionReturn(0); 1648 } 1649 1650 /*@ 1651 TSForwardSetIntegralGradients - Set the vectors holding forward sensitivities of the integral term. 1652 1653 Input Parameters: 1654 + ts - the TS context obtained from TSCreate() 1655 . numfwdint - number of integrals 1656 - vp - the vectors containing the gradients for each integral w.r.t. parameters 1657 1658 Level: deprecated 1659 1660 .seealso: `TSForwardGetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()` 1661 @*/ 1662 PetscErrorCode TSForwardSetIntegralGradients(TS ts,PetscInt numfwdint,Vec *vp) 1663 { 1664 PetscFunctionBegin; 1665 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1666 PetscCheck(!ts->numcost || ts->numcost == numfwdint,PetscObjectComm((PetscObject)ts),PETSC_ERR_USER,"The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand()"); 1667 if (!ts->numcost) ts->numcost = numfwdint; 1668 1669 ts->vecs_integral_sensip = vp; 1670 PetscFunctionReturn(0); 1671 } 1672 1673 /*@ 1674 TSForwardGetIntegralGradients - Returns the forward sensitivities ofthe integral term. 1675 1676 Input Parameter: 1677 . ts - the TS context obtained from TSCreate() 1678 1679 Output Parameter: 1680 . vp - the vectors containing the gradients for each integral w.r.t. parameters 1681 1682 Level: deprecated 1683 1684 .seealso: `TSForwardSetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()` 1685 @*/ 1686 PetscErrorCode TSForwardGetIntegralGradients(TS ts,PetscInt *numfwdint,Vec **vp) 1687 { 1688 PetscFunctionBegin; 1689 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1690 PetscValidPointer(vp,3); 1691 if (numfwdint) *numfwdint = ts->numcost; 1692 if (vp) *vp = ts->vecs_integral_sensip; 1693 PetscFunctionReturn(0); 1694 } 1695 1696 /*@ 1697 TSForwardStep - Compute the forward sensitivity for one time step. 1698 1699 Collective on TS 1700 1701 Input Parameter: 1702 . ts - time stepping context 1703 1704 Level: advanced 1705 1706 Notes: 1707 This function cannot be called until TSStep() has been completed. 1708 1709 .seealso: `TSForwardSetSensitivities()`, `TSForwardGetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardSetUp()` 1710 @*/ 1711 PetscErrorCode TSForwardStep(TS ts) 1712 { 1713 PetscFunctionBegin; 1714 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1715 PetscCheck(ts->ops->forwardstep,PetscObjectComm((PetscObject)ts),PETSC_ERR_SUP,"%s does not provide forward sensitivity analysis",((PetscObject)ts)->type_name); 1716 PetscCall(PetscLogEventBegin(TS_ForwardStep,ts,0,0,0)); 1717 PetscCall((*ts->ops->forwardstep)(ts)); 1718 PetscCall(PetscLogEventEnd(TS_ForwardStep,ts,0,0,0)); 1719 PetscCheck(ts->reason >= 0 || !ts->errorifstepfailed,PetscObjectComm((PetscObject)ts),PETSC_ERR_NOT_CONVERGED,"TSFowardStep has failed due to %s",TSConvergedReasons[ts->reason]); 1720 PetscFunctionReturn(0); 1721 } 1722 1723 /*@ 1724 TSForwardSetSensitivities - Sets the initial value of the trajectory sensitivities of solution w.r.t. the problem parameters and initial values. 1725 1726 Logically Collective on TS 1727 1728 Input Parameters: 1729 + ts - the TS context obtained from TSCreate() 1730 . nump - number of parameters 1731 - Smat - sensitivities with respect to the parameters, the number of entries in these vectors is the same as the number of parameters 1732 1733 Level: beginner 1734 1735 Notes: 1736 Forward sensitivity is also called 'trajectory sensitivity' in some fields such as power systems. 1737 This function turns on a flag to trigger TSSolve() to compute forward sensitivities automatically. 1738 You must call this function before TSSolve(). 1739 The entries in the sensitivity matrix must be correctly initialized with the values S = dy/dp|startingtime. 1740 1741 .seealso: `TSForwardGetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()` 1742 @*/ 1743 PetscErrorCode TSForwardSetSensitivities(TS ts,PetscInt nump,Mat Smat) 1744 { 1745 PetscFunctionBegin; 1746 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1747 PetscValidHeaderSpecific(Smat,MAT_CLASSID,3); 1748 ts->forward_solve = PETSC_TRUE; 1749 if (nump == PETSC_DEFAULT) { 1750 PetscCall(MatGetSize(Smat,NULL,&ts->num_parameters)); 1751 } else ts->num_parameters = nump; 1752 PetscCall(PetscObjectReference((PetscObject)Smat)); 1753 PetscCall(MatDestroy(&ts->mat_sensip)); 1754 ts->mat_sensip = Smat; 1755 PetscFunctionReturn(0); 1756 } 1757 1758 /*@ 1759 TSForwardGetSensitivities - Returns the trajectory sensitivities 1760 1761 Not Collective, but Vec returned is parallel if TS is parallel 1762 1763 Output Parameters: 1764 + ts - the TS context obtained from TSCreate() 1765 . nump - number of parameters 1766 - Smat - sensitivities with respect to the parameters, the number of entries in these vectors is the same as the number of parameters 1767 1768 Level: intermediate 1769 1770 .seealso: `TSForwardSetSensitivities()`, `TSForwardSetIntegralGradients()`, `TSForwardGetIntegralGradients()`, `TSForwardStep()` 1771 @*/ 1772 PetscErrorCode TSForwardGetSensitivities(TS ts,PetscInt *nump,Mat *Smat) 1773 { 1774 PetscFunctionBegin; 1775 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1776 if (nump) *nump = ts->num_parameters; 1777 if (Smat) *Smat = ts->mat_sensip; 1778 PetscFunctionReturn(0); 1779 } 1780 1781 /*@ 1782 TSForwardCostIntegral - Evaluate the cost integral in the forward run. 1783 1784 Collective on TS 1785 1786 Input Parameter: 1787 . ts - time stepping context 1788 1789 Level: advanced 1790 1791 Notes: 1792 This function cannot be called until TSStep() has been completed. 1793 1794 .seealso: `TSSolve()`, `TSAdjointCostIntegral()` 1795 @*/ 1796 PetscErrorCode TSForwardCostIntegral(TS ts) 1797 { 1798 PetscFunctionBegin; 1799 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1800 PetscCheck(ts->ops->forwardintegral,PetscObjectComm((PetscObject)ts),PETSC_ERR_SUP,"%s does not provide integral evaluation in the forward run",((PetscObject)ts)->type_name); 1801 PetscCall((*ts->ops->forwardintegral)(ts)); 1802 PetscFunctionReturn(0); 1803 } 1804 1805 /*@ 1806 TSForwardSetInitialSensitivities - Set initial values for tangent linear sensitivities 1807 1808 Collective on TS 1809 1810 Input Parameters: 1811 + ts - the TS context obtained from TSCreate() 1812 - didp - parametric sensitivities of the initial condition 1813 1814 Level: intermediate 1815 1816 Notes: TSSolve() allows users to pass the initial solution directly to TS. But the tangent linear variables cannot be initialized in this way. This function is used to set initial values for tangent linear variables. 1817 1818 .seealso: `TSForwardSetSensitivities()` 1819 @*/ 1820 PetscErrorCode TSForwardSetInitialSensitivities(TS ts,Mat didp) 1821 { 1822 PetscFunctionBegin; 1823 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1824 PetscValidHeaderSpecific(didp,MAT_CLASSID,2); 1825 if (!ts->mat_sensip) { 1826 PetscCall(TSForwardSetSensitivities(ts,PETSC_DEFAULT,didp)); 1827 } 1828 PetscFunctionReturn(0); 1829 } 1830 1831 /*@ 1832 TSForwardGetStages - Get the number of stages and the tangent linear sensitivities at the intermediate stages 1833 1834 Input Parameter: 1835 . ts - the TS context obtained from TSCreate() 1836 1837 Output Parameters: 1838 + ns - number of stages 1839 - S - tangent linear sensitivities at the intermediate stages 1840 1841 Level: advanced 1842 1843 @*/ 1844 PetscErrorCode TSForwardGetStages(TS ts,PetscInt *ns,Mat **S) 1845 { 1846 PetscFunctionBegin; 1847 PetscValidHeaderSpecific(ts, TS_CLASSID,1); 1848 1849 if (!ts->ops->getstages) *S=NULL; 1850 else { 1851 PetscCall((*ts->ops->forwardgetstages)(ts,ns,S)); 1852 } 1853 PetscFunctionReturn(0); 1854 } 1855 1856 /*@ 1857 TSCreateQuadratureTS - Create a sub-TS that evaluates integrals over time 1858 1859 Input Parameters: 1860 + ts - the TS context obtained from TSCreate() 1861 - fwd - flag indicating whether to evaluate cost integral in the forward run or the adjoint run 1862 1863 Output Parameters: 1864 . quadts - the child TS context 1865 1866 Level: intermediate 1867 1868 .seealso: `TSGetQuadratureTS()` 1869 @*/ 1870 PetscErrorCode TSCreateQuadratureTS(TS ts,PetscBool fwd,TS *quadts) 1871 { 1872 char prefix[128]; 1873 1874 PetscFunctionBegin; 1875 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1876 PetscValidPointer(quadts,3); 1877 PetscCall(TSDestroy(&ts->quadraturets)); 1878 PetscCall(TSCreate(PetscObjectComm((PetscObject)ts),&ts->quadraturets)); 1879 PetscCall(PetscObjectIncrementTabLevel((PetscObject)ts->quadraturets,(PetscObject)ts,1)); 1880 PetscCall(PetscLogObjectParent((PetscObject)ts,(PetscObject)ts->quadraturets)); 1881 PetscCall(PetscSNPrintf(prefix,sizeof(prefix),"%squad_",((PetscObject)ts)->prefix ? ((PetscObject)ts)->prefix : "")); 1882 PetscCall(TSSetOptionsPrefix(ts->quadraturets,prefix)); 1883 *quadts = ts->quadraturets; 1884 1885 if (ts->numcost) { 1886 PetscCall(VecCreateSeq(PETSC_COMM_SELF,ts->numcost,&(*quadts)->vec_sol)); 1887 } else { 1888 PetscCall(VecCreateSeq(PETSC_COMM_SELF,1,&(*quadts)->vec_sol)); 1889 } 1890 ts->costintegralfwd = fwd; 1891 PetscFunctionReturn(0); 1892 } 1893 1894 /*@ 1895 TSGetQuadratureTS - Return the sub-TS that evaluates integrals over time 1896 1897 Input Parameter: 1898 . ts - the TS context obtained from TSCreate() 1899 1900 Output Parameters: 1901 + fwd - flag indicating whether to evaluate cost integral in the forward run or the adjoint run 1902 - quadts - the child TS context 1903 1904 Level: intermediate 1905 1906 .seealso: `TSCreateQuadratureTS()` 1907 @*/ 1908 PetscErrorCode TSGetQuadratureTS(TS ts,PetscBool *fwd,TS *quadts) 1909 { 1910 PetscFunctionBegin; 1911 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1912 if (fwd) *fwd = ts->costintegralfwd; 1913 if (quadts) *quadts = ts->quadraturets; 1914 PetscFunctionReturn(0); 1915 } 1916 1917 /*@ 1918 TSComputeSNESJacobian - Compute the SNESJacobian 1919 1920 Input Parameters: 1921 + ts - the TS context obtained from TSCreate() 1922 - x - state vector 1923 1924 Output Parameters: 1925 + J - Jacobian matrix 1926 - Jpre - preconditioning matrix for J (may be same as J) 1927 1928 Level: developer 1929 1930 Notes: 1931 Using SNES to compute the Jacobian enables finite differencing when TS Jacobian is not available. 1932 @*/ 1933 PetscErrorCode TSComputeSNESJacobian(TS ts,Vec x,Mat J,Mat Jpre) 1934 { 1935 SNES snes = ts->snes; 1936 PetscErrorCode (*jac)(SNES,Vec,Mat,Mat,void*) = NULL; 1937 1938 PetscFunctionBegin; 1939 /* 1940 Unlike implicit methods, explicit methods do not have SNESMatFDColoring in the snes object 1941 because SNESSolve() has not been called yet; so querying SNESMatFDColoring does not work for 1942 explicit methods. Instead, we check the Jacobian compute function directly to determin if FD 1943 coloring is used. 1944 */ 1945 PetscCall(SNESGetJacobian(snes,NULL,NULL,&jac,NULL)); 1946 if (jac == SNESComputeJacobianDefaultColor) { 1947 Vec f; 1948 PetscCall(SNESSetSolution(snes,x)); 1949 PetscCall(SNESGetFunction(snes,&f,NULL,NULL)); 1950 /* Force MatFDColoringApply to evaluate the SNES residual function for the base vector */ 1951 PetscCall(SNESComputeFunction(snes,x,f)); 1952 } 1953 PetscCall(SNESComputeJacobian(snes,x,J,Jpre)); 1954 PetscFunctionReturn(0); 1955 } 1956