1 #include <petsc/private/tsimpl.h> /*I "petscts.h" I*/ 2 #include <petscdraw.h> 3 4 PetscLogEvent TS_AdjointStep,TS_ForwardStep,TS_JacobianPEval; 5 6 /* #define TSADJOINT_STAGE */ 7 8 /* ------------------------ Sensitivity Context ---------------------------*/ 9 10 /*@C 11 TSSetRHSJacobianP - Sets the function that computes the Jacobian of G w.r.t. the parameters P where U_t = G(U,P,t), as well as the location to store the matrix. 12 13 Logically Collective on TS 14 15 Input Parameters: 16 + ts - TS context obtained from TSCreate() 17 . Amat - JacobianP matrix 18 . func - function 19 - ctx - [optional] user-defined function context 20 21 Calling sequence of func: 22 $ func (TS ts,PetscReal t,Vec y,Mat A,void *ctx); 23 + t - current timestep 24 . U - input vector (current ODE solution) 25 . A - output matrix 26 - ctx - [optional] user-defined function context 27 28 Level: intermediate 29 30 Notes: 31 Amat has the same number of rows and the same row parallel layout as u, Amat has the same number of columns and parallel layout as p 32 33 .seealso: TSGetRHSJacobianP() 34 @*/ 35 PetscErrorCode TSSetRHSJacobianP(TS ts,Mat Amat,PetscErrorCode (*func)(TS,PetscReal,Vec,Mat,void*),void *ctx) 36 { 37 PetscErrorCode ierr; 38 39 PetscFunctionBegin; 40 PetscValidHeaderSpecific(ts, TS_CLASSID,1); 41 PetscValidHeaderSpecific(Amat,MAT_CLASSID,2); 42 43 ts->rhsjacobianp = func; 44 ts->rhsjacobianpctx = ctx; 45 if (Amat) { 46 ierr = PetscObjectReference((PetscObject)Amat);CHKERRQ(ierr); 47 ierr = MatDestroy(&ts->Jacprhs);CHKERRQ(ierr); 48 ts->Jacprhs = Amat; 49 } 50 PetscFunctionReturn(0); 51 } 52 53 /*@C 54 TSGetRHSJacobianP - Gets the function that computes the Jacobian of G w.r.t. the parameters P where U_t = G(U,P,t), as well as the location to store the matrix. 55 56 Logically Collective on TS 57 58 Input Parameter: 59 . ts - TS context obtained from TSCreate() 60 61 Output Parameters: 62 + Amat - JacobianP matrix 63 . func - function 64 - ctx - [optional] user-defined function context 65 66 Calling sequence of func: 67 $ func (TS ts,PetscReal t,Vec y,Mat A,void *ctx); 68 + t - current timestep 69 . U - input vector (current ODE solution) 70 . A - output matrix 71 - ctx - [optional] user-defined function context 72 73 Level: intermediate 74 75 Notes: 76 Amat has the same number of rows and the same row parallel layout as u, Amat has the same number of columns and parallel layout as p 77 78 .seealso: TSSetRHSJacobianP() 79 @*/ 80 PetscErrorCode TSGetRHSJacobianP(TS ts,Mat *Amat,PetscErrorCode (**func)(TS,PetscReal,Vec,Mat,void*),void **ctx) 81 { 82 PetscFunctionBegin; 83 if (func) *func = ts->rhsjacobianp; 84 if (ctx) *ctx = ts->rhsjacobianpctx; 85 if (Amat) *Amat = ts->Jacprhs; 86 PetscFunctionReturn(0); 87 } 88 89 /*@C 90 TSComputeRHSJacobianP - Runs the user-defined JacobianP function. 91 92 Collective on TS 93 94 Input Parameters: 95 . ts - The TS context obtained from TSCreate() 96 97 Level: developer 98 99 .seealso: TSSetRHSJacobianP() 100 @*/ 101 PetscErrorCode TSComputeRHSJacobianP(TS ts,PetscReal t,Vec U,Mat Amat) 102 { 103 PetscErrorCode ierr; 104 105 PetscFunctionBegin; 106 if (!Amat) PetscFunctionReturn(0); 107 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 108 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 109 110 PetscStackPush("TS user JacobianP function for sensitivity analysis"); 111 ierr = (*ts->rhsjacobianp)(ts,t,U,Amat,ts->rhsjacobianpctx);CHKERRQ(ierr); 112 PetscStackPop; 113 PetscFunctionReturn(0); 114 } 115 116 /*@C 117 TSSetIJacobianP - Sets the function that computes the Jacobian of F w.r.t. the parameters P where F(Udot,U,t) = G(U,P,t), as well as the location to store the matrix. 118 119 Logically Collective on TS 120 121 Input Parameters: 122 + ts - TS context obtained from TSCreate() 123 . Amat - JacobianP matrix 124 . func - function 125 - ctx - [optional] user-defined function context 126 127 Calling sequence of func: 128 $ func (TS ts,PetscReal t,Vec y,Mat A,void *ctx); 129 + t - current timestep 130 . U - input vector (current ODE solution) 131 . Udot - time derivative of state vector 132 . shift - shift to apply, see note below 133 . A - output matrix 134 - ctx - [optional] user-defined function context 135 136 Level: intermediate 137 138 Notes: 139 Amat has the same number of rows and the same row parallel layout as u, Amat has the same number of columns and parallel layout as p 140 141 .seealso: 142 @*/ 143 PetscErrorCode TSSetIJacobianP(TS ts,Mat Amat,PetscErrorCode (*func)(TS,PetscReal,Vec,Vec,PetscReal,Mat,void*),void *ctx) 144 { 145 PetscErrorCode ierr; 146 147 PetscFunctionBegin; 148 PetscValidHeaderSpecific(ts, TS_CLASSID,1); 149 PetscValidHeaderSpecific(Amat,MAT_CLASSID,2); 150 151 ts->ijacobianp = func; 152 ts->ijacobianpctx = ctx; 153 if (Amat) { 154 ierr = PetscObjectReference((PetscObject)Amat);CHKERRQ(ierr); 155 ierr = MatDestroy(&ts->Jacp);CHKERRQ(ierr); 156 ts->Jacp = Amat; 157 } 158 PetscFunctionReturn(0); 159 } 160 161 /*@C 162 TSComputeIJacobianP - Runs the user-defined IJacobianP function. 163 164 Collective on TS 165 166 Input Parameters: 167 + ts - the TS context 168 . t - current timestep 169 . U - state vector 170 . Udot - time derivative of state vector 171 . shift - shift to apply, see note below 172 - imex - flag indicates if the method is IMEX so that the RHSJacobian should be kept separate 173 174 Output Parameters: 175 . A - Jacobian matrix 176 177 Level: developer 178 179 .seealso: TSSetIJacobianP() 180 @*/ 181 PetscErrorCode TSComputeIJacobianP(TS ts,PetscReal t,Vec U,Vec Udot,PetscReal shift,Mat Amat,PetscBool imex) 182 { 183 PetscErrorCode ierr; 184 185 PetscFunctionBegin; 186 if (!Amat) PetscFunctionReturn(0); 187 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 188 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 189 PetscValidHeaderSpecific(Udot,VEC_CLASSID,4); 190 191 ierr = PetscLogEventBegin(TS_JacobianPEval,ts,U,Amat,0);CHKERRQ(ierr); 192 if (ts->ijacobianp) { 193 PetscStackPush("TS user JacobianP function for sensitivity analysis"); 194 ierr = (*ts->ijacobianp)(ts,t,U,Udot,shift,Amat,ts->ijacobianpctx);CHKERRQ(ierr); 195 PetscStackPop; 196 } 197 if (imex) { 198 if (!ts->ijacobianp) { /* system was written as Udot = G(t,U) */ 199 PetscBool assembled; 200 ierr = MatZeroEntries(Amat);CHKERRQ(ierr); 201 ierr = MatAssembled(Amat,&assembled);CHKERRQ(ierr); 202 if (!assembled) { 203 ierr = MatAssemblyBegin(Amat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 204 ierr = MatAssemblyEnd(Amat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 205 } 206 } 207 } else { 208 if (ts->rhsjacobianp) { 209 ierr = TSComputeRHSJacobianP(ts,t,U,ts->Jacprhs);CHKERRQ(ierr); 210 } 211 if (ts->Jacprhs == Amat) { /* No IJacobian, so we only have the RHS matrix */ 212 ierr = MatScale(Amat,-1);CHKERRQ(ierr); 213 } else if (ts->Jacprhs) { /* Both IJacobian and RHSJacobian */ 214 MatStructure axpy = DIFFERENT_NONZERO_PATTERN; 215 if (!ts->ijacobianp) { /* No IJacobianp provided, but we have a separate RHS matrix */ 216 ierr = MatZeroEntries(Amat);CHKERRQ(ierr); 217 } 218 ierr = MatAXPY(Amat,-1,ts->Jacprhs,axpy);CHKERRQ(ierr); 219 } 220 } 221 ierr = PetscLogEventEnd(TS_JacobianPEval,ts,U,Amat,0);CHKERRQ(ierr); 222 PetscFunctionReturn(0); 223 } 224 225 /*@C 226 TSSetCostIntegrand - Sets the routine for evaluating the integral term in one or more cost functions 227 228 Logically Collective on TS 229 230 Input Parameters: 231 + ts - the TS context obtained from TSCreate() 232 . numcost - number of gradients to be computed, this is the number of cost functions 233 . costintegral - vector that stores the integral values 234 . rf - routine for evaluating the integrand function 235 . drduf - function that computes the gradients of the r's with respect to u 236 . drdpf - function that computes the gradients of the r's with respect to p, can be NULL if parametric sensitivity is not desired (mu=NULL) 237 . fwd - flag indicating whether to evaluate cost integral in the forward run or the adjoint run 238 - ctx - [optional] user-defined context for private data for the function evaluation routine (may be NULL) 239 240 Calling sequence of rf: 241 $ PetscErrorCode rf(TS ts,PetscReal t,Vec U,Vec F,void *ctx); 242 243 Calling sequence of drduf: 244 $ PetscErroCode drduf(TS ts,PetscReal t,Vec U,Vec *dRdU,void *ctx); 245 246 Calling sequence of drdpf: 247 $ PetscErroCode drdpf(TS ts,PetscReal t,Vec U,Vec *dRdP,void *ctx); 248 249 Level: deprecated 250 251 Notes: 252 For optimization there is usually a single cost function (numcost = 1). For sensitivities there may be multiple cost functions 253 254 .seealso: TSSetRHSJacobianP(), TSGetCostGradients(), TSSetCostGradients() 255 @*/ 256 PetscErrorCode TSSetCostIntegrand(TS ts,PetscInt numcost,Vec costintegral,PetscErrorCode (*rf)(TS,PetscReal,Vec,Vec,void*), 257 PetscErrorCode (*drduf)(TS,PetscReal,Vec,Vec*,void*), 258 PetscErrorCode (*drdpf)(TS,PetscReal,Vec,Vec*,void*), 259 PetscBool fwd,void *ctx) 260 { 261 PetscErrorCode ierr; 262 263 PetscFunctionBegin; 264 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 265 if (costintegral) PetscValidHeaderSpecific(costintegral,VEC_CLASSID,3); 266 if (ts->numcost && ts->numcost!=numcost) SETERRQ(PetscObjectComm((PetscObject)ts),PETSC_ERR_USER,"The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostGradients() or TSForwardSetIntegralGradients()"); 267 if (!ts->numcost) ts->numcost=numcost; 268 269 if (costintegral) { 270 ierr = PetscObjectReference((PetscObject)costintegral);CHKERRQ(ierr); 271 ierr = VecDestroy(&ts->vec_costintegral);CHKERRQ(ierr); 272 ts->vec_costintegral = costintegral; 273 } else { 274 if (!ts->vec_costintegral) { /* Create a seq vec if user does not provide one */ 275 ierr = VecCreateSeq(PETSC_COMM_SELF,numcost,&ts->vec_costintegral);CHKERRQ(ierr); 276 } else { 277 ierr = VecSet(ts->vec_costintegral,0.0);CHKERRQ(ierr); 278 } 279 } 280 if (!ts->vec_costintegrand) { 281 ierr = VecDuplicate(ts->vec_costintegral,&ts->vec_costintegrand);CHKERRQ(ierr); 282 } else { 283 ierr = VecSet(ts->vec_costintegrand,0.0);CHKERRQ(ierr); 284 } 285 ts->costintegralfwd = fwd; /* Evaluate the cost integral in forward run if fwd is true */ 286 ts->costintegrand = rf; 287 ts->costintegrandctx = ctx; 288 ts->drdufunction = drduf; 289 ts->drdpfunction = drdpf; 290 PetscFunctionReturn(0); 291 } 292 293 /*@C 294 TSGetCostIntegral - Returns the values of the integral term in the cost functions. 295 It is valid to call the routine after a backward run. 296 297 Not Collective 298 299 Input Parameter: 300 . ts - the TS context obtained from TSCreate() 301 302 Output Parameter: 303 . v - the vector containing the integrals for each cost function 304 305 Level: intermediate 306 307 .seealso: TSSetCostIntegrand() 308 309 @*/ 310 PetscErrorCode TSGetCostIntegral(TS ts,Vec *v) 311 { 312 TS quadts; 313 PetscErrorCode ierr; 314 315 PetscFunctionBegin; 316 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 317 PetscValidPointer(v,2); 318 ierr = TSGetQuadratureTS(ts,NULL,&quadts);CHKERRQ(ierr); 319 *v = quadts->vec_sol; 320 PetscFunctionReturn(0); 321 } 322 323 /*@C 324 TSComputeCostIntegrand - Evaluates the integral function in the cost functions. 325 326 Input Parameters: 327 + ts - the TS context 328 . t - current time 329 - U - state vector, i.e. current solution 330 331 Output Parameter: 332 . Q - vector of size numcost to hold the outputs 333 334 Notes: 335 Most users should not need to explicitly call this routine, as it 336 is used internally within the sensitivity analysis context. 337 338 Level: deprecated 339 340 .seealso: TSSetCostIntegrand() 341 @*/ 342 PetscErrorCode TSComputeCostIntegrand(TS ts,PetscReal t,Vec U,Vec Q) 343 { 344 PetscErrorCode ierr; 345 346 PetscFunctionBegin; 347 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 348 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 349 PetscValidHeaderSpecific(Q,VEC_CLASSID,4); 350 351 ierr = PetscLogEventBegin(TS_FunctionEval,ts,U,Q,0);CHKERRQ(ierr); 352 if (ts->costintegrand) { 353 PetscStackPush("TS user integrand in the cost function"); 354 ierr = (*ts->costintegrand)(ts,t,U,Q,ts->costintegrandctx);CHKERRQ(ierr); 355 PetscStackPop; 356 } else { 357 ierr = VecZeroEntries(Q);CHKERRQ(ierr); 358 } 359 360 ierr = PetscLogEventEnd(TS_FunctionEval,ts,U,Q,0);CHKERRQ(ierr); 361 PetscFunctionReturn(0); 362 } 363 364 /*@C 365 TSComputeDRDUFunction - Deprecated, use TSGetQuadratureTS() then TSComputeRHSJacobian() 366 367 Level: deprecated 368 369 @*/ 370 PetscErrorCode TSComputeDRDUFunction(TS ts,PetscReal t,Vec U,Vec *DRDU) 371 { 372 PetscErrorCode ierr; 373 374 PetscFunctionBegin; 375 if (!DRDU) PetscFunctionReturn(0); 376 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 377 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 378 379 PetscStackPush("TS user DRDU function for sensitivity analysis"); 380 ierr = (*ts->drdufunction)(ts,t,U,DRDU,ts->costintegrandctx);CHKERRQ(ierr); 381 PetscStackPop; 382 PetscFunctionReturn(0); 383 } 384 385 /*@C 386 TSComputeDRDPFunction - Deprecated, use TSGetQuadratureTS() then TSComputeRHSJacobianP() 387 388 Level: deprecated 389 390 @*/ 391 PetscErrorCode TSComputeDRDPFunction(TS ts,PetscReal t,Vec U,Vec *DRDP) 392 { 393 PetscErrorCode ierr; 394 395 PetscFunctionBegin; 396 if (!DRDP) PetscFunctionReturn(0); 397 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 398 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 399 400 PetscStackPush("TS user DRDP function for sensitivity analysis"); 401 ierr = (*ts->drdpfunction)(ts,t,U,DRDP,ts->costintegrandctx);CHKERRQ(ierr); 402 PetscStackPop; 403 PetscFunctionReturn(0); 404 } 405 406 /*@C 407 TSSetIHessianProduct - Sets the function that computes the vector-Hessian-vector product. The Hessian is the second-order derivative of F (IFunction) w.r.t. the state variable. 408 409 Logically Collective on TS 410 411 Input Parameters: 412 + ts - TS context obtained from TSCreate() 413 . ihp1 - an array of vectors storing the result of vector-Hessian-vector product for F_UU 414 . hessianproductfunc1 - vector-Hessian-vector product function for F_UU 415 . ihp2 - an array of vectors storing the result of vector-Hessian-vector product for F_UP 416 . hessianproductfunc2 - vector-Hessian-vector product function for F_UP 417 . ihp3 - an array of vectors storing the result of vector-Hessian-vector product for F_PU 418 . hessianproductfunc3 - vector-Hessian-vector product function for F_PU 419 . ihp4 - an array of vectors storing the result of vector-Hessian-vector product for F_PP 420 - hessianproductfunc4 - vector-Hessian-vector product function for F_PP 421 422 Calling sequence of ihessianproductfunc: 423 $ ihessianproductfunc (TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV,void *ctx); 424 + t - current timestep 425 . U - input vector (current ODE solution) 426 . Vl - an array of input vectors to be left-multiplied with the Hessian 427 . Vr - input vector to be right-multiplied with the Hessian 428 . VHV - an array of output vectors for vector-Hessian-vector product 429 - ctx - [optional] user-defined function context 430 431 Level: intermediate 432 433 Notes: 434 The first Hessian function and the working array are required. 435 As an example to implement the callback functions, the second callback function calculates the vector-Hessian-vector product 436 $ Vl_n^T*F_UP*Vr 437 where the vector Vl_n (n-th element in the array Vl) and Vr are of size N and M respectively, and the Hessian F_UP is of size N x N x M. 438 Each entry of F_UP corresponds to the derivative 439 $ F_UP[i][j][k] = \frac{\partial^2 F[i]}{\partial U[j] \partial P[k]}. 440 The result of the vector-Hessian-vector product for Vl_n needs to be stored in vector VHV_n with the j-th entry being 441 $ VHV_n[j] = \sum_i \sum_k {Vl_n[i] * F_UP[i][j][k] * Vr[k]} 442 If the cost function is a scalar, there will be only one vector in Vl and VHV. 443 444 .seealso: 445 @*/ 446 PetscErrorCode TSSetIHessianProduct(TS ts,Vec *ihp1,PetscErrorCode (*ihessianproductfunc1)(TS,PetscReal,Vec,Vec*,Vec,Vec*,void*), 447 Vec *ihp2,PetscErrorCode (*ihessianproductfunc2)(TS,PetscReal,Vec,Vec*,Vec,Vec*,void*), 448 Vec *ihp3,PetscErrorCode (*ihessianproductfunc3)(TS,PetscReal,Vec,Vec*,Vec,Vec*,void*), 449 Vec *ihp4,PetscErrorCode (*ihessianproductfunc4)(TS,PetscReal,Vec,Vec*,Vec,Vec*,void*), 450 void *ctx) 451 { 452 PetscFunctionBegin; 453 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 454 PetscValidPointer(ihp1,2); 455 456 ts->ihessianproductctx = ctx; 457 if (ihp1) ts->vecs_fuu = ihp1; 458 if (ihp2) ts->vecs_fup = ihp2; 459 if (ihp3) ts->vecs_fpu = ihp3; 460 if (ihp4) ts->vecs_fpp = ihp4; 461 ts->ihessianproduct_fuu = ihessianproductfunc1; 462 ts->ihessianproduct_fup = ihessianproductfunc2; 463 ts->ihessianproduct_fpu = ihessianproductfunc3; 464 ts->ihessianproduct_fpp = ihessianproductfunc4; 465 PetscFunctionReturn(0); 466 } 467 468 /*@C 469 TSComputeIHessianProductFunctionUU - Runs the user-defined vector-Hessian-vector product function for Fuu. 470 471 Collective on TS 472 473 Input Parameters: 474 . ts - The TS context obtained from TSCreate() 475 476 Notes: 477 TSComputeIHessianProductFunctionUU() is typically used for sensitivity implementation, 478 so most users would not generally call this routine themselves. 479 480 Level: developer 481 482 .seealso: TSSetIHessianProduct() 483 @*/ 484 PetscErrorCode TSComputeIHessianProductFunctionUU(TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV) 485 { 486 PetscErrorCode ierr; 487 488 PetscFunctionBegin; 489 if (!VHV) PetscFunctionReturn(0); 490 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 491 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 492 493 if (ts->ihessianproduct_fuu) { 494 PetscStackPush("TS user IHessianProduct function 1 for sensitivity analysis"); 495 ierr = (*ts->ihessianproduct_fuu)(ts,t,U,Vl,Vr,VHV,ts->ihessianproductctx);CHKERRQ(ierr); 496 PetscStackPop; 497 } 498 /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */ 499 if (ts->rhshessianproduct_guu) { 500 PetscInt nadj; 501 ierr = TSComputeRHSHessianProductFunctionUU(ts,t,U,Vl,Vr,VHV);CHKERRQ(ierr); 502 for (nadj=0; nadj<ts->numcost; nadj++) { 503 ierr = VecScale(VHV[nadj],-1);CHKERRQ(ierr); 504 } 505 } 506 PetscFunctionReturn(0); 507 } 508 509 /*@C 510 TSComputeIHessianProductFunctionUP - Runs the user-defined vector-Hessian-vector product function for Fup. 511 512 Collective on TS 513 514 Input Parameters: 515 . ts - The TS context obtained from TSCreate() 516 517 Notes: 518 TSComputeIHessianProductFunctionUP() is typically used for sensitivity implementation, 519 so most users would not generally call this routine themselves. 520 521 Level: developer 522 523 .seealso: TSSetIHessianProduct() 524 @*/ 525 PetscErrorCode TSComputeIHessianProductFunctionUP(TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV) 526 { 527 PetscErrorCode ierr; 528 529 PetscFunctionBegin; 530 if (!VHV) PetscFunctionReturn(0); 531 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 532 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 533 534 if (ts->ihessianproduct_fup) { 535 PetscStackPush("TS user IHessianProduct function 2 for sensitivity analysis"); 536 ierr = (*ts->ihessianproduct_fup)(ts,t,U,Vl,Vr,VHV,ts->ihessianproductctx);CHKERRQ(ierr); 537 PetscStackPop; 538 } 539 /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */ 540 if (ts->rhshessianproduct_gup) { 541 PetscInt nadj; 542 ierr = TSComputeRHSHessianProductFunctionUP(ts,t,U,Vl,Vr,VHV);CHKERRQ(ierr); 543 for (nadj=0; nadj<ts->numcost; nadj++) { 544 ierr = VecScale(VHV[nadj],-1);CHKERRQ(ierr); 545 } 546 } 547 PetscFunctionReturn(0); 548 } 549 550 /*@C 551 TSComputeIHessianProductFunctionPU - Runs the user-defined vector-Hessian-vector product function for Fpu. 552 553 Collective on TS 554 555 Input Parameters: 556 . ts - The TS context obtained from TSCreate() 557 558 Notes: 559 TSComputeIHessianProductFunctionPU() is typically used for sensitivity implementation, 560 so most users would not generally call this routine themselves. 561 562 Level: developer 563 564 .seealso: TSSetIHessianProduct() 565 @*/ 566 PetscErrorCode TSComputeIHessianProductFunctionPU(TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV) 567 { 568 PetscErrorCode ierr; 569 570 PetscFunctionBegin; 571 if (!VHV) PetscFunctionReturn(0); 572 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 573 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 574 575 if (ts->ihessianproduct_fpu) { 576 PetscStackPush("TS user IHessianProduct function 3 for sensitivity analysis"); 577 ierr = (*ts->ihessianproduct_fpu)(ts,t,U,Vl,Vr,VHV,ts->ihessianproductctx);CHKERRQ(ierr); 578 PetscStackPop; 579 } 580 /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */ 581 if (ts->rhshessianproduct_gpu) { 582 PetscInt nadj; 583 ierr = TSComputeRHSHessianProductFunctionPU(ts,t,U,Vl,Vr,VHV);CHKERRQ(ierr); 584 for (nadj=0; nadj<ts->numcost; nadj++) { 585 ierr = VecScale(VHV[nadj],-1);CHKERRQ(ierr); 586 } 587 } 588 PetscFunctionReturn(0); 589 } 590 591 /*@C 592 TSComputeIHessianProductFunctionPP - Runs the user-defined vector-Hessian-vector product function for Fpp. 593 594 Collective on TS 595 596 Input Parameters: 597 . ts - The TS context obtained from TSCreate() 598 599 Notes: 600 TSComputeIHessianProductFunctionPP() is typically used for sensitivity implementation, 601 so most users would not generally call this routine themselves. 602 603 Level: developer 604 605 .seealso: TSSetIHessianProduct() 606 @*/ 607 PetscErrorCode TSComputeIHessianProductFunctionPP(TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV) 608 { 609 PetscErrorCode ierr; 610 611 PetscFunctionBegin; 612 if (!VHV) PetscFunctionReturn(0); 613 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 614 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 615 616 if (ts->ihessianproduct_fpp) { 617 PetscStackPush("TS user IHessianProduct function 3 for sensitivity analysis"); 618 ierr = (*ts->ihessianproduct_fpp)(ts,t,U,Vl,Vr,VHV,ts->ihessianproductctx);CHKERRQ(ierr); 619 PetscStackPop; 620 } 621 /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */ 622 if (ts->rhshessianproduct_gpp) { 623 PetscInt nadj; 624 ierr = TSComputeRHSHessianProductFunctionPP(ts,t,U,Vl,Vr,VHV);CHKERRQ(ierr); 625 for (nadj=0; nadj<ts->numcost; nadj++) { 626 ierr = VecScale(VHV[nadj],-1);CHKERRQ(ierr); 627 } 628 } 629 PetscFunctionReturn(0); 630 } 631 632 /*@C 633 TSSetRHSHessianProduct - Sets the function that computes the vector-Hessian-vector product. The Hessian is the second-order derivative of G (RHSFunction) w.r.t. the state variable. 634 635 Logically Collective on TS 636 637 Input Parameters: 638 + ts - TS context obtained from TSCreate() 639 . rhshp1 - an array of vectors storing the result of vector-Hessian-vector product for G_UU 640 . hessianproductfunc1 - vector-Hessian-vector product function for G_UU 641 . rhshp2 - an array of vectors storing the result of vector-Hessian-vector product for G_UP 642 . hessianproductfunc2 - vector-Hessian-vector product function for G_UP 643 . rhshp3 - an array of vectors storing the result of vector-Hessian-vector product for G_PU 644 . hessianproductfunc3 - vector-Hessian-vector product function for G_PU 645 . rhshp4 - an array of vectors storing the result of vector-Hessian-vector product for G_PP 646 - hessianproductfunc4 - vector-Hessian-vector product function for G_PP 647 648 Calling sequence of ihessianproductfunc: 649 $ rhshessianproductfunc (TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV,void *ctx); 650 + t - current timestep 651 . U - input vector (current ODE solution) 652 . Vl - an array of input vectors to be left-multiplied with the Hessian 653 . Vr - input vector to be right-multiplied with the Hessian 654 . VHV - an array of output vectors for vector-Hessian-vector product 655 - ctx - [optional] user-defined function context 656 657 Level: intermediate 658 659 Notes: 660 The first Hessian function and the working array are required. 661 As an example to implement the callback functions, the second callback function calculates the vector-Hessian-vector product 662 $ Vl_n^T*G_UP*Vr 663 where the vector Vl_n (n-th element in the array Vl) and Vr are of size N and M respectively, and the Hessian G_UP is of size N x N x M. 664 Each entry of G_UP corresponds to the derivative 665 $ G_UP[i][j][k] = \frac{\partial^2 G[i]}{\partial U[j] \partial P[k]}. 666 The result of the vector-Hessian-vector product for Vl_n needs to be stored in vector VHV_n with j-th entry being 667 $ VHV_n[j] = \sum_i \sum_k {Vl_n[i] * G_UP[i][j][k] * Vr[k]} 668 If the cost function is a scalar, there will be only one vector in Vl and VHV. 669 670 .seealso: 671 @*/ 672 PetscErrorCode TSSetRHSHessianProduct(TS ts,Vec *rhshp1,PetscErrorCode (*rhshessianproductfunc1)(TS,PetscReal,Vec,Vec*,Vec,Vec*,void*), 673 Vec *rhshp2,PetscErrorCode (*rhshessianproductfunc2)(TS,PetscReal,Vec,Vec*,Vec,Vec*,void*), 674 Vec *rhshp3,PetscErrorCode (*rhshessianproductfunc3)(TS,PetscReal,Vec,Vec*,Vec,Vec*,void*), 675 Vec *rhshp4,PetscErrorCode (*rhshessianproductfunc4)(TS,PetscReal,Vec,Vec*,Vec,Vec*,void*), 676 void *ctx) 677 { 678 PetscFunctionBegin; 679 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 680 PetscValidPointer(rhshp1,2); 681 682 ts->rhshessianproductctx = ctx; 683 if (rhshp1) ts->vecs_guu = rhshp1; 684 if (rhshp2) ts->vecs_gup = rhshp2; 685 if (rhshp3) ts->vecs_gpu = rhshp3; 686 if (rhshp4) ts->vecs_gpp = rhshp4; 687 ts->rhshessianproduct_guu = rhshessianproductfunc1; 688 ts->rhshessianproduct_gup = rhshessianproductfunc2; 689 ts->rhshessianproduct_gpu = rhshessianproductfunc3; 690 ts->rhshessianproduct_gpp = rhshessianproductfunc4; 691 PetscFunctionReturn(0); 692 } 693 694 /*@C 695 TSComputeRHSHessianProductFunctionUU - Runs the user-defined vector-Hessian-vector product function for Guu. 696 697 Collective on TS 698 699 Input Parameters: 700 . ts - The TS context obtained from TSCreate() 701 702 Notes: 703 TSComputeRHSHessianProductFunctionUU() is typically used for sensitivity implementation, 704 so most users would not generally call this routine themselves. 705 706 Level: developer 707 708 .seealso: TSSetRHSHessianProduct() 709 @*/ 710 PetscErrorCode TSComputeRHSHessianProductFunctionUU(TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV) 711 { 712 PetscErrorCode ierr; 713 714 PetscFunctionBegin; 715 if (!VHV) PetscFunctionReturn(0); 716 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 717 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 718 719 PetscStackPush("TS user RHSHessianProduct function 1 for sensitivity analysis"); 720 ierr = (*ts->rhshessianproduct_guu)(ts,t,U,Vl,Vr,VHV,ts->rhshessianproductctx);CHKERRQ(ierr); 721 PetscStackPop; 722 PetscFunctionReturn(0); 723 } 724 725 /*@C 726 TSComputeRHSHessianProductFunctionUP - Runs the user-defined vector-Hessian-vector product function for Gup. 727 728 Collective on TS 729 730 Input Parameters: 731 . ts - The TS context obtained from TSCreate() 732 733 Notes: 734 TSComputeRHSHessianProductFunctionUP() is typically used for sensitivity implementation, 735 so most users would not generally call this routine themselves. 736 737 Level: developer 738 739 .seealso: TSSetRHSHessianProduct() 740 @*/ 741 PetscErrorCode TSComputeRHSHessianProductFunctionUP(TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV) 742 { 743 PetscErrorCode ierr; 744 745 PetscFunctionBegin; 746 if (!VHV) PetscFunctionReturn(0); 747 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 748 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 749 750 PetscStackPush("TS user RHSHessianProduct function 2 for sensitivity analysis"); 751 ierr = (*ts->rhshessianproduct_gup)(ts,t,U,Vl,Vr,VHV,ts->rhshessianproductctx);CHKERRQ(ierr); 752 PetscStackPop; 753 PetscFunctionReturn(0); 754 } 755 756 /*@C 757 TSComputeRHSHessianProductFunctionPU - Runs the user-defined vector-Hessian-vector product function for Gpu. 758 759 Collective on TS 760 761 Input Parameters: 762 . ts - The TS context obtained from TSCreate() 763 764 Notes: 765 TSComputeRHSHessianProductFunctionPU() is typically used for sensitivity implementation, 766 so most users would not generally call this routine themselves. 767 768 Level: developer 769 770 .seealso: TSSetRHSHessianProduct() 771 @*/ 772 PetscErrorCode TSComputeRHSHessianProductFunctionPU(TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV) 773 { 774 PetscErrorCode ierr; 775 776 PetscFunctionBegin; 777 if (!VHV) PetscFunctionReturn(0); 778 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 779 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 780 781 PetscStackPush("TS user RHSHessianProduct function 3 for sensitivity analysis"); 782 ierr = (*ts->rhshessianproduct_gpu)(ts,t,U,Vl,Vr,VHV,ts->rhshessianproductctx);CHKERRQ(ierr); 783 PetscStackPop; 784 PetscFunctionReturn(0); 785 } 786 787 /*@C 788 TSComputeRHSHessianProductFunctionPP - Runs the user-defined vector-Hessian-vector product function for Gpp. 789 790 Collective on TS 791 792 Input Parameters: 793 . ts - The TS context obtained from TSCreate() 794 795 Notes: 796 TSComputeRHSHessianProductFunctionPP() is typically used for sensitivity implementation, 797 so most users would not generally call this routine themselves. 798 799 Level: developer 800 801 .seealso: TSSetRHSHessianProduct() 802 @*/ 803 PetscErrorCode TSComputeRHSHessianProductFunctionPP(TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV) 804 { 805 PetscErrorCode ierr; 806 807 PetscFunctionBegin; 808 if (!VHV) PetscFunctionReturn(0); 809 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 810 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 811 812 PetscStackPush("TS user RHSHessianProduct function 3 for sensitivity analysis"); 813 ierr = (*ts->rhshessianproduct_gpp)(ts,t,U,Vl,Vr,VHV,ts->rhshessianproductctx);CHKERRQ(ierr); 814 PetscStackPop; 815 PetscFunctionReturn(0); 816 } 817 818 /* --------------------------- Adjoint sensitivity ---------------------------*/ 819 820 /*@ 821 TSSetCostGradients - Sets the initial value of the gradients of the cost function w.r.t. initial values and w.r.t. the problem parameters 822 for use by the TSAdjoint routines. 823 824 Logically Collective on TS 825 826 Input Parameters: 827 + ts - the TS context obtained from TSCreate() 828 . numcost - number of gradients to be computed, this is the number of cost functions 829 . lambda - gradients with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector 830 - mu - gradients with respect to the parameters, the number of entries in these vectors is the same as the number of parameters 831 832 Level: beginner 833 834 Notes: 835 the entries in these vectors must be correctly initialized with the values lamda_i = df/dy|finaltime mu_i = df/dp|finaltime 836 837 After TSAdjointSolve() is called the lamba and the mu contain the computed sensitivities 838 839 .seealso TSGetCostGradients() 840 @*/ 841 PetscErrorCode TSSetCostGradients(TS ts,PetscInt numcost,Vec *lambda,Vec *mu) 842 { 843 PetscFunctionBegin; 844 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 845 PetscValidPointer(lambda,3); 846 ts->vecs_sensi = lambda; 847 ts->vecs_sensip = mu; 848 if (ts->numcost && ts->numcost!=numcost) SETERRQ(PetscObjectComm((PetscObject)ts),PETSC_ERR_USER,"The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand"); 849 ts->numcost = numcost; 850 PetscFunctionReturn(0); 851 } 852 853 /*@ 854 TSGetCostGradients - Returns the gradients from the TSAdjointSolve() 855 856 Not Collective, but Vec returned is parallel if TS is parallel 857 858 Input Parameter: 859 . ts - the TS context obtained from TSCreate() 860 861 Output Parameters: 862 + numcost - size of returned arrays 863 . lambda - vectors containing the gradients of the cost functions with respect to the ODE/DAE solution variables 864 - mu - vectors containing the gradients of the cost functions with respect to the problem parameters 865 866 Level: intermediate 867 868 .seealso: TSSetCostGradients() 869 @*/ 870 PetscErrorCode TSGetCostGradients(TS ts,PetscInt *numcost,Vec **lambda,Vec **mu) 871 { 872 PetscFunctionBegin; 873 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 874 if (numcost) *numcost = ts->numcost; 875 if (lambda) *lambda = ts->vecs_sensi; 876 if (mu) *mu = ts->vecs_sensip; 877 PetscFunctionReturn(0); 878 } 879 880 /*@ 881 TSSetCostHessianProducts - Sets the initial value of the Hessian-vector products of the cost function w.r.t. initial values and w.r.t. the problem parameters 882 for use by the TSAdjoint routines. 883 884 Logically Collective on TS 885 886 Input Parameters: 887 + ts - the TS context obtained from TSCreate() 888 . numcost - number of cost functions 889 . lambda2 - Hessian-vector product with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector 890 . mu2 - Hessian-vector product with respect to the parameters, the number of entries in these vectors is the same as the number of parameters 891 - dir - the direction vector that are multiplied with the Hessian of the cost functions 892 893 Level: beginner 894 895 Notes: Hessian of the cost function is completely different from Hessian of the ODE/DAE system 896 897 For second-order adjoint, one needs to call this function and then TSAdjointSetForward() before TSSolve(). 898 899 After TSAdjointSolve() is called, the lamba2 and the mu2 will contain the computed second-order adjoint sensitivities, and can be used to produce Hessian-vector product (not the full Hessian matrix). Users must provide a direction vector; it is usually generated by an optimization solver. 900 901 Passing NULL for lambda2 disables the second-order calculation. 902 .seealso: TSAdjointSetForward() 903 @*/ 904 PetscErrorCode TSSetCostHessianProducts(TS ts,PetscInt numcost,Vec *lambda2,Vec *mu2,Vec dir) 905 { 906 PetscFunctionBegin; 907 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 908 if (ts->numcost && ts->numcost!=numcost) SETERRQ(PetscObjectComm((PetscObject)ts),PETSC_ERR_USER,"The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand"); 909 ts->numcost = numcost; 910 ts->vecs_sensi2 = lambda2; 911 ts->vecs_sensi2p = mu2; 912 ts->vec_dir = dir; 913 PetscFunctionReturn(0); 914 } 915 916 /*@ 917 TSGetCostHessianProducts - Returns the gradients from the TSAdjointSolve() 918 919 Not Collective, but Vec returned is parallel if TS is parallel 920 921 Input Parameter: 922 . ts - the TS context obtained from TSCreate() 923 924 Output Parameters: 925 + numcost - number of cost functions 926 . lambda2 - Hessian-vector product with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector 927 . mu2 - Hessian-vector product with respect to the parameters, the number of entries in these vectors is the same as the number of parameters 928 - dir - the direction vector that are multiplied with the Hessian of the cost functions 929 930 Level: intermediate 931 932 .seealso: TSSetCostHessianProducts() 933 @*/ 934 PetscErrorCode TSGetCostHessianProducts(TS ts,PetscInt *numcost,Vec **lambda2,Vec **mu2, Vec *dir) 935 { 936 PetscFunctionBegin; 937 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 938 if (numcost) *numcost = ts->numcost; 939 if (lambda2) *lambda2 = ts->vecs_sensi2; 940 if (mu2) *mu2 = ts->vecs_sensi2p; 941 if (dir) *dir = ts->vec_dir; 942 PetscFunctionReturn(0); 943 } 944 945 /*@ 946 TSAdjointSetForward - Trigger the tangent linear solver and initialize the forward sensitivities 947 948 Logically Collective on TS 949 950 Input Parameters: 951 + ts - the TS context obtained from TSCreate() 952 - didp - the derivative of initial values w.r.t. parameters 953 954 Level: intermediate 955 956 Notes: When computing sensitivies w.r.t. initial condition, set didp to NULL so that the solver will take it as an identity matrix mathematically. TSAdjoint does not reset the tangent linear solver automatically, TSAdjointResetForward() should be called to reset the tangent linear solver. 957 958 .seealso: TSSetCostHessianProducts(), TSAdjointResetForward() 959 @*/ 960 PetscErrorCode TSAdjointSetForward(TS ts,Mat didp) 961 { 962 Mat A; 963 Vec sp; 964 PetscScalar *xarr; 965 PetscInt lsize; 966 PetscErrorCode ierr; 967 968 PetscFunctionBegin; 969 ts->forward_solve = PETSC_TRUE; /* turn on tangent linear mode */ 970 if (!ts->vecs_sensi2) SETERRQ(PetscObjectComm((PetscObject)ts),PETSC_ERR_USER,"Must call TSSetCostHessianProducts() first"); 971 if (!ts->vec_dir) SETERRQ(PetscObjectComm((PetscObject)ts),PETSC_ERR_USER,"Directional vector is missing. Call TSSetCostHessianProducts() to set it."); 972 /* create a single-column dense matrix */ 973 ierr = VecGetLocalSize(ts->vec_sol,&lsize);CHKERRQ(ierr); 974 ierr = MatCreateDense(PetscObjectComm((PetscObject)ts),lsize,PETSC_DECIDE,PETSC_DECIDE,1,NULL,&A);CHKERRQ(ierr); 975 976 ierr = VecDuplicate(ts->vec_sol,&sp);CHKERRQ(ierr); 977 ierr = MatDenseGetColumn(A,0,&xarr);CHKERRQ(ierr); 978 ierr = VecPlaceArray(sp,xarr);CHKERRQ(ierr); 979 if (ts->vecs_sensi2p) { /* tangent linear variable initialized as 2*dIdP*dir */ 980 if (didp) { 981 ierr = MatMult(didp,ts->vec_dir,sp);CHKERRQ(ierr); 982 ierr = VecScale(sp,2.);CHKERRQ(ierr); 983 } else { 984 ierr = VecZeroEntries(sp);CHKERRQ(ierr); 985 } 986 } else { /* tangent linear variable initialized as dir */ 987 ierr = VecCopy(ts->vec_dir,sp);CHKERRQ(ierr); 988 } 989 ierr = VecResetArray(sp);CHKERRQ(ierr); 990 ierr = MatDenseRestoreColumn(A,&xarr);CHKERRQ(ierr); 991 ierr = VecDestroy(&sp);CHKERRQ(ierr); 992 993 ierr = TSForwardSetInitialSensitivities(ts,A);CHKERRQ(ierr); /* if didp is NULL, identity matrix is assumed */ 994 995 ierr = MatDestroy(&A);CHKERRQ(ierr); 996 PetscFunctionReturn(0); 997 } 998 999 /*@ 1000 TSAdjointResetForward - Reset the tangent linear solver and destroy the tangent linear context 1001 1002 Logically Collective on TS 1003 1004 Input Parameters: 1005 . ts - the TS context obtained from TSCreate() 1006 1007 Level: intermediate 1008 1009 .seealso: TSAdjointSetForward() 1010 @*/ 1011 PetscErrorCode TSAdjointResetForward(TS ts) 1012 { 1013 PetscErrorCode ierr; 1014 1015 PetscFunctionBegin; 1016 ts->forward_solve = PETSC_FALSE; /* turn off tangent linear mode */ 1017 ierr = TSForwardReset(ts);CHKERRQ(ierr); 1018 PetscFunctionReturn(0); 1019 } 1020 1021 /*@ 1022 TSAdjointSetUp - Sets up the internal data structures for the later use 1023 of an adjoint solver 1024 1025 Collective on TS 1026 1027 Input Parameter: 1028 . ts - the TS context obtained from TSCreate() 1029 1030 Level: advanced 1031 1032 .seealso: TSCreate(), TSAdjointStep(), TSSetCostGradients() 1033 @*/ 1034 PetscErrorCode TSAdjointSetUp(TS ts) 1035 { 1036 TSTrajectory tj; 1037 PetscBool match; 1038 PetscErrorCode ierr; 1039 1040 PetscFunctionBegin; 1041 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1042 if (ts->adjointsetupcalled) PetscFunctionReturn(0); 1043 if (!ts->vecs_sensi) SETERRQ(PetscObjectComm((PetscObject)ts),PETSC_ERR_ARG_WRONGSTATE,"Must call TSSetCostGradients() first"); 1044 if (ts->vecs_sensip && !ts->Jacp && !ts->Jacprhs) SETERRQ(PetscObjectComm((PetscObject)ts),PETSC_ERR_ARG_WRONGSTATE,"Must call TSSetRHSJacobianP() or TSSetIJacobianP() first"); 1045 ierr = TSGetTrajectory(ts,&tj);CHKERRQ(ierr); 1046 ierr = PetscObjectTypeCompare((PetscObject)tj,TSTRAJECTORYBASIC,&match);CHKERRQ(ierr); 1047 if (match) { 1048 PetscBool solution_only; 1049 ierr = TSTrajectoryGetSolutionOnly(tj,&solution_only);CHKERRQ(ierr); 1050 if (solution_only) SETERRQ(PetscObjectComm((PetscObject)ts),PETSC_ERR_USER,"TSAdjoint cannot use the solution-only mode when choosing the Basic TSTrajectory type. Turn it off with -ts_trajectory_solution_only 0"); 1051 } 1052 ierr = TSTrajectorySetUseHistory(tj,PETSC_FALSE);CHKERRQ(ierr); /* not use TSHistory */ 1053 1054 if (ts->quadraturets) { /* if there is integral in the cost function */ 1055 ierr = VecDuplicate(ts->vecs_sensi[0],&ts->vec_drdu_col);CHKERRQ(ierr); 1056 if (ts->vecs_sensip) { 1057 ierr = VecDuplicate(ts->vecs_sensip[0],&ts->vec_drdp_col);CHKERRQ(ierr); 1058 } 1059 } 1060 1061 if (ts->ops->adjointsetup) { 1062 ierr = (*ts->ops->adjointsetup)(ts);CHKERRQ(ierr); 1063 } 1064 ts->adjointsetupcalled = PETSC_TRUE; 1065 PetscFunctionReturn(0); 1066 } 1067 1068 /*@ 1069 TSAdjointReset - Resets a TSAdjoint context and removes any allocated Vecs and Mats. 1070 1071 Collective on TS 1072 1073 Input Parameter: 1074 . ts - the TS context obtained from TSCreate() 1075 1076 Level: beginner 1077 1078 .seealso: TSCreate(), TSAdjointSetUp(), TSADestroy() 1079 @*/ 1080 PetscErrorCode TSAdjointReset(TS ts) 1081 { 1082 PetscErrorCode ierr; 1083 1084 PetscFunctionBegin; 1085 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1086 if (ts->ops->adjointreset) { 1087 ierr = (*ts->ops->adjointreset)(ts);CHKERRQ(ierr); 1088 } 1089 if (ts->quadraturets) { /* if there is integral in the cost function */ 1090 ierr = VecDestroy(&ts->vec_drdu_col);CHKERRQ(ierr); 1091 if (ts->vecs_sensip) { 1092 ierr = VecDestroy(&ts->vec_drdp_col);CHKERRQ(ierr); 1093 } 1094 } 1095 ts->vecs_sensi = NULL; 1096 ts->vecs_sensip = NULL; 1097 ts->vecs_sensi2 = NULL; 1098 ts->vecs_sensi2p = NULL; 1099 ts->vec_dir = NULL; 1100 ts->adjointsetupcalled = PETSC_FALSE; 1101 PetscFunctionReturn(0); 1102 } 1103 1104 /*@ 1105 TSAdjointSetSteps - Sets the number of steps the adjoint solver should take backward in time 1106 1107 Logically Collective on TS 1108 1109 Input Parameters: 1110 + ts - the TS context obtained from TSCreate() 1111 - steps - number of steps to use 1112 1113 Level: intermediate 1114 1115 Notes: 1116 Normally one does not call this and TSAdjointSolve() integrates back to the original timestep. One can call this 1117 so as to integrate back to less than the original timestep 1118 1119 .seealso: TSSetExactFinalTime() 1120 @*/ 1121 PetscErrorCode TSAdjointSetSteps(TS ts,PetscInt steps) 1122 { 1123 PetscFunctionBegin; 1124 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1125 PetscValidLogicalCollectiveInt(ts,steps,2); 1126 if (steps < 0) SETERRQ(PetscObjectComm((PetscObject)ts),PETSC_ERR_ARG_OUTOFRANGE,"Cannot step back a negative number of steps"); 1127 if (steps > ts->steps) SETERRQ(PetscObjectComm((PetscObject)ts),PETSC_ERR_ARG_OUTOFRANGE,"Cannot step back more than the total number of forward steps"); 1128 ts->adjoint_max_steps = steps; 1129 PetscFunctionReturn(0); 1130 } 1131 1132 /*@C 1133 TSAdjointSetRHSJacobian - Deprecated, use TSSetRHSJacobianP() 1134 1135 Level: deprecated 1136 1137 @*/ 1138 PetscErrorCode TSAdjointSetRHSJacobian(TS ts,Mat Amat,PetscErrorCode (*func)(TS,PetscReal,Vec,Mat,void*),void *ctx) 1139 { 1140 PetscErrorCode ierr; 1141 1142 PetscFunctionBegin; 1143 PetscValidHeaderSpecific(ts, TS_CLASSID,1); 1144 PetscValidHeaderSpecific(Amat,MAT_CLASSID,2); 1145 1146 ts->rhsjacobianp = func; 1147 ts->rhsjacobianpctx = ctx; 1148 if (Amat) { 1149 ierr = PetscObjectReference((PetscObject)Amat);CHKERRQ(ierr); 1150 ierr = MatDestroy(&ts->Jacp);CHKERRQ(ierr); 1151 ts->Jacp = Amat; 1152 } 1153 PetscFunctionReturn(0); 1154 } 1155 1156 /*@C 1157 TSAdjointComputeRHSJacobian - Deprecated, use TSComputeRHSJacobianP() 1158 1159 Level: deprecated 1160 1161 @*/ 1162 PetscErrorCode TSAdjointComputeRHSJacobian(TS ts,PetscReal t,Vec U,Mat Amat) 1163 { 1164 PetscErrorCode ierr; 1165 1166 PetscFunctionBegin; 1167 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1168 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 1169 PetscValidPointer(Amat,4); 1170 1171 PetscStackPush("TS user JacobianP function for sensitivity analysis"); 1172 ierr = (*ts->rhsjacobianp)(ts,t,U,Amat,ts->rhsjacobianpctx);CHKERRQ(ierr); 1173 PetscStackPop; 1174 PetscFunctionReturn(0); 1175 } 1176 1177 /*@ 1178 TSAdjointComputeDRDYFunction - Deprecated, use TSGetQuadratureTS() then TSComputeRHSJacobian() 1179 1180 Level: deprecated 1181 1182 @*/ 1183 PetscErrorCode TSAdjointComputeDRDYFunction(TS ts,PetscReal t,Vec U,Vec *DRDU) 1184 { 1185 PetscErrorCode ierr; 1186 1187 PetscFunctionBegin; 1188 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1189 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 1190 1191 PetscStackPush("TS user DRDY function for sensitivity analysis"); 1192 ierr = (*ts->drdufunction)(ts,t,U,DRDU,ts->costintegrandctx);CHKERRQ(ierr); 1193 PetscStackPop; 1194 PetscFunctionReturn(0); 1195 } 1196 1197 /*@ 1198 TSAdjointComputeDRDPFunction - Deprecated, use TSGetQuadratureTS() then TSComputeRHSJacobianP() 1199 1200 Level: deprecated 1201 1202 @*/ 1203 PetscErrorCode TSAdjointComputeDRDPFunction(TS ts,PetscReal t,Vec U,Vec *DRDP) 1204 { 1205 PetscErrorCode ierr; 1206 1207 PetscFunctionBegin; 1208 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1209 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 1210 1211 PetscStackPush("TS user DRDP function for sensitivity analysis"); 1212 ierr = (*ts->drdpfunction)(ts,t,U,DRDP,ts->costintegrandctx);CHKERRQ(ierr); 1213 PetscStackPop; 1214 PetscFunctionReturn(0); 1215 } 1216 1217 /*@C 1218 TSAdjointMonitorSensi - monitors the first lambda sensitivity 1219 1220 Level: intermediate 1221 1222 .seealso: TSAdjointMonitorSet() 1223 @*/ 1224 PetscErrorCode TSAdjointMonitorSensi(TS ts,PetscInt step,PetscReal ptime,Vec v,PetscInt numcost,Vec *lambda,Vec *mu,PetscViewerAndFormat *vf) 1225 { 1226 PetscErrorCode ierr; 1227 PetscViewer viewer = vf->viewer; 1228 1229 PetscFunctionBegin; 1230 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,8); 1231 ierr = PetscViewerPushFormat(viewer,vf->format);CHKERRQ(ierr); 1232 ierr = VecView(lambda[0],viewer);CHKERRQ(ierr); 1233 ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr); 1234 PetscFunctionReturn(0); 1235 } 1236 1237 /*@C 1238 TSAdjointMonitorSetFromOptions - Sets a monitor function and viewer appropriate for the type indicated by the user 1239 1240 Collective on TS 1241 1242 Input Parameters: 1243 + ts - TS object you wish to monitor 1244 . name - the monitor type one is seeking 1245 . help - message indicating what monitoring is done 1246 . manual - manual page for the monitor 1247 . monitor - the monitor function 1248 - monitorsetup - a function that is called once ONLY if the user selected this monitor that may set additional features of the TS or PetscViewer objects 1249 1250 Level: developer 1251 1252 .seealso: PetscOptionsGetViewer(), PetscOptionsGetReal(), PetscOptionsHasName(), PetscOptionsGetString(), 1253 PetscOptionsGetIntArray(), PetscOptionsGetRealArray(), PetscOptionsBool() 1254 PetscOptionsInt(), PetscOptionsString(), PetscOptionsReal(), PetscOptionsBool(), 1255 PetscOptionsName(), PetscOptionsBegin(), PetscOptionsEnd(), PetscOptionsHead(), 1256 PetscOptionsStringArray(),PetscOptionsRealArray(), PetscOptionsScalar(), 1257 PetscOptionsBoolGroupBegin(), PetscOptionsBoolGroup(), PetscOptionsBoolGroupEnd(), 1258 PetscOptionsFList(), PetscOptionsEList() 1259 @*/ 1260 PetscErrorCode TSAdjointMonitorSetFromOptions(TS ts,const char name[],const char help[], const char manual[],PetscErrorCode (*monitor)(TS,PetscInt,PetscReal,Vec,PetscInt,Vec*,Vec*,PetscViewerAndFormat*),PetscErrorCode (*monitorsetup)(TS,PetscViewerAndFormat*)) 1261 { 1262 PetscErrorCode ierr; 1263 PetscViewer viewer; 1264 PetscViewerFormat format; 1265 PetscBool flg; 1266 1267 PetscFunctionBegin; 1268 ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject)ts),((PetscObject) ts)->options,((PetscObject)ts)->prefix,name,&viewer,&format,&flg);CHKERRQ(ierr); 1269 if (flg) { 1270 PetscViewerAndFormat *vf; 1271 ierr = PetscViewerAndFormatCreate(viewer,format,&vf);CHKERRQ(ierr); 1272 ierr = PetscObjectDereference((PetscObject)viewer);CHKERRQ(ierr); 1273 if (monitorsetup) { 1274 ierr = (*monitorsetup)(ts,vf);CHKERRQ(ierr); 1275 } 1276 ierr = TSAdjointMonitorSet(ts,(PetscErrorCode (*)(TS,PetscInt,PetscReal,Vec,PetscInt,Vec*,Vec*,void*))monitor,vf,(PetscErrorCode (*)(void**))PetscViewerAndFormatDestroy);CHKERRQ(ierr); 1277 } 1278 PetscFunctionReturn(0); 1279 } 1280 1281 /*@C 1282 TSAdjointMonitorSet - Sets an ADDITIONAL function that is to be used at every 1283 timestep to display the iteration's progress. 1284 1285 Logically Collective on TS 1286 1287 Input Parameters: 1288 + ts - the TS context obtained from TSCreate() 1289 . adjointmonitor - monitoring routine 1290 . adjointmctx - [optional] user-defined context for private data for the 1291 monitor routine (use NULL if no context is desired) 1292 - adjointmonitordestroy - [optional] routine that frees monitor context 1293 (may be NULL) 1294 1295 Calling sequence of monitor: 1296 $ int adjointmonitor(TS ts,PetscInt steps,PetscReal time,Vec u,PetscInt numcost,Vec *lambda, Vec *mu,void *adjointmctx) 1297 1298 + ts - the TS context 1299 . steps - iteration number (after the final time step the monitor routine is called with a step of -1, this is at the final time which may have 1300 been interpolated to) 1301 . time - current time 1302 . u - current iterate 1303 . numcost - number of cost functionos 1304 . lambda - sensitivities to initial conditions 1305 . mu - sensitivities to parameters 1306 - adjointmctx - [optional] adjoint monitoring context 1307 1308 Notes: 1309 This routine adds an additional monitor to the list of monitors that 1310 already has been loaded. 1311 1312 Fortran Notes: 1313 Only a single monitor function can be set for each TS object 1314 1315 Level: intermediate 1316 1317 .seealso: TSAdjointMonitorCancel() 1318 @*/ 1319 PetscErrorCode TSAdjointMonitorSet(TS ts,PetscErrorCode (*adjointmonitor)(TS,PetscInt,PetscReal,Vec,PetscInt,Vec*,Vec*,void*),void *adjointmctx,PetscErrorCode (*adjointmdestroy)(void**)) 1320 { 1321 PetscErrorCode ierr; 1322 PetscInt i; 1323 PetscBool identical; 1324 1325 PetscFunctionBegin; 1326 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1327 for (i=0; i<ts->numbermonitors;i++) { 1328 ierr = PetscMonitorCompare((PetscErrorCode (*)(void))adjointmonitor,adjointmctx,adjointmdestroy,(PetscErrorCode (*)(void))ts->adjointmonitor[i],ts->adjointmonitorcontext[i],ts->adjointmonitordestroy[i],&identical);CHKERRQ(ierr); 1329 if (identical) PetscFunctionReturn(0); 1330 } 1331 if (ts->numberadjointmonitors >= MAXTSMONITORS) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many adjoint monitors set"); 1332 ts->adjointmonitor[ts->numberadjointmonitors] = adjointmonitor; 1333 ts->adjointmonitordestroy[ts->numberadjointmonitors] = adjointmdestroy; 1334 ts->adjointmonitorcontext[ts->numberadjointmonitors++] = (void*)adjointmctx; 1335 PetscFunctionReturn(0); 1336 } 1337 1338 /*@C 1339 TSAdjointMonitorCancel - Clears all the adjoint monitors that have been set on a time-step object. 1340 1341 Logically Collective on TS 1342 1343 Input Parameters: 1344 . ts - the TS context obtained from TSCreate() 1345 1346 Notes: 1347 There is no way to remove a single, specific monitor. 1348 1349 Level: intermediate 1350 1351 .seealso: TSAdjointMonitorSet() 1352 @*/ 1353 PetscErrorCode TSAdjointMonitorCancel(TS ts) 1354 { 1355 PetscErrorCode ierr; 1356 PetscInt i; 1357 1358 PetscFunctionBegin; 1359 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1360 for (i=0; i<ts->numberadjointmonitors; i++) { 1361 if (ts->adjointmonitordestroy[i]) { 1362 ierr = (*ts->adjointmonitordestroy[i])(&ts->adjointmonitorcontext[i]);CHKERRQ(ierr); 1363 } 1364 } 1365 ts->numberadjointmonitors = 0; 1366 PetscFunctionReturn(0); 1367 } 1368 1369 /*@C 1370 TSAdjointMonitorDefault - the default monitor of adjoint computations 1371 1372 Level: intermediate 1373 1374 .seealso: TSAdjointMonitorSet() 1375 @*/ 1376 PetscErrorCode TSAdjointMonitorDefault(TS ts,PetscInt step,PetscReal ptime,Vec v,PetscInt numcost,Vec *lambda,Vec *mu,PetscViewerAndFormat *vf) 1377 { 1378 PetscErrorCode ierr; 1379 PetscViewer viewer = vf->viewer; 1380 1381 PetscFunctionBegin; 1382 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,8); 1383 ierr = PetscViewerPushFormat(viewer,vf->format);CHKERRQ(ierr); 1384 ierr = PetscViewerASCIIAddTab(viewer,((PetscObject)ts)->tablevel);CHKERRQ(ierr); 1385 ierr = PetscViewerASCIIPrintf(viewer,"%D TS dt %g time %g%s",step,(double)ts->time_step,(double)ptime,ts->steprollback ? " (r)\n" : "\n");CHKERRQ(ierr); 1386 ierr = PetscViewerASCIISubtractTab(viewer,((PetscObject)ts)->tablevel);CHKERRQ(ierr); 1387 ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr); 1388 PetscFunctionReturn(0); 1389 } 1390 1391 /*@C 1392 TSAdjointMonitorDrawSensi - Monitors progress of the adjoint TS solvers by calling 1393 VecView() for the sensitivities to initial states at each timestep 1394 1395 Collective on TS 1396 1397 Input Parameters: 1398 + ts - the TS context 1399 . step - current time-step 1400 . ptime - current time 1401 . u - current state 1402 . numcost - number of cost functions 1403 . lambda - sensitivities to initial conditions 1404 . mu - sensitivities to parameters 1405 - dummy - either a viewer or NULL 1406 1407 Level: intermediate 1408 1409 .seealso: TSAdjointMonitorSet(), TSAdjointMonitorDefault(), VecView() 1410 @*/ 1411 PetscErrorCode TSAdjointMonitorDrawSensi(TS ts,PetscInt step,PetscReal ptime,Vec u,PetscInt numcost,Vec *lambda,Vec *mu,void *dummy) 1412 { 1413 PetscErrorCode ierr; 1414 TSMonitorDrawCtx ictx = (TSMonitorDrawCtx)dummy; 1415 PetscDraw draw; 1416 PetscReal xl,yl,xr,yr,h; 1417 char time[32]; 1418 1419 PetscFunctionBegin; 1420 if (!(((ictx->howoften > 0) && (!(step % ictx->howoften))) || ((ictx->howoften == -1) && ts->reason))) PetscFunctionReturn(0); 1421 1422 ierr = VecView(lambda[0],ictx->viewer);CHKERRQ(ierr); 1423 ierr = PetscViewerDrawGetDraw(ictx->viewer,0,&draw);CHKERRQ(ierr); 1424 ierr = PetscSNPrintf(time,32,"Timestep %d Time %g",(int)step,(double)ptime);CHKERRQ(ierr); 1425 ierr = PetscDrawGetCoordinates(draw,&xl,&yl,&xr,&yr);CHKERRQ(ierr); 1426 h = yl + .95*(yr - yl); 1427 ierr = PetscDrawStringCentered(draw,.5*(xl+xr),h,PETSC_DRAW_BLACK,time);CHKERRQ(ierr); 1428 ierr = PetscDrawFlush(draw);CHKERRQ(ierr); 1429 PetscFunctionReturn(0); 1430 } 1431 1432 /* 1433 TSAdjointSetFromOptions - Sets various TSAdjoint parameters from user options. 1434 1435 Collective on TSAdjoint 1436 1437 Input Parameter: 1438 . ts - the TS context 1439 1440 Options Database Keys: 1441 + -ts_adjoint_solve <yes,no> After solving the ODE/DAE solve the adjoint problem (requires -ts_save_trajectory) 1442 . -ts_adjoint_monitor - print information at each adjoint time step 1443 - -ts_adjoint_monitor_draw_sensi - monitor the sensitivity of the first cost function wrt initial conditions (lambda[0]) graphically 1444 1445 Level: developer 1446 1447 Notes: 1448 This is not normally called directly by users 1449 1450 .seealso: TSSetSaveTrajectory(), TSTrajectorySetUp() 1451 */ 1452 PetscErrorCode TSAdjointSetFromOptions(PetscOptionItems *PetscOptionsObject,TS ts) 1453 { 1454 PetscBool tflg,opt; 1455 PetscErrorCode ierr; 1456 1457 PetscFunctionBegin; 1458 PetscValidHeaderSpecific(ts,TS_CLASSID,2); 1459 ierr = PetscOptionsHead(PetscOptionsObject,"TS Adjoint options");CHKERRQ(ierr); 1460 tflg = ts->adjoint_solve ? PETSC_TRUE : PETSC_FALSE; 1461 ierr = PetscOptionsBool("-ts_adjoint_solve","Solve the adjoint problem immediately after solving the forward problem","",tflg,&tflg,&opt);CHKERRQ(ierr); 1462 if (opt) { 1463 ierr = TSSetSaveTrajectory(ts);CHKERRQ(ierr); 1464 ts->adjoint_solve = tflg; 1465 } 1466 ierr = TSAdjointMonitorSetFromOptions(ts,"-ts_adjoint_monitor","Monitor adjoint timestep size","TSAdjointMonitorDefault",TSAdjointMonitorDefault,NULL);CHKERRQ(ierr); 1467 ierr = TSAdjointMonitorSetFromOptions(ts,"-ts_adjoint_monitor_sensi","Monitor sensitivity in the adjoint computation","TSAdjointMonitorSensi",TSAdjointMonitorSensi,NULL);CHKERRQ(ierr); 1468 opt = PETSC_FALSE; 1469 ierr = PetscOptionsName("-ts_adjoint_monitor_draw_sensi","Monitor adjoint sensitivities (lambda only) graphically","TSAdjointMonitorDrawSensi",&opt);CHKERRQ(ierr); 1470 if (opt) { 1471 TSMonitorDrawCtx ctx; 1472 PetscInt howoften = 1; 1473 1474 ierr = PetscOptionsInt("-ts_adjoint_monitor_draw_sensi","Monitor adjoint sensitivities (lambda only) graphically","TSAdjointMonitorDrawSensi",howoften,&howoften,NULL);CHKERRQ(ierr); 1475 ierr = TSMonitorDrawCtxCreate(PetscObjectComm((PetscObject)ts),NULL,NULL,PETSC_DECIDE,PETSC_DECIDE,300,300,howoften,&ctx);CHKERRQ(ierr); 1476 ierr = TSAdjointMonitorSet(ts,TSAdjointMonitorDrawSensi,ctx,(PetscErrorCode (*)(void**))TSMonitorDrawCtxDestroy);CHKERRQ(ierr); 1477 } 1478 PetscFunctionReturn(0); 1479 } 1480 1481 /*@ 1482 TSAdjointStep - Steps one time step backward in the adjoint run 1483 1484 Collective on TS 1485 1486 Input Parameter: 1487 . ts - the TS context obtained from TSCreate() 1488 1489 Level: intermediate 1490 1491 .seealso: TSAdjointSetUp(), TSAdjointSolve() 1492 @*/ 1493 PetscErrorCode TSAdjointStep(TS ts) 1494 { 1495 DM dm; 1496 PetscErrorCode ierr; 1497 1498 PetscFunctionBegin; 1499 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1500 ierr = TSGetDM(ts,&dm);CHKERRQ(ierr); 1501 ierr = TSAdjointSetUp(ts);CHKERRQ(ierr); 1502 ts->steps--; /* must decrease the step index before the adjoint step is taken. */ 1503 1504 ts->reason = TS_CONVERGED_ITERATING; 1505 ts->ptime_prev = ts->ptime; 1506 if (!ts->ops->adjointstep) SETERRQ1(PetscObjectComm((PetscObject)ts),PETSC_ERR_NOT_CONVERGED,"TSStep has failed because the adjoint of %s has not been implemented, try other time stepping methods for adjoint sensitivity analysis",((PetscObject)ts)->type_name); 1507 ierr = PetscLogEventBegin(TS_AdjointStep,ts,0,0,0);CHKERRQ(ierr); 1508 ierr = (*ts->ops->adjointstep)(ts);CHKERRQ(ierr); 1509 ierr = PetscLogEventEnd(TS_AdjointStep,ts,0,0,0);CHKERRQ(ierr); 1510 ts->adjoint_steps++; 1511 1512 if (ts->reason < 0) { 1513 if (ts->errorifstepfailed) SETERRQ1(PetscObjectComm((PetscObject)ts),PETSC_ERR_NOT_CONVERGED,"TSAdjointStep has failed due to %s",TSConvergedReasons[ts->reason]); 1514 } else if (!ts->reason) { 1515 if (ts->adjoint_steps >= ts->adjoint_max_steps) ts->reason = TS_CONVERGED_ITS; 1516 } 1517 PetscFunctionReturn(0); 1518 } 1519 1520 /*@ 1521 TSAdjointSolve - Solves the discrete ajoint problem for an ODE/DAE 1522 1523 Collective on TS 1524 1525 Input Parameter: 1526 . ts - the TS context obtained from TSCreate() 1527 1528 Options Database: 1529 . -ts_adjoint_view_solution <viewerinfo> - views the first gradient with respect to the initial values 1530 1531 Level: intermediate 1532 1533 Notes: 1534 This must be called after a call to TSSolve() that solves the forward problem 1535 1536 By default this will integrate back to the initial time, one can use TSAdjointSetSteps() to step back to a later time 1537 1538 .seealso: TSCreate(), TSSetCostGradients(), TSSetSolution(), TSAdjointStep() 1539 @*/ 1540 PetscErrorCode TSAdjointSolve(TS ts) 1541 { 1542 static PetscBool cite = PETSC_FALSE; 1543 #if defined(TSADJOINT_STAGE) 1544 PetscLogStage adjoint_stage; 1545 #endif 1546 PetscErrorCode ierr; 1547 1548 PetscFunctionBegin; 1549 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1550 ierr = PetscCitationsRegister("@article{tsadjointpaper,\n" 1551 " title = {{PETSc TSAdjoint: a discrete adjoint ODE solver for first-order and second-order sensitivity analysis}},\n" 1552 " author = {Zhang, Hong and Constantinescu, Emil M. and Smith, Barry F.},\n" 1553 " journal = {arXiv e-preprints},\n" 1554 " eprint = {1912.07696},\n" 1555 " archivePrefix = {arXiv},\n" 1556 " year = {2019}\n}\n",&cite);CHKERRQ(ierr); 1557 #if defined(TSADJOINT_STAGE) 1558 ierr = PetscLogStageRegister("TSAdjoint",&adjoint_stage);CHKERRQ(ierr); 1559 ierr = PetscLogStagePush(adjoint_stage);CHKERRQ(ierr); 1560 #endif 1561 ierr = TSAdjointSetUp(ts);CHKERRQ(ierr); 1562 1563 /* reset time step and iteration counters */ 1564 ts->adjoint_steps = 0; 1565 ts->ksp_its = 0; 1566 ts->snes_its = 0; 1567 ts->num_snes_failures = 0; 1568 ts->reject = 0; 1569 ts->reason = TS_CONVERGED_ITERATING; 1570 1571 if (!ts->adjoint_max_steps) ts->adjoint_max_steps = ts->steps; 1572 if (ts->adjoint_steps >= ts->adjoint_max_steps) ts->reason = TS_CONVERGED_ITS; 1573 1574 while (!ts->reason) { 1575 ierr = TSTrajectoryGet(ts->trajectory,ts,ts->steps,&ts->ptime);CHKERRQ(ierr); 1576 ierr = TSAdjointMonitor(ts,ts->steps,ts->ptime,ts->vec_sol,ts->numcost,ts->vecs_sensi,ts->vecs_sensip);CHKERRQ(ierr); 1577 ierr = TSAdjointEventHandler(ts);CHKERRQ(ierr); 1578 ierr = TSAdjointStep(ts);CHKERRQ(ierr); 1579 if ((ts->vec_costintegral || ts->quadraturets) && !ts->costintegralfwd) { 1580 ierr = TSAdjointCostIntegral(ts);CHKERRQ(ierr); 1581 } 1582 } 1583 if (!ts->steps) { 1584 ierr = TSTrajectoryGet(ts->trajectory,ts,ts->steps,&ts->ptime);CHKERRQ(ierr); 1585 ierr = TSAdjointMonitor(ts,ts->steps,ts->ptime,ts->vec_sol,ts->numcost,ts->vecs_sensi,ts->vecs_sensip);CHKERRQ(ierr); 1586 } 1587 ts->solvetime = ts->ptime; 1588 ierr = TSTrajectoryViewFromOptions(ts->trajectory,NULL,"-ts_trajectory_view");CHKERRQ(ierr); 1589 ierr = VecViewFromOptions(ts->vecs_sensi[0],(PetscObject) ts, "-ts_adjoint_view_solution");CHKERRQ(ierr); 1590 ts->adjoint_max_steps = 0; 1591 #if defined(TSADJOINT_STAGE) 1592 ierr = PetscLogStagePop();CHKERRQ(ierr); 1593 #endif 1594 PetscFunctionReturn(0); 1595 } 1596 1597 /*@C 1598 TSAdjointMonitor - Runs all user-provided adjoint monitor routines set using TSAdjointMonitorSet() 1599 1600 Collective on TS 1601 1602 Input Parameters: 1603 + ts - time stepping context obtained from TSCreate() 1604 . step - step number that has just completed 1605 . ptime - model time of the state 1606 . u - state at the current model time 1607 . numcost - number of cost functions (dimension of lambda or mu) 1608 . lambda - vectors containing the gradients of the cost functions with respect to the ODE/DAE solution variables 1609 - mu - vectors containing the gradients of the cost functions with respect to the problem parameters 1610 1611 Notes: 1612 TSAdjointMonitor() is typically used automatically within the time stepping implementations. 1613 Users would almost never call this routine directly. 1614 1615 Level: developer 1616 1617 @*/ 1618 PetscErrorCode TSAdjointMonitor(TS ts,PetscInt step,PetscReal ptime,Vec u,PetscInt numcost,Vec *lambda, Vec *mu) 1619 { 1620 PetscErrorCode ierr; 1621 PetscInt i,n = ts->numberadjointmonitors; 1622 1623 PetscFunctionBegin; 1624 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1625 PetscValidHeaderSpecific(u,VEC_CLASSID,4); 1626 ierr = VecLockReadPush(u);CHKERRQ(ierr); 1627 for (i=0; i<n; i++) { 1628 ierr = (*ts->adjointmonitor[i])(ts,step,ptime,u,numcost,lambda,mu,ts->adjointmonitorcontext[i]);CHKERRQ(ierr); 1629 } 1630 ierr = VecLockReadPop(u);CHKERRQ(ierr); 1631 PetscFunctionReturn(0); 1632 } 1633 1634 /*@ 1635 TSAdjointCostIntegral - Evaluate the cost integral in the adjoint run. 1636 1637 Collective on TS 1638 1639 Input Parameter: 1640 . ts - time stepping context 1641 1642 Level: advanced 1643 1644 Notes: 1645 This function cannot be called until TSAdjointStep() has been completed. 1646 1647 .seealso: TSAdjointSolve(), TSAdjointStep 1648 @*/ 1649 PetscErrorCode TSAdjointCostIntegral(TS ts) 1650 { 1651 PetscFunctionBegin; 1652 PetscErrorCode ierr; 1653 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1654 if (!ts->ops->adjointintegral) SETERRQ1(PetscObjectComm((PetscObject)ts),PETSC_ERR_SUP,"%s does not provide integral evaluation in the adjoint run",((PetscObject)ts)->type_name); 1655 ierr = (*ts->ops->adjointintegral)(ts);CHKERRQ(ierr); 1656 PetscFunctionReturn(0); 1657 } 1658 1659 /* ------------------ Forward (tangent linear) sensitivity ------------------*/ 1660 1661 /*@ 1662 TSForwardSetUp - Sets up the internal data structures for the later use 1663 of forward sensitivity analysis 1664 1665 Collective on TS 1666 1667 Input Parameter: 1668 . ts - the TS context obtained from TSCreate() 1669 1670 Level: advanced 1671 1672 .seealso: TSCreate(), TSDestroy(), TSSetUp() 1673 @*/ 1674 PetscErrorCode TSForwardSetUp(TS ts) 1675 { 1676 PetscErrorCode ierr; 1677 1678 PetscFunctionBegin; 1679 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1680 if (ts->forwardsetupcalled) PetscFunctionReturn(0); 1681 if (ts->ops->forwardsetup) { 1682 ierr = (*ts->ops->forwardsetup)(ts);CHKERRQ(ierr); 1683 } 1684 ierr = VecDuplicate(ts->vec_sol,&ts->vec_sensip_col);CHKERRQ(ierr); 1685 ts->forwardsetupcalled = PETSC_TRUE; 1686 PetscFunctionReturn(0); 1687 } 1688 1689 /*@ 1690 TSForwardReset - Reset the internal data structures used by forward sensitivity analysis 1691 1692 Collective on TS 1693 1694 Input Parameter: 1695 . ts - the TS context obtained from TSCreate() 1696 1697 Level: advanced 1698 1699 .seealso: TSCreate(), TSDestroy(), TSForwardSetUp() 1700 @*/ 1701 PetscErrorCode TSForwardReset(TS ts) 1702 { 1703 TS quadts = ts->quadraturets; 1704 PetscErrorCode ierr; 1705 1706 PetscFunctionBegin; 1707 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1708 if (ts->ops->forwardreset) { 1709 ierr = (*ts->ops->forwardreset)(ts);CHKERRQ(ierr); 1710 } 1711 ierr = MatDestroy(&ts->mat_sensip);CHKERRQ(ierr); 1712 if (quadts) { 1713 ierr = MatDestroy(&quadts->mat_sensip);CHKERRQ(ierr); 1714 } 1715 ierr = VecDestroy(&ts->vec_sensip_col);CHKERRQ(ierr); 1716 ts->forward_solve = PETSC_FALSE; 1717 ts->forwardsetupcalled = PETSC_FALSE; 1718 PetscFunctionReturn(0); 1719 } 1720 1721 /*@ 1722 TSForwardSetIntegralGradients - Set the vectors holding forward sensitivities of the integral term. 1723 1724 Input Parameters: 1725 + ts- the TS context obtained from TSCreate() 1726 . numfwdint- number of integrals 1727 - vp = the vectors containing the gradients for each integral w.r.t. parameters 1728 1729 Level: deprecated 1730 1731 .seealso: TSForwardGetSensitivities(), TSForwardSetIntegralGradients(), TSForwardGetIntegralGradients(), TSForwardStep() 1732 @*/ 1733 PetscErrorCode TSForwardSetIntegralGradients(TS ts,PetscInt numfwdint,Vec *vp) 1734 { 1735 PetscFunctionBegin; 1736 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1737 if (ts->numcost && ts->numcost!=numfwdint) SETERRQ(PetscObjectComm((PetscObject)ts),PETSC_ERR_USER,"The number of cost functions (2nd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand()"); 1738 if (!ts->numcost) ts->numcost = numfwdint; 1739 1740 ts->vecs_integral_sensip = vp; 1741 PetscFunctionReturn(0); 1742 } 1743 1744 /*@ 1745 TSForwardGetIntegralGradients - Returns the forward sensitivities ofthe integral term. 1746 1747 Input Parameter: 1748 . ts- the TS context obtained from TSCreate() 1749 1750 Output Parameter: 1751 . vp = the vectors containing the gradients for each integral w.r.t. parameters 1752 1753 Level: deprecated 1754 1755 .seealso: TSForwardSetSensitivities(), TSForwardSetIntegralGradients(), TSForwardGetIntegralGradients(), TSForwardStep() 1756 @*/ 1757 PetscErrorCode TSForwardGetIntegralGradients(TS ts,PetscInt *numfwdint,Vec **vp) 1758 { 1759 PetscFunctionBegin; 1760 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1761 PetscValidPointer(vp,3); 1762 if (numfwdint) *numfwdint = ts->numcost; 1763 if (vp) *vp = ts->vecs_integral_sensip; 1764 PetscFunctionReturn(0); 1765 } 1766 1767 /*@ 1768 TSForwardStep - Compute the forward sensitivity for one time step. 1769 1770 Collective on TS 1771 1772 Input Parameter: 1773 . ts - time stepping context 1774 1775 Level: advanced 1776 1777 Notes: 1778 This function cannot be called until TSStep() has been completed. 1779 1780 .seealso: TSForwardSetSensitivities(), TSForwardGetSensitivities(), TSForwardSetIntegralGradients(), TSForwardGetIntegralGradients(), TSForwardSetUp() 1781 @*/ 1782 PetscErrorCode TSForwardStep(TS ts) 1783 { 1784 PetscErrorCode ierr; 1785 PetscFunctionBegin; 1786 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1787 if (!ts->ops->forwardstep) SETERRQ1(PetscObjectComm((PetscObject)ts),PETSC_ERR_SUP,"%s does not provide forward sensitivity analysis",((PetscObject)ts)->type_name); 1788 ierr = PetscLogEventBegin(TS_ForwardStep,ts,0,0,0);CHKERRQ(ierr); 1789 ierr = (*ts->ops->forwardstep)(ts);CHKERRQ(ierr); 1790 ierr = PetscLogEventEnd(TS_ForwardStep,ts,0,0,0);CHKERRQ(ierr); 1791 if (ts->reason < 0 && ts->errorifstepfailed) SETERRQ1(PetscObjectComm((PetscObject)ts),PETSC_ERR_NOT_CONVERGED,"TSFowardStep has failed due to %s",TSConvergedReasons[ts->reason]); 1792 PetscFunctionReturn(0); 1793 } 1794 1795 /*@ 1796 TSForwardSetSensitivities - Sets the initial value of the trajectory sensitivities of solution w.r.t. the problem parameters and initial values. 1797 1798 Logically Collective on TS 1799 1800 Input Parameters: 1801 + ts - the TS context obtained from TSCreate() 1802 . nump - number of parameters 1803 - Smat - sensitivities with respect to the parameters, the number of entries in these vectors is the same as the number of parameters 1804 1805 Level: beginner 1806 1807 Notes: 1808 Forward sensitivity is also called 'trajectory sensitivity' in some fields such as power systems. 1809 This function turns on a flag to trigger TSSolve() to compute forward sensitivities automatically. 1810 You must call this function before TSSolve(). 1811 The entries in the sensitivity matrix must be correctly initialized with the values S = dy/dp|startingtime. 1812 1813 .seealso: TSForwardGetSensitivities(), TSForwardSetIntegralGradients(), TSForwardGetIntegralGradients(), TSForwardStep() 1814 @*/ 1815 PetscErrorCode TSForwardSetSensitivities(TS ts,PetscInt nump,Mat Smat) 1816 { 1817 PetscErrorCode ierr; 1818 1819 PetscFunctionBegin; 1820 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1821 PetscValidHeaderSpecific(Smat,MAT_CLASSID,3); 1822 ts->forward_solve = PETSC_TRUE; 1823 if (nump == PETSC_DEFAULT) { 1824 ierr = MatGetSize(Smat,NULL,&ts->num_parameters);CHKERRQ(ierr); 1825 } else ts->num_parameters = nump; 1826 ierr = PetscObjectReference((PetscObject)Smat);CHKERRQ(ierr); 1827 ierr = MatDestroy(&ts->mat_sensip);CHKERRQ(ierr); 1828 ts->mat_sensip = Smat; 1829 PetscFunctionReturn(0); 1830 } 1831 1832 /*@ 1833 TSForwardGetSensitivities - Returns the trajectory sensitivities 1834 1835 Not Collective, but Vec returned is parallel if TS is parallel 1836 1837 Output Parameters: 1838 + ts - the TS context obtained from TSCreate() 1839 . nump - number of parameters 1840 - Smat - sensitivities with respect to the parameters, the number of entries in these vectors is the same as the number of parameters 1841 1842 Level: intermediate 1843 1844 .seealso: TSForwardSetSensitivities(), TSForwardSetIntegralGradients(), TSForwardGetIntegralGradients(), TSForwardStep() 1845 @*/ 1846 PetscErrorCode TSForwardGetSensitivities(TS ts,PetscInt *nump,Mat *Smat) 1847 { 1848 PetscFunctionBegin; 1849 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1850 if (nump) *nump = ts->num_parameters; 1851 if (Smat) *Smat = ts->mat_sensip; 1852 PetscFunctionReturn(0); 1853 } 1854 1855 /*@ 1856 TSForwardCostIntegral - Evaluate the cost integral in the forward run. 1857 1858 Collective on TS 1859 1860 Input Parameter: 1861 . ts - time stepping context 1862 1863 Level: advanced 1864 1865 Notes: 1866 This function cannot be called until TSStep() has been completed. 1867 1868 .seealso: TSSolve(), TSAdjointCostIntegral() 1869 @*/ 1870 PetscErrorCode TSForwardCostIntegral(TS ts) 1871 { 1872 PetscErrorCode ierr; 1873 1874 PetscFunctionBegin; 1875 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1876 if (!ts->ops->forwardintegral) SETERRQ1(PetscObjectComm((PetscObject)ts),PETSC_ERR_SUP,"%s does not provide integral evaluation in the forward run",((PetscObject)ts)->type_name); 1877 ierr = (*ts->ops->forwardintegral)(ts);CHKERRQ(ierr); 1878 PetscFunctionReturn(0); 1879 } 1880 1881 /*@ 1882 TSForwardSetInitialSensitivities - Set initial values for tangent linear sensitivities 1883 1884 Collective on TS 1885 1886 Input Parameters: 1887 + ts - the TS context obtained from TSCreate() 1888 - didp - parametric sensitivities of the initial condition 1889 1890 Level: intermediate 1891 1892 Notes: TSSolve() allows users to pass the initial solution directly to TS. But the tangent linear variables cannot be initialized in this way. This function is used to set initial values for tangent linear variables. 1893 1894 .seealso: TSForwardSetSensitivities() 1895 @*/ 1896 PetscErrorCode TSForwardSetInitialSensitivities(TS ts,Mat didp) 1897 { 1898 PetscErrorCode ierr; 1899 1900 PetscFunctionBegin; 1901 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1902 PetscValidHeaderSpecific(didp,MAT_CLASSID,2); 1903 if (!ts->mat_sensip) { 1904 ierr = TSForwardSetSensitivities(ts,PETSC_DEFAULT,didp);CHKERRQ(ierr); 1905 } 1906 PetscFunctionReturn(0); 1907 } 1908 1909 /*@ 1910 TSForwardGetStages - Get the number of stages and the tangent linear sensitivities at the intermediate stages 1911 1912 Input Parameter: 1913 . ts - the TS context obtained from TSCreate() 1914 1915 Output Parameters: 1916 + ns - number of stages 1917 - S - tangent linear sensitivities at the intermediate stages 1918 1919 Level: advanced 1920 1921 @*/ 1922 PetscErrorCode TSForwardGetStages(TS ts,PetscInt *ns,Mat **S) 1923 { 1924 PetscErrorCode ierr; 1925 1926 PetscFunctionBegin; 1927 PetscValidHeaderSpecific(ts, TS_CLASSID,1); 1928 1929 if (!ts->ops->getstages) *S=NULL; 1930 else { 1931 ierr = (*ts->ops->forwardgetstages)(ts,ns,S);CHKERRQ(ierr); 1932 } 1933 PetscFunctionReturn(0); 1934 } 1935 1936 /*@ 1937 TSCreateQuadratureTS - Create a sub-TS that evaluates integrals over time 1938 1939 Input Parameters: 1940 + ts - the TS context obtained from TSCreate() 1941 - fwd - flag indicating whether to evaluate cost integral in the forward run or the adjoint run 1942 1943 Output Parameters: 1944 . quadts - the child TS context 1945 1946 Level: intermediate 1947 1948 .seealso: TSGetQuadratureTS() 1949 @*/ 1950 PetscErrorCode TSCreateQuadratureTS(TS ts,PetscBool fwd,TS *quadts) 1951 { 1952 char prefix[128]; 1953 PetscErrorCode ierr; 1954 1955 PetscFunctionBegin; 1956 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1957 PetscValidPointer(quadts,3); 1958 ierr = TSDestroy(&ts->quadraturets);CHKERRQ(ierr); 1959 ierr = TSCreate(PetscObjectComm((PetscObject)ts),&ts->quadraturets);CHKERRQ(ierr); 1960 ierr = PetscObjectIncrementTabLevel((PetscObject)ts->quadraturets,(PetscObject)ts,1);CHKERRQ(ierr); 1961 ierr = PetscLogObjectParent((PetscObject)ts,(PetscObject)ts->quadraturets);CHKERRQ(ierr); 1962 ierr = PetscSNPrintf(prefix,sizeof(prefix),"%squad_",((PetscObject)ts)->prefix ? ((PetscObject)ts)->prefix : "");CHKERRQ(ierr); 1963 ierr = TSSetOptionsPrefix(ts->quadraturets,prefix);CHKERRQ(ierr); 1964 *quadts = ts->quadraturets; 1965 1966 if (ts->numcost) { 1967 ierr = VecCreateSeq(PETSC_COMM_SELF,ts->numcost,&(*quadts)->vec_sol);CHKERRQ(ierr); 1968 } else { 1969 ierr = VecCreateSeq(PETSC_COMM_SELF,1,&(*quadts)->vec_sol);CHKERRQ(ierr); 1970 } 1971 ts->costintegralfwd = fwd; 1972 PetscFunctionReturn(0); 1973 } 1974 1975 /*@ 1976 TSGetQuadratureTS - Return the sub-TS that evaluates integrals over time 1977 1978 Input Parameter: 1979 . ts - the TS context obtained from TSCreate() 1980 1981 Output Parameters: 1982 + fwd - flag indicating whether to evaluate cost integral in the forward run or the adjoint run 1983 - quadts - the child TS context 1984 1985 Level: intermediate 1986 1987 .seealso: TSCreateQuadratureTS() 1988 @*/ 1989 PetscErrorCode TSGetQuadratureTS(TS ts,PetscBool *fwd,TS *quadts) 1990 { 1991 PetscFunctionBegin; 1992 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1993 if (fwd) *fwd = ts->costintegralfwd; 1994 if (quadts) *quadts = ts->quadraturets; 1995 PetscFunctionReturn(0); 1996 } 1997 1998 /*@ 1999 TSComputeSNESJacobian - Compute the SNESJacobian 2000 2001 Input Parameters: 2002 + ts - the TS context obtained from TSCreate() 2003 - x - state vector 2004 2005 Output Parameters: 2006 + J - Jacobian matrix 2007 - Jpre - preconditioning matrix for J (may be same as J) 2008 2009 Level: developer 2010 2011 Notes: 2012 Using SNES to compute the Jacobian enables finite differencing when TS Jacobian is not available. 2013 @*/ 2014 PetscErrorCode TSComputeSNESJacobian(TS ts,Vec x,Mat J,Mat Jpre) 2015 { 2016 SNES snes = ts->snes; 2017 PetscErrorCode (*jac)(SNES,Vec,Mat,Mat,void*) = NULL; 2018 PetscErrorCode ierr; 2019 2020 PetscFunctionBegin; 2021 /* 2022 Unlike implicit methods, explicit methods do not have SNESMatFDColoring in the snes object 2023 because SNESSolve() has not been called yet; so querying SNESMatFDColoring does not work for 2024 explicit methods. Instead, we check the Jacobian compute function directly to determin if FD 2025 coloring is used. 2026 */ 2027 ierr = SNESGetJacobian(snes,NULL,NULL,&jac,NULL);CHKERRQ(ierr); 2028 if (jac == SNESComputeJacobianDefaultColor) { 2029 Vec f; 2030 ierr = SNESSetSolution(snes,x);CHKERRQ(ierr); 2031 ierr = SNESGetFunction(snes,&f,NULL,NULL);CHKERRQ(ierr); 2032 /* Force MatFDColoringApply to evaluate the SNES residual function for the base vector */ 2033 ierr = SNESComputeFunction(snes,x,f);CHKERRQ(ierr); 2034 } 2035 ierr = SNESComputeJacobian(snes,x,J,Jpre);CHKERRQ(ierr); 2036 PetscFunctionReturn(0); 2037 } 2038