1 #include <petsc/private/tsimpl.h> /*I "petscts.h" I*/ 2 #include <petscdraw.h> 3 4 PetscLogEvent TS_AdjointStep,TS_ForwardStep,TS_JacobianPEval; 5 6 /* #define TSADJOINT_STAGE */ 7 8 /* ------------------------ Sensitivity Context ---------------------------*/ 9 10 /*@C 11 TSSetRHSJacobianP - Sets the function that computes the Jacobian of G w.r.t. the parameters P where U_t = G(U,P,t), as well as the location to store the matrix. 12 13 Logically Collective on TS 14 15 Input Parameters: 16 + ts - TS context obtained from TSCreate() 17 . Amat - JacobianP matrix 18 . func - function 19 - ctx - [optional] user-defined function context 20 21 Calling sequence of func: 22 $ func (TS ts,PetscReal t,Vec y,Mat A,void *ctx); 23 + t - current timestep 24 . U - input vector (current ODE solution) 25 . A - output matrix 26 - ctx - [optional] user-defined function context 27 28 Level: intermediate 29 30 Notes: 31 Amat has the same number of rows and the same row parallel layout as u, Amat has the same number of columns and parallel layout as p 32 33 .seealso: TSGetRHSJacobianP() 34 @*/ 35 PetscErrorCode TSSetRHSJacobianP(TS ts,Mat Amat,PetscErrorCode (*func)(TS,PetscReal,Vec,Mat,void*),void *ctx) 36 { 37 PetscErrorCode ierr; 38 39 PetscFunctionBegin; 40 PetscValidHeaderSpecific(ts, TS_CLASSID,1); 41 PetscValidHeaderSpecific(Amat,MAT_CLASSID,2); 42 43 ts->rhsjacobianp = func; 44 ts->rhsjacobianpctx = ctx; 45 if(Amat) { 46 ierr = PetscObjectReference((PetscObject)Amat);CHKERRQ(ierr); 47 ierr = MatDestroy(&ts->Jacprhs);CHKERRQ(ierr); 48 ts->Jacprhs = Amat; 49 } 50 PetscFunctionReturn(0); 51 } 52 53 /*@C 54 TSGetRHSJacobianP - Gets the function that computes the Jacobian of G w.r.t. the parameters P where U_t = G(U,P,t), as well as the location to store the matrix. 55 56 Logically Collective on TS 57 58 Input Parameters: 59 . ts - TS context obtained from TSCreate() 60 61 Output Parameters: 62 + Amat - JacobianP matrix 63 . func - function 64 - ctx - [optional] user-defined function context 65 66 Calling sequence of func: 67 $ func (TS ts,PetscReal t,Vec y,Mat A,void *ctx); 68 + t - current timestep 69 . U - input vector (current ODE solution) 70 . A - output matrix 71 - ctx - [optional] user-defined function context 72 73 Level: intermediate 74 75 Notes: 76 Amat has the same number of rows and the same row parallel layout as u, Amat has the same number of columns and parallel layout as p 77 78 .seealso: TSSetRHSJacobianP() 79 @*/ 80 PetscErrorCode TSGetRHSJacobianP(TS ts,Mat *Amat,PetscErrorCode (**func)(TS,PetscReal,Vec,Mat,void*),void **ctx) 81 { 82 PetscFunctionBegin; 83 if (func) *func = ts->rhsjacobianp; 84 if (ctx) *ctx = ts->rhsjacobianpctx; 85 if (Amat) *Amat = ts->Jacprhs; 86 PetscFunctionReturn(0); 87 } 88 89 /*@C 90 TSComputeRHSJacobianP - Runs the user-defined JacobianP function. 91 92 Collective on TS 93 94 Input Parameters: 95 . ts - The TS context obtained from TSCreate() 96 97 Level: developer 98 99 .seealso: TSSetRHSJacobianP() 100 @*/ 101 PetscErrorCode TSComputeRHSJacobianP(TS ts,PetscReal t,Vec U,Mat Amat) 102 { 103 PetscErrorCode ierr; 104 105 PetscFunctionBegin; 106 if (!Amat) PetscFunctionReturn(0); 107 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 108 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 109 110 PetscStackPush("TS user JacobianP function for sensitivity analysis"); 111 ierr = (*ts->rhsjacobianp)(ts,t,U,Amat,ts->rhsjacobianpctx);CHKERRQ(ierr); 112 PetscStackPop; 113 PetscFunctionReturn(0); 114 } 115 116 /*@C 117 TSSetIJacobianP - Sets the function that computes the Jacobian of F w.r.t. the parameters P where F(Udot,U,t) = G(U,P,t), as well as the location to store the matrix. 118 119 Logically Collective on TS 120 121 Input Parameters: 122 + ts - TS context obtained from TSCreate() 123 . Amat - JacobianP matrix 124 . func - function 125 - ctx - [optional] user-defined function context 126 127 Calling sequence of func: 128 $ func (TS ts,PetscReal t,Vec y,Mat A,void *ctx); 129 + t - current timestep 130 . U - input vector (current ODE solution) 131 . Udot - time derivative of state vector 132 . shift - shift to apply, see note below 133 . A - output matrix 134 - ctx - [optional] user-defined function context 135 136 Level: intermediate 137 138 Notes: 139 Amat has the same number of rows and the same row parallel layout as u, Amat has the same number of columns and parallel layout as p 140 141 .seealso: 142 @*/ 143 PetscErrorCode TSSetIJacobianP(TS ts,Mat Amat,PetscErrorCode (*func)(TS,PetscReal,Vec,Vec,PetscReal,Mat,void*),void *ctx) 144 { 145 PetscErrorCode ierr; 146 147 PetscFunctionBegin; 148 PetscValidHeaderSpecific(ts, TS_CLASSID,1); 149 PetscValidHeaderSpecific(Amat,MAT_CLASSID,2); 150 151 ts->ijacobianp = func; 152 ts->ijacobianpctx = ctx; 153 if(Amat) { 154 ierr = PetscObjectReference((PetscObject)Amat);CHKERRQ(ierr); 155 ierr = MatDestroy(&ts->Jacp);CHKERRQ(ierr); 156 ts->Jacp = Amat; 157 } 158 PetscFunctionReturn(0); 159 } 160 161 /*@C 162 TSComputeIJacobianP - Runs the user-defined IJacobianP function. 163 164 Collective on TS 165 166 Input Parameters: 167 + ts - the TS context 168 . t - current timestep 169 . U - state vector 170 . Udot - time derivative of state vector 171 . shift - shift to apply, see note below 172 - imex - flag indicates if the method is IMEX so that the RHSJacobian should be kept separate 173 174 Output Parameters: 175 . A - Jacobian matrix 176 177 Level: developer 178 179 .seealso: TSSetIJacobianP() 180 @*/ 181 PetscErrorCode TSComputeIJacobianP(TS ts,PetscReal t,Vec U,Vec Udot,PetscReal shift,Mat Amat,PetscBool imex) 182 { 183 PetscErrorCode ierr; 184 185 PetscFunctionBegin; 186 if (!Amat) PetscFunctionReturn(0); 187 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 188 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 189 PetscValidHeaderSpecific(Udot,VEC_CLASSID,4); 190 191 ierr = PetscLogEventBegin(TS_JacobianPEval,ts,U,Amat,0);CHKERRQ(ierr); 192 if (ts->ijacobianp) { 193 PetscStackPush("TS user JacobianP function for sensitivity analysis"); 194 ierr = (*ts->ijacobianp)(ts,t,U,Udot,shift,Amat,ts->ijacobianpctx);CHKERRQ(ierr); 195 PetscStackPop; 196 } 197 if (imex) { 198 if (!ts->ijacobianp) { /* system was written as Udot = G(t,U) */ 199 PetscBool assembled; 200 ierr = MatZeroEntries(Amat);CHKERRQ(ierr); 201 ierr = MatAssembled(Amat,&assembled);CHKERRQ(ierr); 202 if (!assembled) { 203 ierr = MatAssemblyBegin(Amat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 204 ierr = MatAssemblyEnd(Amat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 205 } 206 } 207 } else { 208 if (ts->rhsjacobianp) { 209 ierr = TSComputeRHSJacobianP(ts,t,U,ts->Jacprhs);CHKERRQ(ierr); 210 } 211 if (ts->Jacprhs == Amat) { /* No IJacobian, so we only have the RHS matrix */ 212 ierr = MatScale(Amat,-1);CHKERRQ(ierr); 213 } else if (ts->Jacprhs) { /* Both IJacobian and RHSJacobian */ 214 MatStructure axpy = DIFFERENT_NONZERO_PATTERN; 215 if (!ts->ijacobianp) { /* No IJacobianp provided, but we have a separate RHS matrix */ 216 ierr = MatZeroEntries(Amat);CHKERRQ(ierr); 217 } 218 ierr = MatAXPY(Amat,-1,ts->Jacprhs,axpy);CHKERRQ(ierr); 219 } 220 } 221 ierr = PetscLogEventEnd(TS_JacobianPEval,ts,U,Amat,0);CHKERRQ(ierr); 222 PetscFunctionReturn(0); 223 } 224 225 /*@C 226 TSSetCostIntegrand - Sets the routine for evaluating the integral term in one or more cost functions 227 228 Logically Collective on TS 229 230 Input Parameters: 231 + ts - the TS context obtained from TSCreate() 232 . numcost - number of gradients to be computed, this is the number of cost functions 233 . costintegral - vector that stores the integral values 234 . rf - routine for evaluating the integrand function 235 . drduf - function that computes the gradients of the r's with respect to u 236 . drdpf - function that computes the gradients of the r's with respect to p, can be NULL if parametric sensitivity is not desired (mu=NULL) 237 . fwd - flag indicating whether to evaluate cost integral in the forward run or the adjoint run 238 - ctx - [optional] user-defined context for private data for the function evaluation routine (may be NULL) 239 240 Calling sequence of rf: 241 $ PetscErrorCode rf(TS ts,PetscReal t,Vec U,Vec F,void *ctx); 242 243 Calling sequence of drduf: 244 $ PetscErroCode drduf(TS ts,PetscReal t,Vec U,Vec *dRdU,void *ctx); 245 246 Calling sequence of drdpf: 247 $ PetscErroCode drdpf(TS ts,PetscReal t,Vec U,Vec *dRdP,void *ctx); 248 249 Level: deprecated 250 251 Notes: 252 For optimization there is usually a single cost function (numcost = 1). For sensitivities there may be multiple cost functions 253 254 .seealso: TSSetRHSJacobianP(), TSGetCostGradients(), TSSetCostGradients() 255 @*/ 256 PetscErrorCode TSSetCostIntegrand(TS ts,PetscInt numcost,Vec costintegral,PetscErrorCode (*rf)(TS,PetscReal,Vec,Vec,void*), 257 PetscErrorCode (*drduf)(TS,PetscReal,Vec,Vec*,void*), 258 PetscErrorCode (*drdpf)(TS,PetscReal,Vec,Vec*,void*), 259 PetscBool fwd,void *ctx) 260 { 261 PetscErrorCode ierr; 262 263 PetscFunctionBegin; 264 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 265 if (costintegral) PetscValidHeaderSpecific(costintegral,VEC_CLASSID,3); 266 if (ts->numcost && ts->numcost!=numcost) SETERRQ(PetscObjectComm((PetscObject)ts),PETSC_ERR_USER,"The number of cost functions (2rd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostGradients() or TSForwardSetIntegralGradients()"); 267 if (!ts->numcost) ts->numcost=numcost; 268 269 if (costintegral) { 270 ierr = PetscObjectReference((PetscObject)costintegral);CHKERRQ(ierr); 271 ierr = VecDestroy(&ts->vec_costintegral);CHKERRQ(ierr); 272 ts->vec_costintegral = costintegral; 273 } else { 274 if (!ts->vec_costintegral) { /* Create a seq vec if user does not provide one */ 275 ierr = VecCreateSeq(PETSC_COMM_SELF,numcost,&ts->vec_costintegral);CHKERRQ(ierr); 276 } else { 277 ierr = VecSet(ts->vec_costintegral,0.0);CHKERRQ(ierr); 278 } 279 } 280 if (!ts->vec_costintegrand) { 281 ierr = VecDuplicate(ts->vec_costintegral,&ts->vec_costintegrand);CHKERRQ(ierr); 282 } else { 283 ierr = VecSet(ts->vec_costintegrand,0.0);CHKERRQ(ierr); 284 } 285 ts->costintegralfwd = fwd; /* Evaluate the cost integral in forward run if fwd is true */ 286 ts->costintegrand = rf; 287 ts->costintegrandctx = ctx; 288 ts->drdufunction = drduf; 289 ts->drdpfunction = drdpf; 290 PetscFunctionReturn(0); 291 } 292 293 /*@C 294 TSGetCostIntegral - Returns the values of the integral term in the cost functions. 295 It is valid to call the routine after a backward run. 296 297 Not Collective 298 299 Input Parameter: 300 . ts - the TS context obtained from TSCreate() 301 302 Output Parameter: 303 . v - the vector containing the integrals for each cost function 304 305 Level: intermediate 306 307 .seealso: TSSetCostIntegrand() 308 309 @*/ 310 PetscErrorCode TSGetCostIntegral(TS ts,Vec *v) 311 { 312 TS quadts; 313 PetscErrorCode ierr; 314 315 PetscFunctionBegin; 316 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 317 PetscValidPointer(v,2); 318 ierr = TSGetQuadratureTS(ts,NULL,&quadts);CHKERRQ(ierr); 319 *v = quadts->vec_sol; 320 PetscFunctionReturn(0); 321 } 322 323 /*@C 324 TSComputeCostIntegrand - Evaluates the integral function in the cost functions. 325 326 Input Parameters: 327 + ts - the TS context 328 . t - current time 329 - U - state vector, i.e. current solution 330 331 Output Parameter: 332 . Q - vector of size numcost to hold the outputs 333 334 Notes: 335 Most users should not need to explicitly call this routine, as it 336 is used internally within the sensitivity analysis context. 337 338 Level: deprecated 339 340 .seealso: TSSetCostIntegrand() 341 @*/ 342 PetscErrorCode TSComputeCostIntegrand(TS ts,PetscReal t,Vec U,Vec Q) 343 { 344 PetscErrorCode ierr; 345 346 PetscFunctionBegin; 347 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 348 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 349 PetscValidHeaderSpecific(Q,VEC_CLASSID,4); 350 351 ierr = PetscLogEventBegin(TS_FunctionEval,ts,U,Q,0);CHKERRQ(ierr); 352 if (ts->costintegrand) { 353 PetscStackPush("TS user integrand in the cost function"); 354 ierr = (*ts->costintegrand)(ts,t,U,Q,ts->costintegrandctx);CHKERRQ(ierr); 355 PetscStackPop; 356 } else { 357 ierr = VecZeroEntries(Q);CHKERRQ(ierr); 358 } 359 360 ierr = PetscLogEventEnd(TS_FunctionEval,ts,U,Q,0);CHKERRQ(ierr); 361 PetscFunctionReturn(0); 362 } 363 364 /*@C 365 TSComputeDRDUFunction - Deprecated, use TSGetQuadratureTS() then TSComputeRHSJacobian() 366 367 Level: deprecated 368 369 @*/ 370 PetscErrorCode TSComputeDRDUFunction(TS ts,PetscReal t,Vec U,Vec *DRDU) 371 { 372 PetscErrorCode ierr; 373 374 PetscFunctionBegin; 375 if (!DRDU) PetscFunctionReturn(0); 376 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 377 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 378 379 PetscStackPush("TS user DRDU function for sensitivity analysis"); 380 ierr = (*ts->drdufunction)(ts,t,U,DRDU,ts->costintegrandctx); CHKERRQ(ierr); 381 PetscStackPop; 382 PetscFunctionReturn(0); 383 } 384 385 /*@C 386 TSComputeDRDPFunction - Deprecated, use TSGetQuadratureTS() then TSComputeRHSJacobianP() 387 388 Level: deprecated 389 390 @*/ 391 PetscErrorCode TSComputeDRDPFunction(TS ts,PetscReal t,Vec U,Vec *DRDP) 392 { 393 PetscErrorCode ierr; 394 395 PetscFunctionBegin; 396 if (!DRDP) PetscFunctionReturn(0); 397 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 398 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 399 400 PetscStackPush("TS user DRDP function for sensitivity analysis"); 401 ierr = (*ts->drdpfunction)(ts,t,U,DRDP,ts->costintegrandctx); CHKERRQ(ierr); 402 PetscStackPop; 403 PetscFunctionReturn(0); 404 } 405 406 /*@C 407 TSSetIHessianProduct - Sets the function that computes the vector-Hessian-vector product. The Hessian is the second-order derivative of F (IFunction) w.r.t. the state variable. 408 409 Logically Collective on TS 410 411 Input Parameters: 412 + ts - TS context obtained from TSCreate() 413 . ihp1 - an array of vectors storing the result of vector-Hessian-vector product for F_UU 414 . hessianproductfunc1 - vector-Hessian-vector product function for F_UU 415 . ihp2 - an array of vectors storing the result of vector-Hessian-vector product for F_UP 416 . hessianproductfunc2 - vector-Hessian-vector product function for F_UP 417 . ihp3 - an array of vectors storing the result of vector-Hessian-vector product for F_PU 418 . hessianproductfunc3 - vector-Hessian-vector product function for F_PU 419 . ihp4 - an array of vectors storing the result of vector-Hessian-vector product for F_PP 420 . hessianproductfunc4 - vector-Hessian-vector product function for F_PP 421 422 Calling sequence of ihessianproductfunc: 423 $ ihessianproductfunc (TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV,void *ctx); 424 + t - current timestep 425 . U - input vector (current ODE solution) 426 . Vl - an array of input vectors to be left-multiplied with the Hessian 427 . Vr - input vector to be right-multiplied with the Hessian 428 . VHV - an array of output vectors for vector-Hessian-vector product 429 - ctx - [optional] user-defined function context 430 431 Level: intermediate 432 433 Notes: 434 The first Hessian function and the working array are required. 435 As an example to implement the callback functions, the second callback function calculates the vector-Hessian-vector product 436 $ Vl_n^T*F_UP*Vr 437 where the vector Vl_n (n-th element in the array Vl) and Vr are of size N and M respectively, and the Hessian F_UP is of size N x N x M. 438 Each entry of F_UP corresponds to the derivative 439 $ F_UP[i][j][k] = \frac{\partial^2 F[i]}{\partial U[j] \partial P[k]}. 440 The result of the vector-Hessian-vector product for Vl_n needs to be stored in vector VHV_n with the j-th entry being 441 $ VHV_n[j] = \sum_i \sum_k {Vl_n[i] * F_UP[i][j][k] * Vr[k]} 442 If the cost function is a scalar, there will be only one vector in Vl and VHV. 443 444 .seealso: 445 @*/ 446 PetscErrorCode TSSetIHessianProduct(TS ts,Vec *ihp1,PetscErrorCode (*ihessianproductfunc1)(TS,PetscReal,Vec,Vec*,Vec,Vec*,void*), 447 Vec *ihp2,PetscErrorCode (*ihessianproductfunc2)(TS,PetscReal,Vec,Vec*,Vec,Vec*,void*), 448 Vec *ihp3,PetscErrorCode (*ihessianproductfunc3)(TS,PetscReal,Vec,Vec*,Vec,Vec*,void*), 449 Vec *ihp4,PetscErrorCode (*ihessianproductfunc4)(TS,PetscReal,Vec,Vec*,Vec,Vec*,void*), 450 void *ctx) 451 { 452 PetscFunctionBegin; 453 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 454 PetscValidPointer(ihp1,2); 455 456 ts->ihessianproductctx = ctx; 457 if (ihp1) ts->vecs_fuu = ihp1; 458 if (ihp2) ts->vecs_fup = ihp2; 459 if (ihp3) ts->vecs_fpu = ihp3; 460 if (ihp4) ts->vecs_fpp = ihp4; 461 ts->ihessianproduct_fuu = ihessianproductfunc1; 462 ts->ihessianproduct_fup = ihessianproductfunc2; 463 ts->ihessianproduct_fpu = ihessianproductfunc3; 464 ts->ihessianproduct_fpp = ihessianproductfunc4; 465 PetscFunctionReturn(0); 466 } 467 468 /*@C 469 TSComputeIHessianProductFunctionUU - Runs the user-defined vector-Hessian-vector product function for Fuu. 470 471 Collective on TS 472 473 Input Parameters: 474 . ts - The TS context obtained from TSCreate() 475 476 Notes: 477 TSComputeIHessianProductFunctionUU() is typically used for sensitivity implementation, 478 so most users would not generally call this routine themselves. 479 480 Level: developer 481 482 .seealso: TSSetIHessianProduct() 483 @*/ 484 PetscErrorCode TSComputeIHessianProductFunctionUU(TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV) 485 { 486 PetscErrorCode ierr; 487 488 PetscFunctionBegin; 489 if (!VHV) PetscFunctionReturn(0); 490 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 491 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 492 493 if (ts->ihessianproduct_fuu) { 494 PetscStackPush("TS user IHessianProduct function 1 for sensitivity analysis"); 495 ierr = (*ts->ihessianproduct_fuu)(ts,t,U,Vl,Vr,VHV,ts->ihessianproductctx);CHKERRQ(ierr); 496 PetscStackPop; 497 } 498 /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */ 499 if (ts->rhshessianproduct_guu) { 500 PetscInt nadj; 501 ierr = TSComputeRHSHessianProductFunctionUU(ts,t,U,Vl,Vr,VHV);CHKERRQ(ierr); 502 for (nadj=0; nadj<ts->numcost; nadj++) { 503 ierr = VecScale(VHV[nadj],-1);CHKERRQ(ierr); 504 } 505 } 506 PetscFunctionReturn(0); 507 } 508 509 /*@C 510 TSComputeIHessianProductFunctionUP - Runs the user-defined vector-Hessian-vector product function for Fup. 511 512 Collective on TS 513 514 Input Parameters: 515 . ts - The TS context obtained from TSCreate() 516 517 Notes: 518 TSComputeIHessianProductFunctionUP() is typically used for sensitivity implementation, 519 so most users would not generally call this routine themselves. 520 521 Level: developer 522 523 .seealso: TSSetIHessianProduct() 524 @*/ 525 PetscErrorCode TSComputeIHessianProductFunctionUP(TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV) 526 { 527 PetscErrorCode ierr; 528 529 PetscFunctionBegin; 530 if (!VHV) PetscFunctionReturn(0); 531 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 532 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 533 534 if (ts->ihessianproduct_fup) { 535 PetscStackPush("TS user IHessianProduct function 2 for sensitivity analysis"); 536 ierr = (*ts->ihessianproduct_fup)(ts,t,U,Vl,Vr,VHV,ts->ihessianproductctx);CHKERRQ(ierr); 537 PetscStackPop; 538 } 539 /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */ 540 if (ts->rhshessianproduct_gup) { 541 PetscInt nadj; 542 ierr = TSComputeRHSHessianProductFunctionUP(ts,t,U,Vl,Vr,VHV);CHKERRQ(ierr); 543 for (nadj=0; nadj<ts->numcost; nadj++) { 544 ierr = VecScale(VHV[nadj],-1);CHKERRQ(ierr); 545 } 546 } 547 PetscFunctionReturn(0); 548 } 549 550 /*@C 551 TSComputeIHessianProductFunctionPU - Runs the user-defined vector-Hessian-vector product function for Fpu. 552 553 Collective on TS 554 555 Input Parameters: 556 . ts - The TS context obtained from TSCreate() 557 558 Notes: 559 TSComputeIHessianProductFunctionPU() is typically used for sensitivity implementation, 560 so most users would not generally call this routine themselves. 561 562 Level: developer 563 564 .seealso: TSSetIHessianProduct() 565 @*/ 566 PetscErrorCode TSComputeIHessianProductFunctionPU(TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV) 567 { 568 PetscErrorCode ierr; 569 570 PetscFunctionBegin; 571 if (!VHV) PetscFunctionReturn(0); 572 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 573 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 574 575 if (ts->ihessianproduct_fpu) { 576 PetscStackPush("TS user IHessianProduct function 3 for sensitivity analysis"); 577 ierr = (*ts->ihessianproduct_fpu)(ts,t,U,Vl,Vr,VHV,ts->ihessianproductctx);CHKERRQ(ierr); 578 PetscStackPop; 579 } 580 /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */ 581 if (ts->rhshessianproduct_gpu) { 582 PetscInt nadj; 583 ierr = TSComputeRHSHessianProductFunctionPU(ts,t,U,Vl,Vr,VHV);CHKERRQ(ierr); 584 for (nadj=0; nadj<ts->numcost; nadj++) { 585 ierr = VecScale(VHV[nadj],-1);CHKERRQ(ierr); 586 } 587 } 588 PetscFunctionReturn(0); 589 } 590 591 /*@C 592 TSComputeIHessianProductFunctionPP - Runs the user-defined vector-Hessian-vector product function for Fpp. 593 594 Collective on TS 595 596 Input Parameters: 597 . ts - The TS context obtained from TSCreate() 598 599 Notes: 600 TSComputeIHessianProductFunctionPP() is typically used for sensitivity implementation, 601 so most users would not generally call this routine themselves. 602 603 Level: developer 604 605 .seealso: TSSetIHessianProduct() 606 @*/ 607 PetscErrorCode TSComputeIHessianProductFunctionPP(TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV) 608 { 609 PetscErrorCode ierr; 610 611 PetscFunctionBegin; 612 if (!VHV) PetscFunctionReturn(0); 613 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 614 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 615 616 if (ts->ihessianproduct_fpp) { 617 PetscStackPush("TS user IHessianProduct function 3 for sensitivity analysis"); 618 ierr = (*ts->ihessianproduct_fpp)(ts,t,U,Vl,Vr,VHV,ts->ihessianproductctx);CHKERRQ(ierr); 619 PetscStackPop; 620 } 621 /* does not consider IMEX for now, so either IHessian or RHSHessian will be calculated, using the same output VHV */ 622 if (ts->rhshessianproduct_gpp) { 623 PetscInt nadj; 624 ierr = TSComputeRHSHessianProductFunctionPP(ts,t,U,Vl,Vr,VHV);CHKERRQ(ierr); 625 for (nadj=0; nadj<ts->numcost; nadj++) { 626 ierr = VecScale(VHV[nadj],-1);CHKERRQ(ierr); 627 } 628 } 629 PetscFunctionReturn(0); 630 } 631 632 /*@C 633 TSSetRHSHessianProduct - Sets the function that computes the vector-Hessian-vector product. The Hessian is the second-order derivative of G (RHSFunction) w.r.t. the state variable. 634 635 Logically Collective on TS 636 637 Input Parameters: 638 + ts - TS context obtained from TSCreate() 639 . rhshp1 - an array of vectors storing the result of vector-Hessian-vector product for G_UU 640 . hessianproductfunc1 - vector-Hessian-vector product function for G_UU 641 . rhshp2 - an array of vectors storing the result of vector-Hessian-vector product for G_UP 642 . hessianproductfunc2 - vector-Hessian-vector product function for G_UP 643 . rhshp3 - an array of vectors storing the result of vector-Hessian-vector product for G_PU 644 . hessianproductfunc3 - vector-Hessian-vector product function for G_PU 645 . rhshp4 - an array of vectors storing the result of vector-Hessian-vector product for G_PP 646 . hessianproductfunc4 - vector-Hessian-vector product function for G_PP 647 648 Calling sequence of ihessianproductfunc: 649 $ rhshessianproductfunc (TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV,void *ctx); 650 + t - current timestep 651 . U - input vector (current ODE solution) 652 . Vl - an array of input vectors to be left-multiplied with the Hessian 653 . Vr - input vector to be right-multiplied with the Hessian 654 . VHV - an array of output vectors for vector-Hessian-vector product 655 - ctx - [optional] user-defined function context 656 657 Level: intermediate 658 659 Notes: 660 The first Hessian function and the working array are required. 661 As an example to implement the callback functions, the second callback function calculates the vector-Hessian-vector product 662 $ Vl_n^T*G_UP*Vr 663 where the vector Vl_n (n-th element in the array Vl) and Vr are of size N and M respectively, and the Hessian G_UP is of size N x N x M. 664 Each entry of G_UP corresponds to the derivative 665 $ G_UP[i][j][k] = \frac{\partial^2 G[i]}{\partial U[j] \partial P[k]}. 666 The result of the vector-Hessian-vector product for Vl_n needs to be stored in vector VHV_n with j-th entry being 667 $ VHV_n[j] = \sum_i \sum_k {Vl_n[i] * G_UP[i][j][k] * Vr[k]} 668 If the cost function is a scalar, there will be only one vector in Vl and VHV. 669 670 .seealso: 671 @*/ 672 PetscErrorCode TSSetRHSHessianProduct(TS ts,Vec *rhshp1,PetscErrorCode (*rhshessianproductfunc1)(TS,PetscReal,Vec,Vec*,Vec,Vec*,void*), 673 Vec *rhshp2,PetscErrorCode (*rhshessianproductfunc2)(TS,PetscReal,Vec,Vec*,Vec,Vec*,void*), 674 Vec *rhshp3,PetscErrorCode (*rhshessianproductfunc3)(TS,PetscReal,Vec,Vec*,Vec,Vec*,void*), 675 Vec *rhshp4,PetscErrorCode (*rhshessianproductfunc4)(TS,PetscReal,Vec,Vec*,Vec,Vec*,void*), 676 void *ctx) 677 { 678 PetscFunctionBegin; 679 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 680 PetscValidPointer(rhshp1,2); 681 682 ts->rhshessianproductctx = ctx; 683 if (rhshp1) ts->vecs_guu = rhshp1; 684 if (rhshp2) ts->vecs_gup = rhshp2; 685 if (rhshp3) ts->vecs_gpu = rhshp3; 686 if (rhshp4) ts->vecs_gpp = rhshp4; 687 ts->rhshessianproduct_guu = rhshessianproductfunc1; 688 ts->rhshessianproduct_gup = rhshessianproductfunc2; 689 ts->rhshessianproduct_gpu = rhshessianproductfunc3; 690 ts->rhshessianproduct_gpp = rhshessianproductfunc4; 691 PetscFunctionReturn(0); 692 } 693 694 /*@C 695 TSComputeRHSHessianProductFunctionUU - Runs the user-defined vector-Hessian-vector product function for Guu. 696 697 Collective on TS 698 699 Input Parameters: 700 . ts - The TS context obtained from TSCreate() 701 702 Notes: 703 TSComputeRHSHessianProductFunctionUU() is typically used for sensitivity implementation, 704 so most users would not generally call this routine themselves. 705 706 Level: developer 707 708 .seealso: TSSetRHSHessianProduct() 709 @*/ 710 PetscErrorCode TSComputeRHSHessianProductFunctionUU(TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV) 711 { 712 PetscErrorCode ierr; 713 714 PetscFunctionBegin; 715 if (!VHV) PetscFunctionReturn(0); 716 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 717 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 718 719 PetscStackPush("TS user RHSHessianProduct function 1 for sensitivity analysis"); 720 ierr = (*ts->rhshessianproduct_guu)(ts,t,U,Vl,Vr,VHV,ts->rhshessianproductctx);CHKERRQ(ierr); 721 PetscStackPop; 722 PetscFunctionReturn(0); 723 } 724 725 /*@C 726 TSComputeRHSHessianProductFunctionUP - Runs the user-defined vector-Hessian-vector product function for Gup. 727 728 Collective on TS 729 730 Input Parameters: 731 . ts - The TS context obtained from TSCreate() 732 733 Notes: 734 TSComputeRHSHessianProductFunctionUP() is typically used for sensitivity implementation, 735 so most users would not generally call this routine themselves. 736 737 Level: developer 738 739 .seealso: TSSetRHSHessianProduct() 740 @*/ 741 PetscErrorCode TSComputeRHSHessianProductFunctionUP(TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV) 742 { 743 PetscErrorCode ierr; 744 745 PetscFunctionBegin; 746 if (!VHV) PetscFunctionReturn(0); 747 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 748 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 749 750 PetscStackPush("TS user RHSHessianProduct function 2 for sensitivity analysis"); 751 ierr = (*ts->rhshessianproduct_gup)(ts,t,U,Vl,Vr,VHV,ts->rhshessianproductctx);CHKERRQ(ierr); 752 PetscStackPop; 753 PetscFunctionReturn(0); 754 } 755 756 /*@C 757 TSComputeRHSHessianProductFunctionPU - Runs the user-defined vector-Hessian-vector product function for Gpu. 758 759 Collective on TS 760 761 Input Parameters: 762 . ts - The TS context obtained from TSCreate() 763 764 Notes: 765 TSComputeRHSHessianProductFunctionPU() is typically used for sensitivity implementation, 766 so most users would not generally call this routine themselves. 767 768 Level: developer 769 770 .seealso: TSSetRHSHessianProduct() 771 @*/ 772 PetscErrorCode TSComputeRHSHessianProductFunctionPU(TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV) 773 { 774 PetscErrorCode ierr; 775 776 PetscFunctionBegin; 777 if (!VHV) PetscFunctionReturn(0); 778 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 779 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 780 781 PetscStackPush("TS user RHSHessianProduct function 3 for sensitivity analysis"); 782 ierr = (*ts->rhshessianproduct_gpu)(ts,t,U,Vl,Vr,VHV,ts->rhshessianproductctx);CHKERRQ(ierr); 783 PetscStackPop; 784 PetscFunctionReturn(0); 785 } 786 787 /*@C 788 TSComputeRHSHessianProductFunctionPP - Runs the user-defined vector-Hessian-vector product function for Gpp. 789 790 Collective on TS 791 792 Input Parameters: 793 . ts - The TS context obtained from TSCreate() 794 795 Notes: 796 TSComputeRHSHessianProductFunctionPP() is typically used for sensitivity implementation, 797 so most users would not generally call this routine themselves. 798 799 Level: developer 800 801 .seealso: TSSetRHSHessianProduct() 802 @*/ 803 PetscErrorCode TSComputeRHSHessianProductFunctionPP(TS ts,PetscReal t,Vec U,Vec *Vl,Vec Vr,Vec *VHV) 804 { 805 PetscErrorCode ierr; 806 807 PetscFunctionBegin; 808 if (!VHV) PetscFunctionReturn(0); 809 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 810 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 811 812 PetscStackPush("TS user RHSHessianProduct function 3 for sensitivity analysis"); 813 ierr = (*ts->rhshessianproduct_gpp)(ts,t,U,Vl,Vr,VHV,ts->rhshessianproductctx);CHKERRQ(ierr); 814 PetscStackPop; 815 PetscFunctionReturn(0); 816 } 817 818 /* --------------------------- Adjoint sensitivity ---------------------------*/ 819 820 /*@ 821 TSSetCostGradients - Sets the initial value of the gradients of the cost function w.r.t. initial values and w.r.t. the problem parameters 822 for use by the TSAdjoint routines. 823 824 Logically Collective on TS 825 826 Input Parameters: 827 + ts - the TS context obtained from TSCreate() 828 . numcost - number of gradients to be computed, this is the number of cost functions 829 . lambda - gradients with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector 830 - mu - gradients with respect to the parameters, the number of entries in these vectors is the same as the number of parameters 831 832 Level: beginner 833 834 Notes: 835 the entries in these vectors must be correctly initialized with the values lamda_i = df/dy|finaltime mu_i = df/dp|finaltime 836 837 After TSAdjointSolve() is called the lamba and the mu contain the computed sensitivities 838 839 .seealso TSGetCostGradients() 840 @*/ 841 PetscErrorCode TSSetCostGradients(TS ts,PetscInt numcost,Vec *lambda,Vec *mu) 842 { 843 PetscFunctionBegin; 844 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 845 PetscValidPointer(lambda,2); 846 ts->vecs_sensi = lambda; 847 ts->vecs_sensip = mu; 848 if (ts->numcost && ts->numcost!=numcost) SETERRQ(PetscObjectComm((PetscObject)ts),PETSC_ERR_USER,"The number of cost functions (2rd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand"); 849 ts->numcost = numcost; 850 PetscFunctionReturn(0); 851 } 852 853 /*@ 854 TSGetCostGradients - Returns the gradients from the TSAdjointSolve() 855 856 Not Collective, but Vec returned is parallel if TS is parallel 857 858 Input Parameter: 859 . ts - the TS context obtained from TSCreate() 860 861 Output Parameter: 862 + lambda - vectors containing the gradients of the cost functions with respect to the ODE/DAE solution variables 863 - mu - vectors containing the gradients of the cost functions with respect to the problem parameters 864 865 Level: intermediate 866 867 .seealso: TSSetCostGradients() 868 @*/ 869 PetscErrorCode TSGetCostGradients(TS ts,PetscInt *numcost,Vec **lambda,Vec **mu) 870 { 871 PetscFunctionBegin; 872 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 873 if (numcost) *numcost = ts->numcost; 874 if (lambda) *lambda = ts->vecs_sensi; 875 if (mu) *mu = ts->vecs_sensip; 876 PetscFunctionReturn(0); 877 } 878 879 /*@ 880 TSSetCostHessianProducts - Sets the initial value of the Hessian-vector products of the cost function w.r.t. initial values and w.r.t. the problem parameters 881 for use by the TSAdjoint routines. 882 883 Logically Collective on TS 884 885 Input Parameters: 886 + ts - the TS context obtained from TSCreate() 887 . numcost - number of cost functions 888 . lambda2 - Hessian-vector product with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector 889 . mu2 - Hessian-vector product with respect to the parameters, the number of entries in these vectors is the same as the number of parameters 890 - dir - the direction vector that are multiplied with the Hessian of the cost functions 891 892 Level: beginner 893 894 Notes: Hessian of the cost function is completely different from Hessian of the ODE/DAE system 895 896 For second-order adjoint, one needs to call this function and then TSAdjointSetForward() before TSSolve(). 897 898 After TSAdjointSolve() is called, the lamba2 and the mu2 will contain the computed second-order adjoint sensitivities, and can be used to produce Hessian-vector product (not the full Hessian matrix). Users must provide a direction vector; it is usually generated by an optimization solver. 899 900 Passing NULL for lambda2 disables the second-order calculation. 901 .seealso: TSAdjointSetForward() 902 @*/ 903 PetscErrorCode TSSetCostHessianProducts(TS ts,PetscInt numcost,Vec *lambda2,Vec *mu2,Vec dir) 904 { 905 PetscFunctionBegin; 906 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 907 if (ts->numcost && ts->numcost!=numcost) SETERRQ(PetscObjectComm((PetscObject)ts),PETSC_ERR_USER,"The number of cost functions (2rd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand"); 908 ts->numcost = numcost; 909 ts->vecs_sensi2 = lambda2; 910 ts->vecs_sensi2p = mu2; 911 ts->vec_dir = dir; 912 PetscFunctionReturn(0); 913 } 914 915 /*@ 916 TSGetCostHessianProducts - Returns the gradients from the TSAdjointSolve() 917 918 Not Collective, but Vec returned is parallel if TS is parallel 919 920 Input Parameter: 921 . ts - the TS context obtained from TSCreate() 922 923 Output Parameter: 924 + numcost - number of cost functions 925 . lambda2 - Hessian-vector product with respect to the initial condition variables, the dimension and parallel layout of these vectors is the same as the ODE solution vector 926 . mu2 - Hessian-vector product with respect to the parameters, the number of entries in these vectors is the same as the number of parameters 927 - dir - the direction vector that are multiplied with the Hessian of the cost functions 928 929 Level: intermediate 930 931 .seealso: TSSetCostHessianProducts() 932 @*/ 933 PetscErrorCode TSGetCostHessianProducts(TS ts,PetscInt *numcost,Vec **lambda2,Vec **mu2, Vec *dir) 934 { 935 PetscFunctionBegin; 936 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 937 if (numcost) *numcost = ts->numcost; 938 if (lambda2) *lambda2 = ts->vecs_sensi2; 939 if (mu2) *mu2 = ts->vecs_sensi2p; 940 if (dir) *dir = ts->vec_dir; 941 PetscFunctionReturn(0); 942 } 943 944 /*@ 945 TSAdjointSetForward - Trigger the tangent linear solver and initialize the forward sensitivities 946 947 Logically Collective on TS 948 949 Input Parameters: 950 + ts - the TS context obtained from TSCreate() 951 - didp - the derivative of initial values w.r.t. parameters 952 953 Level: intermediate 954 955 Notes: When computing sensitivies w.r.t. initial condition, set didp to NULL so that the solver will take it as an identity matrix mathematically. TSAdjoint does not reset the tangent linear solver automatically, TSAdjointResetForward() should be called to reset the tangent linear solver. 956 957 .seealso: TSSetCostHessianProducts(), TSAdjointResetForward() 958 @*/ 959 PetscErrorCode TSAdjointSetForward(TS ts,Mat didp) 960 { 961 Mat A; 962 Vec sp; 963 PetscScalar *xarr; 964 PetscInt lsize; 965 PetscErrorCode ierr; 966 967 PetscFunctionBegin; 968 ts->forward_solve = PETSC_TRUE; /* turn on tangent linear mode */ 969 if (!ts->vecs_sensi2) SETERRQ(PetscObjectComm((PetscObject)ts),PETSC_ERR_USER,"Must call TSSetCostHessianProducts() first"); 970 if (!ts->vec_dir) SETERRQ(PetscObjectComm((PetscObject)ts),PETSC_ERR_USER,"Directional vector is missing. Call TSSetCostHessianProducts() to set it."); 971 /* create a single-column dense matrix */ 972 ierr = VecGetLocalSize(ts->vec_sol,&lsize);CHKERRQ(ierr); 973 ierr = MatCreateDense(PetscObjectComm((PetscObject)ts),lsize,PETSC_DECIDE,PETSC_DECIDE,1,NULL,&A);CHKERRQ(ierr); 974 975 ierr = VecDuplicate(ts->vec_sol,&sp);CHKERRQ(ierr); 976 ierr = MatDenseGetColumn(A,0,&xarr);CHKERRQ(ierr); 977 ierr = VecPlaceArray(sp,xarr);CHKERRQ(ierr); 978 if (ts->vecs_sensi2p) { /* tangent linear variable initialized as 2*dIdP*dir */ 979 if (didp) { 980 ierr = MatMult(didp,ts->vec_dir,sp);CHKERRQ(ierr); 981 ierr = VecScale(sp,2.);CHKERRQ(ierr); 982 } else { 983 ierr = VecZeroEntries(sp);CHKERRQ(ierr); 984 } 985 } else { /* tangent linear variable initialized as dir */ 986 ierr = VecCopy(ts->vec_dir,sp);CHKERRQ(ierr); 987 } 988 ierr = VecResetArray(sp);CHKERRQ(ierr); 989 ierr = MatDenseRestoreColumn(A,&xarr);CHKERRQ(ierr); 990 ierr = VecDestroy(&sp);CHKERRQ(ierr); 991 992 ierr = TSForwardSetInitialSensitivities(ts,A);CHKERRQ(ierr); /* if didp is NULL, identity matrix is assumed */ 993 994 ierr = MatDestroy(&A);CHKERRQ(ierr); 995 PetscFunctionReturn(0); 996 } 997 998 /*@ 999 TSAdjointResetForward - Reset the tangent linear solver and destroy the tangent linear context 1000 1001 Logically Collective on TS 1002 1003 Input Parameters: 1004 . ts - the TS context obtained from TSCreate() 1005 1006 Level: intermediate 1007 1008 .seealso: TSAdjointSetForward() 1009 @*/ 1010 PetscErrorCode TSAdjointResetForward(TS ts) 1011 { 1012 PetscErrorCode ierr; 1013 1014 PetscFunctionBegin; 1015 ts->forward_solve = PETSC_FALSE; /* turn off tangent linear mode */ 1016 ierr = TSForwardReset(ts);CHKERRQ(ierr); 1017 PetscFunctionReturn(0); 1018 } 1019 1020 /*@ 1021 TSAdjointSetUp - Sets up the internal data structures for the later use 1022 of an adjoint solver 1023 1024 Collective on TS 1025 1026 Input Parameter: 1027 . ts - the TS context obtained from TSCreate() 1028 1029 Level: advanced 1030 1031 .seealso: TSCreate(), TSAdjointStep(), TSSetCostGradients() 1032 @*/ 1033 PetscErrorCode TSAdjointSetUp(TS ts) 1034 { 1035 TSTrajectory tj; 1036 PetscBool match; 1037 PetscErrorCode ierr; 1038 1039 PetscFunctionBegin; 1040 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1041 if (ts->adjointsetupcalled) PetscFunctionReturn(0); 1042 if (!ts->vecs_sensi) SETERRQ(PetscObjectComm((PetscObject)ts),PETSC_ERR_ARG_WRONGSTATE,"Must call TSSetCostGradients() first"); 1043 if (ts->vecs_sensip && !ts->Jacp && !ts->Jacprhs) SETERRQ(PetscObjectComm((PetscObject)ts),PETSC_ERR_ARG_WRONGSTATE,"Must call TSSetRHSJacobianP() or TSSetIJacobianP() first"); 1044 ierr = TSGetTrajectory(ts,&tj);CHKERRQ(ierr); 1045 ierr = PetscObjectTypeCompare((PetscObject)tj,TSTRAJECTORYBASIC,&match);CHKERRQ(ierr); 1046 if (match) { 1047 PetscBool solution_only; 1048 ierr = TSTrajectoryGetSolutionOnly(tj,&solution_only);CHKERRQ(ierr); 1049 if (solution_only) SETERRQ(PetscObjectComm((PetscObject)ts),PETSC_ERR_USER,"TSAdjoint cannot use the solution-only mode when choosing the Basic TSTrajectory type. Turn it off with -ts_trajectory_solution_only 0"); 1050 } 1051 ierr = TSTrajectorySetUseHistory(tj,PETSC_FALSE);CHKERRQ(ierr); /* not use TSHistory */ 1052 1053 if (ts->quadraturets) { /* if there is integral in the cost function */ 1054 ierr = VecDuplicate(ts->vecs_sensi[0],&ts->vec_drdu_col);CHKERRQ(ierr); 1055 if (ts->vecs_sensip){ 1056 ierr = VecDuplicate(ts->vecs_sensip[0],&ts->vec_drdp_col);CHKERRQ(ierr); 1057 } 1058 } 1059 1060 if (ts->ops->adjointsetup) { 1061 ierr = (*ts->ops->adjointsetup)(ts);CHKERRQ(ierr); 1062 } 1063 ts->adjointsetupcalled = PETSC_TRUE; 1064 PetscFunctionReturn(0); 1065 } 1066 1067 /*@ 1068 TSAdjointReset - Resets a TSAdjoint context and removes any allocated Vecs and Mats. 1069 1070 Collective on TS 1071 1072 Input Parameter: 1073 . ts - the TS context obtained from TSCreate() 1074 1075 Level: beginner 1076 1077 .seealso: TSCreate(), TSAdjointSetUp(), TSADestroy() 1078 @*/ 1079 PetscErrorCode TSAdjointReset(TS ts) 1080 { 1081 PetscErrorCode ierr; 1082 1083 PetscFunctionBegin; 1084 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1085 if (ts->ops->adjointreset) { 1086 ierr = (*ts->ops->adjointreset)(ts);CHKERRQ(ierr); 1087 } 1088 if (ts->quadraturets) { /* if there is integral in the cost function */ 1089 ierr = VecDestroy(&ts->vec_drdu_col);CHKERRQ(ierr); 1090 if (ts->vecs_sensip){ 1091 ierr = VecDestroy(&ts->vec_drdp_col);CHKERRQ(ierr); 1092 } 1093 } 1094 ts->vecs_sensi = NULL; 1095 ts->vecs_sensip = NULL; 1096 ts->vecs_sensi2 = NULL; 1097 ts->vecs_sensi2p = NULL; 1098 ts->vec_dir = NULL; 1099 ts->adjointsetupcalled = PETSC_FALSE; 1100 PetscFunctionReturn(0); 1101 } 1102 1103 /*@ 1104 TSAdjointSetSteps - Sets the number of steps the adjoint solver should take backward in time 1105 1106 Logically Collective on TS 1107 1108 Input Parameters: 1109 + ts - the TS context obtained from TSCreate() 1110 - steps - number of steps to use 1111 1112 Level: intermediate 1113 1114 Notes: 1115 Normally one does not call this and TSAdjointSolve() integrates back to the original timestep. One can call this 1116 so as to integrate back to less than the original timestep 1117 1118 .seealso: TSSetExactFinalTime() 1119 @*/ 1120 PetscErrorCode TSAdjointSetSteps(TS ts,PetscInt steps) 1121 { 1122 PetscFunctionBegin; 1123 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1124 PetscValidLogicalCollectiveInt(ts,steps,2); 1125 if (steps < 0) SETERRQ(PetscObjectComm((PetscObject)ts),PETSC_ERR_ARG_OUTOFRANGE,"Cannot step back a negative number of steps"); 1126 if (steps > ts->steps) SETERRQ(PetscObjectComm((PetscObject)ts),PETSC_ERR_ARG_OUTOFRANGE,"Cannot step back more than the total number of forward steps"); 1127 ts->adjoint_max_steps = steps; 1128 PetscFunctionReturn(0); 1129 } 1130 1131 /*@C 1132 TSAdjointSetRHSJacobian - Deprecated, use TSSetRHSJacobianP() 1133 1134 Level: deprecated 1135 1136 @*/ 1137 PetscErrorCode TSAdjointSetRHSJacobian(TS ts,Mat Amat,PetscErrorCode (*func)(TS,PetscReal,Vec,Mat,void*),void *ctx) 1138 { 1139 PetscErrorCode ierr; 1140 1141 PetscFunctionBegin; 1142 PetscValidHeaderSpecific(ts, TS_CLASSID,1); 1143 PetscValidHeaderSpecific(Amat,MAT_CLASSID,2); 1144 1145 ts->rhsjacobianp = func; 1146 ts->rhsjacobianpctx = ctx; 1147 if(Amat) { 1148 ierr = PetscObjectReference((PetscObject)Amat);CHKERRQ(ierr); 1149 ierr = MatDestroy(&ts->Jacp);CHKERRQ(ierr); 1150 ts->Jacp = Amat; 1151 } 1152 PetscFunctionReturn(0); 1153 } 1154 1155 /*@C 1156 TSAdjointComputeRHSJacobian - Deprecated, use TSComputeRHSJacobianP() 1157 1158 Level: deprecated 1159 1160 @*/ 1161 PetscErrorCode TSAdjointComputeRHSJacobian(TS ts,PetscReal t,Vec U,Mat Amat) 1162 { 1163 PetscErrorCode ierr; 1164 1165 PetscFunctionBegin; 1166 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1167 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 1168 PetscValidPointer(Amat,4); 1169 1170 PetscStackPush("TS user JacobianP function for sensitivity analysis"); 1171 ierr = (*ts->rhsjacobianp)(ts,t,U,Amat,ts->rhsjacobianpctx); CHKERRQ(ierr); 1172 PetscStackPop; 1173 PetscFunctionReturn(0); 1174 } 1175 1176 /*@ 1177 TSAdjointComputeDRDYFunction - Deprecated, use TSGetQuadratureTS() then TSComputeRHSJacobian() 1178 1179 Level: deprecated 1180 1181 @*/ 1182 PetscErrorCode TSAdjointComputeDRDYFunction(TS ts,PetscReal t,Vec U,Vec *DRDU) 1183 { 1184 PetscErrorCode ierr; 1185 1186 PetscFunctionBegin; 1187 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1188 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 1189 1190 PetscStackPush("TS user DRDY function for sensitivity analysis"); 1191 ierr = (*ts->drdufunction)(ts,t,U,DRDU,ts->costintegrandctx); CHKERRQ(ierr); 1192 PetscStackPop; 1193 PetscFunctionReturn(0); 1194 } 1195 1196 /*@ 1197 TSAdjointComputeDRDPFunction - Deprecated, use TSGetQuadratureTS() then TSComputeRHSJacobianP() 1198 1199 Level: deprecated 1200 1201 @*/ 1202 PetscErrorCode TSAdjointComputeDRDPFunction(TS ts,PetscReal t,Vec U,Vec *DRDP) 1203 { 1204 PetscErrorCode ierr; 1205 1206 PetscFunctionBegin; 1207 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1208 PetscValidHeaderSpecific(U,VEC_CLASSID,3); 1209 1210 PetscStackPush("TS user DRDP function for sensitivity analysis"); 1211 ierr = (*ts->drdpfunction)(ts,t,U,DRDP,ts->costintegrandctx); CHKERRQ(ierr); 1212 PetscStackPop; 1213 PetscFunctionReturn(0); 1214 } 1215 1216 /*@C 1217 TSAdjointMonitorSensi - monitors the first lambda sensitivity 1218 1219 Level: intermediate 1220 1221 .seealso: TSAdjointMonitorSet() 1222 @*/ 1223 PetscErrorCode TSAdjointMonitorSensi(TS ts,PetscInt step,PetscReal ptime,Vec v,PetscInt numcost,Vec *lambda,Vec *mu,PetscViewerAndFormat *vf) 1224 { 1225 PetscErrorCode ierr; 1226 PetscViewer viewer = vf->viewer; 1227 1228 PetscFunctionBegin; 1229 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,4); 1230 ierr = PetscViewerPushFormat(viewer,vf->format);CHKERRQ(ierr); 1231 ierr = VecView(lambda[0],viewer);CHKERRQ(ierr); 1232 ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr); 1233 PetscFunctionReturn(0); 1234 } 1235 1236 /*@C 1237 TSAdjointMonitorSetFromOptions - Sets a monitor function and viewer appropriate for the type indicated by the user 1238 1239 Collective on TS 1240 1241 Input Parameters: 1242 + ts - TS object you wish to monitor 1243 . name - the monitor type one is seeking 1244 . help - message indicating what monitoring is done 1245 . manual - manual page for the monitor 1246 . monitor - the monitor function 1247 - monitorsetup - a function that is called once ONLY if the user selected this monitor that may set additional features of the TS or PetscViewer objects 1248 1249 Level: developer 1250 1251 .seealso: PetscOptionsGetViewer(), PetscOptionsGetReal(), PetscOptionsHasName(), PetscOptionsGetString(), 1252 PetscOptionsGetIntArray(), PetscOptionsGetRealArray(), PetscOptionsBool() 1253 PetscOptionsInt(), PetscOptionsString(), PetscOptionsReal(), PetscOptionsBool(), 1254 PetscOptionsName(), PetscOptionsBegin(), PetscOptionsEnd(), PetscOptionsHead(), 1255 PetscOptionsStringArray(),PetscOptionsRealArray(), PetscOptionsScalar(), 1256 PetscOptionsBoolGroupBegin(), PetscOptionsBoolGroup(), PetscOptionsBoolGroupEnd(), 1257 PetscOptionsFList(), PetscOptionsEList() 1258 @*/ 1259 PetscErrorCode TSAdjointMonitorSetFromOptions(TS ts,const char name[],const char help[], const char manual[],PetscErrorCode (*monitor)(TS,PetscInt,PetscReal,Vec,PetscInt,Vec*,Vec*,PetscViewerAndFormat*),PetscErrorCode (*monitorsetup)(TS,PetscViewerAndFormat*)) 1260 { 1261 PetscErrorCode ierr; 1262 PetscViewer viewer; 1263 PetscViewerFormat format; 1264 PetscBool flg; 1265 1266 PetscFunctionBegin; 1267 ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject)ts),((PetscObject) ts)->options,((PetscObject)ts)->prefix,name,&viewer,&format,&flg);CHKERRQ(ierr); 1268 if (flg) { 1269 PetscViewerAndFormat *vf; 1270 ierr = PetscViewerAndFormatCreate(viewer,format,&vf);CHKERRQ(ierr); 1271 ierr = PetscObjectDereference((PetscObject)viewer);CHKERRQ(ierr); 1272 if (monitorsetup) { 1273 ierr = (*monitorsetup)(ts,vf);CHKERRQ(ierr); 1274 } 1275 ierr = TSAdjointMonitorSet(ts,(PetscErrorCode (*)(TS,PetscInt,PetscReal,Vec,PetscInt,Vec*,Vec*,void*))monitor,vf,(PetscErrorCode (*)(void**))PetscViewerAndFormatDestroy);CHKERRQ(ierr); 1276 } 1277 PetscFunctionReturn(0); 1278 } 1279 1280 /*@C 1281 TSAdjointMonitorSet - Sets an ADDITIONAL function that is to be used at every 1282 timestep to display the iteration's progress. 1283 1284 Logically Collective on TS 1285 1286 Input Parameters: 1287 + ts - the TS context obtained from TSCreate() 1288 . adjointmonitor - monitoring routine 1289 . adjointmctx - [optional] user-defined context for private data for the 1290 monitor routine (use NULL if no context is desired) 1291 - adjointmonitordestroy - [optional] routine that frees monitor context 1292 (may be NULL) 1293 1294 Calling sequence of monitor: 1295 $ int adjointmonitor(TS ts,PetscInt steps,PetscReal time,Vec u,PetscInt numcost,Vec *lambda, Vec *mu,void *adjointmctx) 1296 1297 + ts - the TS context 1298 . steps - iteration number (after the final time step the monitor routine is called with a step of -1, this is at the final time which may have 1299 been interpolated to) 1300 . time - current time 1301 . u - current iterate 1302 . numcost - number of cost functionos 1303 . lambda - sensitivities to initial conditions 1304 . mu - sensitivities to parameters 1305 - adjointmctx - [optional] adjoint monitoring context 1306 1307 Notes: 1308 This routine adds an additional monitor to the list of monitors that 1309 already has been loaded. 1310 1311 Fortran Notes: 1312 Only a single monitor function can be set for each TS object 1313 1314 Level: intermediate 1315 1316 .seealso: TSAdjointMonitorCancel() 1317 @*/ 1318 PetscErrorCode TSAdjointMonitorSet(TS ts,PetscErrorCode (*adjointmonitor)(TS,PetscInt,PetscReal,Vec,PetscInt,Vec*,Vec*,void*),void *adjointmctx,PetscErrorCode (*adjointmdestroy)(void**)) 1319 { 1320 PetscErrorCode ierr; 1321 PetscInt i; 1322 PetscBool identical; 1323 1324 PetscFunctionBegin; 1325 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1326 for (i=0; i<ts->numbermonitors;i++) { 1327 ierr = PetscMonitorCompare((PetscErrorCode (*)(void))adjointmonitor,adjointmctx,adjointmdestroy,(PetscErrorCode (*)(void))ts->adjointmonitor[i],ts->adjointmonitorcontext[i],ts->adjointmonitordestroy[i],&identical);CHKERRQ(ierr); 1328 if (identical) PetscFunctionReturn(0); 1329 } 1330 if (ts->numberadjointmonitors >= MAXTSMONITORS) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many adjoint monitors set"); 1331 ts->adjointmonitor[ts->numberadjointmonitors] = adjointmonitor; 1332 ts->adjointmonitordestroy[ts->numberadjointmonitors] = adjointmdestroy; 1333 ts->adjointmonitorcontext[ts->numberadjointmonitors++] = (void*)adjointmctx; 1334 PetscFunctionReturn(0); 1335 } 1336 1337 /*@C 1338 TSAdjointMonitorCancel - Clears all the adjoint monitors that have been set on a time-step object. 1339 1340 Logically Collective on TS 1341 1342 Input Parameters: 1343 . ts - the TS context obtained from TSCreate() 1344 1345 Notes: 1346 There is no way to remove a single, specific monitor. 1347 1348 Level: intermediate 1349 1350 .seealso: TSAdjointMonitorSet() 1351 @*/ 1352 PetscErrorCode TSAdjointMonitorCancel(TS ts) 1353 { 1354 PetscErrorCode ierr; 1355 PetscInt i; 1356 1357 PetscFunctionBegin; 1358 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1359 for (i=0; i<ts->numberadjointmonitors; i++) { 1360 if (ts->adjointmonitordestroy[i]) { 1361 ierr = (*ts->adjointmonitordestroy[i])(&ts->adjointmonitorcontext[i]);CHKERRQ(ierr); 1362 } 1363 } 1364 ts->numberadjointmonitors = 0; 1365 PetscFunctionReturn(0); 1366 } 1367 1368 /*@C 1369 TSAdjointMonitorDefault - the default monitor of adjoint computations 1370 1371 Level: intermediate 1372 1373 .seealso: TSAdjointMonitorSet() 1374 @*/ 1375 PetscErrorCode TSAdjointMonitorDefault(TS ts,PetscInt step,PetscReal ptime,Vec v,PetscInt numcost,Vec *lambda,Vec *mu,PetscViewerAndFormat *vf) 1376 { 1377 PetscErrorCode ierr; 1378 PetscViewer viewer = vf->viewer; 1379 1380 PetscFunctionBegin; 1381 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,4); 1382 ierr = PetscViewerPushFormat(viewer,vf->format);CHKERRQ(ierr); 1383 ierr = PetscViewerASCIIAddTab(viewer,((PetscObject)ts)->tablevel);CHKERRQ(ierr); 1384 ierr = PetscViewerASCIIPrintf(viewer,"%D TS dt %g time %g%s",step,(double)ts->time_step,(double)ptime,ts->steprollback ? " (r)\n" : "\n");CHKERRQ(ierr); 1385 ierr = PetscViewerASCIISubtractTab(viewer,((PetscObject)ts)->tablevel);CHKERRQ(ierr); 1386 ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr); 1387 PetscFunctionReturn(0); 1388 } 1389 1390 /*@C 1391 TSAdjointMonitorDrawSensi - Monitors progress of the adjoint TS solvers by calling 1392 VecView() for the sensitivities to initial states at each timestep 1393 1394 Collective on TS 1395 1396 Input Parameters: 1397 + ts - the TS context 1398 . step - current time-step 1399 . ptime - current time 1400 . u - current state 1401 . numcost - number of cost functions 1402 . lambda - sensitivities to initial conditions 1403 . mu - sensitivities to parameters 1404 - dummy - either a viewer or NULL 1405 1406 Level: intermediate 1407 1408 .seealso: TSAdjointMonitorSet(), TSAdjointMonitorDefault(), VecView() 1409 @*/ 1410 PetscErrorCode TSAdjointMonitorDrawSensi(TS ts,PetscInt step,PetscReal ptime,Vec u,PetscInt numcost,Vec *lambda,Vec *mu,void *dummy) 1411 { 1412 PetscErrorCode ierr; 1413 TSMonitorDrawCtx ictx = (TSMonitorDrawCtx)dummy; 1414 PetscDraw draw; 1415 PetscReal xl,yl,xr,yr,h; 1416 char time[32]; 1417 1418 PetscFunctionBegin; 1419 if (!(((ictx->howoften > 0) && (!(step % ictx->howoften))) || ((ictx->howoften == -1) && ts->reason))) PetscFunctionReturn(0); 1420 1421 ierr = VecView(lambda[0],ictx->viewer);CHKERRQ(ierr); 1422 ierr = PetscViewerDrawGetDraw(ictx->viewer,0,&draw);CHKERRQ(ierr); 1423 ierr = PetscSNPrintf(time,32,"Timestep %d Time %g",(int)step,(double)ptime);CHKERRQ(ierr); 1424 ierr = PetscDrawGetCoordinates(draw,&xl,&yl,&xr,&yr);CHKERRQ(ierr); 1425 h = yl + .95*(yr - yl); 1426 ierr = PetscDrawStringCentered(draw,.5*(xl+xr),h,PETSC_DRAW_BLACK,time);CHKERRQ(ierr); 1427 ierr = PetscDrawFlush(draw);CHKERRQ(ierr); 1428 PetscFunctionReturn(0); 1429 } 1430 1431 /* 1432 TSAdjointSetFromOptions - Sets various TSAdjoint parameters from user options. 1433 1434 Collective on TSAdjoint 1435 1436 Input Parameter: 1437 . ts - the TS context 1438 1439 Options Database Keys: 1440 + -ts_adjoint_solve <yes,no> After solving the ODE/DAE solve the adjoint problem (requires -ts_save_trajectory) 1441 . -ts_adjoint_monitor - print information at each adjoint time step 1442 - -ts_adjoint_monitor_draw_sensi - monitor the sensitivity of the first cost function wrt initial conditions (lambda[0]) graphically 1443 1444 Level: developer 1445 1446 Notes: 1447 This is not normally called directly by users 1448 1449 .seealso: TSSetSaveTrajectory(), TSTrajectorySetUp() 1450 */ 1451 PetscErrorCode TSAdjointSetFromOptions(PetscOptionItems *PetscOptionsObject,TS ts) 1452 { 1453 PetscBool tflg,opt; 1454 PetscErrorCode ierr; 1455 1456 PetscFunctionBegin; 1457 PetscValidHeaderSpecific(ts,TS_CLASSID,2); 1458 ierr = PetscOptionsHead(PetscOptionsObject,"TS Adjoint options");CHKERRQ(ierr); 1459 tflg = ts->adjoint_solve ? PETSC_TRUE : PETSC_FALSE; 1460 ierr = PetscOptionsBool("-ts_adjoint_solve","Solve the adjoint problem immediately after solving the forward problem","",tflg,&tflg,&opt);CHKERRQ(ierr); 1461 if (opt) { 1462 ierr = TSSetSaveTrajectory(ts);CHKERRQ(ierr); 1463 ts->adjoint_solve = tflg; 1464 } 1465 ierr = TSAdjointMonitorSetFromOptions(ts,"-ts_adjoint_monitor","Monitor adjoint timestep size","TSAdjointMonitorDefault",TSAdjointMonitorDefault,NULL);CHKERRQ(ierr); 1466 ierr = TSAdjointMonitorSetFromOptions(ts,"-ts_adjoint_monitor_sensi","Monitor sensitivity in the adjoint computation","TSAdjointMonitorSensi",TSAdjointMonitorSensi,NULL);CHKERRQ(ierr); 1467 opt = PETSC_FALSE; 1468 ierr = PetscOptionsName("-ts_adjoint_monitor_draw_sensi","Monitor adjoint sensitivities (lambda only) graphically","TSAdjointMonitorDrawSensi",&opt);CHKERRQ(ierr); 1469 if (opt) { 1470 TSMonitorDrawCtx ctx; 1471 PetscInt howoften = 1; 1472 1473 ierr = PetscOptionsInt("-ts_adjoint_monitor_draw_sensi","Monitor adjoint sensitivities (lambda only) graphically","TSAdjointMonitorDrawSensi",howoften,&howoften,NULL);CHKERRQ(ierr); 1474 ierr = TSMonitorDrawCtxCreate(PetscObjectComm((PetscObject)ts),0,0,PETSC_DECIDE,PETSC_DECIDE,300,300,howoften,&ctx);CHKERRQ(ierr); 1475 ierr = TSAdjointMonitorSet(ts,TSAdjointMonitorDrawSensi,ctx,(PetscErrorCode (*)(void**))TSMonitorDrawCtxDestroy);CHKERRQ(ierr); 1476 } 1477 PetscFunctionReturn(0); 1478 } 1479 1480 /*@ 1481 TSAdjointStep - Steps one time step backward in the adjoint run 1482 1483 Collective on TS 1484 1485 Input Parameter: 1486 . ts - the TS context obtained from TSCreate() 1487 1488 Level: intermediate 1489 1490 .seealso: TSAdjointSetUp(), TSAdjointSolve() 1491 @*/ 1492 PetscErrorCode TSAdjointStep(TS ts) 1493 { 1494 DM dm; 1495 PetscErrorCode ierr; 1496 1497 PetscFunctionBegin; 1498 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1499 ierr = TSGetDM(ts,&dm);CHKERRQ(ierr); 1500 ierr = TSAdjointSetUp(ts);CHKERRQ(ierr); 1501 1502 ts->reason = TS_CONVERGED_ITERATING; 1503 ts->ptime_prev = ts->ptime; 1504 if (!ts->ops->adjointstep) SETERRQ1(PetscObjectComm((PetscObject)ts),PETSC_ERR_NOT_CONVERGED,"TSStep has failed because the adjoint of %s has not been implemented, try other time stepping methods for adjoint sensitivity analysis",((PetscObject)ts)->type_name); 1505 ierr = PetscLogEventBegin(TS_AdjointStep,ts,0,0,0);CHKERRQ(ierr); 1506 ierr = (*ts->ops->adjointstep)(ts);CHKERRQ(ierr); 1507 ierr = PetscLogEventEnd(TS_AdjointStep,ts,0,0,0);CHKERRQ(ierr); 1508 ts->adjoint_steps++; ts->steps--; 1509 1510 if (ts->reason < 0) { 1511 if (ts->errorifstepfailed) SETERRQ1(PetscObjectComm((PetscObject)ts),PETSC_ERR_NOT_CONVERGED,"TSAdjointStep has failed due to %s",TSConvergedReasons[ts->reason]); 1512 } else if (!ts->reason) { 1513 if (ts->adjoint_steps >= ts->adjoint_max_steps) ts->reason = TS_CONVERGED_ITS; 1514 } 1515 PetscFunctionReturn(0); 1516 } 1517 1518 /*@ 1519 TSAdjointSolve - Solves the discrete ajoint problem for an ODE/DAE 1520 1521 Collective on TS 1522 1523 Input Parameter: 1524 . ts - the TS context obtained from TSCreate() 1525 1526 Options Database: 1527 . -ts_adjoint_view_solution <viewerinfo> - views the first gradient with respect to the initial values 1528 1529 Level: intermediate 1530 1531 Notes: 1532 This must be called after a call to TSSolve() that solves the forward problem 1533 1534 By default this will integrate back to the initial time, one can use TSAdjointSetSteps() to step back to a later time 1535 1536 .seealso: TSCreate(), TSSetCostGradients(), TSSetSolution(), TSAdjointStep() 1537 @*/ 1538 PetscErrorCode TSAdjointSolve(TS ts) 1539 { 1540 #if defined(TSADJOINT_STAGE) 1541 PetscLogStage adjoint_stage; 1542 #endif 1543 PetscErrorCode ierr; 1544 1545 PetscFunctionBegin; 1546 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1547 #if defined(TSADJOINT_STAGE) 1548 ierr = PetscLogStageRegister("TSAdjoint",&adjoint_stage);CHKERRQ(ierr); 1549 ierr = PetscLogStagePush(adjoint_stage);CHKERRQ(ierr); 1550 #endif 1551 ierr = TSAdjointSetUp(ts);CHKERRQ(ierr); 1552 1553 /* reset time step and iteration counters */ 1554 ts->adjoint_steps = 0; 1555 ts->ksp_its = 0; 1556 ts->snes_its = 0; 1557 ts->num_snes_failures = 0; 1558 ts->reject = 0; 1559 ts->reason = TS_CONVERGED_ITERATING; 1560 1561 if (!ts->adjoint_max_steps) ts->adjoint_max_steps = ts->steps; 1562 if (ts->adjoint_steps >= ts->adjoint_max_steps) ts->reason = TS_CONVERGED_ITS; 1563 1564 while (!ts->reason) { 1565 ierr = TSTrajectoryGet(ts->trajectory,ts,ts->steps,&ts->ptime);CHKERRQ(ierr); 1566 ierr = TSAdjointMonitor(ts,ts->steps,ts->ptime,ts->vec_sol,ts->numcost,ts->vecs_sensi,ts->vecs_sensip);CHKERRQ(ierr); 1567 ierr = TSAdjointEventHandler(ts);CHKERRQ(ierr); 1568 ierr = TSAdjointStep(ts);CHKERRQ(ierr); 1569 if ((ts->vec_costintegral || ts->quadraturets) && !ts->costintegralfwd) { 1570 ierr = TSAdjointCostIntegral(ts);CHKERRQ(ierr); 1571 } 1572 } 1573 ierr = TSTrajectoryGet(ts->trajectory,ts,ts->steps,&ts->ptime);CHKERRQ(ierr); 1574 ierr = TSAdjointMonitor(ts,ts->steps,ts->ptime,ts->vec_sol,ts->numcost,ts->vecs_sensi,ts->vecs_sensip);CHKERRQ(ierr); 1575 ts->solvetime = ts->ptime; 1576 ierr = TSTrajectoryViewFromOptions(ts->trajectory,NULL,"-ts_trajectory_view");CHKERRQ(ierr); 1577 ierr = VecViewFromOptions(ts->vecs_sensi[0],(PetscObject) ts, "-ts_adjoint_view_solution");CHKERRQ(ierr); 1578 ts->adjoint_max_steps = 0; 1579 #if defined(TSADJOINT_STAGE) 1580 ierr = PetscLogStagePop();CHKERRQ(ierr); 1581 #endif 1582 PetscFunctionReturn(0); 1583 } 1584 1585 /*@C 1586 TSAdjointMonitor - Runs all user-provided adjoint monitor routines set using TSAdjointMonitorSet() 1587 1588 Collective on TS 1589 1590 Input Parameters: 1591 + ts - time stepping context obtained from TSCreate() 1592 . step - step number that has just completed 1593 . ptime - model time of the state 1594 . u - state at the current model time 1595 . numcost - number of cost functions (dimension of lambda or mu) 1596 . lambda - vectors containing the gradients of the cost functions with respect to the ODE/DAE solution variables 1597 - mu - vectors containing the gradients of the cost functions with respect to the problem parameters 1598 1599 Notes: 1600 TSAdjointMonitor() is typically used automatically within the time stepping implementations. 1601 Users would almost never call this routine directly. 1602 1603 Level: developer 1604 1605 @*/ 1606 PetscErrorCode TSAdjointMonitor(TS ts,PetscInt step,PetscReal ptime,Vec u,PetscInt numcost,Vec *lambda, Vec *mu) 1607 { 1608 PetscErrorCode ierr; 1609 PetscInt i,n = ts->numberadjointmonitors; 1610 1611 PetscFunctionBegin; 1612 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1613 PetscValidHeaderSpecific(u,VEC_CLASSID,4); 1614 ierr = VecLockReadPush(u);CHKERRQ(ierr); 1615 for (i=0; i<n; i++) { 1616 ierr = (*ts->adjointmonitor[i])(ts,step,ptime,u,numcost,lambda,mu,ts->adjointmonitorcontext[i]);CHKERRQ(ierr); 1617 } 1618 ierr = VecLockReadPop(u);CHKERRQ(ierr); 1619 PetscFunctionReturn(0); 1620 } 1621 1622 /*@ 1623 TSAdjointCostIntegral - Evaluate the cost integral in the adjoint run. 1624 1625 Collective on TS 1626 1627 Input Arguments: 1628 . ts - time stepping context 1629 1630 Level: advanced 1631 1632 Notes: 1633 This function cannot be called until TSAdjointStep() has been completed. 1634 1635 .seealso: TSAdjointSolve(), TSAdjointStep 1636 @*/ 1637 PetscErrorCode TSAdjointCostIntegral(TS ts) 1638 { 1639 PetscErrorCode ierr; 1640 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1641 if (!ts->ops->adjointintegral) SETERRQ1(PetscObjectComm((PetscObject)ts),PETSC_ERR_SUP,"%s does not provide integral evaluation in the adjoint run",((PetscObject)ts)->type_name); 1642 ierr = (*ts->ops->adjointintegral)(ts);CHKERRQ(ierr); 1643 PetscFunctionReturn(0); 1644 } 1645 1646 /* ------------------ Forward (tangent linear) sensitivity ------------------*/ 1647 1648 /*@ 1649 TSForwardSetUp - Sets up the internal data structures for the later use 1650 of forward sensitivity analysis 1651 1652 Collective on TS 1653 1654 Input Parameter: 1655 . ts - the TS context obtained from TSCreate() 1656 1657 Level: advanced 1658 1659 .seealso: TSCreate(), TSDestroy(), TSSetUp() 1660 @*/ 1661 PetscErrorCode TSForwardSetUp(TS ts) 1662 { 1663 PetscErrorCode ierr; 1664 1665 PetscFunctionBegin; 1666 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1667 if (ts->forwardsetupcalled) PetscFunctionReturn(0); 1668 if (ts->ops->forwardsetup) { 1669 ierr = (*ts->ops->forwardsetup)(ts);CHKERRQ(ierr); 1670 } 1671 ierr = VecDuplicate(ts->vec_sol,&ts->vec_sensip_col);CHKERRQ(ierr); 1672 ts->forwardsetupcalled = PETSC_TRUE; 1673 PetscFunctionReturn(0); 1674 } 1675 1676 /*@ 1677 TSForwardReset - Reset the internal data structures used by forward sensitivity analysis 1678 1679 Collective on TS 1680 1681 Input Parameter: 1682 . ts - the TS context obtained from TSCreate() 1683 1684 Level: advanced 1685 1686 .seealso: TSCreate(), TSDestroy(), TSForwardSetUp() 1687 @*/ 1688 PetscErrorCode TSForwardReset(TS ts) 1689 { 1690 TS quadts = ts->quadraturets; 1691 PetscErrorCode ierr; 1692 1693 PetscFunctionBegin; 1694 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1695 if (ts->ops->forwardreset) { 1696 ierr = (*ts->ops->forwardreset)(ts);CHKERRQ(ierr); 1697 } 1698 ierr = MatDestroy(&ts->mat_sensip);CHKERRQ(ierr); 1699 if (quadts) { 1700 ierr = MatDestroy(&quadts->mat_sensip);CHKERRQ(ierr); 1701 } 1702 ierr = VecDestroy(&ts->vec_sensip_col);CHKERRQ(ierr); 1703 ts->forward_solve = PETSC_FALSE; 1704 ts->forwardsetupcalled = PETSC_FALSE; 1705 PetscFunctionReturn(0); 1706 } 1707 1708 /*@ 1709 TSForwardSetIntegralGradients - Set the vectors holding forward sensitivities of the integral term. 1710 1711 Input Parameter: 1712 + ts- the TS context obtained from TSCreate() 1713 . numfwdint- number of integrals 1714 - vp = the vectors containing the gradients for each integral w.r.t. parameters 1715 1716 Level: deprecated 1717 1718 .seealso: TSForwardGetSensitivities(), TSForwardSetIntegralGradients(), TSForwardGetIntegralGradients(), TSForwardStep() 1719 @*/ 1720 PetscErrorCode TSForwardSetIntegralGradients(TS ts,PetscInt numfwdint,Vec *vp) 1721 { 1722 PetscFunctionBegin; 1723 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1724 if (ts->numcost && ts->numcost!=numfwdint) SETERRQ(PetscObjectComm((PetscObject)ts),PETSC_ERR_USER,"The number of cost functions (2rd parameter of TSSetCostIntegrand()) is inconsistent with the one set by TSSetCostIntegrand()"); 1725 if (!ts->numcost) ts->numcost = numfwdint; 1726 1727 ts->vecs_integral_sensip = vp; 1728 PetscFunctionReturn(0); 1729 } 1730 1731 /*@ 1732 TSForwardGetIntegralGradients - Returns the forward sensitivities ofthe integral term. 1733 1734 Input Parameter: 1735 . ts- the TS context obtained from TSCreate() 1736 1737 Output Parameter: 1738 . vp = the vectors containing the gradients for each integral w.r.t. parameters 1739 1740 Level: deprecated 1741 1742 .seealso: TSForwardSetSensitivities(), TSForwardSetIntegralGradients(), TSForwardGetIntegralGradients(), TSForwardStep() 1743 @*/ 1744 PetscErrorCode TSForwardGetIntegralGradients(TS ts,PetscInt *numfwdint,Vec **vp) 1745 { 1746 PetscFunctionBegin; 1747 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1748 PetscValidPointer(vp,3); 1749 if (numfwdint) *numfwdint = ts->numcost; 1750 if (vp) *vp = ts->vecs_integral_sensip; 1751 PetscFunctionReturn(0); 1752 } 1753 1754 /*@ 1755 TSForwardStep - Compute the forward sensitivity for one time step. 1756 1757 Collective on TS 1758 1759 Input Arguments: 1760 . ts - time stepping context 1761 1762 Level: advanced 1763 1764 Notes: 1765 This function cannot be called until TSStep() has been completed. 1766 1767 .seealso: TSForwardSetSensitivities(), TSForwardGetSensitivities(), TSForwardSetIntegralGradients(), TSForwardGetIntegralGradients(), TSForwardSetUp() 1768 @*/ 1769 PetscErrorCode TSForwardStep(TS ts) 1770 { 1771 PetscErrorCode ierr; 1772 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1773 if (!ts->ops->forwardstep) SETERRQ1(PetscObjectComm((PetscObject)ts),PETSC_ERR_SUP,"%s does not provide forward sensitivity analysis",((PetscObject)ts)->type_name); 1774 ierr = PetscLogEventBegin(TS_ForwardStep,ts,0,0,0);CHKERRQ(ierr); 1775 ierr = (*ts->ops->forwardstep)(ts);CHKERRQ(ierr); 1776 ierr = PetscLogEventEnd(TS_ForwardStep,ts,0,0,0);CHKERRQ(ierr); 1777 if (ts->reason < 0 && ts->errorifstepfailed) SETERRQ1(PetscObjectComm((PetscObject)ts),PETSC_ERR_NOT_CONVERGED,"TSFowardStep has failed due to %s",TSConvergedReasons[ts->reason]); 1778 PetscFunctionReturn(0); 1779 } 1780 1781 /*@ 1782 TSForwardSetSensitivities - Sets the initial value of the trajectory sensitivities of solution w.r.t. the problem parameters and initial values. 1783 1784 Logically Collective on TS 1785 1786 Input Parameters: 1787 + ts - the TS context obtained from TSCreate() 1788 . nump - number of parameters 1789 - Smat - sensitivities with respect to the parameters, the number of entries in these vectors is the same as the number of parameters 1790 1791 Level: beginner 1792 1793 Notes: 1794 Forward sensitivity is also called 'trajectory sensitivity' in some fields such as power systems. 1795 This function turns on a flag to trigger TSSolve() to compute forward sensitivities automatically. 1796 You must call this function before TSSolve(). 1797 The entries in the sensitivity matrix must be correctly initialized with the values S = dy/dp|startingtime. 1798 1799 .seealso: TSForwardGetSensitivities(), TSForwardSetIntegralGradients(), TSForwardGetIntegralGradients(), TSForwardStep() 1800 @*/ 1801 PetscErrorCode TSForwardSetSensitivities(TS ts,PetscInt nump,Mat Smat) 1802 { 1803 PetscErrorCode ierr; 1804 1805 PetscFunctionBegin; 1806 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1807 PetscValidHeaderSpecific(Smat,MAT_CLASSID,3); 1808 ts->forward_solve = PETSC_TRUE; 1809 if (nump == PETSC_DEFAULT) { 1810 ierr = MatGetSize(Smat,NULL,&ts->num_parameters);CHKERRQ(ierr); 1811 } else ts->num_parameters = nump; 1812 ierr = PetscObjectReference((PetscObject)Smat);CHKERRQ(ierr); 1813 ierr = MatDestroy(&ts->mat_sensip);CHKERRQ(ierr); 1814 ts->mat_sensip = Smat; 1815 PetscFunctionReturn(0); 1816 } 1817 1818 /*@ 1819 TSForwardGetSensitivities - Returns the trajectory sensitivities 1820 1821 Not Collective, but Vec returned is parallel if TS is parallel 1822 1823 Output Parameter: 1824 + ts - the TS context obtained from TSCreate() 1825 . nump - number of parameters 1826 - Smat - sensitivities with respect to the parameters, the number of entries in these vectors is the same as the number of parameters 1827 1828 Level: intermediate 1829 1830 .seealso: TSForwardSetSensitivities(), TSForwardSetIntegralGradients(), TSForwardGetIntegralGradients(), TSForwardStep() 1831 @*/ 1832 PetscErrorCode TSForwardGetSensitivities(TS ts,PetscInt *nump,Mat *Smat) 1833 { 1834 PetscFunctionBegin; 1835 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1836 if (nump) *nump = ts->num_parameters; 1837 if (Smat) *Smat = ts->mat_sensip; 1838 PetscFunctionReturn(0); 1839 } 1840 1841 /*@ 1842 TSForwardCostIntegral - Evaluate the cost integral in the forward run. 1843 1844 Collective on TS 1845 1846 Input Arguments: 1847 . ts - time stepping context 1848 1849 Level: advanced 1850 1851 Notes: 1852 This function cannot be called until TSStep() has been completed. 1853 1854 .seealso: TSSolve(), TSAdjointCostIntegral() 1855 @*/ 1856 PetscErrorCode TSForwardCostIntegral(TS ts) 1857 { 1858 PetscErrorCode ierr; 1859 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1860 if (!ts->ops->forwardintegral) SETERRQ1(PetscObjectComm((PetscObject)ts),PETSC_ERR_SUP,"%s does not provide integral evaluation in the forward run",((PetscObject)ts)->type_name); 1861 ierr = (*ts->ops->forwardintegral)(ts);CHKERRQ(ierr); 1862 PetscFunctionReturn(0); 1863 } 1864 1865 /*@ 1866 TSForwardSetInitialSensitivities - Set initial values for tangent linear sensitivities 1867 1868 Collective on TS 1869 1870 Input Parameter 1871 + ts - the TS context obtained from TSCreate() 1872 - didp - parametric sensitivities of the initial condition 1873 1874 Level: intermediate 1875 1876 Notes: TSSolve() allows users to pass the initial solution directly to TS. But the tangent linear variables cannot be initialized in this way. This function is used to set initial values for tangent linear variables. 1877 1878 .seealso: TSForwardSetSensitivities() 1879 @*/ 1880 PetscErrorCode TSForwardSetInitialSensitivities(TS ts,Mat didp) 1881 { 1882 PetscErrorCode ierr; 1883 1884 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1885 PetscValidHeaderSpecific(didp,MAT_CLASSID,2); 1886 if (!ts->mat_sensip) { 1887 ierr = TSForwardSetSensitivities(ts,PETSC_DEFAULT,didp);CHKERRQ(ierr); 1888 } 1889 PetscFunctionReturn(0); 1890 } 1891 1892 /*@ 1893 TSForwardGetStages - Get the number of stages and the tangent linear sensitivities at the intermediate stages 1894 1895 Input Parameter: 1896 . ts - the TS context obtained from TSCreate() 1897 1898 Output Parameters: 1899 + ns - number of stages 1900 - S - tangent linear sensitivities at the intermediate stages 1901 1902 Level: advanced 1903 1904 @*/ 1905 PetscErrorCode TSForwardGetStages(TS ts,PetscInt *ns,Mat **S) 1906 { 1907 PetscErrorCode ierr; 1908 1909 PetscFunctionBegin; 1910 PetscValidHeaderSpecific(ts, TS_CLASSID,1); 1911 1912 if (!ts->ops->getstages) *S=NULL; 1913 else { 1914 ierr = (*ts->ops->forwardgetstages)(ts,ns,S);CHKERRQ(ierr); 1915 } 1916 PetscFunctionReturn(0); 1917 } 1918 1919 /*@ 1920 TSCreateQuadratureTS - Create a sub-TS that evaluates integrals over time 1921 1922 Input Parameter: 1923 + ts - the TS context obtained from TSCreate() 1924 - fwd - flag indicating whether to evaluate cost integral in the forward run or the adjoint run 1925 1926 Output Parameters: 1927 . quadts - the child TS context 1928 1929 Level: intermediate 1930 1931 .seealso: TSGetQuadratureTS() 1932 @*/ 1933 PetscErrorCode TSCreateQuadratureTS(TS ts,PetscBool fwd,TS *quadts) 1934 { 1935 char prefix[128]; 1936 PetscErrorCode ierr; 1937 1938 PetscFunctionBegin; 1939 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1940 PetscValidPointer(quadts,2); 1941 ierr = TSDestroy(&ts->quadraturets);CHKERRQ(ierr); 1942 ierr = TSCreate(PetscObjectComm((PetscObject)ts),&ts->quadraturets);CHKERRQ(ierr); 1943 ierr = PetscObjectIncrementTabLevel((PetscObject)ts->quadraturets,(PetscObject)ts,1);CHKERRQ(ierr); 1944 ierr = PetscLogObjectParent((PetscObject)ts,(PetscObject)ts->quadraturets);CHKERRQ(ierr); 1945 ierr = PetscSNPrintf(prefix,sizeof(prefix),"%squad_",((PetscObject)ts)->prefix ? ((PetscObject)ts)->prefix : "");CHKERRQ(ierr); 1946 ierr = TSSetOptionsPrefix(ts->quadraturets,prefix);CHKERRQ(ierr); 1947 *quadts = ts->quadraturets; 1948 1949 if (ts->numcost) { 1950 ierr = VecCreateSeq(PETSC_COMM_SELF,ts->numcost,&(*quadts)->vec_sol);CHKERRQ(ierr); 1951 } else { 1952 ierr = VecCreateSeq(PETSC_COMM_SELF,1,&(*quadts)->vec_sol);CHKERRQ(ierr); 1953 } 1954 ts->costintegralfwd = fwd; 1955 PetscFunctionReturn(0); 1956 } 1957 1958 /*@ 1959 TSGetQuadratureTS - Return the sub-TS that evaluates integrals over time 1960 1961 Input Parameter: 1962 . ts - the TS context obtained from TSCreate() 1963 1964 Output Parameters: 1965 + fwd - flag indicating whether to evaluate cost integral in the forward run or the adjoint run 1966 - quadts - the child TS context 1967 1968 Level: intermediate 1969 1970 .seealso: TSCreateQuadratureTS() 1971 @*/ 1972 PetscErrorCode TSGetQuadratureTS(TS ts,PetscBool *fwd,TS *quadts) 1973 { 1974 PetscFunctionBegin; 1975 PetscValidHeaderSpecific(ts,TS_CLASSID,1); 1976 if (fwd) *fwd = ts->costintegralfwd; 1977 if (quadts) *quadts = ts->quadraturets; 1978 PetscFunctionReturn(0); 1979 } 1980 1981 /*@ 1982 TSComputeSNESJacobian - Compute the SNESJacobian 1983 1984 Input Parameters: 1985 + ts - the TS context obtained from TSCreate() 1986 - x - state vector 1987 1988 Output Parameters: 1989 + J - Jacobian matrix 1990 - Jpre - preconditioning matrix for J (may be same as J) 1991 1992 Level: developer 1993 1994 Notes: 1995 Using SNES to compute the Jacobian enables finite differencing when TS Jacobian is not available. 1996 @*/ 1997 PetscErrorCode TSComputeSNESJacobian(TS ts,Vec x,Mat J,Mat Jpre) 1998 { 1999 SNES snes = ts->snes; 2000 PetscErrorCode (*jac)(SNES,Vec,Mat,Mat,void*) = NULL; 2001 PetscErrorCode ierr; 2002 2003 PetscFunctionBegin; 2004 /* 2005 Unlike implicit methods, explicit methods do not have SNESMatFDColoring in the snes object 2006 because SNESSolve() has not been called yet; so querying SNESMatFDColoring does not work for 2007 explicit methods. Instead, we check the Jacobian compute function directly to determin if FD 2008 coloring is used. 2009 */ 2010 ierr = SNESGetJacobian(snes,NULL,NULL,&jac,NULL);CHKERRQ(ierr); 2011 if (jac == SNESComputeJacobianDefaultColor) { 2012 Vec f; 2013 ierr = SNESSetSolution(snes,x);CHKERRQ(ierr); 2014 ierr = SNESGetFunction(snes,&f,NULL,NULL);CHKERRQ(ierr); 2015 /* Force MatFDColoringApply to evaluate the SNES residual function for the base vector */ 2016 ierr = SNESComputeFunction(snes,x,f);CHKERRQ(ierr); 2017 } 2018 ierr = SNESComputeJacobian(snes,x,J,Jpre);CHKERRQ(ierr); 2019 PetscFunctionReturn(0); 2020 } 2021