1 #include <petsc/private/taoimpl.h> /*I "petsctao.h" I*/ 2 3 /*@ 4 TaoSetInitialVector - Sets the initial guess for the solve 5 6 Logically collective on Tao 7 8 Input Parameters: 9 + tao - the Tao context 10 - x0 - the initial guess 11 12 Level: beginner 13 .seealso: TaoCreate(), TaoSolve() 14 @*/ 15 16 PetscErrorCode TaoSetInitialVector(Tao tao, Vec x0) 17 { 18 PetscErrorCode ierr; 19 20 PetscFunctionBegin; 21 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 22 if (x0) { 23 PetscValidHeaderSpecific(x0,VEC_CLASSID,2); 24 PetscObjectReference((PetscObject)x0); 25 } 26 ierr = VecDestroy(&tao->solution);CHKERRQ(ierr); 27 tao->solution = x0; 28 PetscFunctionReturn(0); 29 } 30 31 PetscErrorCode TaoTestGradient(Tao tao,Vec x,Vec g1) 32 { 33 Vec g2,g3; 34 PetscBool complete_print = PETSC_FALSE,test = PETSC_FALSE; 35 PetscReal hcnorm,fdnorm,hcmax,fdmax,diffmax,diffnorm; 36 PetscScalar dot; 37 MPI_Comm comm; 38 PetscViewer viewer,mviewer; 39 PetscViewerFormat format; 40 PetscInt tabs; 41 static PetscBool directionsprinted = PETSC_FALSE; 42 PetscErrorCode ierr; 43 44 PetscFunctionBegin; 45 ierr = PetscObjectOptionsBegin((PetscObject)tao);CHKERRQ(ierr); 46 ierr = PetscOptionsName("-tao_test_gradient","Compare hand-coded and finite difference Gradients","None",&test);CHKERRQ(ierr); 47 ierr = PetscOptionsViewer("-tao_test_gradient_view","View difference between hand-coded and finite difference Gradients element entries","None",&mviewer,&format,&complete_print);CHKERRQ(ierr); 48 ierr = PetscOptionsEnd();CHKERRQ(ierr); 49 if (!test) PetscFunctionReturn(0); 50 51 ierr = PetscObjectGetComm((PetscObject)tao,&comm);CHKERRQ(ierr); 52 ierr = PetscViewerASCIIGetStdout(comm,&viewer);CHKERRQ(ierr); 53 ierr = PetscViewerASCIIGetTab(viewer, &tabs);CHKERRQ(ierr); 54 ierr = PetscViewerASCIISetTab(viewer, ((PetscObject)tao)->tablevel);CHKERRQ(ierr); 55 ierr = PetscViewerASCIIPrintf(viewer," ---------- Testing Gradient -------------\n");CHKERRQ(ierr); 56 if (!complete_print && !directionsprinted) { 57 ierr = PetscViewerASCIIPrintf(viewer," Run with -tao_test_gradient_view and optionally -tao_test_gradient <threshold> to show difference\n");CHKERRQ(ierr); 58 ierr = PetscViewerASCIIPrintf(viewer," of hand-coded and finite difference gradient entries greater than <threshold>.\n");CHKERRQ(ierr); 59 } 60 if (!directionsprinted) { 61 ierr = PetscViewerASCIIPrintf(viewer," Testing hand-coded Gradient, if (for double precision runs) ||G - Gfd||_F/||G||_F is\n");CHKERRQ(ierr); 62 ierr = PetscViewerASCIIPrintf(viewer," O(1.e-8), the hand-coded Gradient is probably correct.\n");CHKERRQ(ierr); 63 directionsprinted = PETSC_TRUE; 64 } 65 if (complete_print) { 66 ierr = PetscViewerPushFormat(mviewer,format);CHKERRQ(ierr); 67 } 68 69 ierr = VecDuplicate(x,&g2);CHKERRQ(ierr); 70 ierr = VecDuplicate(x,&g3);CHKERRQ(ierr); 71 72 /* Compute finite difference gradient, assume the gradient is already computed by TaoComputeGradient() and put into g1 */ 73 ierr = TaoDefaultComputeGradient(tao,x,g2,NULL);CHKERRQ(ierr); 74 75 ierr = VecNorm(g2,NORM_2,&fdnorm);CHKERRQ(ierr); 76 ierr = VecNorm(g1,NORM_2,&hcnorm);CHKERRQ(ierr); 77 ierr = VecNorm(g2,NORM_INFINITY,&fdmax);CHKERRQ(ierr); 78 ierr = VecNorm(g1,NORM_INFINITY,&hcmax);CHKERRQ(ierr); 79 ierr = VecDot(g1,g2,&dot);CHKERRQ(ierr); 80 ierr = VecCopy(g1,g3);CHKERRQ(ierr); 81 ierr = VecAXPY(g3,-1.0,g2);CHKERRQ(ierr); 82 ierr = VecNorm(g3,NORM_2,&diffnorm);CHKERRQ(ierr); 83 ierr = VecNorm(g3,NORM_INFINITY,&diffmax);CHKERRQ(ierr); 84 ierr = PetscViewerASCIIPrintf(viewer," ||Gfd|| %g, ||G|| = %g, angle cosine = (Gfd'G)/||Gfd||||G|| = %g\n", (double)fdnorm, (double)hcnorm, (double)(PetscRealPart(dot)/(fdnorm*hcnorm)));CHKERRQ(ierr); 85 ierr = PetscViewerASCIIPrintf(viewer," 2-norm ||G - Gfd||/||G|| = %g, ||G - Gfd|| = %g\n",(double)(diffnorm/PetscMax(hcnorm,fdnorm)),(double)diffnorm);CHKERRQ(ierr); 86 ierr = PetscViewerASCIIPrintf(viewer," max-norm ||G - Gfd||/||G|| = %g, ||G - Gfd|| = %g\n",(double)(diffmax/PetscMax(hcmax,fdmax)),(double)diffmax);CHKERRQ(ierr); 87 88 if (complete_print) { 89 ierr = PetscViewerASCIIPrintf(viewer," Hand-coded gradient ----------\n");CHKERRQ(ierr); 90 ierr = VecView(g1,mviewer);CHKERRQ(ierr); 91 ierr = PetscViewerASCIIPrintf(viewer," Finite difference gradient ----------\n");CHKERRQ(ierr); 92 ierr = VecView(g2,mviewer);CHKERRQ(ierr); 93 ierr = PetscViewerASCIIPrintf(viewer," Hand-coded minus finite-difference gradient ----------\n");CHKERRQ(ierr); 94 ierr = VecView(g3,mviewer);CHKERRQ(ierr); 95 } 96 ierr = VecDestroy(&g2);CHKERRQ(ierr); 97 ierr = VecDestroy(&g3);CHKERRQ(ierr); 98 99 if (complete_print) { 100 ierr = PetscViewerPopFormat(mviewer);CHKERRQ(ierr); 101 } 102 ierr = PetscViewerASCIISetTab(viewer,tabs);CHKERRQ(ierr); 103 PetscFunctionReturn(0); 104 } 105 106 /*@ 107 TaoComputeGradient - Computes the gradient of the objective function 108 109 Collective on Tao 110 111 Input Parameters: 112 + tao - the Tao context 113 - X - input vector 114 115 Output Parameter: 116 . G - gradient vector 117 118 Options Database Keys: 119 + -tao_test_gradient - compare the user provided gradient with one compute via finite differences to check for errors 120 - -tao_test_gradient_view - display the user provided gradient, the finite difference gradient and the difference between them to help users detect the location of errors in the user provided gradient 121 122 Notes: 123 TaoComputeGradient() is typically used within minimization implementations, 124 so most users would not generally call this routine themselves. 125 126 Level: advanced 127 128 .seealso: TaoComputeObjective(), TaoComputeObjectiveAndGradient(), TaoSetGradientRoutine() 129 @*/ 130 PetscErrorCode TaoComputeGradient(Tao tao, Vec X, Vec G) 131 { 132 PetscErrorCode ierr; 133 PetscReal dummy; 134 135 PetscFunctionBegin; 136 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 137 PetscValidHeaderSpecific(X,VEC_CLASSID,2); 138 PetscValidHeaderSpecific(G,VEC_CLASSID,2); 139 PetscCheckSameComm(tao,1,X,2); 140 PetscCheckSameComm(tao,1,G,3); 141 ierr = VecLockReadPush(X);CHKERRQ(ierr); 142 if (tao->ops->computegradient) { 143 ierr = PetscLogEventBegin(TAO_GradientEval,tao,X,G,NULL);CHKERRQ(ierr); 144 PetscStackPush("Tao user gradient evaluation routine"); 145 ierr = (*tao->ops->computegradient)(tao,X,G,tao->user_gradP);CHKERRQ(ierr); 146 PetscStackPop; 147 ierr = PetscLogEventEnd(TAO_GradientEval,tao,X,G,NULL);CHKERRQ(ierr); 148 tao->ngrads++; 149 } else if (tao->ops->computeobjectiveandgradient) { 150 ierr = PetscLogEventBegin(TAO_ObjGradEval,tao,X,G,NULL);CHKERRQ(ierr); 151 PetscStackPush("Tao user objective/gradient evaluation routine"); 152 ierr = (*tao->ops->computeobjectiveandgradient)(tao,X,&dummy,G,tao->user_objgradP);CHKERRQ(ierr); 153 PetscStackPop; 154 ierr = PetscLogEventEnd(TAO_ObjGradEval,tao,X,G,NULL);CHKERRQ(ierr); 155 tao->nfuncgrads++; 156 } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"TaoSetGradientRoutine() has not been called"); 157 ierr = VecLockReadPop(X);CHKERRQ(ierr); 158 159 ierr = TaoTestGradient(tao,X,G);CHKERRQ(ierr); 160 PetscFunctionReturn(0); 161 } 162 163 /*@ 164 TaoComputeObjective - Computes the objective function value at a given point 165 166 Collective on Tao 167 168 Input Parameters: 169 + tao - the Tao context 170 - X - input vector 171 172 Output Parameter: 173 . f - Objective value at X 174 175 Notes: 176 TaoComputeObjective() is typically used within minimization implementations, 177 so most users would not generally call this routine themselves. 178 179 Level: advanced 180 181 .seealso: TaoComputeGradient(), TaoComputeObjectiveAndGradient(), TaoSetObjectiveRoutine() 182 @*/ 183 PetscErrorCode TaoComputeObjective(Tao tao, Vec X, PetscReal *f) 184 { 185 PetscErrorCode ierr; 186 Vec temp; 187 188 PetscFunctionBegin; 189 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 190 PetscValidHeaderSpecific(X,VEC_CLASSID,2); 191 PetscCheckSameComm(tao,1,X,2); 192 ierr = VecLockReadPush(X);CHKERRQ(ierr); 193 if (tao->ops->computeobjective) { 194 ierr = PetscLogEventBegin(TAO_ObjectiveEval,tao,X,NULL,NULL);CHKERRQ(ierr); 195 PetscStackPush("Tao user objective evaluation routine"); 196 ierr = (*tao->ops->computeobjective)(tao,X,f,tao->user_objP);CHKERRQ(ierr); 197 PetscStackPop; 198 ierr = PetscLogEventEnd(TAO_ObjectiveEval,tao,X,NULL,NULL);CHKERRQ(ierr); 199 tao->nfuncs++; 200 } else if (tao->ops->computeobjectiveandgradient) { 201 ierr = PetscInfo(tao,"Duplicating variable vector in order to call func/grad routine\n");CHKERRQ(ierr); 202 ierr = VecDuplicate(X,&temp);CHKERRQ(ierr); 203 ierr = PetscLogEventBegin(TAO_ObjGradEval,tao,X,NULL,NULL);CHKERRQ(ierr); 204 PetscStackPush("Tao user objective/gradient evaluation routine"); 205 ierr = (*tao->ops->computeobjectiveandgradient)(tao,X,f,temp,tao->user_objgradP);CHKERRQ(ierr); 206 PetscStackPop; 207 ierr = PetscLogEventEnd(TAO_ObjGradEval,tao,X,NULL,NULL);CHKERRQ(ierr); 208 ierr = VecDestroy(&temp);CHKERRQ(ierr); 209 tao->nfuncgrads++; 210 } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"TaoSetObjectiveRoutine() has not been called"); 211 ierr = PetscInfo1(tao,"TAO Function evaluation: %20.19e\n",(double)(*f));CHKERRQ(ierr); 212 ierr = VecLockReadPop(X);CHKERRQ(ierr); 213 PetscFunctionReturn(0); 214 } 215 216 /*@ 217 TaoComputeObjectiveAndGradient - Computes the objective function value at a given point 218 219 Collective on Tao 220 221 Input Parameters: 222 + tao - the Tao context 223 - X - input vector 224 225 Output Parameter: 226 + f - Objective value at X 227 - g - Gradient vector at X 228 229 Notes: 230 TaoComputeObjectiveAndGradient() is typically used within minimization implementations, 231 so most users would not generally call this routine themselves. 232 233 Level: advanced 234 235 .seealso: TaoComputeGradient(), TaoComputeObjectiveAndGradient(), TaoSetObjectiveRoutine() 236 @*/ 237 PetscErrorCode TaoComputeObjectiveAndGradient(Tao tao, Vec X, PetscReal *f, Vec G) 238 { 239 PetscErrorCode ierr; 240 241 PetscFunctionBegin; 242 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 243 PetscValidHeaderSpecific(X,VEC_CLASSID,2); 244 PetscValidHeaderSpecific(G,VEC_CLASSID,4); 245 PetscCheckSameComm(tao,1,X,2); 246 PetscCheckSameComm(tao,1,G,4); 247 ierr = VecLockReadPush(X);CHKERRQ(ierr); 248 if (tao->ops->computeobjectiveandgradient) { 249 ierr = PetscLogEventBegin(TAO_ObjGradEval,tao,X,G,NULL);CHKERRQ(ierr); 250 if (tao->ops->computegradient == TaoDefaultComputeGradient) { 251 ierr = TaoComputeObjective(tao,X,f);CHKERRQ(ierr); 252 ierr = TaoDefaultComputeGradient(tao,X,G,NULL);CHKERRQ(ierr); 253 } else { 254 PetscStackPush("Tao user objective/gradient evaluation routine"); 255 ierr = (*tao->ops->computeobjectiveandgradient)(tao,X,f,G,tao->user_objgradP);CHKERRQ(ierr); 256 PetscStackPop; 257 } 258 ierr = PetscLogEventEnd(TAO_ObjGradEval,tao,X,G,NULL);CHKERRQ(ierr); 259 tao->nfuncgrads++; 260 } else if (tao->ops->computeobjective && tao->ops->computegradient) { 261 ierr = PetscLogEventBegin(TAO_ObjectiveEval,tao,X,NULL,NULL);CHKERRQ(ierr); 262 PetscStackPush("Tao user objective evaluation routine"); 263 ierr = (*tao->ops->computeobjective)(tao,X,f,tao->user_objP);CHKERRQ(ierr); 264 PetscStackPop; 265 ierr = PetscLogEventEnd(TAO_ObjectiveEval,tao,X,NULL,NULL);CHKERRQ(ierr); 266 tao->nfuncs++; 267 ierr = PetscLogEventBegin(TAO_GradientEval,tao,X,G,NULL);CHKERRQ(ierr); 268 PetscStackPush("Tao user gradient evaluation routine"); 269 ierr = (*tao->ops->computegradient)(tao,X,G,tao->user_gradP);CHKERRQ(ierr); 270 PetscStackPop; 271 ierr = PetscLogEventEnd(TAO_GradientEval,tao,X,G,NULL);CHKERRQ(ierr); 272 tao->ngrads++; 273 } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"TaoSetObjectiveRoutine() or TaoSetGradientRoutine() not set"); 274 ierr = PetscInfo1(tao,"TAO Function evaluation: %20.19e\n",(double)(*f));CHKERRQ(ierr); 275 ierr = VecLockReadPop(X);CHKERRQ(ierr); 276 277 ierr = TaoTestGradient(tao,X,G);CHKERRQ(ierr); 278 PetscFunctionReturn(0); 279 } 280 281 /*@C 282 TaoSetObjectiveRoutine - Sets the function evaluation routine for minimization 283 284 Logically collective on Tao 285 286 Input Parameter: 287 + tao - the Tao context 288 . func - the objective function 289 - ctx - [optional] user-defined context for private data for the function evaluation 290 routine (may be NULL) 291 292 Calling sequence of func: 293 $ func (Tao tao, Vec x, PetscReal *f, void *ctx); 294 295 + x - input vector 296 . f - function value 297 - ctx - [optional] user-defined function context 298 299 Level: beginner 300 301 .seealso: TaoSetGradientRoutine(), TaoSetHessianRoutine() TaoSetObjectiveAndGradientRoutine() 302 @*/ 303 PetscErrorCode TaoSetObjectiveRoutine(Tao tao, PetscErrorCode (*func)(Tao, Vec, PetscReal*,void*),void *ctx) 304 { 305 PetscFunctionBegin; 306 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 307 tao->user_objP = ctx; 308 tao->ops->computeobjective = func; 309 PetscFunctionReturn(0); 310 } 311 312 /*@C 313 TaoSetResidualRoutine - Sets the residual evaluation routine for least-square applications 314 315 Logically collective on Tao 316 317 Input Parameter: 318 + tao - the Tao context 319 . func - the residual evaluation routine 320 - ctx - [optional] user-defined context for private data for the function evaluation 321 routine (may be NULL) 322 323 Calling sequence of func: 324 $ func (Tao tao, Vec x, Vec f, void *ctx); 325 326 + x - input vector 327 . f - function value vector 328 - ctx - [optional] user-defined function context 329 330 Level: beginner 331 332 .seealso: TaoSetObjectiveRoutine(), TaoSetJacobianRoutine() 333 @*/ 334 PetscErrorCode TaoSetResidualRoutine(Tao tao, Vec res, PetscErrorCode (*func)(Tao, Vec, Vec, void*),void *ctx) 335 { 336 PetscErrorCode ierr; 337 338 PetscFunctionBegin; 339 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 340 PetscValidHeaderSpecific(res,VEC_CLASSID,2); 341 ierr = PetscObjectReference((PetscObject)res);CHKERRQ(ierr); 342 if (tao->ls_res) { 343 ierr = VecDestroy(&tao->ls_res);CHKERRQ(ierr); 344 } 345 tao->ls_res = res; 346 tao->user_lsresP = ctx; 347 tao->ops->computeresidual = func; 348 349 PetscFunctionReturn(0); 350 } 351 352 /*@ 353 TaoSetResidualWeights - Give weights for the residual values. A vector can be used if only diagonal terms are used, otherwise a matrix can be give. If this function is not used, or if sigma_v and sigma_w are both NULL, then the default identity matrix will be used for weights. 354 355 Collective on Tao 356 357 Input Parameters: 358 + tao - the Tao context 359 . sigma_v - vector of weights (diagonal terms only) 360 . n - the number of weights (if using off-diagonal) 361 . rows - index list of rows for sigma_w 362 . cols - index list of columns for sigma_w 363 - vals - array of weights 364 365 366 367 Note: Either sigma_v or sigma_w (or both) should be NULL 368 369 Level: intermediate 370 371 .seealso: TaoSetResidualRoutine() 372 @*/ 373 PetscErrorCode TaoSetResidualWeights(Tao tao, Vec sigma_v, PetscInt n, PetscInt *rows, PetscInt *cols, PetscReal *vals) 374 { 375 PetscErrorCode ierr; 376 PetscInt i; 377 PetscFunctionBegin; 378 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 379 if (sigma_v) { 380 PetscValidHeaderSpecific(sigma_v,VEC_CLASSID,2); 381 ierr = PetscObjectReference((PetscObject)sigma_v);CHKERRQ(ierr); 382 } 383 if (tao->res_weights_v) { 384 ierr = VecDestroy(&tao->res_weights_v);CHKERRQ(ierr); 385 } 386 tao->res_weights_v=sigma_v; 387 if (vals) { 388 if (tao->res_weights_n) { 389 ierr = PetscFree(tao->res_weights_rows);CHKERRQ(ierr); 390 ierr = PetscFree(tao->res_weights_cols);CHKERRQ(ierr); 391 ierr = PetscFree(tao->res_weights_w);CHKERRQ(ierr); 392 } 393 ierr = PetscMalloc1(n,&tao->res_weights_rows);CHKERRQ(ierr); 394 ierr = PetscMalloc1(n,&tao->res_weights_cols);CHKERRQ(ierr); 395 ierr = PetscMalloc1(n,&tao->res_weights_w);CHKERRQ(ierr); 396 tao->res_weights_n=n; 397 for (i=0;i<n;i++) { 398 tao->res_weights_rows[i]=rows[i]; 399 tao->res_weights_cols[i]=cols[i]; 400 tao->res_weights_w[i]=vals[i]; 401 } 402 } else { 403 tao->res_weights_n=0; 404 tao->res_weights_rows=0; 405 tao->res_weights_cols=0; 406 } 407 PetscFunctionReturn(0); 408 } 409 410 /*@ 411 TaoComputeResidual - Computes a least-squares residual vector at a given point 412 413 Collective on Tao 414 415 Input Parameters: 416 + tao - the Tao context 417 - X - input vector 418 419 Output Parameter: 420 . f - Objective vector at X 421 422 Notes: 423 TaoComputeResidual() is typically used within minimization implementations, 424 so most users would not generally call this routine themselves. 425 426 Level: advanced 427 428 .seealso: TaoSetResidualRoutine() 429 @*/ 430 PetscErrorCode TaoComputeResidual(Tao tao, Vec X, Vec F) 431 { 432 PetscErrorCode ierr; 433 434 PetscFunctionBegin; 435 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 436 PetscValidHeaderSpecific(X,VEC_CLASSID,2); 437 PetscValidHeaderSpecific(F,VEC_CLASSID,3); 438 PetscCheckSameComm(tao,1,X,2); 439 PetscCheckSameComm(tao,1,F,3); 440 if (tao->ops->computeresidual) { 441 ierr = PetscLogEventBegin(TAO_ObjectiveEval,tao,X,NULL,NULL);CHKERRQ(ierr); 442 PetscStackPush("Tao user least-squares residual evaluation routine"); 443 ierr = (*tao->ops->computeresidual)(tao,X,F,tao->user_lsresP);CHKERRQ(ierr); 444 PetscStackPop; 445 ierr = PetscLogEventEnd(TAO_ObjectiveEval,tao,X,NULL,NULL);CHKERRQ(ierr); 446 tao->nfuncs++; 447 } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"TaoSetResidualRoutine() has not been called"); 448 ierr = PetscInfo(tao,"TAO least-squares residual evaluation.\n");CHKERRQ(ierr); 449 PetscFunctionReturn(0); 450 } 451 452 /*@C 453 TaoSetGradientRoutine - Sets the gradient evaluation routine for minimization 454 455 Logically collective on Tao 456 457 Input Parameter: 458 + tao - the Tao context 459 . func - the gradient function 460 - ctx - [optional] user-defined context for private data for the gradient evaluation 461 routine (may be NULL) 462 463 Calling sequence of func: 464 $ func (Tao tao, Vec x, Vec g, void *ctx); 465 466 + x - input vector 467 . g - gradient value (output) 468 - ctx - [optional] user-defined function context 469 470 Level: beginner 471 472 .seealso: TaoSetObjectiveRoutine(), TaoSetHessianRoutine() TaoSetObjectiveAndGradientRoutine() 473 @*/ 474 PetscErrorCode TaoSetGradientRoutine(Tao tao, PetscErrorCode (*func)(Tao, Vec, Vec, void*),void *ctx) 475 { 476 PetscFunctionBegin; 477 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 478 tao->user_gradP = ctx; 479 tao->ops->computegradient = func; 480 PetscFunctionReturn(0); 481 } 482 483 /*@C 484 TaoSetObjectiveAndGradientRoutine - Sets a combined objective function and gradient evaluation routine for minimization 485 486 Logically collective on Tao 487 488 Input Parameter: 489 + tao - the Tao context 490 . func - the gradient function 491 - ctx - [optional] user-defined context for private data for the gradient evaluation 492 routine (may be NULL) 493 494 Calling sequence of func: 495 $ func (Tao tao, Vec x, PetscReal *f, Vec g, void *ctx); 496 497 + x - input vector 498 . f - objective value (output) 499 . g - gradient value (output) 500 - ctx - [optional] user-defined function context 501 502 Level: beginner 503 504 .seealso: TaoSetObjectiveRoutine(), TaoSetHessianRoutine() TaoSetObjectiveAndGradientRoutine() 505 @*/ 506 PetscErrorCode TaoSetObjectiveAndGradientRoutine(Tao tao, PetscErrorCode (*func)(Tao, Vec, PetscReal *, Vec, void*), void *ctx) 507 { 508 PetscFunctionBegin; 509 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 510 tao->user_objgradP = ctx; 511 tao->ops->computeobjectiveandgradient = func; 512 PetscFunctionReturn(0); 513 } 514 515 /*@ 516 TaoIsObjectiveDefined -- Checks to see if the user has 517 declared an objective-only routine. Useful for determining when 518 it is appropriate to call TaoComputeObjective() or 519 TaoComputeObjectiveAndGradient() 520 521 Collective on Tao 522 523 Input Parameter: 524 + tao - the Tao context 525 - ctx - PETSC_TRUE if objective function routine is set by user, 526 PETSC_FALSE otherwise 527 Level: developer 528 529 .seealso: TaoSetObjectiveRoutine(), TaoIsGradientDefined(), TaoIsObjectiveAndGradientDefined() 530 @*/ 531 PetscErrorCode TaoIsObjectiveDefined(Tao tao, PetscBool *flg) 532 { 533 PetscFunctionBegin; 534 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 535 if (tao->ops->computeobjective == 0) *flg = PETSC_FALSE; 536 else *flg = PETSC_TRUE; 537 PetscFunctionReturn(0); 538 } 539 540 /*@ 541 TaoIsGradientDefined -- Checks to see if the user has 542 declared an objective-only routine. Useful for determining when 543 it is appropriate to call TaoComputeGradient() or 544 TaoComputeGradientAndGradient() 545 546 Not Collective 547 548 Input Parameter: 549 + tao - the Tao context 550 - ctx - PETSC_TRUE if gradient routine is set by user, PETSC_FALSE otherwise 551 Level: developer 552 553 .seealso: TaoSetGradientRoutine(), TaoIsObjectiveDefined(), TaoIsObjectiveAndGradientDefined() 554 @*/ 555 PetscErrorCode TaoIsGradientDefined(Tao tao, PetscBool *flg) 556 { 557 PetscFunctionBegin; 558 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 559 if (tao->ops->computegradient == 0) *flg = PETSC_FALSE; 560 else *flg = PETSC_TRUE; 561 PetscFunctionReturn(0); 562 } 563 564 /*@ 565 TaoIsObjectiveAndGradientDefined -- Checks to see if the user has 566 declared a joint objective/gradient routine. Useful for determining when 567 it is appropriate to call TaoComputeObjective() or 568 TaoComputeObjectiveAndGradient() 569 570 Not Collective 571 572 Input Parameter: 573 + tao - the Tao context 574 - ctx - PETSC_TRUE if objective/gradient routine is set by user, PETSC_FALSE otherwise 575 Level: developer 576 577 .seealso: TaoSetObjectiveAndGradientRoutine(), TaoIsObjectiveDefined(), TaoIsGradientDefined() 578 @*/ 579 PetscErrorCode TaoIsObjectiveAndGradientDefined(Tao tao, PetscBool *flg) 580 { 581 PetscFunctionBegin; 582 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 583 if (tao->ops->computeobjectiveandgradient == 0) *flg = PETSC_FALSE; 584 else *flg = PETSC_TRUE; 585 PetscFunctionReturn(0); 586 } 587