1 #define TAO_DLL 2 3 #include <petsc/private/taoimpl.h> /*I "petsctao.h" I*/ 4 5 PetscBool TaoRegisterAllCalled = PETSC_FALSE; 6 PetscFunctionList TaoList = NULL; 7 8 PetscClassId TAO_CLASSID; 9 PetscLogEvent Tao_Solve, Tao_ObjectiveEval, Tao_GradientEval, Tao_ObjGradientEval, Tao_HessianEval, Tao_ConstraintsEval, Tao_JacobianEval; 10 11 const char *TaoSubSetTypes[] = { "subvec","mask","matrixfree","TaoSubSetType","TAO_SUBSET_",0}; 12 13 #undef __FUNCT__ 14 #define __FUNCT__ "TaoCreate" 15 /*@ 16 TaoCreate - Creates a TAO solver 17 18 Collective on MPI_Comm 19 20 Input Parameter: 21 . comm - MPI communicator 22 23 Output Parameter: 24 . newtao - the new Tao context 25 26 Available methods include: 27 + nls - Newton's method with line search for unconstrained minimization 28 . ntr - Newton's method with trust region for unconstrained minimization 29 . ntl - Newton's method with trust region, line search for unconstrained minimization 30 . lmvm - Limited memory variable metric method for unconstrained minimization 31 . cg - Nonlinear conjugate gradient method for unconstrained minimization 32 . nm - Nelder-Mead algorithm for derivate-free unconstrained minimization 33 . tron - Newton Trust Region method for bound constrained minimization 34 . gpcg - Newton Trust Region method for quadratic bound constrained minimization 35 . blmvm - Limited memory variable metric method for bound constrained minimization 36 . lcl - Linearly constrained Lagrangian method for pde-constrained minimization 37 - pounders - Model-based algorithm for nonlinear least squares 38 39 Options Database Keys: 40 . -tao_type - select which method TAO should use 41 42 Level: beginner 43 44 .seealso: TaoSolve(), TaoDestroy() 45 @*/ 46 PetscErrorCode TaoCreate(MPI_Comm comm, Tao *newtao) 47 { 48 PetscErrorCode ierr; 49 Tao tao; 50 51 PetscFunctionBegin; 52 PetscValidPointer(newtao,2); 53 *newtao = NULL; 54 55 ierr = TaoInitializePackage();CHKERRQ(ierr); 56 ierr = TaoLineSearchInitializePackage();CHKERRQ(ierr); 57 58 ierr = PetscHeaderCreate(tao,TAO_CLASSID,"Tao","Optimization solver","Tao",comm,TaoDestroy,TaoView);CHKERRQ(ierr); 59 tao->ops->computeobjective=0; 60 tao->ops->computeobjectiveandgradient=0; 61 tao->ops->computegradient=0; 62 tao->ops->computehessian=0; 63 tao->ops->computeseparableobjective=0; 64 tao->ops->computeconstraints=0; 65 tao->ops->computejacobian=0; 66 tao->ops->computejacobianequality=0; 67 tao->ops->computejacobianinequality=0; 68 tao->ops->computeequalityconstraints=0; 69 tao->ops->computeinequalityconstraints=0; 70 tao->ops->convergencetest=TaoDefaultConvergenceTest; 71 tao->ops->convergencedestroy=0; 72 tao->ops->computedual=0; 73 tao->ops->setup=0; 74 tao->ops->solve=0; 75 tao->ops->view=0; 76 tao->ops->setfromoptions=0; 77 tao->ops->destroy=0; 78 79 tao->solution=NULL; 80 tao->gradient=NULL; 81 tao->sep_objective = NULL; 82 tao->constraints=NULL; 83 tao->constraints_equality=NULL; 84 tao->constraints_inequality=NULL; 85 tao->sep_weights_v=NULL; 86 tao->sep_weights_w=NULL; 87 tao->stepdirection=NULL; 88 tao->niter=0; 89 tao->ntotalits=0; 90 tao->XL = NULL; 91 tao->XU = NULL; 92 tao->IL = NULL; 93 tao->IU = NULL; 94 tao->DI = NULL; 95 tao->DE = NULL; 96 tao->gradient_norm = NULL; 97 tao->gradient_norm_tmp = NULL; 98 tao->hessian = NULL; 99 tao->hessian_pre = NULL; 100 tao->jacobian = NULL; 101 tao->jacobian_pre = NULL; 102 tao->jacobian_state = NULL; 103 tao->jacobian_state_pre = NULL; 104 tao->jacobian_state_inv = NULL; 105 tao->jacobian_design = NULL; 106 tao->jacobian_design_pre = NULL; 107 tao->jacobian_equality = NULL; 108 tao->jacobian_equality_pre = NULL; 109 tao->jacobian_inequality = NULL; 110 tao->jacobian_inequality_pre = NULL; 111 tao->state_is = NULL; 112 tao->design_is = NULL; 113 114 tao->max_it = 10000; 115 tao->max_funcs = 10000; 116 #if defined(PETSC_USE_REAL_SINGLE) 117 tao->gatol = 1e-5; 118 tao->grtol = 1e-5; 119 #else 120 tao->gatol = 1e-8; 121 tao->grtol = 1e-8; 122 #endif 123 tao->crtol = 0.0; 124 tao->catol = 0.0; 125 tao->steptol = 0.0; 126 tao->gttol = 0.0; 127 tao->trust0 = PETSC_INFINITY; 128 tao->fmin = PETSC_NINFINITY; 129 tao->hist_malloc = PETSC_FALSE; 130 tao->hist_reset = PETSC_TRUE; 131 tao->hist_max = 0; 132 tao->hist_len = 0; 133 tao->hist_obj = NULL; 134 tao->hist_resid = NULL; 135 tao->hist_cnorm = NULL; 136 tao->hist_lits = NULL; 137 138 tao->numbermonitors=0; 139 tao->viewsolution=PETSC_FALSE; 140 tao->viewhessian=PETSC_FALSE; 141 tao->viewgradient=PETSC_FALSE; 142 tao->viewjacobian=PETSC_FALSE; 143 tao->viewconstraints = PETSC_FALSE; 144 145 /* These flags prevents algorithms from overriding user options */ 146 tao->max_it_changed =PETSC_FALSE; 147 tao->max_funcs_changed=PETSC_FALSE; 148 tao->gatol_changed =PETSC_FALSE; 149 tao->grtol_changed =PETSC_FALSE; 150 tao->gttol_changed =PETSC_FALSE; 151 tao->steptol_changed =PETSC_FALSE; 152 tao->trust0_changed =PETSC_FALSE; 153 tao->fmin_changed =PETSC_FALSE; 154 tao->catol_changed =PETSC_FALSE; 155 tao->crtol_changed =PETSC_FALSE; 156 ierr = TaoResetStatistics(tao);CHKERRQ(ierr); 157 *newtao = tao; 158 PetscFunctionReturn(0); 159 } 160 161 #undef __FUNCT__ 162 #define __FUNCT__ "TaoSolve" 163 /*@ 164 TaoSolve - Solves an optimization problem min F(x) s.t. l <= x <= u 165 166 Collective on Tao 167 168 Input Parameters: 169 . tao - the Tao context 170 171 Notes: 172 The user must set up the Tao with calls to TaoSetInitialVector(), 173 TaoSetObjectiveRoutine(), 174 TaoSetGradientRoutine(), and (if using 2nd order method) TaoSetHessianRoutine(). 175 176 You should call TaoGetConvergedReason() or run with -tao_converged_reason to determine if the optimization algorithm actually succeeded or 177 why it failed. 178 179 Level: beginner 180 181 .seealso: TaoCreate(), TaoSetObjectiveRoutine(), TaoSetGradientRoutine(), TaoSetHessianRoutine(), TaoGetConvergedReason() 182 @*/ 183 PetscErrorCode TaoSolve(Tao tao) 184 { 185 PetscErrorCode ierr; 186 static PetscBool set = PETSC_FALSE; 187 188 PetscFunctionBegin; 189 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 190 ierr = PetscCitationsRegister("@TechReport{tao-user-ref,\n" 191 "title = {Toolkit for Advanced Optimization (TAO) Users Manual},\n" 192 "author = {Todd Munson and Jason Sarich and Stefan Wild and Steve Benson and Lois Curfman McInnes},\n" 193 "Institution = {Argonne National Laboratory},\n" 194 "Year = 2014,\n" 195 "Number = {ANL/MCS-TM-322 - Revision 3.5},\n" 196 "url = {http://www.mcs.anl.gov/tao}\n}\n",&set);CHKERRQ(ierr); 197 198 ierr = TaoSetUp(tao);CHKERRQ(ierr); 199 ierr = TaoResetStatistics(tao);CHKERRQ(ierr); 200 if (tao->linesearch) { 201 ierr = TaoLineSearchReset(tao->linesearch);CHKERRQ(ierr); 202 } 203 204 ierr = PetscLogEventBegin(Tao_Solve,tao,0,0,0);CHKERRQ(ierr); 205 if (tao->ops->solve){ ierr = (*tao->ops->solve)(tao);CHKERRQ(ierr); } 206 ierr = PetscLogEventEnd(Tao_Solve,tao,0,0,0);CHKERRQ(ierr); 207 208 tao->ntotalits += tao->niter; 209 ierr = TaoViewFromOptions(tao,NULL,"-tao_view");CHKERRQ(ierr); 210 211 if (tao->printreason) { 212 if (tao->reason > 0) { 213 ierr = PetscPrintf(((PetscObject)tao)->comm,"TAO solve converged due to %s iterations %D\n",TaoConvergedReasons[tao->reason],tao->niter);CHKERRQ(ierr); 214 } else { 215 ierr = PetscPrintf(((PetscObject)tao)->comm,"TAO solve did not converge due to %s iteration %D\n",TaoConvergedReasons[tao->reason],tao->niter);CHKERRQ(ierr); 216 } 217 } 218 PetscFunctionReturn(0); 219 } 220 221 #undef __FUNCT__ 222 #define __FUNCT__ "TaoSetUp" 223 /*@ 224 TaoSetUp - Sets up the internal data structures for the later use 225 of a Tao solver 226 227 Collective on tao 228 229 Input Parameters: 230 . tao - the TAO context 231 232 Notes: 233 The user will not need to explicitly call TaoSetUp(), as it will 234 automatically be called in TaoSolve(). However, if the user 235 desires to call it explicitly, it should come after TaoCreate() 236 and any TaoSetSomething() routines, but before TaoSolve(). 237 238 Level: advanced 239 240 .seealso: TaoCreate(), TaoSolve() 241 @*/ 242 PetscErrorCode TaoSetUp(Tao tao) 243 { 244 PetscErrorCode ierr; 245 246 PetscFunctionBegin; 247 PetscValidHeaderSpecific(tao, TAO_CLASSID,1); 248 if (tao->setupcalled) PetscFunctionReturn(0); 249 250 if (!tao->solution) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Must call TaoSetInitialVector"); 251 if (tao->ops->setup) { 252 ierr = (*tao->ops->setup)(tao);CHKERRQ(ierr); 253 } 254 tao->setupcalled = PETSC_TRUE; 255 PetscFunctionReturn(0); 256 } 257 258 #undef __FUNCT__ 259 #define __FUNCT__ "TaoDestroy" 260 /*@ 261 TaoDestroy - Destroys the TAO context that was created with 262 TaoCreate() 263 264 Collective on Tao 265 266 Input Parameter: 267 . tao - the Tao context 268 269 Level: beginner 270 271 .seealso: TaoCreate(), TaoSolve() 272 @*/ 273 PetscErrorCode TaoDestroy(Tao *tao) 274 { 275 PetscErrorCode ierr; 276 277 PetscFunctionBegin; 278 if (!*tao) PetscFunctionReturn(0); 279 PetscValidHeaderSpecific(*tao,TAO_CLASSID,1); 280 if (--((PetscObject)*tao)->refct > 0) {*tao=0;PetscFunctionReturn(0);} 281 282 if ((*tao)->ops->destroy) { 283 ierr = (*((*tao))->ops->destroy)(*tao);CHKERRQ(ierr); 284 } 285 ierr = KSPDestroy(&(*tao)->ksp);CHKERRQ(ierr); 286 ierr = TaoLineSearchDestroy(&(*tao)->linesearch);CHKERRQ(ierr); 287 288 if ((*tao)->ops->convergencedestroy) { 289 ierr = (*(*tao)->ops->convergencedestroy)((*tao)->cnvP);CHKERRQ(ierr); 290 if ((*tao)->jacobian_state_inv) { 291 ierr = MatDestroy(&(*tao)->jacobian_state_inv);CHKERRQ(ierr); 292 } 293 } 294 ierr = VecDestroy(&(*tao)->solution);CHKERRQ(ierr); 295 ierr = VecDestroy(&(*tao)->gradient);CHKERRQ(ierr); 296 297 if ((*tao)->gradient_norm) { 298 ierr = PetscObjectDereference((PetscObject)(*tao)->gradient_norm);CHKERRQ(ierr); 299 ierr = VecDestroy(&(*tao)->gradient_norm_tmp);CHKERRQ(ierr); 300 } 301 302 ierr = VecDestroy(&(*tao)->XL);CHKERRQ(ierr); 303 ierr = VecDestroy(&(*tao)->XU);CHKERRQ(ierr); 304 ierr = VecDestroy(&(*tao)->IL);CHKERRQ(ierr); 305 ierr = VecDestroy(&(*tao)->IU);CHKERRQ(ierr); 306 ierr = VecDestroy(&(*tao)->DE);CHKERRQ(ierr); 307 ierr = VecDestroy(&(*tao)->DI);CHKERRQ(ierr); 308 ierr = VecDestroy(&(*tao)->constraints_equality);CHKERRQ(ierr); 309 ierr = VecDestroy(&(*tao)->constraints_inequality);CHKERRQ(ierr); 310 ierr = VecDestroy(&(*tao)->stepdirection);CHKERRQ(ierr); 311 ierr = MatDestroy(&(*tao)->hessian_pre);CHKERRQ(ierr); 312 ierr = MatDestroy(&(*tao)->hessian);CHKERRQ(ierr); 313 ierr = MatDestroy(&(*tao)->jacobian_pre);CHKERRQ(ierr); 314 ierr = MatDestroy(&(*tao)->jacobian);CHKERRQ(ierr); 315 ierr = MatDestroy(&(*tao)->jacobian_state_pre);CHKERRQ(ierr); 316 ierr = MatDestroy(&(*tao)->jacobian_state);CHKERRQ(ierr); 317 ierr = MatDestroy(&(*tao)->jacobian_state_inv);CHKERRQ(ierr); 318 ierr = MatDestroy(&(*tao)->jacobian_design);CHKERRQ(ierr); 319 ierr = MatDestroy(&(*tao)->jacobian_equality);CHKERRQ(ierr); 320 ierr = MatDestroy(&(*tao)->jacobian_equality_pre);CHKERRQ(ierr); 321 ierr = MatDestroy(&(*tao)->jacobian_inequality);CHKERRQ(ierr); 322 ierr = MatDestroy(&(*tao)->jacobian_inequality_pre);CHKERRQ(ierr); 323 ierr = ISDestroy(&(*tao)->state_is);CHKERRQ(ierr); 324 ierr = ISDestroy(&(*tao)->design_is);CHKERRQ(ierr); 325 ierr = VecDestroy(&(*tao)->sep_weights_v);CHKERRQ(ierr); 326 ierr = TaoCancelMonitors(*tao);CHKERRQ(ierr); 327 if ((*tao)->hist_malloc) { 328 ierr = PetscFree((*tao)->hist_obj);CHKERRQ(ierr); 329 ierr = PetscFree((*tao)->hist_resid);CHKERRQ(ierr); 330 ierr = PetscFree((*tao)->hist_cnorm);CHKERRQ(ierr); 331 ierr = PetscFree((*tao)->hist_lits);CHKERRQ(ierr); 332 } 333 if ((*tao)->sep_weights_n) { 334 ierr = PetscFree((*tao)->sep_weights_rows);CHKERRQ(ierr); 335 ierr = PetscFree((*tao)->sep_weights_cols);CHKERRQ(ierr); 336 ierr = PetscFree((*tao)->sep_weights_w);CHKERRQ(ierr); 337 } 338 ierr = PetscHeaderDestroy(tao);CHKERRQ(ierr); 339 PetscFunctionReturn(0); 340 } 341 342 #undef __FUNCT__ 343 #define __FUNCT__ "TaoSetFromOptions" 344 /*@ 345 TaoSetFromOptions - Sets various Tao parameters from user 346 options. 347 348 Collective on Tao 349 350 Input Paremeter: 351 . tao - the Tao solver context 352 353 options Database Keys: 354 + -tao_type <type> - The algorithm that TAO uses (lmvm, nls, etc.) 355 . -tao_gatol <gatol> - absolute error tolerance for ||gradient|| 356 . -tao_grtol <grtol> - relative error tolerance for ||gradient|| 357 . -tao_gttol <gttol> - reduction of ||gradient|| relative to initial gradient 358 . -tao_max_it <max> - sets maximum number of iterations 359 . -tao_max_funcs <max> - sets maximum number of function evaluations 360 . -tao_fmin <fmin> - stop if function value reaches fmin 361 . -tao_steptol <tol> - stop if trust region radius less than <tol> 362 . -tao_trust0 <t> - initial trust region radius 363 . -tao_monitor - prints function value and residual at each iteration 364 . -tao_smonitor - same as tao_monitor, but truncates very small values 365 . -tao_cmonitor - prints function value, residual, and constraint norm at each iteration 366 . -tao_view_solution - prints solution vector at each iteration 367 . -tao_view_separableobjective - prints separable objective vector at each iteration 368 . -tao_view_step - prints step direction vector at each iteration 369 . -tao_view_gradient - prints gradient vector at each iteration 370 . -tao_draw_solution - graphically view solution vector at each iteration 371 . -tao_draw_step - graphically view step vector at each iteration 372 . -tao_draw_gradient - graphically view gradient at each iteration 373 . -tao_fd_gradient - use gradient computed with finite differences 374 . -tao_cancelmonitors - cancels all monitors (except those set with command line) 375 . -tao_view - prints information about the Tao after solving 376 - -tao_converged_reason - prints the reason TAO stopped iterating 377 378 Notes: 379 To see all options, run your program with the -help option or consult the 380 user's manual. Should be called after TaoCreate() but before TaoSolve() 381 382 Level: beginner 383 @*/ 384 PetscErrorCode TaoSetFromOptions(Tao tao) 385 { 386 PetscErrorCode ierr; 387 const TaoType default_type = TAOLMVM; 388 char type[256], monfilename[PETSC_MAX_PATH_LEN]; 389 PetscViewer monviewer; 390 PetscBool flg; 391 MPI_Comm comm; 392 393 PetscFunctionBegin; 394 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 395 ierr = PetscObjectGetComm((PetscObject)tao,&comm);CHKERRQ(ierr); 396 397 /* So no warnings are given about unused options */ 398 ierr = PetscOptionsHasName(((PetscObject)tao)->options,((PetscObject)tao)->prefix,"-tao_ls_type",&flg);CHKERRQ(ierr); 399 400 ierr = PetscObjectOptionsBegin((PetscObject)tao);CHKERRQ(ierr); 401 { 402 ierr = TaoRegisterAll();CHKERRQ(ierr); 403 if (((PetscObject)tao)->type_name) { 404 default_type = ((PetscObject)tao)->type_name; 405 } 406 /* Check for type from options */ 407 ierr = PetscOptionsFList("-tao_type","Tao Solver type","TaoSetType",TaoList,default_type,type,256,&flg);CHKERRQ(ierr); 408 if (flg) { 409 ierr = TaoSetType(tao,type);CHKERRQ(ierr); 410 } else if (!((PetscObject)tao)->type_name) { 411 ierr = TaoSetType(tao,default_type);CHKERRQ(ierr); 412 } 413 414 ierr = PetscOptionsReal("-tao_catol","Stop if constraints violations within","TaoSetConstraintTolerances",tao->catol,&tao->catol,&flg);CHKERRQ(ierr); 415 if (flg) tao->catol_changed=PETSC_TRUE; 416 ierr = PetscOptionsReal("-tao_crtol","Stop if relative contraint violations within","TaoSetConstraintTolerances",tao->crtol,&tao->crtol,&flg);CHKERRQ(ierr); 417 if (flg) tao->crtol_changed=PETSC_TRUE; 418 ierr = PetscOptionsReal("-tao_gatol","Stop if norm of gradient less than","TaoSetTolerances",tao->gatol,&tao->gatol,&flg);CHKERRQ(ierr); 419 if (flg) tao->gatol_changed=PETSC_TRUE; 420 ierr = PetscOptionsReal("-tao_grtol","Stop if norm of gradient divided by the function value is less than","TaoSetTolerances",tao->grtol,&tao->grtol,&flg);CHKERRQ(ierr); 421 if (flg) tao->grtol_changed=PETSC_TRUE; 422 ierr = PetscOptionsReal("-tao_gttol","Stop if the norm of the gradient is less than the norm of the initial gradient times tol","TaoSetTolerances",tao->gttol,&tao->gttol,&flg);CHKERRQ(ierr); 423 if (flg) tao->gttol_changed=PETSC_TRUE; 424 ierr = PetscOptionsInt("-tao_max_it","Stop if iteration number exceeds","TaoSetMaximumIterations",tao->max_it,&tao->max_it,&flg);CHKERRQ(ierr); 425 if (flg) tao->max_it_changed=PETSC_TRUE; 426 ierr = PetscOptionsInt("-tao_max_funcs","Stop if number of function evaluations exceeds","TaoSetMaximumFunctionEvaluations",tao->max_funcs,&tao->max_funcs,&flg);CHKERRQ(ierr); 427 if (flg) tao->max_funcs_changed=PETSC_TRUE; 428 ierr = PetscOptionsReal("-tao_fmin","Stop if function less than","TaoSetFunctionLowerBound",tao->fmin,&tao->fmin,&flg);CHKERRQ(ierr); 429 if (flg) tao->fmin_changed=PETSC_TRUE; 430 ierr = PetscOptionsReal("-tao_steptol","Stop if step size or trust region radius less than","",tao->steptol,&tao->steptol,&flg);CHKERRQ(ierr); 431 if (flg) tao->steptol_changed=PETSC_TRUE; 432 ierr = PetscOptionsReal("-tao_trust0","Initial trust region radius","TaoSetTrustRegionRadius",tao->trust0,&tao->trust0,&flg);CHKERRQ(ierr); 433 if (flg) tao->trust0_changed=PETSC_TRUE; 434 ierr = PetscOptionsString("-tao_view_solution","view solution vector after each evaluation","TaoSetMonitor","stdout",monfilename,PETSC_MAX_PATH_LEN,&flg);CHKERRQ(ierr); 435 if (flg) { 436 ierr = PetscViewerASCIIOpen(comm,monfilename,&monviewer);CHKERRQ(ierr); 437 ierr = TaoSetMonitor(tao,TaoSolutionMonitor,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);CHKERRQ(ierr); 438 } 439 440 ierr = PetscOptionsBool("-tao_converged_reason","Print reason for TAO converged","TaoSolve",tao->printreason,&tao->printreason,NULL);CHKERRQ(ierr); 441 ierr = PetscOptionsString("-tao_view_gradient","view gradient vector after each evaluation","TaoSetMonitor","stdout",monfilename,PETSC_MAX_PATH_LEN,&flg);CHKERRQ(ierr); 442 if (flg) { 443 ierr = PetscViewerASCIIOpen(comm,monfilename,&monviewer);CHKERRQ(ierr); 444 ierr = TaoSetMonitor(tao,TaoGradientMonitor,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);CHKERRQ(ierr); 445 } 446 447 ierr = PetscOptionsString("-tao_view_stepdirection","view step direction vector after each iteration","TaoSetMonitor","stdout",monfilename,PETSC_MAX_PATH_LEN,&flg);CHKERRQ(ierr); 448 if (flg) { 449 ierr = PetscViewerASCIIOpen(comm,monfilename,&monviewer);CHKERRQ(ierr); 450 ierr = TaoSetMonitor(tao,TaoStepDirectionMonitor,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);CHKERRQ(ierr); 451 } 452 453 ierr = PetscOptionsString("-tao_view_separableobjective","view separable objective vector after each evaluation","TaoSetMonitor","stdout",monfilename,PETSC_MAX_PATH_LEN,&flg);CHKERRQ(ierr); 454 if (flg) { 455 ierr = PetscViewerASCIIOpen(comm,monfilename,&monviewer);CHKERRQ(ierr); 456 ierr = TaoSetMonitor(tao,TaoSeparableObjectiveMonitor,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);CHKERRQ(ierr); 457 } 458 459 ierr = PetscOptionsString("-tao_monitor","Use the default convergence monitor","TaoSetMonitor","stdout",monfilename,PETSC_MAX_PATH_LEN,&flg);CHKERRQ(ierr); 460 if (flg) { 461 ierr = PetscViewerASCIIOpen(comm,monfilename,&monviewer);CHKERRQ(ierr); 462 ierr = TaoSetMonitor(tao,TaoDefaultMonitor,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);CHKERRQ(ierr); 463 } 464 465 ierr = PetscOptionsString("-tao_smonitor","Use the short convergence monitor","TaoSetMonitor","stdout",monfilename,PETSC_MAX_PATH_LEN,&flg);CHKERRQ(ierr); 466 if (flg) { 467 ierr = PetscViewerASCIIOpen(comm,monfilename,&monviewer);CHKERRQ(ierr); 468 ierr = TaoSetMonitor(tao,TaoDefaultSMonitor,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);CHKERRQ(ierr); 469 } 470 471 ierr = PetscOptionsString("-tao_cmonitor","Use the default convergence monitor with constraint norm","TaoSetMonitor","stdout",monfilename,PETSC_MAX_PATH_LEN,&flg);CHKERRQ(ierr); 472 if (flg) { 473 ierr = PetscViewerASCIIOpen(comm,monfilename,&monviewer);CHKERRQ(ierr); 474 ierr = TaoSetMonitor(tao,TaoDefaultCMonitor,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);CHKERRQ(ierr); 475 } 476 477 478 flg = PETSC_FALSE; 479 ierr = PetscOptionsBool("-tao_cancelmonitors","cancel all monitors and call any registered destroy routines","TaoCancelMonitors",flg,&flg,NULL);CHKERRQ(ierr); 480 if (flg) {ierr = TaoCancelMonitors(tao);CHKERRQ(ierr);} 481 482 flg = PETSC_FALSE; 483 ierr = PetscOptionsBool("-tao_draw_solution","Plot solution vector at each iteration","TaoSetMonitor",flg,&flg,NULL);CHKERRQ(ierr); 484 if (flg) { 485 ierr = TaoSetMonitor(tao,TaoDrawSolutionMonitor,NULL,NULL);CHKERRQ(ierr); 486 } 487 488 flg = PETSC_FALSE; 489 ierr = PetscOptionsBool("-tao_draw_step","plots step direction at each iteration","TaoSetMonitor",flg,&flg,NULL);CHKERRQ(ierr); 490 if (flg) { 491 ierr = TaoSetMonitor(tao,TaoDrawStepMonitor,NULL,NULL);CHKERRQ(ierr); 492 } 493 494 flg = PETSC_FALSE; 495 ierr = PetscOptionsBool("-tao_draw_gradient","plots gradient at each iteration","TaoSetMonitor",flg,&flg,NULL);CHKERRQ(ierr); 496 if (flg) { 497 ierr = TaoSetMonitor(tao,TaoDrawGradientMonitor,NULL,NULL);CHKERRQ(ierr); 498 } 499 flg = PETSC_FALSE; 500 ierr = PetscOptionsBool("-tao_fd_gradient","compute gradient using finite differences","TaoDefaultComputeGradient",flg,&flg,NULL);CHKERRQ(ierr); 501 if (flg) { 502 ierr = TaoSetGradientRoutine(tao,TaoDefaultComputeGradient,NULL);CHKERRQ(ierr); 503 } 504 ierr = PetscOptionsEnum("-tao_subset_type","subset type", "", TaoSubSetTypes,(PetscEnum)tao->subset_type, (PetscEnum*)&tao->subset_type, 0);CHKERRQ(ierr); 505 506 if (tao->ops->setfromoptions) { 507 ierr = (*tao->ops->setfromoptions)(PetscOptionsObject,tao);CHKERRQ(ierr); 508 } 509 } 510 ierr = PetscOptionsEnd();CHKERRQ(ierr); 511 PetscFunctionReturn(0); 512 } 513 514 #undef __FUNCT__ 515 #define __FUNCT__ "TaoView" 516 /*@C 517 TaoView - Prints information about the Tao 518 519 Collective on Tao 520 521 InputParameters: 522 + tao - the Tao context 523 - viewer - visualization context 524 525 Options Database Key: 526 . -tao_view - Calls TaoView() at the end of TaoSolve() 527 528 Notes: 529 The available visualization contexts include 530 + PETSC_VIEWER_STDOUT_SELF - standard output (default) 531 - PETSC_VIEWER_STDOUT_WORLD - synchronized standard 532 output where only the first processor opens 533 the file. All other processors send their 534 data to the first processor to print. 535 536 Level: beginner 537 538 .seealso: PetscViewerASCIIOpen() 539 @*/ 540 PetscErrorCode TaoView(Tao tao, PetscViewer viewer) 541 { 542 PetscErrorCode ierr; 543 PetscBool isascii,isstring; 544 const TaoType type; 545 546 PetscFunctionBegin; 547 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 548 if (!viewer) { 549 ierr = PetscViewerASCIIGetStdout(((PetscObject)tao)->comm,&viewer);CHKERRQ(ierr); 550 } 551 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 552 PetscCheckSameComm(tao,1,viewer,2); 553 554 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&isascii);CHKERRQ(ierr); 555 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr); 556 if (isascii) { 557 ierr = PetscObjectPrintClassNamePrefixType((PetscObject)tao,viewer);CHKERRQ(ierr); 558 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 559 560 if (tao->ops->view) { 561 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 562 ierr = (*tao->ops->view)(tao,viewer);CHKERRQ(ierr); 563 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 564 } 565 if (tao->linesearch) { 566 ierr = PetscObjectPrintClassNamePrefixType((PetscObject)(tao->linesearch),viewer);CHKERRQ(ierr); 567 } 568 if (tao->ksp) { 569 ierr = PetscObjectPrintClassNamePrefixType((PetscObject)(tao->ksp),viewer);CHKERRQ(ierr); 570 ierr = PetscViewerASCIIPrintf(viewer,"total KSP iterations: %D\n",tao->ksp_tot_its);CHKERRQ(ierr); 571 } 572 if (tao->XL || tao->XU) { 573 ierr = PetscViewerASCIIPrintf(viewer,"Active Set subset type: %s\n",TaoSubSetTypes[tao->subset_type]);CHKERRQ(ierr); 574 } 575 576 ierr=PetscViewerASCIIPrintf(viewer,"convergence tolerances: gatol=%g,",(double)tao->gatol);CHKERRQ(ierr); 577 ierr=PetscViewerASCIIPrintf(viewer," steptol=%g,",(double)tao->steptol);CHKERRQ(ierr); 578 ierr=PetscViewerASCIIPrintf(viewer," gttol=%g\n",(double)tao->gttol);CHKERRQ(ierr); 579 580 ierr = PetscViewerASCIIPrintf(viewer,"Residual in Function/Gradient:=%g\n",(double)tao->residual);CHKERRQ(ierr); 581 582 if (tao->cnorm>0 || tao->catol>0 || tao->crtol>0){ 583 ierr=PetscViewerASCIIPrintf(viewer,"convergence tolerances:");CHKERRQ(ierr); 584 ierr=PetscViewerASCIIPrintf(viewer," catol=%g,",(double)tao->catol);CHKERRQ(ierr); 585 ierr=PetscViewerASCIIPrintf(viewer," crtol=%g\n",(double)tao->crtol);CHKERRQ(ierr); 586 ierr = PetscViewerASCIIPrintf(viewer,"Residual in Constraints:=%g\n",(double)tao->cnorm);CHKERRQ(ierr); 587 } 588 589 if (tao->trust < tao->steptol){ 590 ierr=PetscViewerASCIIPrintf(viewer,"convergence tolerances: steptol=%g\n",(double)tao->steptol);CHKERRQ(ierr); 591 ierr=PetscViewerASCIIPrintf(viewer,"Final trust region radius:=%g\n",(double)tao->trust);CHKERRQ(ierr); 592 } 593 594 if (tao->fmin>-1.e25){ 595 ierr=PetscViewerASCIIPrintf(viewer,"convergence tolerances: function minimum=%g\n",(double)tao->fmin);CHKERRQ(ierr); 596 } 597 ierr = PetscViewerASCIIPrintf(viewer,"Objective value=%g\n",(double)tao->fc);CHKERRQ(ierr); 598 599 ierr = PetscViewerASCIIPrintf(viewer,"total number of iterations=%D, ",tao->niter);CHKERRQ(ierr); 600 ierr = PetscViewerASCIIPrintf(viewer," (max: %D)\n",tao->max_it);CHKERRQ(ierr); 601 602 if (tao->nfuncs>0){ 603 ierr = PetscViewerASCIIPrintf(viewer,"total number of function evaluations=%D,",tao->nfuncs);CHKERRQ(ierr); 604 ierr = PetscViewerASCIIPrintf(viewer," max: %D\n",tao->max_funcs);CHKERRQ(ierr); 605 } 606 if (tao->ngrads>0){ 607 ierr = PetscViewerASCIIPrintf(viewer,"total number of gradient evaluations=%D,",tao->ngrads);CHKERRQ(ierr); 608 ierr = PetscViewerASCIIPrintf(viewer," max: %D\n",tao->max_funcs);CHKERRQ(ierr); 609 } 610 if (tao->nfuncgrads>0){ 611 ierr = PetscViewerASCIIPrintf(viewer,"total number of function/gradient evaluations=%D,",tao->nfuncgrads);CHKERRQ(ierr); 612 ierr = PetscViewerASCIIPrintf(viewer," (max: %D)\n",tao->max_funcs);CHKERRQ(ierr); 613 } 614 if (tao->nhess>0){ 615 ierr = PetscViewerASCIIPrintf(viewer,"total number of Hessian evaluations=%D\n",tao->nhess);CHKERRQ(ierr); 616 } 617 /* if (tao->linear_its>0){ 618 ierr = PetscViewerASCIIPrintf(viewer," total Krylov method iterations=%D\n",tao->linear_its);CHKERRQ(ierr); 619 }*/ 620 if (tao->nconstraints>0){ 621 ierr = PetscViewerASCIIPrintf(viewer,"total number of constraint function evaluations=%D\n",tao->nconstraints);CHKERRQ(ierr); 622 } 623 if (tao->njac>0){ 624 ierr = PetscViewerASCIIPrintf(viewer,"total number of Jacobian evaluations=%D\n",tao->njac);CHKERRQ(ierr); 625 } 626 627 if (tao->reason>0){ 628 ierr = PetscViewerASCIIPrintf(viewer, "Solution converged: ");CHKERRQ(ierr); 629 switch (tao->reason) { 630 case TAO_CONVERGED_GATOL: 631 ierr = PetscViewerASCIIPrintf(viewer," ||g(X)|| <= gatol\n");CHKERRQ(ierr); 632 break; 633 case TAO_CONVERGED_GRTOL: 634 ierr = PetscViewerASCIIPrintf(viewer," ||g(X)||/|f(X)| <= grtol\n");CHKERRQ(ierr); 635 break; 636 case TAO_CONVERGED_GTTOL: 637 ierr = PetscViewerASCIIPrintf(viewer," ||g(X)||/||g(X0)|| <= gttol\n");CHKERRQ(ierr); 638 break; 639 case TAO_CONVERGED_STEPTOL: 640 ierr = PetscViewerASCIIPrintf(viewer," Steptol -- step size small\n");CHKERRQ(ierr); 641 break; 642 case TAO_CONVERGED_MINF: 643 ierr = PetscViewerASCIIPrintf(viewer," Minf -- f < fmin\n");CHKERRQ(ierr); 644 break; 645 case TAO_CONVERGED_USER: 646 ierr = PetscViewerASCIIPrintf(viewer," User Terminated\n");CHKERRQ(ierr); 647 break; 648 default: 649 ierr = PetscViewerASCIIPrintf(viewer,"\n");CHKERRQ(ierr); 650 break; 651 } 652 653 } else { 654 ierr = PetscViewerASCIIPrintf(viewer,"Solver terminated: %d",tao->reason);CHKERRQ(ierr); 655 switch (tao->reason) { 656 case TAO_DIVERGED_MAXITS: 657 ierr = PetscViewerASCIIPrintf(viewer," Maximum Iterations\n");CHKERRQ(ierr); 658 break; 659 case TAO_DIVERGED_NAN: 660 ierr = PetscViewerASCIIPrintf(viewer," NAN or Inf encountered\n");CHKERRQ(ierr); 661 break; 662 case TAO_DIVERGED_MAXFCN: 663 ierr = PetscViewerASCIIPrintf(viewer," Maximum Function Evaluations\n");CHKERRQ(ierr); 664 break; 665 case TAO_DIVERGED_LS_FAILURE: 666 ierr = PetscViewerASCIIPrintf(viewer," Line Search Failure\n");CHKERRQ(ierr); 667 break; 668 case TAO_DIVERGED_TR_REDUCTION: 669 ierr = PetscViewerASCIIPrintf(viewer," Trust Region too small\n");CHKERRQ(ierr); 670 break; 671 case TAO_DIVERGED_USER: 672 ierr = PetscViewerASCIIPrintf(viewer," User Terminated\n");CHKERRQ(ierr); 673 break; 674 default: 675 ierr = PetscViewerASCIIPrintf(viewer,"\n");CHKERRQ(ierr); 676 break; 677 } 678 } 679 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 680 } else if (isstring) { 681 ierr = TaoGetType(tao,&type);CHKERRQ(ierr); 682 ierr = PetscViewerStringSPrintf(viewer," %-3.3s",type);CHKERRQ(ierr); 683 } 684 PetscFunctionReturn(0); 685 } 686 687 #undef __FUNCT__ 688 #define __FUNCT__ "TaoSetTolerances" 689 /*@ 690 TaoSetTolerances - Sets parameters used in TAO convergence tests 691 692 Logically collective on Tao 693 694 Input Parameters: 695 + tao - the Tao context 696 . gatol - stop if norm of gradient is less than this 697 . grtol - stop if relative norm of gradient is less than this 698 - gttol - stop if norm of gradient is reduced by this factor 699 700 Options Database Keys: 701 + -tao_gatol <gatol> - Sets gatol 702 . -tao_grtol <grtol> - Sets grtol 703 - -tao_gttol <gttol> - Sets gttol 704 705 Stopping Criteria: 706 $ ||g(X)|| <= gatol 707 $ ||g(X)|| / |f(X)| <= grtol 708 $ ||g(X)|| / ||g(X0)|| <= gttol 709 710 Notes: 711 Use PETSC_DEFAULT to leave one or more tolerances unchanged. 712 713 Level: beginner 714 715 .seealso: TaoGetTolerances() 716 717 @*/ 718 PetscErrorCode TaoSetTolerances(Tao tao, PetscReal gatol, PetscReal grtol, PetscReal gttol) 719 { 720 PetscErrorCode ierr; 721 722 PetscFunctionBegin; 723 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 724 725 if (gatol != PETSC_DEFAULT) { 726 if (gatol<0) { 727 ierr = PetscInfo(tao,"Tried to set negative gatol -- ignored.\n");CHKERRQ(ierr); 728 } else { 729 tao->gatol = PetscMax(0,gatol); 730 tao->gatol_changed=PETSC_TRUE; 731 } 732 } 733 734 if (grtol != PETSC_DEFAULT) { 735 if (grtol<0) { 736 ierr = PetscInfo(tao,"Tried to set negative grtol -- ignored.\n");CHKERRQ(ierr); 737 } else { 738 tao->grtol = PetscMax(0,grtol); 739 tao->grtol_changed=PETSC_TRUE; 740 } 741 } 742 743 if (gttol != PETSC_DEFAULT) { 744 if (gttol<0) { 745 ierr = PetscInfo(tao,"Tried to set negative gttol -- ignored.\n");CHKERRQ(ierr); 746 } else { 747 tao->gttol = PetscMax(0,gttol); 748 tao->gttol_changed=PETSC_TRUE; 749 } 750 } 751 PetscFunctionReturn(0); 752 } 753 754 #undef __FUNCT__ 755 #define __FUNCT__ "TaoSetConstraintTolerances" 756 /*@ 757 TaoSetConstraintTolerances - Sets constraint tolerance parameters used in TAO convergence tests 758 759 Logically collective on Tao 760 761 Input Parameters: 762 + tao - the Tao context 763 . catol - absolute constraint tolerance, constraint norm must be less than catol for used for gatol convergence criteria 764 - crtol - relative contraint tolerance, constraint norm must be less than crtol for used for gatol, gttol convergence criteria 765 766 Options Database Keys: 767 + -tao_catol <catol> - Sets catol 768 - -tao_crtol <crtol> - Sets crtol 769 770 Notes: 771 Use PETSC_DEFAULT to leave any tolerance unchanged. 772 773 Level: intermediate 774 775 .seealso: TaoGetTolerances(), TaoGetConstraintTolerances(), TaoSetTolerances() 776 777 @*/ 778 PetscErrorCode TaoSetConstraintTolerances(Tao tao, PetscReal catol, PetscReal crtol) 779 { 780 PetscErrorCode ierr; 781 782 PetscFunctionBegin; 783 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 784 785 if (catol != PETSC_DEFAULT) { 786 if (catol<0) { 787 ierr = PetscInfo(tao,"Tried to set negative catol -- ignored.\n");CHKERRQ(ierr); 788 } else { 789 tao->catol = PetscMax(0,catol); 790 tao->catol_changed=PETSC_TRUE; 791 } 792 } 793 794 if (crtol != PETSC_DEFAULT) { 795 if (crtol<0) { 796 ierr = PetscInfo(tao,"Tried to set negative crtol -- ignored.\n");CHKERRQ(ierr); 797 } else { 798 tao->crtol = PetscMax(0,crtol); 799 tao->crtol_changed=PETSC_TRUE; 800 } 801 } 802 PetscFunctionReturn(0); 803 } 804 805 #undef __FUNCT__ 806 #define __FUNCT__ "TaoGetConstraintTolerances" 807 /*@ 808 TaoGetConstraintTolerances - Gets constraint tolerance parameters used in TAO convergence tests 809 810 Not ollective 811 812 Input Parameter: 813 . tao - the Tao context 814 815 Output Parameter: 816 + catol - absolute constraint tolerance, constraint norm must be less than catol for used for gatol convergence criteria 817 - crtol - relative contraint tolerance, constraint norm must be less than crtol for used for gatol, gttol convergence criteria 818 819 Level: intermediate 820 821 .seealso: TaoGetTolerances(), TaoSetTolerances(), TaoSetConstraintTolerances() 822 823 @*/ 824 PetscErrorCode TaoGetConstraintTolerances(Tao tao, PetscReal *catol, PetscReal *crtol) 825 { 826 PetscFunctionBegin; 827 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 828 if (catol) *catol = tao->catol; 829 if (crtol) *crtol = tao->crtol; 830 PetscFunctionReturn(0); 831 } 832 833 #undef __FUNCT__ 834 #define __FUNCT__ "TaoSetFunctionLowerBound" 835 /*@ 836 TaoSetFunctionLowerBound - Sets a bound on the solution objective value. 837 When an approximate solution with an objective value below this number 838 has been found, the solver will terminate. 839 840 Logically Collective on Tao 841 842 Input Parameters: 843 + tao - the Tao solver context 844 - fmin - the tolerance 845 846 Options Database Keys: 847 . -tao_fmin <fmin> - sets the minimum function value 848 849 Level: intermediate 850 851 .seealso: TaoSetTolerances() 852 @*/ 853 PetscErrorCode TaoSetFunctionLowerBound(Tao tao,PetscReal fmin) 854 { 855 PetscFunctionBegin; 856 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 857 tao->fmin = fmin; 858 tao->fmin_changed=PETSC_TRUE; 859 PetscFunctionReturn(0); 860 } 861 862 #undef __FUNCT__ 863 #define __FUNCT__ "TaoGetFunctionLowerBound" 864 /*@ 865 TaoGetFunctionLowerBound - Gets the bound on the solution objective value. 866 When an approximate solution with an objective value below this number 867 has been found, the solver will terminate. 868 869 Not collective on Tao 870 871 Input Parameters: 872 . tao - the Tao solver context 873 874 OutputParameters: 875 . fmin - the minimum function value 876 877 Level: intermediate 878 879 .seealso: TaoSetFunctionLowerBound() 880 @*/ 881 PetscErrorCode TaoGetFunctionLowerBound(Tao tao,PetscReal *fmin) 882 { 883 PetscFunctionBegin; 884 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 885 *fmin = tao->fmin; 886 PetscFunctionReturn(0); 887 } 888 889 #undef __FUNCT__ 890 #define __FUNCT__ "TaoSetMaximumFunctionEvaluations" 891 /*@ 892 TaoSetMaximumFunctionEvaluations - Sets a maximum number of 893 function evaluations. 894 895 Logically Collective on Tao 896 897 Input Parameters: 898 + tao - the Tao solver context 899 - nfcn - the maximum number of function evaluations (>=0) 900 901 Options Database Keys: 902 . -tao_max_funcs <nfcn> - sets the maximum number of function evaluations 903 904 Level: intermediate 905 906 .seealso: TaoSetTolerances(), TaoSetMaximumIterations() 907 @*/ 908 909 PetscErrorCode TaoSetMaximumFunctionEvaluations(Tao tao,PetscInt nfcn) 910 { 911 PetscFunctionBegin; 912 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 913 tao->max_funcs = PetscMax(0,nfcn); 914 tao->max_funcs_changed=PETSC_TRUE; 915 PetscFunctionReturn(0); 916 } 917 918 #undef __FUNCT__ 919 #define __FUNCT__ "TaoGetMaximumFunctionEvaluations" 920 /*@ 921 TaoGetMaximumFunctionEvaluations - Sets a maximum number of 922 function evaluations. 923 924 Not Collective 925 926 Input Parameters: 927 . tao - the Tao solver context 928 929 Output Parameters: 930 . nfcn - the maximum number of function evaluations 931 932 Level: intermediate 933 934 .seealso: TaoSetMaximumFunctionEvaluations(), TaoGetMaximumIterations() 935 @*/ 936 937 PetscErrorCode TaoGetMaximumFunctionEvaluations(Tao tao,PetscInt *nfcn) 938 { 939 PetscFunctionBegin; 940 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 941 *nfcn = tao->max_funcs; 942 PetscFunctionReturn(0); 943 } 944 945 #undef __FUNCT__ 946 #define __FUNCT__ "TaoGetCurrentFunctionEvaluations" 947 /*@ 948 TaoGetCurrentFunctionEvaluations - Get current number of 949 function evaluations. 950 951 Not Collective 952 953 Input Parameters: 954 . tao - the Tao solver context 955 956 Output Parameters: 957 . nfuncs - the current number of function evaluations 958 959 Level: intermediate 960 961 .seealso: TaoSetMaximumFunctionEvaluations(), TaoGetMaximumFunctionEvaluations(), TaoGetMaximumIterations() 962 @*/ 963 964 PetscErrorCode TaoGetCurrentFunctionEvaluations(Tao tao,PetscInt *nfuncs) 965 { 966 PetscFunctionBegin; 967 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 968 *nfuncs=PetscMax(tao->nfuncs,tao->nfuncgrads); 969 PetscFunctionReturn(0); 970 } 971 972 #undef __FUNCT__ 973 #define __FUNCT__ "TaoSetMaximumIterations" 974 /*@ 975 TaoSetMaximumIterations - Sets a maximum number of iterates. 976 977 Logically Collective on Tao 978 979 Input Parameters: 980 + tao - the Tao solver context 981 - maxits - the maximum number of iterates (>=0) 982 983 Options Database Keys: 984 . -tao_max_it <its> - sets the maximum number of iterations 985 986 Level: intermediate 987 988 .seealso: TaoSetTolerances(), TaoSetMaximumFunctionEvaluations() 989 @*/ 990 PetscErrorCode TaoSetMaximumIterations(Tao tao,PetscInt maxits) 991 { 992 PetscFunctionBegin; 993 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 994 tao->max_it = PetscMax(0,maxits); 995 tao->max_it_changed=PETSC_TRUE; 996 PetscFunctionReturn(0); 997 } 998 999 #undef __FUNCT__ 1000 #define __FUNCT__ "TaoGetMaximumIterations" 1001 /*@ 1002 TaoGetMaximumIterations - Sets a maximum number of iterates. 1003 1004 Not Collective 1005 1006 Input Parameters: 1007 . tao - the Tao solver context 1008 1009 Output Parameters: 1010 . maxits - the maximum number of iterates 1011 1012 Level: intermediate 1013 1014 .seealso: TaoSetMaximumIterations(), TaoGetMaximumFunctionEvaluations() 1015 @*/ 1016 PetscErrorCode TaoGetMaximumIterations(Tao tao,PetscInt *maxits) 1017 { 1018 PetscFunctionBegin; 1019 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 1020 *maxits = tao->max_it; 1021 PetscFunctionReturn(0); 1022 } 1023 1024 #undef __FUNCT__ 1025 #define __FUNCT__ "TaoSetInitialTrustRegionRadius" 1026 /*@ 1027 TaoSetInitialTrustRegionRadius - Sets the initial trust region radius. 1028 1029 Logically collective on Tao 1030 1031 Input Parameter: 1032 + tao - a TAO optimization solver 1033 - radius - the trust region radius 1034 1035 Level: intermediate 1036 1037 Options Database Key: 1038 . -tao_trust0 <t0> - sets initial trust region radius 1039 1040 .seealso: TaoGetTrustRegionRadius(), TaoSetTrustRegionTolerance() 1041 @*/ 1042 PetscErrorCode TaoSetInitialTrustRegionRadius(Tao tao, PetscReal radius) 1043 { 1044 PetscFunctionBegin; 1045 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 1046 tao->trust0 = PetscMax(0.0,radius); 1047 tao->trust0_changed=PETSC_TRUE; 1048 PetscFunctionReturn(0); 1049 } 1050 1051 #undef __FUNCT__ 1052 #define __FUNCT__ "TaoGetInitialTrustRegionRadius" 1053 /*@ 1054 TaoGetInitialTrustRegionRadius - Sets the initial trust region radius. 1055 1056 Not Collective 1057 1058 Input Parameter: 1059 . tao - a TAO optimization solver 1060 1061 Output Parameter: 1062 . radius - the trust region radius 1063 1064 Level: intermediate 1065 1066 .seealso: TaoSetInitialTrustRegionRadius(), TaoGetCurrentTrustRegionRadius() 1067 @*/ 1068 PetscErrorCode TaoGetInitialTrustRegionRadius(Tao tao, PetscReal *radius) 1069 { 1070 PetscFunctionBegin; 1071 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 1072 *radius = tao->trust0; 1073 PetscFunctionReturn(0); 1074 } 1075 1076 #undef __FUNCT__ 1077 #define __FUNCT__ "TaoGetCurrentTrustRegionRadius" 1078 /*@ 1079 TaoGetCurrentTrustRegionRadius - Gets the current trust region radius. 1080 1081 Not Collective 1082 1083 Input Parameter: 1084 . tao - a TAO optimization solver 1085 1086 Output Parameter: 1087 . radius - the trust region radius 1088 1089 Level: intermediate 1090 1091 .seealso: TaoSetInitialTrustRegionRadius(), TaoGetInitialTrustRegionRadius() 1092 @*/ 1093 PetscErrorCode TaoGetCurrentTrustRegionRadius(Tao tao, PetscReal *radius) 1094 { 1095 PetscFunctionBegin; 1096 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 1097 *radius = tao->trust; 1098 PetscFunctionReturn(0); 1099 } 1100 1101 #undef __FUNCT__ 1102 #define __FUNCT__ "TaoGetTolerances" 1103 /*@ 1104 TaoGetTolerances - gets the current values of tolerances 1105 1106 Not Collective 1107 1108 Input Parameters: 1109 . tao - the Tao context 1110 1111 Output Parameters: 1112 + gatol - stop if norm of gradient is less than this 1113 . grtol - stop if relative norm of gradient is less than this 1114 - gttol - stop if norm of gradient is reduced by a this factor 1115 1116 Note: NULL can be used as an argument if not all tolerances values are needed 1117 1118 .seealso TaoSetTolerances() 1119 1120 Level: intermediate 1121 @*/ 1122 PetscErrorCode TaoGetTolerances(Tao tao, PetscReal *gatol, PetscReal *grtol, PetscReal *gttol) 1123 { 1124 PetscFunctionBegin; 1125 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 1126 if (gatol) *gatol=tao->gatol; 1127 if (grtol) *grtol=tao->grtol; 1128 if (gttol) *gttol=tao->gttol; 1129 PetscFunctionReturn(0); 1130 } 1131 1132 #undef __FUNCT__ 1133 #define __FUNCT__ "TaoGetKSP" 1134 /*@ 1135 TaoGetKSP - Gets the linear solver used by the optimization solver. 1136 Application writers should use TaoGetKSP if they need direct access 1137 to the PETSc KSP object. 1138 1139 Not Collective 1140 1141 Input Parameters: 1142 . tao - the TAO solver 1143 1144 Output Parameters: 1145 . ksp - the KSP linear solver used in the optimization solver 1146 1147 Level: intermediate 1148 1149 @*/ 1150 PetscErrorCode TaoGetKSP(Tao tao, KSP *ksp) 1151 { 1152 PetscFunctionBegin; 1153 *ksp = tao->ksp; 1154 PetscFunctionReturn(0); 1155 } 1156 1157 #undef __FUNCT__ 1158 #define __FUNCT__ "TaoGetLinearSolveIterations" 1159 /*@ 1160 TaoGetLinearSolveIterations - Gets the total number of linear iterations 1161 used by the TAO solver 1162 1163 Not Collective 1164 1165 Input Parameter: 1166 . tao - TAO context 1167 1168 Output Parameter: 1169 . lits - number of linear iterations 1170 1171 Notes: 1172 This counter is reset to zero for each successive call to TaoSolve() 1173 1174 Level: intermediate 1175 1176 .keywords: TAO 1177 1178 .seealso: TaoGetKSP() 1179 @*/ 1180 PetscErrorCode TaoGetLinearSolveIterations(Tao tao,PetscInt *lits) 1181 { 1182 PetscFunctionBegin; 1183 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 1184 PetscValidIntPointer(lits,2); 1185 *lits = tao->ksp_tot_its; 1186 PetscFunctionReturn(0); 1187 } 1188 1189 #undef __FUNCT__ 1190 #define __FUNCT__ "TaoGetLineSearch" 1191 /*@ 1192 TaoGetLineSearch - Gets the line search used by the optimization solver. 1193 Application writers should use TaoGetLineSearch if they need direct access 1194 to the TaoLineSearch object. 1195 1196 Not Collective 1197 1198 Input Parameters: 1199 . tao - the TAO solver 1200 1201 Output Parameters: 1202 . ls - the line search used in the optimization solver 1203 1204 Level: intermediate 1205 1206 @*/ 1207 PetscErrorCode TaoGetLineSearch(Tao tao, TaoLineSearch *ls) 1208 { 1209 PetscFunctionBegin; 1210 *ls = tao->linesearch; 1211 PetscFunctionReturn(0); 1212 } 1213 1214 #undef __FUNCT__ 1215 #define __FUNCT__ "TaoAddLineSearchCounts" 1216 /*@ 1217 TaoAddLineSearchCounts - Adds the number of function evaluations spent 1218 in the line search to the running total. 1219 1220 Input Parameters: 1221 + tao - the TAO solver 1222 - ls - the line search used in the optimization solver 1223 1224 Level: developer 1225 1226 .seealso: TaoLineSearchApply() 1227 @*/ 1228 PetscErrorCode TaoAddLineSearchCounts(Tao tao) 1229 { 1230 PetscErrorCode ierr; 1231 PetscBool flg; 1232 PetscInt nfeval,ngeval,nfgeval; 1233 1234 PetscFunctionBegin; 1235 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 1236 if (tao->linesearch) { 1237 ierr = TaoLineSearchIsUsingTaoRoutines(tao->linesearch,&flg);CHKERRQ(ierr); 1238 if (!flg) { 1239 ierr = TaoLineSearchGetNumberFunctionEvaluations(tao->linesearch,&nfeval,&ngeval,&nfgeval);CHKERRQ(ierr); 1240 tao->nfuncs+=nfeval; 1241 tao->ngrads+=ngeval; 1242 tao->nfuncgrads+=nfgeval; 1243 } 1244 } 1245 PetscFunctionReturn(0); 1246 } 1247 1248 #undef __FUNCT__ 1249 #define __FUNCT__ "TaoGetSolutionVector" 1250 /*@ 1251 TaoGetSolutionVector - Returns the vector with the current TAO solution 1252 1253 Not Collective 1254 1255 Input Parameter: 1256 . tao - the Tao context 1257 1258 Output Parameter: 1259 . X - the current solution 1260 1261 Level: intermediate 1262 1263 Note: The returned vector will be the same object that was passed into TaoSetInitialVector() 1264 @*/ 1265 PetscErrorCode TaoGetSolutionVector(Tao tao, Vec *X) 1266 { 1267 PetscFunctionBegin; 1268 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 1269 *X = tao->solution; 1270 PetscFunctionReturn(0); 1271 } 1272 1273 #undef __FUNCT__ 1274 #define __FUNCT__ "TaoGetGradientVector" 1275 /*@ 1276 TaoGetGradientVector - Returns the vector with the current TAO gradient 1277 1278 Not Collective 1279 1280 Input Parameter: 1281 . tao - the Tao context 1282 1283 Output Parameter: 1284 . G - the current solution 1285 1286 Level: intermediate 1287 @*/ 1288 PetscErrorCode TaoGetGradientVector(Tao tao, Vec *G) 1289 { 1290 PetscFunctionBegin; 1291 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 1292 *G = tao->gradient; 1293 PetscFunctionReturn(0); 1294 } 1295 1296 #undef __FUNCT__ 1297 #define __FUNCT__ "TaoResetStatistics" 1298 /*@ 1299 TaoResetStatistics - Initialize the statistics used by TAO for all of the solvers. 1300 These statistics include the iteration number, residual norms, and convergence status. 1301 This routine gets called before solving each optimization problem. 1302 1303 Collective on Tao 1304 1305 Input Parameters: 1306 . solver - the Tao context 1307 1308 Level: developer 1309 1310 .seealso: TaoCreate(), TaoSolve() 1311 @*/ 1312 PetscErrorCode TaoResetStatistics(Tao tao) 1313 { 1314 PetscFunctionBegin; 1315 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 1316 tao->niter = 0; 1317 tao->nfuncs = 0; 1318 tao->nfuncgrads = 0; 1319 tao->ngrads = 0; 1320 tao->nhess = 0; 1321 tao->njac = 0; 1322 tao->nconstraints = 0; 1323 tao->ksp_its = 0; 1324 tao->ksp_tot_its = 0; 1325 tao->reason = TAO_CONTINUE_ITERATING; 1326 tao->residual = 0.0; 1327 tao->cnorm = 0.0; 1328 tao->step = 0.0; 1329 tao->lsflag = PETSC_FALSE; 1330 if (tao->hist_reset) tao->hist_len=0; 1331 PetscFunctionReturn(0); 1332 } 1333 1334 #undef __FUNCT__ 1335 #define __FUNCT__ "TaoSetConvergenceTest" 1336 /*@C 1337 TaoSetConvergenceTest - Sets the function that is to be used to test 1338 for convergence o fthe iterative minimization solution. The new convergence 1339 testing routine will replace TAO's default convergence test. 1340 1341 Logically Collective on Tao 1342 1343 Input Parameters: 1344 + tao - the Tao object 1345 . conv - the routine to test for convergence 1346 - ctx - [optional] context for private data for the convergence routine 1347 (may be NULL) 1348 1349 Calling sequence of conv: 1350 $ PetscErrorCode conv(Tao tao, void *ctx) 1351 1352 + tao - the Tao object 1353 - ctx - [optional] convergence context 1354 1355 Note: The new convergence testing routine should call TaoSetConvergedReason(). 1356 1357 Level: advanced 1358 1359 .seealso: TaoSetConvergedReason(), TaoGetSolutionStatus(), TaoGetTolerances(), TaoSetMonitor 1360 1361 @*/ 1362 PetscErrorCode TaoSetConvergenceTest(Tao tao, PetscErrorCode (*conv)(Tao,void*), void *ctx) 1363 { 1364 PetscFunctionBegin; 1365 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 1366 (tao)->ops->convergencetest = conv; 1367 (tao)->cnvP = ctx; 1368 PetscFunctionReturn(0); 1369 } 1370 1371 #undef __FUNCT__ 1372 #define __FUNCT__ "TaoSetMonitor" 1373 /*@C 1374 TaoSetMonitor - Sets an ADDITIONAL function that is to be used at every 1375 iteration of the solver to display the iteration's 1376 progress. 1377 1378 Logically Collective on Tao 1379 1380 Input Parameters: 1381 + tao - the Tao solver context 1382 . mymonitor - monitoring routine 1383 - mctx - [optional] user-defined context for private data for the 1384 monitor routine (may be NULL) 1385 1386 Calling sequence of mymonitor: 1387 $ int mymonitor(Tao tao,void *mctx) 1388 1389 + tao - the Tao solver context 1390 - mctx - [optional] monitoring context 1391 1392 1393 Options Database Keys: 1394 + -tao_monitor - sets TaoDefaultMonitor() 1395 . -tao_smonitor - sets short monitor 1396 . -tao_cmonitor - same as smonitor plus constraint norm 1397 . -tao_view_solution - view solution at each iteration 1398 . -tao_view_gradient - view gradient at each iteration 1399 . -tao_view_separableobjective - view separable objective function at each iteration 1400 - -tao_cancelmonitors - cancels all monitors that have been hardwired into a code by calls to TaoSetMonitor(), but does not cancel those set via the options database. 1401 1402 1403 Notes: 1404 Several different monitoring routines may be set by calling 1405 TaoSetMonitor() multiple times; all will be called in the 1406 order in which they were set. 1407 1408 Fortran Notes: Only one monitor function may be set 1409 1410 Level: intermediate 1411 1412 .seealso: TaoDefaultMonitor(), TaoCancelMonitors(), TaoSetDestroyRoutine() 1413 @*/ 1414 PetscErrorCode TaoSetMonitor(Tao tao, PetscErrorCode (*func)(Tao, void*), void *ctx,PetscErrorCode (*dest)(void**)) 1415 { 1416 PetscErrorCode ierr; 1417 PetscInt i; 1418 1419 PetscFunctionBegin; 1420 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 1421 if (tao->numbermonitors >= MAXTAOMONITORS) SETERRQ1(PETSC_COMM_SELF,1,"Cannot attach another monitor -- max=",MAXTAOMONITORS); 1422 1423 for (i=0; i<tao->numbermonitors;i++) { 1424 if (func == tao->monitor[i] && dest == tao->monitordestroy[i] && ctx == tao->monitorcontext[i]) { 1425 if (dest) { 1426 ierr = (*dest)(&ctx);CHKERRQ(ierr); 1427 } 1428 PetscFunctionReturn(0); 1429 } 1430 } 1431 tao->monitor[tao->numbermonitors] = func; 1432 tao->monitorcontext[tao->numbermonitors] = ctx; 1433 tao->monitordestroy[tao->numbermonitors] = dest; 1434 ++tao->numbermonitors; 1435 PetscFunctionReturn(0); 1436 } 1437 1438 #undef __FUNCT__ 1439 #define __FUNCT__ "TaoCancelMonitors" 1440 /*@ 1441 TaoCancelMonitors - Clears all the monitor functions for a Tao object. 1442 1443 Logically Collective on Tao 1444 1445 Input Parameters: 1446 . tao - the Tao solver context 1447 1448 Options Database: 1449 . -tao_cancelmonitors - cancels all monitors that have been hardwired 1450 into a code by calls to TaoSetMonitor(), but does not cancel those 1451 set via the options database 1452 1453 Notes: 1454 There is no way to clear one specific monitor from a Tao object. 1455 1456 Level: advanced 1457 1458 .seealso: TaoDefaultMonitor(), TaoSetMonitor() 1459 @*/ 1460 PetscErrorCode TaoCancelMonitors(Tao tao) 1461 { 1462 PetscInt i; 1463 PetscErrorCode ierr; 1464 1465 PetscFunctionBegin; 1466 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 1467 for (i=0;i<tao->numbermonitors;i++) { 1468 if (tao->monitordestroy[i]) { 1469 ierr = (*tao->monitordestroy[i])(&tao->monitorcontext[i]);CHKERRQ(ierr); 1470 } 1471 } 1472 tao->numbermonitors=0; 1473 PetscFunctionReturn(0); 1474 } 1475 1476 #undef __FUNCT__ 1477 #define __FUNCT__ "TaoDefaultMonitor" 1478 /*@ 1479 TaoDefaultMonitor - Default routine for monitoring progress of the 1480 Tao solvers (default). This monitor prints the function value and gradient 1481 norm at each iteration. It can be turned on from the command line using the 1482 -tao_monitor option 1483 1484 Collective on Tao 1485 1486 Input Parameters: 1487 + tao - the Tao context 1488 - ctx - PetscViewer context or NULL 1489 1490 Options Database Keys: 1491 . -tao_monitor 1492 1493 Level: advanced 1494 1495 .seealso: TaoDefaultSMonitor(), TaoSetMonitor() 1496 @*/ 1497 PetscErrorCode TaoDefaultMonitor(Tao tao, void *ctx) 1498 { 1499 PetscErrorCode ierr; 1500 PetscInt its; 1501 PetscReal fct,gnorm; 1502 PetscViewer viewer = (PetscViewer)ctx; 1503 1504 PetscFunctionBegin; 1505 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 1506 its=tao->niter; 1507 fct=tao->fc; 1508 gnorm=tao->residual; 1509 ierr=PetscViewerASCIIPrintf(viewer,"iter = %3D,",its);CHKERRQ(ierr); 1510 ierr=PetscViewerASCIIPrintf(viewer," Function value: %g,",(double)fct);CHKERRQ(ierr); 1511 if (gnorm >= PETSC_INFINITY) { 1512 ierr=PetscViewerASCIIPrintf(viewer," Residual: Inf \n");CHKERRQ(ierr); 1513 } else { 1514 ierr=PetscViewerASCIIPrintf(viewer," Residual: %g \n",(double)gnorm);CHKERRQ(ierr); 1515 } 1516 PetscFunctionReturn(0); 1517 } 1518 1519 #undef __FUNCT__ 1520 #define __FUNCT__ "TaoDefaultSMonitor" 1521 /*@ 1522 TaoDefaultSMonitor - Default routine for monitoring progress of the 1523 solver. Same as TaoDefaultMonitor() except 1524 it prints fewer digits of the residual as the residual gets smaller. 1525 This is because the later digits are meaningless and are often 1526 different on different machines; by using this routine different 1527 machines will usually generate the same output. It can be turned on 1528 by using the -tao_smonitor option 1529 1530 Collective on Tao 1531 1532 Input Parameters: 1533 + tao - the Tao context 1534 - ctx - PetscViewer context of type ASCII 1535 1536 Options Database Keys: 1537 . -tao_smonitor 1538 1539 Level: advanced 1540 1541 .seealso: TaoDefaultMonitor(), TaoSetMonitor() 1542 @*/ 1543 PetscErrorCode TaoDefaultSMonitor(Tao tao, void *ctx) 1544 { 1545 PetscErrorCode ierr; 1546 PetscInt its; 1547 PetscReal fct,gnorm; 1548 PetscViewer viewer = (PetscViewer)ctx; 1549 1550 PetscFunctionBegin; 1551 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 1552 its=tao->niter; 1553 fct=tao->fc; 1554 gnorm=tao->residual; 1555 ierr=PetscViewerASCIIPrintf(viewer,"iter = %3D,",its);CHKERRQ(ierr); 1556 ierr=PetscViewerASCIIPrintf(viewer," Function value %g,",(double)fct);CHKERRQ(ierr); 1557 if (gnorm >= PETSC_INFINITY) { 1558 ierr=PetscViewerASCIIPrintf(viewer," Residual: Inf \n");CHKERRQ(ierr); 1559 } else if (gnorm > 1.e-6) { 1560 ierr=PetscViewerASCIIPrintf(viewer," Residual: %g \n",(double)gnorm);CHKERRQ(ierr); 1561 } else if (gnorm > 1.e-11) { 1562 ierr=PetscViewerASCIIPrintf(viewer," Residual: < 1.0e-6 \n");CHKERRQ(ierr); 1563 } else { 1564 ierr=PetscViewerASCIIPrintf(viewer," Residual: < 1.0e-11 \n");CHKERRQ(ierr); 1565 } 1566 PetscFunctionReturn(0); 1567 } 1568 1569 #undef __FUNCT__ 1570 #define __FUNCT__ "TaoDefaultCMonitor" 1571 /*@ 1572 TaoDefaultCMonitor - same as TaoDefaultMonitor() except 1573 it prints the norm of the constraints function. It can be turned on 1574 from the command line using the -tao_cmonitor option 1575 1576 Collective on Tao 1577 1578 Input Parameters: 1579 + tao - the Tao context 1580 - ctx - PetscViewer context or NULL 1581 1582 Options Database Keys: 1583 . -tao_cmonitor 1584 1585 Level: advanced 1586 1587 .seealso: TaoDefaultMonitor(), TaoSetMonitor() 1588 @*/ 1589 PetscErrorCode TaoDefaultCMonitor(Tao tao, void *ctx) 1590 { 1591 PetscErrorCode ierr; 1592 PetscInt its; 1593 PetscReal fct,gnorm; 1594 PetscViewer viewer = (PetscViewer)ctx; 1595 1596 PetscFunctionBegin; 1597 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 1598 its=tao->niter; 1599 fct=tao->fc; 1600 gnorm=tao->residual; 1601 ierr=PetscViewerASCIIPrintf(viewer,"iter = %D,",its);CHKERRQ(ierr); 1602 ierr=PetscViewerASCIIPrintf(viewer," Function value: %g,",(double)fct);CHKERRQ(ierr); 1603 ierr=PetscViewerASCIIPrintf(viewer," Residual: %g ",(double)gnorm);CHKERRQ(ierr); 1604 ierr = PetscViewerASCIIPrintf(viewer," Constraint: %g \n",(double)tao->cnorm);CHKERRQ(ierr); 1605 PetscFunctionReturn(0); 1606 } 1607 1608 #undef __FUNCT__ 1609 #define __FUNCT__ "TaoSolutionMonitor" 1610 /*@C 1611 TaoSolutionMonitor - Views the solution at each iteration 1612 It can be turned on from the command line using the 1613 -tao_view_solution option 1614 1615 Collective on Tao 1616 1617 Input Parameters: 1618 + tao - the Tao context 1619 - ctx - PetscViewer context or NULL 1620 1621 Options Database Keys: 1622 . -tao_view_solution 1623 1624 Level: advanced 1625 1626 .seealso: TaoDefaultSMonitor(), TaoSetMonitor() 1627 @*/ 1628 PetscErrorCode TaoSolutionMonitor(Tao tao, void *ctx) 1629 { 1630 PetscErrorCode ierr; 1631 PetscViewer viewer = (PetscViewer)ctx;; 1632 1633 PetscFunctionBegin; 1634 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 1635 ierr = VecView(tao->solution, viewer);CHKERRQ(ierr); 1636 PetscFunctionReturn(0); 1637 } 1638 1639 #undef __FUNCT__ 1640 #define __FUNCT__ "TaoGradientMonitor" 1641 /*@C 1642 TaoGradientMonitor - Views the gradient at each iteration 1643 It can be turned on from the command line using the 1644 -tao_view_gradient option 1645 1646 Collective on Tao 1647 1648 Input Parameters: 1649 + tao - the Tao context 1650 - ctx - PetscViewer context or NULL 1651 1652 Options Database Keys: 1653 . -tao_view_gradient 1654 1655 Level: advanced 1656 1657 .seealso: TaoDefaultSMonitor(), TaoSetMonitor() 1658 @*/ 1659 PetscErrorCode TaoGradientMonitor(Tao tao, void *ctx) 1660 { 1661 PetscErrorCode ierr; 1662 PetscViewer viewer = (PetscViewer)ctx; 1663 1664 PetscFunctionBegin; 1665 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 1666 ierr = VecView(tao->gradient, viewer);CHKERRQ(ierr); 1667 PetscFunctionReturn(0); 1668 } 1669 1670 #undef __FUNCT__ 1671 #define __FUNCT__ "TaoStepDirectionMonitor" 1672 /*@C 1673 TaoStepDirectionMonitor - Views the gradient at each iteration 1674 It can be turned on from the command line using the 1675 -tao_view_gradient option 1676 1677 Collective on Tao 1678 1679 Input Parameters: 1680 + tao - the Tao context 1681 - ctx - PetscViewer context or NULL 1682 1683 Options Database Keys: 1684 . -tao_view_gradient 1685 1686 Level: advanced 1687 1688 .seealso: TaoDefaultSMonitor(), TaoSetMonitor() 1689 @*/ 1690 PetscErrorCode TaoStepDirectionMonitor(Tao tao, void *ctx) 1691 { 1692 PetscErrorCode ierr; 1693 PetscViewer viewer = (PetscViewer)ctx; 1694 1695 PetscFunctionBegin; 1696 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 1697 ierr = VecView(tao->stepdirection, viewer);CHKERRQ(ierr); 1698 PetscFunctionReturn(0); 1699 } 1700 1701 #undef __FUNCT__ 1702 #define __FUNCT__ "TaoDrawSolutionMonitor" 1703 /*@C 1704 TaoDrawSolutionMonitor - Plots the solution at each iteration 1705 It can be turned on from the command line using the 1706 -tao_draw_solution option 1707 1708 Collective on Tao 1709 1710 Input Parameters: 1711 + tao - the Tao context 1712 - ctx - PetscViewer context 1713 1714 Options Database Keys: 1715 . -tao_draw_solution 1716 1717 Level: advanced 1718 1719 .seealso: TaoSolutionMonitor(), TaoSetMonitor(), TaoDrawGradientMonitor 1720 @*/ 1721 PetscErrorCode TaoDrawSolutionMonitor(Tao tao, void *ctx) 1722 { 1723 PetscErrorCode ierr; 1724 PetscViewer viewer = (PetscViewer) ctx; 1725 1726 PetscFunctionBegin; 1727 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 1728 ierr = VecView(tao->solution, viewer);CHKERRQ(ierr); 1729 PetscFunctionReturn(0); 1730 } 1731 1732 #undef __FUNCT__ 1733 #define __FUNCT__ "TaoDrawGradientMonitor" 1734 /*@C 1735 TaoDrawGradientMonitor - Plots the gradient at each iteration 1736 It can be turned on from the command line using the 1737 -tao_draw_gradient option 1738 1739 Collective on Tao 1740 1741 Input Parameters: 1742 + tao - the Tao context 1743 - ctx - PetscViewer context 1744 1745 Options Database Keys: 1746 . -tao_draw_gradient 1747 1748 Level: advanced 1749 1750 .seealso: TaoGradientMonitor(), TaoSetMonitor(), TaoDrawSolutionMonitor 1751 @*/ 1752 PetscErrorCode TaoDrawGradientMonitor(Tao tao, void *ctx) 1753 { 1754 PetscErrorCode ierr; 1755 PetscViewer viewer = (PetscViewer)ctx; 1756 1757 PetscFunctionBegin; 1758 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 1759 ierr = VecView(tao->gradient, viewer);CHKERRQ(ierr); 1760 PetscFunctionReturn(0); 1761 } 1762 1763 #undef __FUNCT__ 1764 #define __FUNCT__ "TaoDrawStepMonitor" 1765 /*@C 1766 TaoDrawStepMonitor - Plots the step direction at each iteration 1767 It can be turned on from the command line using the 1768 -tao_draw_step option 1769 1770 Collective on Tao 1771 1772 Input Parameters: 1773 + tao - the Tao context 1774 - ctx - PetscViewer context 1775 1776 Options Database Keys: 1777 . -tao_draw_step 1778 1779 Level: advanced 1780 1781 .seealso: TaoSetMonitor(), TaoDrawSolutionMonitor 1782 @*/ 1783 PetscErrorCode TaoDrawStepMonitor(Tao tao, void *ctx) 1784 { 1785 PetscErrorCode ierr; 1786 PetscViewer viewer = (PetscViewer)(ctx); 1787 1788 PetscFunctionBegin; 1789 ierr = VecView(tao->stepdirection, viewer);CHKERRQ(ierr); 1790 PetscFunctionReturn(0); 1791 } 1792 1793 #undef __FUNCT__ 1794 #define __FUNCT__ "TaoSeparableObjectiveMonitor" 1795 /*@C 1796 TaoSeparableObjectiveMonitor - Views the separable objective function at each iteration 1797 It can be turned on from the command line using the 1798 -tao_view_separableobjective option 1799 1800 Collective on Tao 1801 1802 Input Parameters: 1803 + tao - the Tao context 1804 - ctx - PetscViewer context or NULL 1805 1806 Options Database Keys: 1807 . -tao_view_separableobjective 1808 1809 Level: advanced 1810 1811 .seealso: TaoDefaultSMonitor(), TaoSetMonitor() 1812 @*/ 1813 PetscErrorCode TaoSeparableObjectiveMonitor(Tao tao, void *ctx) 1814 { 1815 PetscErrorCode ierr; 1816 PetscViewer viewer = (PetscViewer)ctx; 1817 1818 PetscFunctionBegin; 1819 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 1820 ierr = VecView(tao->sep_objective,viewer);CHKERRQ(ierr); 1821 PetscFunctionReturn(0); 1822 } 1823 1824 #undef __FUNCT__ 1825 #define __FUNCT__ "TaoDefaultConvergenceTest" 1826 /*@ 1827 TaoDefaultConvergenceTest - Determines whether the solver should continue iterating 1828 or terminate. 1829 1830 Collective on Tao 1831 1832 Input Parameters: 1833 + tao - the Tao context 1834 - dummy - unused dummy context 1835 1836 Output Parameter: 1837 . reason - for terminating 1838 1839 Notes: 1840 This routine checks the residual in the optimality conditions, the 1841 relative residual in the optimity conditions, the number of function 1842 evaluations, and the function value to test convergence. Some 1843 solvers may use different convergence routines. 1844 1845 Level: developer 1846 1847 .seealso: TaoSetTolerances(),TaoGetConvergedReason(),TaoSetConvergedReason() 1848 @*/ 1849 1850 PetscErrorCode TaoDefaultConvergenceTest(Tao tao,void *dummy) 1851 { 1852 PetscInt niter=tao->niter, nfuncs=PetscMax(tao->nfuncs,tao->nfuncgrads); 1853 PetscInt max_funcs=tao->max_funcs; 1854 PetscReal gnorm=tao->residual, gnorm0=tao->gnorm0; 1855 PetscReal f=tao->fc, steptol=tao->steptol,trradius=tao->step; 1856 PetscReal gatol=tao->gatol,grtol=tao->grtol,gttol=tao->gttol; 1857 PetscReal catol=tao->catol,crtol=tao->crtol; 1858 PetscReal fmin=tao->fmin, cnorm=tao->cnorm; 1859 TaoConvergedReason reason=tao->reason; 1860 PetscErrorCode ierr; 1861 1862 PetscFunctionBegin; 1863 PetscValidHeaderSpecific(tao, TAO_CLASSID,1); 1864 if (reason != TAO_CONTINUE_ITERATING) { 1865 PetscFunctionReturn(0); 1866 } 1867 1868 if (PetscIsInfOrNanReal(f)) { 1869 ierr = PetscInfo(tao,"Failed to converged, function value is Inf or NaN\n");CHKERRQ(ierr); 1870 reason = TAO_DIVERGED_NAN; 1871 } else if (f <= fmin && cnorm <=catol) { 1872 ierr = PetscInfo2(tao,"Converged due to function value %g < minimum function value %g\n", (double)f,(double)fmin);CHKERRQ(ierr); 1873 reason = TAO_CONVERGED_MINF; 1874 } else if (gnorm<= gatol && cnorm <=catol) { 1875 ierr = PetscInfo2(tao,"Converged due to residual norm ||g(X)||=%g < %g\n",(double)gnorm,(double)gatol);CHKERRQ(ierr); 1876 reason = TAO_CONVERGED_GATOL; 1877 } else if ( f!=0 && PetscAbsReal(gnorm/f) <= grtol && cnorm <= crtol) { 1878 ierr = PetscInfo2(tao,"Converged due to residual ||g(X)||/|f(X)| =%g < %g\n",(double)(gnorm/f),(double)grtol);CHKERRQ(ierr); 1879 reason = TAO_CONVERGED_GRTOL; 1880 } else if (gnorm0 != 0 && ((gttol == 0 && gnorm == 0) || gnorm/gnorm0 < gttol) && cnorm <= crtol) { 1881 ierr = PetscInfo2(tao,"Converged due to relative residual norm ||g(X)||/||g(X0)|| = %g < %g\n",(double)(gnorm/gnorm0),(double)gttol);CHKERRQ(ierr); 1882 reason = TAO_CONVERGED_GTTOL; 1883 } else if (nfuncs > max_funcs){ 1884 ierr = PetscInfo2(tao,"Exceeded maximum number of function evaluations: %D > %D\n", nfuncs,max_funcs);CHKERRQ(ierr); 1885 reason = TAO_DIVERGED_MAXFCN; 1886 } else if ( tao->lsflag != 0 ){ 1887 ierr = PetscInfo(tao,"Tao Line Search failure.\n");CHKERRQ(ierr); 1888 reason = TAO_DIVERGED_LS_FAILURE; 1889 } else if (trradius < steptol && niter > 0){ 1890 ierr = PetscInfo2(tao,"Trust region/step size too small: %g < %g\n", (double)trradius,(double)steptol);CHKERRQ(ierr); 1891 reason = TAO_CONVERGED_STEPTOL; 1892 } else if (niter > tao->max_it) { 1893 ierr = PetscInfo2(tao,"Exceeded maximum number of iterations: %D > %D\n",niter,tao->max_it);CHKERRQ(ierr); 1894 reason = TAO_DIVERGED_MAXITS; 1895 } else { 1896 reason = TAO_CONTINUE_ITERATING; 1897 } 1898 tao->reason = reason; 1899 PetscFunctionReturn(0); 1900 } 1901 1902 #undef __FUNCT__ 1903 #define __FUNCT__ "TaoSetOptionsPrefix" 1904 /*@C 1905 TaoSetOptionsPrefix - Sets the prefix used for searching for all 1906 TAO options in the database. 1907 1908 1909 Logically Collective on Tao 1910 1911 Input Parameters: 1912 + tao - the Tao context 1913 - prefix - the prefix string to prepend to all TAO option requests 1914 1915 Notes: 1916 A hyphen (-) must NOT be given at the beginning of the prefix name. 1917 The first character of all runtime options is AUTOMATICALLY the hyphen. 1918 1919 For example, to distinguish between the runtime options for two 1920 different TAO solvers, one could call 1921 .vb 1922 TaoSetOptionsPrefix(tao1,"sys1_") 1923 TaoSetOptionsPrefix(tao2,"sys2_") 1924 .ve 1925 1926 This would enable use of different options for each system, such as 1927 .vb 1928 -sys1_tao_method blmvm -sys1_tao_gtol 1.e-3 1929 -sys2_tao_method lmvm -sys2_tao_gtol 1.e-4 1930 .ve 1931 1932 1933 Level: advanced 1934 1935 .seealso: TaoAppendOptionsPrefix(), TaoGetOptionsPrefix() 1936 @*/ 1937 1938 PetscErrorCode TaoSetOptionsPrefix(Tao tao, const char p[]) 1939 { 1940 PetscErrorCode ierr; 1941 1942 PetscFunctionBegin; 1943 ierr = PetscObjectSetOptionsPrefix((PetscObject)tao,p);CHKERRQ(ierr); 1944 if (tao->linesearch) { 1945 ierr = TaoLineSearchSetOptionsPrefix(tao->linesearch,p);CHKERRQ(ierr); 1946 } 1947 if (tao->ksp) { 1948 ierr = KSPSetOptionsPrefix(tao->ksp,p);CHKERRQ(ierr); 1949 } 1950 PetscFunctionReturn(0); 1951 } 1952 1953 #undef __FUNCT__ 1954 #define __FUNCT__ "TaoAppendOptionsPrefix" 1955 /*@C 1956 TaoAppendOptionsPrefix - Appends to the prefix used for searching for all 1957 TAO options in the database. 1958 1959 1960 Logically Collective on Tao 1961 1962 Input Parameters: 1963 + tao - the Tao solver context 1964 - prefix - the prefix string to prepend to all TAO option requests 1965 1966 Notes: 1967 A hyphen (-) must NOT be given at the beginning of the prefix name. 1968 The first character of all runtime options is AUTOMATICALLY the hyphen. 1969 1970 1971 Level: advanced 1972 1973 .seealso: TaoSetOptionsPrefix(), TaoGetOptionsPrefix() 1974 @*/ 1975 PetscErrorCode TaoAppendOptionsPrefix(Tao tao, const char p[]) 1976 { 1977 PetscErrorCode ierr; 1978 1979 PetscFunctionBegin; 1980 ierr = PetscObjectAppendOptionsPrefix((PetscObject)tao,p);CHKERRQ(ierr); 1981 if (tao->linesearch) { 1982 ierr = TaoLineSearchSetOptionsPrefix(tao->linesearch,p);CHKERRQ(ierr); 1983 } 1984 if (tao->ksp) { 1985 ierr = KSPSetOptionsPrefix(tao->ksp,p);CHKERRQ(ierr); 1986 } 1987 PetscFunctionReturn(0); 1988 } 1989 1990 #undef __FUNCT__ 1991 #define __FUNCT__ "TaoGetOptionsPrefix" 1992 /*@C 1993 TaoGetOptionsPrefix - Gets the prefix used for searching for all 1994 TAO options in the database 1995 1996 Not Collective 1997 1998 Input Parameters: 1999 . tao - the Tao context 2000 2001 Output Parameters: 2002 . prefix - pointer to the prefix string used is returned 2003 2004 Notes: On the fortran side, the user should pass in a string 'prefix' of 2005 sufficient length to hold the prefix. 2006 2007 Level: advanced 2008 2009 .seealso: TaoSetOptionsPrefix(), TaoAppendOptionsPrefix() 2010 @*/ 2011 PetscErrorCode TaoGetOptionsPrefix(Tao tao, const char *p[]) 2012 { 2013 return PetscObjectGetOptionsPrefix((PetscObject)tao,p); 2014 } 2015 2016 #undef __FUNCT__ 2017 #define __FUNCT__ "TaoSetType" 2018 /*@C 2019 TaoSetType - Sets the method for the unconstrained minimization solver. 2020 2021 Collective on Tao 2022 2023 Input Parameters: 2024 + solver - the Tao solver context 2025 - type - a known method 2026 2027 Options Database Key: 2028 . -tao_type <type> - Sets the method; use -help for a list 2029 of available methods (for instance, "-tao_type lmvm" or "-tao_type tron") 2030 2031 Available methods include: 2032 + nls - Newton's method with line search for unconstrained minimization 2033 . ntr - Newton's method with trust region for unconstrained minimization 2034 . ntl - Newton's method with trust region, line search for unconstrained minimization 2035 . lmvm - Limited memory variable metric method for unconstrained minimization 2036 . cg - Nonlinear conjugate gradient method for unconstrained minimization 2037 . nm - Nelder-Mead algorithm for derivate-free unconstrained minimization 2038 . tron - Newton Trust Region method for bound constrained minimization 2039 . gpcg - Newton Trust Region method for quadratic bound constrained minimization 2040 . blmvm - Limited memory variable metric method for bound constrained minimization 2041 - pounders - Model-based algorithm pounder extended for nonlinear least squares 2042 2043 Level: intermediate 2044 2045 .seealso: TaoCreate(), TaoGetType(), TaoType 2046 2047 @*/ 2048 PetscErrorCode TaoSetType(Tao tao, const TaoType type) 2049 { 2050 PetscErrorCode ierr; 2051 PetscErrorCode (*create_xxx)(Tao); 2052 PetscBool issame; 2053 2054 PetscFunctionBegin; 2055 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 2056 2057 ierr = PetscObjectTypeCompare((PetscObject)tao,type,&issame);CHKERRQ(ierr); 2058 if (issame) PetscFunctionReturn(0); 2059 2060 ierr = PetscFunctionListFind(TaoList, type, (void(**)(void))&create_xxx);CHKERRQ(ierr); 2061 if (!create_xxx) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested Tao type %s",type); 2062 2063 /* Destroy the existing solver information */ 2064 if (tao->ops->destroy) { 2065 ierr = (*tao->ops->destroy)(tao);CHKERRQ(ierr); 2066 } 2067 ierr = KSPDestroy(&tao->ksp);CHKERRQ(ierr); 2068 ierr = TaoLineSearchDestroy(&tao->linesearch);CHKERRQ(ierr); 2069 ierr = VecDestroy(&tao->gradient);CHKERRQ(ierr); 2070 ierr = VecDestroy(&tao->stepdirection);CHKERRQ(ierr); 2071 2072 tao->ops->setup = 0; 2073 tao->ops->solve = 0; 2074 tao->ops->view = 0; 2075 tao->ops->setfromoptions = 0; 2076 tao->ops->destroy = 0; 2077 2078 tao->setupcalled = PETSC_FALSE; 2079 2080 ierr = (*create_xxx)(tao);CHKERRQ(ierr); 2081 ierr = PetscObjectChangeTypeName((PetscObject)tao,type);CHKERRQ(ierr); 2082 PetscFunctionReturn(0); 2083 } 2084 2085 #undef __FUNCT__ 2086 #define __FUNCT__ "TaoRegister" 2087 /*MC 2088 TaoRegister - Adds a method to the TAO package for unconstrained minimization. 2089 2090 Synopsis: 2091 TaoRegister(char *name_solver,char *path,char *name_Create,int (*routine_Create)(Tao)) 2092 2093 Not collective 2094 2095 Input Parameters: 2096 + sname - name of a new user-defined solver 2097 - func - routine to Create method context 2098 2099 Notes: 2100 TaoRegister() may be called multiple times to add several user-defined solvers. 2101 2102 Sample usage: 2103 .vb 2104 TaoRegister("my_solver",MySolverCreate); 2105 .ve 2106 2107 Then, your solver can be chosen with the procedural interface via 2108 $ TaoSetType(tao,"my_solver") 2109 or at runtime via the option 2110 $ -tao_type my_solver 2111 2112 Level: advanced 2113 2114 .seealso: TaoRegisterAll(), TaoRegisterDestroy() 2115 M*/ 2116 PetscErrorCode TaoRegister(const char sname[], PetscErrorCode (*func)(Tao)) 2117 { 2118 PetscErrorCode ierr; 2119 2120 PetscFunctionBegin; 2121 ierr = PetscFunctionListAdd(&TaoList,sname, (void (*)(void))func);CHKERRQ(ierr); 2122 PetscFunctionReturn(0); 2123 } 2124 2125 #undef __FUNCT__ 2126 #define __FUNCT__ "TaoRegisterDestroy" 2127 /*@C 2128 TaoRegisterDestroy - Frees the list of minimization solvers that were 2129 registered by TaoRegisterDynamic(). 2130 2131 Not Collective 2132 2133 Level: advanced 2134 2135 .seealso: TaoRegisterAll(), TaoRegister() 2136 @*/ 2137 PetscErrorCode TaoRegisterDestroy(void) 2138 { 2139 PetscErrorCode ierr; 2140 PetscFunctionBegin; 2141 ierr = PetscFunctionListDestroy(&TaoList);CHKERRQ(ierr); 2142 TaoRegisterAllCalled = PETSC_FALSE; 2143 PetscFunctionReturn(0); 2144 } 2145 2146 #undef __FUNCT__ 2147 #define __FUNCT__ "TaoGetIterationNumber" 2148 /*@ 2149 TaoGetIterationNumber - Gets the number of Tao iterations completed 2150 at this time. 2151 2152 Not Collective 2153 2154 Input Parameter: 2155 . tao - Tao context 2156 2157 Output Parameter: 2158 . iter - iteration number 2159 2160 Notes: 2161 For example, during the computation of iteration 2 this would return 1. 2162 2163 2164 Level: intermediate 2165 2166 .keywords: Tao, nonlinear, get, iteration, number, 2167 2168 .seealso: TaoGetLinearSolveIterations() 2169 @*/ 2170 PetscErrorCode TaoGetIterationNumber(Tao tao,PetscInt *iter) 2171 { 2172 PetscFunctionBegin; 2173 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 2174 PetscValidIntPointer(iter,2); 2175 *iter = tao->niter; 2176 PetscFunctionReturn(0); 2177 } 2178 2179 #undef __FUNCT__ 2180 #define __FUNCT__ "TaoSetIterationNumber" 2181 /*@ 2182 TaoSetIterationNumber - Sets the current iteration number. 2183 2184 Not Collective 2185 2186 Input Parameter: 2187 . tao - Tao context 2188 . iter - iteration number 2189 2190 Level: developer 2191 2192 .keywords: Tao, nonlinear, set, iteration, number, 2193 2194 .seealso: TaoGetLinearSolveIterations() 2195 @*/ 2196 PetscErrorCode TaoSetIterationNumber(Tao tao,PetscInt iter) 2197 { 2198 PetscErrorCode ierr; 2199 2200 PetscFunctionBegin; 2201 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 2202 ierr = PetscObjectSAWsTakeAccess((PetscObject)tao);CHKERRQ(ierr); 2203 tao->niter = iter; 2204 ierr = PetscObjectSAWsGrantAccess((PetscObject)tao);CHKERRQ(ierr); 2205 PetscFunctionReturn(0); 2206 } 2207 2208 #undef __FUNCT__ 2209 #define __FUNCT__ "TaoGetTotalIterationNumber" 2210 /*@ 2211 TaoGetTotalIterationNumber - Gets the total number of Tao iterations 2212 completed. This number keeps accumulating if multiple solves 2213 are called with the Tao object. 2214 2215 Not Collective 2216 2217 Input Parameter: 2218 . tao - Tao context 2219 2220 Output Parameter: 2221 . iter - iteration number 2222 2223 Notes: 2224 The total iteration count is updated after each solve, if there is a current 2225 TaoSolve() in progress then those iterations are not yet counted. 2226 2227 Level: intermediate 2228 2229 .keywords: Tao, nonlinear, get, iteration, number, 2230 2231 .seealso: TaoGetLinearSolveIterations() 2232 @*/ 2233 PetscErrorCode TaoGetTotalIterationNumber(Tao tao,PetscInt *iter) 2234 { 2235 PetscFunctionBegin; 2236 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 2237 PetscValidIntPointer(iter,2); 2238 *iter = tao->ntotalits; 2239 PetscFunctionReturn(0); 2240 } 2241 2242 #undef __FUNCT__ 2243 #define __FUNCT__ "TaoSetTotalIterationNumber" 2244 /*@ 2245 TaoSetTotalIterationNumber - Sets the current total iteration number. 2246 2247 Not Collective 2248 2249 Input Parameter: 2250 . tao - Tao context 2251 . iter - iteration number 2252 2253 Level: developer 2254 2255 .keywords: Tao, nonlinear, set, iteration, number, 2256 2257 .seealso: TaoGetLinearSolveIterations() 2258 @*/ 2259 PetscErrorCode TaoSetTotalIterationNumber(Tao tao,PetscInt iter) 2260 { 2261 PetscErrorCode ierr; 2262 2263 PetscFunctionBegin; 2264 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 2265 ierr = PetscObjectSAWsTakeAccess((PetscObject)tao);CHKERRQ(ierr); 2266 tao->ntotalits = iter; 2267 ierr = PetscObjectSAWsGrantAccess((PetscObject)tao);CHKERRQ(ierr); 2268 PetscFunctionReturn(0); 2269 } 2270 2271 #undef __FUNCT__ 2272 #define __FUNCT__ "TaoSetConvergedReason" 2273 /*@ 2274 TaoSetConvergedReason - Sets the termination flag on a Tao object 2275 2276 Logically Collective on Tao 2277 2278 Input Parameters: 2279 + tao - the Tao context 2280 - reason - one of 2281 $ TAO_CONVERGED_ATOL (2), 2282 $ TAO_CONVERGED_RTOL (3), 2283 $ TAO_CONVERGED_STEPTOL (4), 2284 $ TAO_CONVERGED_MINF (5), 2285 $ TAO_CONVERGED_USER (6), 2286 $ TAO_DIVERGED_MAXITS (-2), 2287 $ TAO_DIVERGED_NAN (-4), 2288 $ TAO_DIVERGED_MAXFCN (-5), 2289 $ TAO_DIVERGED_LS_FAILURE (-6), 2290 $ TAO_DIVERGED_TR_REDUCTION (-7), 2291 $ TAO_DIVERGED_USER (-8), 2292 $ TAO_CONTINUE_ITERATING (0) 2293 2294 Level: intermediate 2295 2296 @*/ 2297 PetscErrorCode TaoSetConvergedReason(Tao tao, TaoConvergedReason reason) 2298 { 2299 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 2300 PetscFunctionBegin; 2301 tao->reason = reason; 2302 PetscFunctionReturn(0); 2303 } 2304 2305 #undef __FUNCT__ 2306 #define __FUNCT__ "TaoGetConvergedReason" 2307 /*@ 2308 TaoGetConvergedReason - Gets the reason the Tao iteration was stopped. 2309 2310 Not Collective 2311 2312 Input Parameter: 2313 . tao - the Tao solver context 2314 2315 Output Parameter: 2316 . reason - one of 2317 $ TAO_CONVERGED_GATOL (3) ||g(X)|| < gatol 2318 $ TAO_CONVERGED_GRTOL (4) ||g(X)|| / f(X) < grtol 2319 $ TAO_CONVERGED_GTTOL (5) ||g(X)|| / ||g(X0)|| < gttol 2320 $ TAO_CONVERGED_STEPTOL (6) step size small 2321 $ TAO_CONVERGED_MINF (7) F < F_min 2322 $ TAO_CONVERGED_USER (8) User defined 2323 $ TAO_DIVERGED_MAXITS (-2) its > maxits 2324 $ TAO_DIVERGED_NAN (-4) Numerical problems 2325 $ TAO_DIVERGED_MAXFCN (-5) fevals > max_funcsals 2326 $ TAO_DIVERGED_LS_FAILURE (-6) line search failure 2327 $ TAO_DIVERGED_TR_REDUCTION (-7) trust region failure 2328 $ TAO_DIVERGED_USER(-8) (user defined) 2329 $ TAO_CONTINUE_ITERATING (0) 2330 2331 where 2332 + X - current solution 2333 . X0 - initial guess 2334 . f(X) - current function value 2335 . f(X*) - true solution (estimated) 2336 . g(X) - current gradient 2337 . its - current iterate number 2338 . maxits - maximum number of iterates 2339 . fevals - number of function evaluations 2340 - max_funcsals - maximum number of function evaluations 2341 2342 Level: intermediate 2343 2344 .seealso: TaoSetConvergenceTest(), TaoSetTolerances() 2345 2346 @*/ 2347 PetscErrorCode TaoGetConvergedReason(Tao tao, TaoConvergedReason *reason) 2348 { 2349 PetscFunctionBegin; 2350 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 2351 PetscValidPointer(reason,2); 2352 *reason = tao->reason; 2353 PetscFunctionReturn(0); 2354 } 2355 2356 #undef __FUNCT__ 2357 #define __FUNCT__ "TaoGetSolutionStatus" 2358 /*@ 2359 TaoGetSolutionStatus - Get the current iterate, objective value, 2360 residual, infeasibility, and termination 2361 2362 Not Collective 2363 2364 Input Parameters: 2365 . tao - the Tao context 2366 2367 Output Parameters: 2368 + iterate - the current iterate number (>=0) 2369 . f - the current function value 2370 . gnorm - the square of the gradient norm, duality gap, or other measure indicating distance from optimality. 2371 . cnorm - the infeasibility of the current solution with regard to the constraints. 2372 . xdiff - the step length or trust region radius of the most recent iterate. 2373 - reason - The termination reason, which can equal TAO_CONTINUE_ITERATING 2374 2375 Level: intermediate 2376 2377 Note: 2378 TAO returns the values set by the solvers in the routine TaoMonitor(). 2379 2380 Note: 2381 If any of the output arguments are set to NULL, no corresponding value will be returned. 2382 2383 .seealso: TaoMonitor(), TaoGetConvergedReason() 2384 @*/ 2385 PetscErrorCode TaoGetSolutionStatus(Tao tao, PetscInt *its, PetscReal *f, PetscReal *gnorm, PetscReal *cnorm, PetscReal *xdiff, TaoConvergedReason *reason) 2386 { 2387 PetscFunctionBegin; 2388 if (its) *its=tao->niter; 2389 if (f) *f=tao->fc; 2390 if (gnorm) *gnorm=tao->residual; 2391 if (cnorm) *cnorm=tao->cnorm; 2392 if (reason) *reason=tao->reason; 2393 if (xdiff) *xdiff=tao->step; 2394 PetscFunctionReturn(0); 2395 } 2396 2397 #undef __FUNCT__ 2398 #define __FUNCT__ "TaoGetType" 2399 /*@C 2400 TaoGetType - Gets the current Tao algorithm. 2401 2402 Not Collective 2403 2404 Input Parameter: 2405 . tao - the Tao solver context 2406 2407 Output Parameter: 2408 . type - Tao method 2409 2410 Level: intermediate 2411 2412 @*/ 2413 PetscErrorCode TaoGetType(Tao tao, const TaoType *type) 2414 { 2415 PetscFunctionBegin; 2416 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 2417 PetscValidPointer(type,2); 2418 *type=((PetscObject)tao)->type_name; 2419 PetscFunctionReturn(0); 2420 } 2421 2422 #undef __FUNCT__ 2423 #define __FUNCT__ "TaoMonitor" 2424 /*@C 2425 TaoMonitor - Monitor the solver and the current solution. This 2426 routine will record the iteration number and residual statistics, 2427 call any monitors specified by the user, and calls the convergence-check routine. 2428 2429 Input Parameters: 2430 + tao - the Tao context 2431 . its - the current iterate number (>=0) 2432 . f - the current objective function value 2433 . res - the gradient norm, square root of the duality gap, or other measure indicating distince from optimality. This measure will be recorded and 2434 used for some termination tests. 2435 . cnorm - the infeasibility of the current solution with regard to the constraints. 2436 - steplength - multiple of the step direction added to the previous iterate. 2437 2438 Output Parameters: 2439 . reason - The termination reason, which can equal TAO_CONTINUE_ITERATING 2440 2441 Options Database Key: 2442 . -tao_monitor - Use the default monitor, which prints statistics to standard output 2443 2444 .seealso TaoGetConvergedReason(), TaoDefaultMonitor(), TaoSetMonitor() 2445 2446 Level: developer 2447 2448 @*/ 2449 PetscErrorCode TaoMonitor(Tao tao, PetscInt its, PetscReal f, PetscReal res, PetscReal cnorm, PetscReal steplength, TaoConvergedReason *reason) 2450 { 2451 PetscErrorCode ierr; 2452 PetscInt i; 2453 2454 PetscFunctionBegin; 2455 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 2456 tao->fc = f; 2457 tao->residual = res; 2458 tao->cnorm = cnorm; 2459 tao->step = steplength; 2460 if (!its) { 2461 tao->cnorm0 = cnorm; tao->gnorm0 = res; 2462 } 2463 TaoLogConvergenceHistory(tao,f,res,cnorm,tao->ksp_its); 2464 if (PetscIsInfOrNanReal(f) || PetscIsInfOrNanReal(res)) SETERRQ(PETSC_COMM_SELF,1, "User provided compute function generated Inf or NaN"); 2465 if (tao->ops->convergencetest) { 2466 ierr = (*tao->ops->convergencetest)(tao,tao->cnvP);CHKERRQ(ierr); 2467 } 2468 for (i=0;i<tao->numbermonitors;i++) { 2469 ierr = (*tao->monitor[i])(tao,tao->monitorcontext[i]);CHKERRQ(ierr); 2470 } 2471 *reason = tao->reason; 2472 PetscFunctionReturn(0); 2473 } 2474 2475 #undef __FUNCT__ 2476 #define __FUNCT__ "TaoSetConvergenceHistory" 2477 /*@ 2478 TaoSetConvergenceHistory - Sets the array used to hold the convergence history. 2479 2480 Logically Collective on Tao 2481 2482 Input Parameters: 2483 + tao - the Tao solver context 2484 . obj - array to hold objective value history 2485 . resid - array to hold residual history 2486 . cnorm - array to hold constraint violation history 2487 . lits - integer array holds the number of linear iterations for each Tao iteration 2488 . na - size of obj, resid, and cnorm 2489 - reset - PetscTrue indicates each new minimization resets the history counter to zero, 2490 else it continues storing new values for new minimizations after the old ones 2491 2492 Notes: 2493 If set, TAO will fill the given arrays with the indicated 2494 information at each iteration. If 'obj','resid','cnorm','lits' are 2495 *all* NULL then space (using size na, or 1000 if na is PETSC_DECIDE or 2496 PETSC_DEFAULT) is allocated for the history. 2497 If not all are NULL, then only the non-NULL information categories 2498 will be stored, the others will be ignored. 2499 2500 Any convergence information after iteration number 'na' will not be stored. 2501 2502 This routine is useful, e.g., when running a code for purposes 2503 of accurate performance monitoring, when no I/O should be done 2504 during the section of code that is being timed. 2505 2506 Level: intermediate 2507 2508 .seealso: TaoGetConvergenceHistory() 2509 2510 @*/ 2511 PetscErrorCode TaoSetConvergenceHistory(Tao tao, PetscReal *obj, PetscReal *resid, PetscReal *cnorm, PetscInt *lits, PetscInt na,PetscBool reset) 2512 { 2513 PetscErrorCode ierr; 2514 2515 PetscFunctionBegin; 2516 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 2517 if (obj) PetscValidScalarPointer(obj,2); 2518 if (resid) PetscValidScalarPointer(resid,3); 2519 if (cnorm) PetscValidScalarPointer(cnorm,4); 2520 if (lits) PetscValidIntPointer(lits,5); 2521 2522 if (na == PETSC_DECIDE || na == PETSC_DEFAULT) na = 1000; 2523 if (!obj && !resid && !cnorm && !lits) { 2524 ierr = PetscCalloc1(na,&obj);CHKERRQ(ierr); 2525 ierr = PetscCalloc1(na,&resid);CHKERRQ(ierr); 2526 ierr = PetscCalloc1(na,&cnorm);CHKERRQ(ierr); 2527 ierr = PetscCalloc1(na,&lits);CHKERRQ(ierr); 2528 tao->hist_malloc=PETSC_TRUE; 2529 } 2530 2531 tao->hist_obj = obj; 2532 tao->hist_resid = resid; 2533 tao->hist_cnorm = cnorm; 2534 tao->hist_lits = lits; 2535 tao->hist_max = na; 2536 tao->hist_reset = reset; 2537 tao->hist_len = 0; 2538 PetscFunctionReturn(0); 2539 } 2540 2541 #undef __FUNCT__ 2542 #define __FUNCT__ "TaoGetConvergenceHistory" 2543 /*@C 2544 TaoGetConvergenceHistory - Gets the arrays used to hold the convergence history. 2545 2546 Collective on Tao 2547 2548 Input Parameter: 2549 . tao - the Tao context 2550 2551 Output Parameters: 2552 + obj - array used to hold objective value history 2553 . resid - array used to hold residual history 2554 . cnorm - array used to hold constraint violation history 2555 . lits - integer array used to hold linear solver iteration count 2556 - nhist - size of obj, resid, cnorm, and lits (will be less than or equal to na given in TaoSetHistory) 2557 2558 Notes: 2559 This routine must be preceded by calls to TaoSetConvergenceHistory() 2560 and TaoSolve(), otherwise it returns useless information. 2561 2562 The calling sequence for this routine in Fortran is 2563 $ call TaoGetConvergenceHistory(Tao tao, PetscInt nhist, PetscErrorCode ierr) 2564 2565 This routine is useful, e.g., when running a code for purposes 2566 of accurate performance monitoring, when no I/O should be done 2567 during the section of code that is being timed. 2568 2569 Level: advanced 2570 2571 .seealso: TaoSetConvergenceHistory() 2572 2573 @*/ 2574 PetscErrorCode TaoGetConvergenceHistory(Tao tao, PetscReal **obj, PetscReal **resid, PetscReal **cnorm, PetscInt **lits, PetscInt *nhist) 2575 { 2576 PetscFunctionBegin; 2577 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 2578 if (obj) *obj = tao->hist_obj; 2579 if (cnorm) *cnorm = tao->hist_cnorm; 2580 if (resid) *resid = tao->hist_resid; 2581 if (nhist) *nhist = tao->hist_len; 2582 PetscFunctionReturn(0); 2583 } 2584 2585 #undef __FUNCT__ 2586 #define __FUNCT__ "TaoSetApplicationContext" 2587 /*@ 2588 TaoSetApplicationContext - Sets the optional user-defined context for 2589 a solver. 2590 2591 Logically Collective on Tao 2592 2593 Input Parameters: 2594 + tao - the Tao context 2595 - usrP - optional user context 2596 2597 Level: intermediate 2598 2599 .seealso: TaoGetApplicationContext(), TaoSetApplicationContext() 2600 @*/ 2601 PetscErrorCode TaoSetApplicationContext(Tao tao,void *usrP) 2602 { 2603 PetscFunctionBegin; 2604 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 2605 tao->user = usrP; 2606 PetscFunctionReturn(0); 2607 } 2608 2609 #undef __FUNCT__ 2610 #define __FUNCT__ "TaoGetApplicationContext" 2611 /*@ 2612 TaoGetApplicationContext - Gets the user-defined context for a 2613 TAO solvers. 2614 2615 Not Collective 2616 2617 Input Parameter: 2618 . tao - Tao context 2619 2620 Output Parameter: 2621 . usrP - user context 2622 2623 Level: intermediate 2624 2625 .seealso: TaoSetApplicationContext() 2626 @*/ 2627 PetscErrorCode TaoGetApplicationContext(Tao tao,void *usrP) 2628 { 2629 PetscFunctionBegin; 2630 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 2631 *(void**)usrP = tao->user; 2632 PetscFunctionReturn(0); 2633 } 2634 2635 #undef __FUNCT__ 2636 #define __FUNCT__ "TaoSetGradientNorm" 2637 /*@ 2638 TaoSetGradientNorm - Sets the matrix used to define the inner product that measures the size of the gradient. 2639 2640 Collective on tao 2641 2642 Input Parameters: 2643 + tao - the Tao context 2644 - M - gradient norm 2645 2646 Level: beginner 2647 2648 .seealso: TaoGetGradientNorm(), TaoGradientNorm() 2649 @*/ 2650 PetscErrorCode TaoSetGradientNorm(Tao tao, Mat M) 2651 { 2652 PetscErrorCode ierr; 2653 2654 PetscFunctionBegin; 2655 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 2656 2657 if (tao->gradient_norm) { 2658 ierr = PetscObjectDereference((PetscObject)tao->gradient_norm);CHKERRQ(ierr); 2659 ierr = VecDestroy(&tao->gradient_norm_tmp);CHKERRQ(ierr); 2660 } 2661 2662 ierr = PetscObjectReference((PetscObject)M);CHKERRQ(ierr); 2663 tao->gradient_norm = M; 2664 ierr = MatCreateVecs(M, NULL, &tao->gradient_norm_tmp);CHKERRQ(ierr); 2665 PetscFunctionReturn(0); 2666 } 2667 2668 #undef __FUNCT__ 2669 #define __FUNCT__ "TaoGetGradientNorm" 2670 /*@ 2671 TaoGetGradientNorm - Returns the matrix used to define the inner product for measuring the size of the gradient. 2672 2673 Not Collective 2674 2675 Input Parameter: 2676 . tao - Tao context 2677 2678 Output Parameter: 2679 . M - gradient norm 2680 2681 Level: beginner 2682 2683 .seealso: TaoSetGradientNorm(), TaoGradientNorm() 2684 @*/ 2685 PetscErrorCode TaoGetGradientNorm(Tao tao, Mat *M) 2686 { 2687 PetscFunctionBegin; 2688 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 2689 *M = tao->gradient_norm; 2690 PetscFunctionReturn(0); 2691 } 2692 2693 #undef __FUNCT__ 2694 #define __FUNCT__ "TaoGradientNorm" 2695 /*c 2696 TaoGradientNorm - Compute the norm with respect to the inner product the user has set. 2697 2698 Collective on tao 2699 2700 Input Parameter: 2701 . tao - the Tao context 2702 . gradient - the gradient to be computed 2703 . norm - the norm type 2704 2705 Output Parameter: 2706 . gnorm - the gradient norm 2707 2708 Level: developer 2709 2710 .seealso: TaoSetGradientNorm(), TaoGetGradientNorm() 2711 @*/ 2712 PetscErrorCode TaoGradientNorm(Tao tao, Vec gradient, NormType type, PetscReal *gnorm) 2713 { 2714 PetscErrorCode ierr; 2715 2716 PetscFunctionBegin; 2717 PetscValidHeaderSpecific(gradient,VEC_CLASSID,1); 2718 2719 if (tao->gradient_norm) { 2720 PetscScalar gnorms; 2721 2722 if (type != NORM_2) SETERRQ(PetscObjectComm((PetscObject)gradient), PETSC_ERR_ARG_WRONGSTATE, "Norm type must be NORM_2 if an inner product for the gradient norm is set."); 2723 ierr = MatMult(tao->gradient_norm, gradient, tao->gradient_norm_tmp);CHKERRQ(ierr); 2724 ierr = VecDot(gradient, tao->gradient_norm_tmp, &gnorms);CHKERRQ(ierr); 2725 *gnorm = PetscRealPart(PetscSqrtScalar(gnorms)); 2726 } else { 2727 ierr = VecNorm(gradient, type, gnorm);CHKERRQ(ierr); 2728 } 2729 PetscFunctionReturn(0); 2730 } 2731 2732 2733