xref: /petsc/src/tao/bound/impls/bncg/bncg.c (revision 65f8aed5f7eaa1e2ef2ddeffe666264e0669c876)
1 #include <petsctaolinesearch.h>
2 #include <../src/tao/bound/impls/bncg/bncg.h>
3 
4 #define CG_FletcherReeves       0
5 #define CG_PolakRibiere         1
6 #define CG_PolakRibierePlus     2
7 #define CG_HestenesStiefel      3
8 #define CG_DaiYuan              4
9 #define CG_GradientDescent      5
10 #define CG_Types                6
11 
12 static const char *CG_Table[64] = {"fr", "pr", "prp", "hs", "dy", "gd"};
13 
14 #define CG_AS_NONE       0
15 #define CG_AS_BERTSEKAS  1
16 #define CG_AS_SIZE       2
17 
18 static const char *CG_AS_TYPE[64] = {"none", "bertsekas"};
19 
20 PetscErrorCode TaoBNCGSetRecycleFlag(Tao tao, PetscBool recycle)
21 {
22   TAO_BNCG                     *cg = (TAO_BNCG*)tao->data;
23 
24   PetscFunctionBegin;
25   cg->recycle = recycle;
26   PetscFunctionReturn(0);
27 }
28 
29 PetscErrorCode TaoBNCGEstimateActiveSet(Tao tao, PetscInt asType)
30 {
31   PetscErrorCode               ierr;
32   TAO_BNCG                     *cg = (TAO_BNCG *)tao->data;
33 
34   PetscFunctionBegin;
35   if (!tao->bounded) PetscFunctionReturn(0);
36   ierr = ISDestroy(&cg->inactive_old);CHKERRQ(ierr);
37   if (cg->inactive_idx) {
38     ierr = ISDuplicate(cg->inactive_idx, &cg->inactive_old);CHKERRQ(ierr);
39     ierr = ISCopy(cg->inactive_idx, cg->inactive_old);CHKERRQ(ierr);
40   }
41   switch (asType) {
42   case CG_AS_NONE:
43     ierr = ISDestroy(&cg->inactive_idx);CHKERRQ(ierr);
44     ierr = VecWhichInactive(tao->XL, tao->solution, cg->unprojected_gradient, tao->XU, PETSC_TRUE, &cg->inactive_idx);CHKERRQ(ierr);
45     ierr = ISDestroy(&cg->active_idx);CHKERRQ(ierr);
46     ierr = ISComplementVec(cg->inactive_idx, tao->solution, &cg->active_idx);CHKERRQ(ierr);
47     break;
48 
49   case CG_AS_BERTSEKAS:
50     /* Use gradient descent to estimate the active set */
51     ierr = VecCopy(cg->unprojected_gradient, cg->W);CHKERRQ(ierr);
52     ierr = VecScale(cg->W, -1.0);CHKERRQ(ierr);
53     ierr = TaoEstimateActiveBounds(tao->solution, tao->XL, tao->XU, cg->unprojected_gradient, cg->W, cg->work, cg->as_step, &cg->as_tol,
54                                    &cg->active_lower, &cg->active_upper, &cg->active_fixed, &cg->active_idx, &cg->inactive_idx);CHKERRQ(ierr);
55     break;
56 
57   default:
58     break;
59   }
60   PetscFunctionReturn(0);
61 }
62 
63 PetscErrorCode TaoBNCGBoundStep(Tao tao, PetscInt asType, Vec step)
64 {
65   PetscErrorCode               ierr;
66   TAO_BNCG                     *cg = (TAO_BNCG *)tao->data;
67 
68   PetscFunctionBegin;
69   if (!tao->bounded) PetscFunctionReturn(0);
70   switch (asType) {
71   case CG_AS_NONE:
72     ierr = VecISSet(step, cg->active_idx, 0.0);CHKERRQ(ierr);
73     break;
74 
75   case CG_AS_BERTSEKAS:
76     ierr = TaoBoundStep(tao->solution, tao->XL, tao->XU, cg->active_lower, cg->active_upper, cg->active_fixed, 1.0, step);CHKERRQ(ierr);
77     break;
78 
79   default:
80     break;
81   }
82   PetscFunctionReturn(0);
83 }
84 
85 static PetscErrorCode TaoSolve_BNCG(Tao tao)
86 {
87   TAO_BNCG                     *cg = (TAO_BNCG*)tao->data;
88   PetscErrorCode               ierr;
89   TaoLineSearchConvergedReason ls_status = TAOLINESEARCH_CONTINUE_ITERATING;
90   PetscReal                    step=1.0,gnorm,gnorm2,gd,ginner,beta,dnorm;
91   PetscReal                    gd_old,gnorm2_old,f_old,resnorm;
92   PetscBool                    cg_restart, gd_fallback = PETSC_FALSE;
93   PetscInt                     nDiff;
94 
95   PetscFunctionBegin;
96   /*   Project the current point onto the feasible set */
97   ierr = TaoComputeVariableBounds(tao);CHKERRQ(ierr);
98   ierr = TaoBoundSolution(tao->solution, tao->XL,tao->XU, 0.0, &nDiff, tao->solution);CHKERRQ(ierr);
99   if (tao->bounded) {
100     ierr = TaoLineSearchSetVariableBounds(tao->linesearch,tao->XL,tao->XU);CHKERRQ(ierr);
101   }
102 
103   if (nDiff > 0 || !cg->recycle) {
104     /*  Solver is not being recycled so just compute the objective function and criteria */
105     ierr = TaoComputeObjectiveAndGradient(tao, tao->solution, &cg->f, cg->unprojected_gradient);CHKERRQ(ierr);
106   } else {
107     /* We are recycling, so we have to compute ||g_old||^2 for use in the CG step calculation */
108     ierr = VecDot(cg->G_old, cg->G_old, &gnorm2_old);CHKERRQ(ierr);
109   }
110   ierr = VecNorm(cg->unprojected_gradient,NORM_2,&gnorm);CHKERRQ(ierr);
111   if (PetscIsInfOrNanReal(cg->f) || PetscIsInfOrNanReal(gnorm)) SETERRQ(PETSC_COMM_SELF,1, "User provided compute function generated Inf or NaN");
112 
113   /* Estimate the active set and compute the projected gradient */
114   ierr = TaoBNCGEstimateActiveSet(tao, cg->as_type);CHKERRQ(ierr);
115 
116   /* Project the gradient and calculate the norm */
117   ierr = VecCopy(cg->unprojected_gradient, tao->gradient);CHKERRQ(ierr);
118   ierr = VecISSet(tao->gradient, cg->active_idx, 0.0);CHKERRQ(ierr);
119   ierr = VecNorm(tao->gradient,NORM_2,&gnorm);CHKERRQ(ierr);
120   gnorm2 = gnorm*gnorm;
121 
122   /* Convergence check */
123   tao->niter = 0;
124   tao->reason = TAO_CONTINUE_ITERATING;
125   ierr = VecFischer(tao->solution, cg->unprojected_gradient, tao->XL, tao->XU, cg->W);CHKERRQ(ierr);
126   ierr = VecNorm(cg->W, NORM_2, &resnorm);CHKERRQ(ierr);
127   ierr = TaoLogConvergenceHistory(tao, cg->f, resnorm, 0.0, tao->ksp_its);CHKERRQ(ierr);
128   ierr = TaoMonitor(tao, tao->niter, cg->f, resnorm, 0.0, step);CHKERRQ(ierr);
129   ierr = (*tao->ops->convergencetest)(tao,tao->cnvP);CHKERRQ(ierr);
130   if (tao->reason != TAO_CONTINUE_ITERATING) PetscFunctionReturn(0);
131 
132   /* Start optimization iterations */
133   cg->ls_fails = cg->broken_ortho = cg->descent_error = 0;
134   cg->resets = -1;
135   while (tao->reason == TAO_CONTINUE_ITERATING) {
136     ++tao->niter;
137 
138     /* Check restart conditions for using steepest descent */
139     cg_restart = PETSC_FALSE;
140     ierr = VecDot(tao->gradient, cg->G_old, &ginner);CHKERRQ(ierr);
141     ierr = VecNorm(tao->stepdirection, NORM_2, &dnorm);CHKERRQ(ierr);
142     if (tao->niter == 1 && !cg->recycle && dnorm != 0.0) {
143       /* 1) First iteration, with recycle disabled, and a non-zero previous step */
144       cg_restart = PETSC_TRUE;
145     } else if (PetscAbsScalar(ginner) >= cg->eta * gnorm2) {
146       /* 2) Gradients are far from orthogonal */
147       cg_restart = PETSC_TRUE;
148       ++cg->broken_ortho;
149     }
150 
151     /* Compute CG step */
152     if (cg_restart) {
153       beta = 0.0;
154       ++cg->resets;
155     } else {
156       switch (cg->cg_type) {
157       case CG_FletcherReeves:
158         beta = gnorm2 / gnorm2_old;
159         break;
160 
161       case CG_PolakRibiere:
162         beta = (gnorm2 - ginner) / gnorm2_old;
163         break;
164 
165       case CG_PolakRibierePlus:
166         beta = PetscMax((gnorm2-ginner)/gnorm2_old, 0.0);
167         break;
168 
169       case CG_HestenesStiefel:
170         ierr = VecDot(tao->gradient, tao->stepdirection, &gd);CHKERRQ(ierr);
171         ierr = VecDot(cg->G_old, tao->stepdirection, &gd_old);CHKERRQ(ierr);
172         beta = (gnorm2 - ginner) / (gd - gd_old);
173         break;
174 
175       case CG_DaiYuan:
176         ierr = VecDot(tao->gradient, tao->stepdirection, &gd);CHKERRQ(ierr);
177         ierr = VecDot(cg->G_old, tao->stepdirection, &gd_old);CHKERRQ(ierr);
178         beta = gnorm2 / (gd - gd_old);
179         break;
180 
181       case CG_GradientDescent:
182         beta = 0.0;
183         break;
184 
185       default:
186         beta = 0.0;
187         break;
188       }
189     }
190 
191     /*  Compute the direction d=-g + beta*d */
192     ierr = VecAXPBY(tao->stepdirection, -1.0, beta, tao->gradient);CHKERRQ(ierr);
193     ierr = TaoBNCGBoundStep(tao, cg->as_type, tao->stepdirection);CHKERRQ(ierr);
194 
195     if (cg->cg_type != CG_GradientDescent) {
196       /* Figure out which previously active variables became inactive this iteration */
197       ierr = ISDestroy(&cg->new_inactives);CHKERRQ(ierr);
198       if (cg->inactive_idx && cg->inactive_old) {
199         ierr = ISDifference(cg->inactive_idx, cg->inactive_old, &cg->new_inactives);CHKERRQ(ierr);
200       }
201 
202       /* Selectively reset the CG step those freshly inactive variables */
203       if (cg->new_inactives) {
204         ierr = VecGetSubVector(tao->stepdirection, cg->new_inactives, &cg->inactive_step);CHKERRQ(ierr);
205         ierr = VecGetSubVector(cg->unprojected_gradient, cg->new_inactives, &cg->inactive_grad);CHKERRQ(ierr);
206         ierr = VecCopy(cg->inactive_grad, cg->inactive_step);CHKERRQ(ierr);
207         ierr = VecScale(cg->inactive_step, -1.0);CHKERRQ(ierr);
208         ierr = VecRestoreSubVector(tao->stepdirection, cg->new_inactives, &cg->inactive_step);CHKERRQ(ierr);
209         ierr = VecRestoreSubVector(cg->unprojected_gradient, cg->new_inactives, &cg->inactive_grad);CHKERRQ(ierr);
210       }
211 
212       /* Verify that this is a descent direction */
213       ierr = VecDot(tao->gradient, tao->stepdirection, &gd);CHKERRQ(ierr);
214       ierr = VecNorm(tao->stepdirection, NORM_2, &dnorm);CHKERRQ(ierr);
215       if (gd > -cg->rho*PetscPowReal(dnorm, cg->pow)) {
216         /* Not a descent direction, so we reset back to projected gradient descent */
217         ierr = VecAXPBY(tao->stepdirection, -1.0, 0.0, tao->gradient);CHKERRQ(ierr);
218         ierr = TaoBNCGBoundStep(tao, cg->as_type, tao->stepdirection);CHKERRQ(ierr);
219         ++cg->resets;
220         ++cg->descent_error;
221         gd_fallback = PETSC_TRUE;
222       } else {
223         gd_fallback = PETSC_FALSE;
224       }
225     }
226 
227     /* Store solution and gradient info before it changes */
228     ierr = VecCopy(tao->solution, cg->X_old);CHKERRQ(ierr);
229     ierr = VecCopy(tao->gradient, cg->G_old);CHKERRQ(ierr);
230     ierr = VecCopy(cg->unprojected_gradient, cg->unprojected_gradient_old);CHKERRQ(ierr);
231     gnorm2_old = gnorm2;
232     f_old = cg->f;
233 
234     /* Perform bounded line search */
235     ierr = TaoLineSearchApply(tao->linesearch, tao->solution, &cg->f, cg->unprojected_gradient, tao->stepdirection, &step, &ls_status);CHKERRQ(ierr);
236     ierr = TaoAddLineSearchCounts(tao);CHKERRQ(ierr);
237 
238     /*  Check linesearch failure */
239     if (ls_status != TAOLINESEARCH_SUCCESS && ls_status != TAOLINESEARCH_SUCCESS_USER) {
240       ++cg->ls_fails;
241       /* Restore previous point */
242       gnorm2 = gnorm2_old;
243       cg->f = f_old;
244       ierr = VecCopy(cg->X_old, tao->solution);CHKERRQ(ierr);
245       ierr = VecCopy(cg->G_old, tao->gradient);CHKERRQ(ierr);
246       ierr = VecCopy(cg->unprojected_gradient_old, cg->unprojected_gradient);CHKERRQ(ierr);
247 
248       if (cg->cg_type == CG_GradientDescent || gd_fallback){
249         /* Nothing left to do but fail out of the optimization */
250         step = 0.0;
251         tao->reason = TAO_DIVERGED_LS_FAILURE;
252       } else {
253         /* Fall back on the unscaled gradient step */
254         ierr = VecCopy(tao->gradient, tao->stepdirection);CHKERRQ(ierr);
255         ierr = VecScale(tao->stepdirection, -1.0);CHKERRQ(ierr);
256         ierr = TaoBNCGBoundStep(tao, cg->as_type, tao->stepdirection);CHKERRQ(ierr);
257 
258         ierr = TaoLineSearchSetInitialStepLength(tao->linesearch, 1.0);CHKERRQ(ierr);
259         ierr = TaoLineSearchApply(tao->linesearch, tao->solution, &cg->f, cg->unprojected_gradient, tao->stepdirection, &step, &ls_status);CHKERRQ(ierr);
260         ierr = TaoAddLineSearchCounts(tao);CHKERRQ(ierr);
261 
262         if (ls_status != TAOLINESEARCH_SUCCESS && ls_status != TAOLINESEARCH_SUCCESS_USER){
263           cg->ls_fails++;
264           /* Restore previous point */
265           gnorm2 = gnorm2_old;
266           cg->f = f_old;
267           ierr = VecCopy(cg->X_old, tao->solution);CHKERRQ(ierr);
268           ierr = VecCopy(cg->G_old, tao->gradient);CHKERRQ(ierr);
269           ierr = VecCopy(cg->unprojected_gradient_old, cg->unprojected_gradient);CHKERRQ(ierr);
270 
271           /* Nothing left to do but fail out of the optimization */
272           step = 0.0;
273           tao->reason = TAO_DIVERGED_LS_FAILURE;
274         }
275       }
276     }
277 
278     if (tao->reason != TAO_DIVERGED_LS_FAILURE) {
279       /* Estimate the active set at the new solution */
280       ierr = TaoBNCGEstimateActiveSet(tao, cg->as_type);CHKERRQ(ierr);
281 
282       /* Compute the projected gradient and its norm */
283       ierr = VecCopy(cg->unprojected_gradient, tao->gradient);CHKERRQ(ierr);
284       ierr = VecISSet(tao->gradient, cg->active_idx, 0.0);CHKERRQ(ierr);
285       ierr = VecNorm(tao->gradient,NORM_2,&gnorm);CHKERRQ(ierr);
286       gnorm2 = gnorm*gnorm;
287     }
288 
289     /* Convergence test */
290     ierr = VecFischer(tao->solution, cg->unprojected_gradient, tao->XL, tao->XU, cg->W);CHKERRQ(ierr);
291     ierr = VecNorm(cg->W, NORM_2, &resnorm);CHKERRQ(ierr);
292     if (PetscIsInfOrNanReal(resnorm)) SETERRQ(PETSC_COMM_SELF,1, "User provided compute function generated Inf or NaN");
293     ierr = TaoLogConvergenceHistory(tao, cg->f, resnorm, 0.0, tao->ksp_its);CHKERRQ(ierr);
294     ierr = TaoMonitor(tao, tao->niter, cg->f, resnorm, 0.0, step);CHKERRQ(ierr);
295     ierr = (*tao->ops->convergencetest)(tao,tao->cnvP);CHKERRQ(ierr);
296   }
297   PetscFunctionReturn(0);
298 }
299 
300 static PetscErrorCode TaoSetUp_BNCG(Tao tao)
301 {
302   TAO_BNCG         *cg = (TAO_BNCG*)tao->data;
303   PetscErrorCode ierr;
304 
305   PetscFunctionBegin;
306   if (!tao->gradient) {
307     ierr = VecDuplicate(tao->solution,&tao->gradient);CHKERRQ(ierr);
308   }
309   if (!tao->stepdirection) {
310     ierr = VecDuplicate(tao->solution,&tao->stepdirection);CHKERRQ(ierr);
311   }
312   if (!cg->W) {
313     ierr = VecDuplicate(tao->solution,&cg->W);CHKERRQ(ierr);
314   }
315   if (!cg->work) {
316     ierr = VecDuplicate(tao->solution,&cg->work);CHKERRQ(ierr);
317   }
318   if (!cg->X_old) {
319     ierr = VecDuplicate(tao->solution,&cg->X_old);CHKERRQ(ierr);
320   }
321   if (!cg->G_old) {
322     ierr = VecDuplicate(tao->gradient,&cg->G_old);CHKERRQ(ierr);
323   }
324   if (!cg->unprojected_gradient) {
325     ierr = VecDuplicate(tao->gradient,&cg->unprojected_gradient);CHKERRQ(ierr);
326   }
327   if (!cg->unprojected_gradient_old) {
328     ierr = VecDuplicate(tao->gradient,&cg->unprojected_gradient_old);CHKERRQ(ierr);
329   }
330   PetscFunctionReturn(0);
331 }
332 
333 static PetscErrorCode TaoDestroy_BNCG(Tao tao)
334 {
335   TAO_BNCG       *cg = (TAO_BNCG*) tao->data;
336   PetscErrorCode ierr;
337 
338   PetscFunctionBegin;
339   if (tao->setupcalled) {
340     ierr = VecDestroy(&cg->W);CHKERRQ(ierr);
341     ierr = VecDestroy(&cg->work);CHKERRQ(ierr);
342     ierr = VecDestroy(&cg->X_old);CHKERRQ(ierr);
343     ierr = VecDestroy(&cg->G_old);CHKERRQ(ierr);
344     ierr = VecDestroy(&cg->unprojected_gradient);CHKERRQ(ierr);
345     ierr = VecDestroy(&cg->unprojected_gradient_old);CHKERRQ(ierr);
346   }
347   ierr = ISDestroy(&cg->active_lower);CHKERRQ(ierr);
348   ierr = ISDestroy(&cg->active_upper);CHKERRQ(ierr);
349   ierr = ISDestroy(&cg->active_fixed);CHKERRQ(ierr);
350   ierr = ISDestroy(&cg->active_idx);CHKERRQ(ierr);
351   ierr = ISDestroy(&cg->inactive_idx);CHKERRQ(ierr);
352   ierr = ISDestroy(&cg->inactive_old);CHKERRQ(ierr);
353   ierr = ISDestroy(&cg->new_inactives);CHKERRQ(ierr);
354   ierr = PetscFree(tao->data);CHKERRQ(ierr);
355   PetscFunctionReturn(0);
356 }
357 
358 static PetscErrorCode TaoSetFromOptions_BNCG(PetscOptionItems *PetscOptionsObject,Tao tao)
359  {
360     TAO_BNCG       *cg = (TAO_BNCG*)tao->data;
361     PetscErrorCode ierr;
362 
363     PetscFunctionBegin;
364     ierr = TaoLineSearchSetFromOptions(tao->linesearch);CHKERRQ(ierr);
365     ierr = PetscOptionsHead(PetscOptionsObject,"Nonlinear Conjugate Gradient method for unconstrained optimization");CHKERRQ(ierr);
366     ierr = PetscOptionsReal("-tao_bncg_eta","restart tolerance", "", cg->eta,&cg->eta,NULL);CHKERRQ(ierr);
367     ierr = PetscOptionsReal("-tao_bncg_rho","descent direction tolerance", "", cg->rho,&cg->rho,NULL);CHKERRQ(ierr);
368     ierr = PetscOptionsReal("-tao_bncg_pow","descent direction exponent", "", cg->pow,&cg->pow,NULL);CHKERRQ(ierr);
369     ierr = PetscOptionsEList("-tao_bncg_type","cg formula", "", CG_Table, CG_Types, CG_Table[cg->cg_type], &cg->cg_type,NULL);CHKERRQ(ierr);
370     ierr = PetscOptionsEList("-tao_bncg_as_type","active set estimation method", "", CG_AS_TYPE, CG_AS_SIZE, CG_AS_TYPE[cg->as_type], &cg->as_type,NULL);CHKERRQ(ierr);
371     ierr = PetscOptionsBool("-tao_bncg_recycle","enable recycling the existing solution and gradient at the start of a new solve","",cg->recycle,&cg->recycle,NULL);CHKERRQ(ierr);
372     ierr = PetscOptionsReal("-tao_bncg_as_tol", "initial tolerance used when estimating actively bounded variables","",cg->as_tol,&cg->as_tol,NULL);CHKERRQ(ierr);
373     ierr = PetscOptionsReal("-tao_bncg_as_step", "step length used when estimating actively bounded variables","",cg->as_step,&cg->as_step,NULL);CHKERRQ(ierr);
374    ierr = PetscOptionsTail();CHKERRQ(ierr);
375    PetscFunctionReturn(0);
376 }
377 
378 static PetscErrorCode TaoView_BNCG(Tao tao, PetscViewer viewer)
379 {
380   PetscBool      isascii;
381   TAO_BNCG       *cg = (TAO_BNCG*)tao->data;
382   PetscErrorCode ierr;
383 
384   PetscFunctionBegin;
385   ierr = PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii);CHKERRQ(ierr);
386   if (isascii) {
387     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
388     ierr = PetscViewerASCIIPrintf(viewer, "CG Type: %s\n", CG_Table[cg->cg_type]);CHKERRQ(ierr);
389     ierr = PetscViewerASCIIPrintf(viewer, "Resets: %i\n", cg->resets);CHKERRQ(ierr);
390     ierr = PetscViewerASCIIPrintf(viewer, "  Broken ortho: %i\n", cg->broken_ortho);CHKERRQ(ierr);
391     ierr = PetscViewerASCIIPrintf(viewer, "  Not a descent dir.: %i\n", cg->descent_error);CHKERRQ(ierr);
392     ierr = PetscViewerASCIIPrintf(viewer, "Line search fails: %i\n", cg->ls_fails);CHKERRQ(ierr);
393     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
394   }
395   PetscFunctionReturn(0);
396 }
397 
398 /*MC
399      TAOBNCG -   Bound-constrained Nonlinear Conjugate Gradient method.
400 
401    Options Database Keys:
402 +      -tao_bncg_recycle - enable recycling the latest calculated gradient vector in subsequent TaoSolve() calls
403 .      -tao_bncg_eta <r> - restart tolerance
404 .      -tao_bncg_type <taocg_type> - cg formula
405 .      -tao_bncg_as_type <none,bertsekas> - active set estimation method
406 .      -tao_bncg_as_tol <r> - tolerance used in Bertsekas active-set estimation
407 .      -tao_bncg_as_step <r> - trial step length used in Bertsekas active-set estimation
408 
409   Notes:
410      CG formulas are:
411          "fr" - Fletcher-Reeves
412          "pr" - Polak-Ribiere
413          "prp" - Polak-Ribiere-Plus
414          "hs" - Hestenes-Steifel
415          "dy" - Dai-Yuan
416          "gd" - Gradient Descent
417   Level: beginner
418 M*/
419 
420 
421 PETSC_EXTERN PetscErrorCode TaoCreate_BNCG(Tao tao)
422 {
423   TAO_BNCG       *cg;
424   const char     *morethuente_type = TAOLINESEARCHMT;
425   PetscErrorCode ierr;
426 
427   PetscFunctionBegin;
428   tao->ops->setup = TaoSetUp_BNCG;
429   tao->ops->solve = TaoSolve_BNCG;
430   tao->ops->view = TaoView_BNCG;
431   tao->ops->setfromoptions = TaoSetFromOptions_BNCG;
432   tao->ops->destroy = TaoDestroy_BNCG;
433 
434   /* Override default settings (unless already changed) */
435   if (!tao->max_it_changed) tao->max_it = 2000;
436   if (!tao->max_funcs_changed) tao->max_funcs = 4000;
437 
438   /*  Note: nondefault values should be used for nonlinear conjugate gradient  */
439   /*  method.  In particular, gtol should be less that 0.5; the value used in  */
440   /*  Nocedal and Wright is 0.10.  We use the default values for the  */
441   /*  linesearch because it seems to work better. */
442   ierr = TaoLineSearchCreate(((PetscObject)tao)->comm, &tao->linesearch);CHKERRQ(ierr);
443   ierr = PetscObjectIncrementTabLevel((PetscObject)tao->linesearch, (PetscObject)tao, 1);CHKERRQ(ierr);
444   ierr = TaoLineSearchSetType(tao->linesearch, morethuente_type);CHKERRQ(ierr);
445   ierr = TaoLineSearchUseTaoRoutines(tao->linesearch, tao);CHKERRQ(ierr);
446   ierr = TaoLineSearchSetOptionsPrefix(tao->linesearch,tao->hdr.prefix);CHKERRQ(ierr);
447 
448   ierr = PetscNewLog(tao,&cg);CHKERRQ(ierr);
449   tao->data = (void*)cg;
450   cg->rho = 1e-4;
451   cg->pow = 2.1;
452   cg->eta = 0.5;
453   cg->as_step = 0.001;
454   cg->as_tol = 0.001;
455   cg->as_type = CG_AS_BERTSEKAS;
456   cg->cg_type = CG_DaiYuan;
457   cg->recycle = PETSC_FALSE;
458   PetscFunctionReturn(0);
459 }
460