xref: /petsc/src/tao/bound/impls/bnk/bnk.c (revision 65f8aed5f7eaa1e2ef2ddeffe666264e0669c876)
1 #include <petsctaolinesearch.h>
2 #include <../src/tao/bound/impls/bnk/bnk.h>
3 #include <petscksp.h>
4 
5 static const char *BNK_INIT[64] = {"constant", "direction", "interpolation"};
6 static const char *BNK_UPDATE[64] = {"step", "reduction", "interpolation"};
7 static const char *BNK_AS[64] = {"none", "bertsekas"};
8 
9 /*------------------------------------------------------------*/
10 
11 /* Routine for initializing the KSP solver, the BFGS preconditioner, and the initial trust radius estimation */
12 
13 PetscErrorCode TaoBNKInitialize(Tao tao, PetscInt initType, PetscBool *needH)
14 {
15   PetscErrorCode               ierr;
16   TAO_BNK                      *bnk = (TAO_BNK *)tao->data;
17   PC                           pc;
18 
19   PetscReal                    f_min, ftrial, prered, actred, kappa, sigma, resnorm;
20   PetscReal                    tau, tau_1, tau_2, tau_max, tau_min, max_radius;
21   PetscBool                    is_bfgs, is_jacobi, is_symmetric, sym_set;
22   PetscInt                     n, N, nDiff;
23   PetscInt                     i_max = 5;
24   PetscInt                     j_max = 1;
25   PetscInt                     i, j;
26 
27   PetscFunctionBegin;
28   /* Project the current point onto the feasible set */
29   ierr = TaoComputeVariableBounds(tao);CHKERRQ(ierr);
30   ierr = TaoSetVariableBounds(bnk->bncg, tao->XL, tao->XU);CHKERRQ(ierr);
31   if (tao->bounded) {
32     ierr = TaoLineSearchSetVariableBounds(tao->linesearch,tao->XL,tao->XU);CHKERRQ(ierr);
33   }
34 
35   /* Project the initial point onto the feasible region */
36   ierr = TaoBoundSolution(tao->solution, tao->XL,tao->XU, 0.0, &nDiff, tao->solution);CHKERRQ(ierr);
37 
38   /* Check convergence criteria */
39   ierr = TaoComputeObjectiveAndGradient(tao, tao->solution, &bnk->f, bnk->unprojected_gradient);CHKERRQ(ierr);
40   ierr = TaoBNKEstimateActiveSet(tao, bnk->as_type);CHKERRQ(ierr);
41   ierr = VecCopy(bnk->unprojected_gradient, tao->gradient);CHKERRQ(ierr);
42   ierr = VecISSet(tao->gradient, bnk->active_idx, 0.0);CHKERRQ(ierr);
43   ierr = TaoGradientNorm(tao, tao->gradient, NORM_2, &bnk->gnorm);CHKERRQ(ierr);
44 
45   /* Test the initial point for convergence */
46   ierr = VecFischer(tao->solution, bnk->unprojected_gradient, tao->XL, tao->XU, bnk->W);CHKERRQ(ierr);
47   ierr = VecNorm(bnk->W, NORM_2, &resnorm);CHKERRQ(ierr);
48   if (PetscIsInfOrNanReal(bnk->f) || PetscIsInfOrNanReal(resnorm)) SETERRQ(PETSC_COMM_SELF,1, "User provided compute function generated Inf or NaN");
49   ierr = TaoLogConvergenceHistory(tao,bnk->f,resnorm,0.0,tao->ksp_its);CHKERRQ(ierr);
50   ierr = TaoMonitor(tao,tao->niter,bnk->f,resnorm,0.0,1.0);CHKERRQ(ierr);
51   ierr = (*tao->ops->convergencetest)(tao,tao->cnvP);CHKERRQ(ierr);
52   if (tao->reason != TAO_CONTINUE_ITERATING) PetscFunctionReturn(0);
53 
54   /* Reset KSP stopping reason counters */
55   bnk->ksp_atol = 0;
56   bnk->ksp_rtol = 0;
57   bnk->ksp_dtol = 0;
58   bnk->ksp_ctol = 0;
59   bnk->ksp_negc = 0;
60   bnk->ksp_iter = 0;
61   bnk->ksp_othr = 0;
62 
63   /* Reset accepted step type counters */
64   bnk->tot_cg_its = 0;
65   bnk->newt = 0;
66   bnk->bfgs = 0;
67   bnk->sgrad = 0;
68   bnk->grad = 0;
69 
70   /* Initialize the Hessian perturbation */
71   bnk->pert = bnk->sval;
72 
73   /* Reset initial steplength to zero (this helps BNCG reset its direction internally) */
74   ierr = VecSet(tao->stepdirection, 0.0);CHKERRQ(ierr);
75 
76   /* Allocate the vectors needed for the BFGS approximation */
77   ierr = KSPGetPC(tao->ksp, &pc);CHKERRQ(ierr);
78   ierr = PetscObjectTypeCompare((PetscObject)pc, PCLMVM, &is_bfgs);CHKERRQ(ierr);
79   ierr = PetscObjectTypeCompare((PetscObject)pc, PCJACOBI, &is_jacobi);CHKERRQ(ierr);
80   if (is_bfgs) {
81     bnk->bfgs_pre = pc;
82     ierr = PCLMVMGetMatLMVM(bnk->bfgs_pre, &bnk->M);CHKERRQ(ierr);
83     ierr = VecGetLocalSize(tao->solution, &n);CHKERRQ(ierr);
84     ierr = VecGetSize(tao->solution, &N);CHKERRQ(ierr);
85     ierr = MatSetSizes(bnk->M, n, n, N, N);CHKERRQ(ierr);
86     ierr = MatLMVMAllocate(bnk->M, tao->solution, bnk->unprojected_gradient);CHKERRQ(ierr);
87     ierr = MatIsSymmetricKnown(bnk->M, &sym_set, &is_symmetric);CHKERRQ(ierr);
88     if (!sym_set || !is_symmetric) SETERRQ(PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_INCOMP, "LMVM matrix in the LMVM preconditioner must be symmetric.");
89   } else if (is_jacobi) {
90     ierr = PCJacobiSetUseAbs(pc,PETSC_TRUE);CHKERRQ(ierr);
91   }
92 
93   /* Prepare the min/max vectors for safeguarding diagonal scales */
94   ierr = VecSet(bnk->Diag_min, bnk->dmin);CHKERRQ(ierr);
95   ierr = VecSet(bnk->Diag_max, bnk->dmax);CHKERRQ(ierr);
96 
97   /* Initialize trust-region radius.  The initialization is only performed
98      when we are using Nash, Steihaug-Toint or the Generalized Lanczos method. */
99   *needH = PETSC_TRUE;
100   if (bnk->is_nash || bnk->is_stcg || bnk->is_gltr) {
101     switch(initType) {
102     case BNK_INIT_CONSTANT:
103       /* Use the initial radius specified */
104       tao->trust = tao->trust0;
105       break;
106 
107     case BNK_INIT_INTERPOLATION:
108       /* Use interpolation based on the initial Hessian */
109       max_radius = 0.0;
110       tao->trust = tao->trust0;
111       for (j = 0; j < j_max; ++j) {
112         f_min = bnk->f;
113         sigma = 0.0;
114 
115         if (*needH) {
116           /* Compute the Hessian at the new step, and extract the inactive subsystem */
117           ierr = bnk->computehessian(tao);CHKERRQ(ierr);
118           ierr = TaoBNKEstimateActiveSet(tao, BNK_AS_NONE);CHKERRQ(ierr);
119           ierr = MatDestroy(&bnk->H_inactive);CHKERRQ(ierr);
120           if (bnk->active_idx) {
121             ierr = MatCreateSubMatrix(tao->hessian, bnk->inactive_idx, bnk->inactive_idx, MAT_INITIAL_MATRIX, &bnk->H_inactive);CHKERRQ(ierr);
122           } else {
123             ierr = MatDuplicate(tao->hessian, MAT_COPY_VALUES, &bnk->H_inactive);CHKERRQ(ierr);
124           }
125           *needH = PETSC_FALSE;
126         }
127 
128         for (i = 0; i < i_max; ++i) {
129           /* Take a steepest descent step and snap it to bounds */
130           ierr = VecCopy(tao->solution, bnk->Xold);CHKERRQ(ierr);
131           ierr = VecAXPY(tao->solution, -tao->trust/bnk->gnorm, tao->gradient);CHKERRQ(ierr);
132           ierr = TaoBoundSolution(tao->solution, tao->XL,tao->XU, 0.0, &nDiff, tao->solution);CHKERRQ(ierr);
133           /* Compute the step we actually accepted */
134           ierr = VecCopy(tao->solution, bnk->W);CHKERRQ(ierr);
135           ierr = VecAXPY(bnk->W, -1.0, bnk->Xold);CHKERRQ(ierr);
136           /* Compute the objective at the trial */
137           ierr = TaoComputeObjective(tao, tao->solution, &ftrial);CHKERRQ(ierr);
138           if (PetscIsInfOrNanReal(bnk->f)) SETERRQ(PETSC_COMM_SELF,1, "User provided compute function generated Inf or NaN");
139           ierr = VecCopy(bnk->Xold, tao->solution);CHKERRQ(ierr);
140           if (PetscIsInfOrNanReal(ftrial)) {
141             tau = bnk->gamma1_i;
142           } else {
143             if (ftrial < f_min) {
144               f_min = ftrial;
145               sigma = -tao->trust / bnk->gnorm;
146             }
147 
148             /* Compute the predicted and actual reduction */
149             if (bnk->active_idx) {
150               ierr = VecGetSubVector(bnk->W, bnk->inactive_idx, &bnk->X_inactive);CHKERRQ(ierr);
151               ierr = VecGetSubVector(bnk->Xwork, bnk->inactive_idx, &bnk->inactive_work);CHKERRQ(ierr);
152             } else {
153               bnk->X_inactive = bnk->W;
154               bnk->inactive_work = bnk->Xwork;
155             }
156             ierr = MatMult(bnk->H_inactive, bnk->X_inactive, bnk->inactive_work);CHKERRQ(ierr);
157             ierr = VecDot(bnk->X_inactive, bnk->inactive_work, &prered);CHKERRQ(ierr);
158             if (bnk->active_idx) {
159               ierr = VecRestoreSubVector(bnk->W, bnk->inactive_idx, &bnk->X_inactive);CHKERRQ(ierr);
160               ierr = VecRestoreSubVector(bnk->Xwork, bnk->inactive_idx, &bnk->inactive_work);CHKERRQ(ierr);
161             }
162             prered = tao->trust * (bnk->gnorm - 0.5 * tao->trust * prered / (bnk->gnorm * bnk->gnorm));
163             actred = bnk->f - ftrial;
164             if ((PetscAbsScalar(actred) <= bnk->epsilon) && (PetscAbsScalar(prered) <= bnk->epsilon)) {
165               kappa = 1.0;
166             } else {
167               kappa = actred / prered;
168             }
169 
170             tau_1 = bnk->theta_i * bnk->gnorm * tao->trust / (bnk->theta_i * bnk->gnorm * tao->trust + (1.0 - bnk->theta_i) * prered - actred);
171             tau_2 = bnk->theta_i * bnk->gnorm * tao->trust / (bnk->theta_i * bnk->gnorm * tao->trust - (1.0 + bnk->theta_i) * prered + actred);
172             tau_min = PetscMin(tau_1, tau_2);
173             tau_max = PetscMax(tau_1, tau_2);
174 
175             if (PetscAbsScalar(kappa - 1.0) <= bnk->mu1_i) {
176               /*  Great agreement */
177               max_radius = PetscMax(max_radius, tao->trust);
178 
179               if (tau_max < 1.0) {
180                 tau = bnk->gamma3_i;
181               } else if (tau_max > bnk->gamma4_i) {
182                 tau = bnk->gamma4_i;
183               } else {
184                 tau = tau_max;
185               }
186             } else if (PetscAbsScalar(kappa - 1.0) <= bnk->mu2_i) {
187               /*  Good agreement */
188               max_radius = PetscMax(max_radius, tao->trust);
189 
190               if (tau_max < bnk->gamma2_i) {
191                 tau = bnk->gamma2_i;
192               } else if (tau_max > bnk->gamma3_i) {
193                 tau = bnk->gamma3_i;
194               } else {
195                 tau = tau_max;
196               }
197             } else {
198               /*  Not good agreement */
199               if (tau_min > 1.0) {
200                 tau = bnk->gamma2_i;
201               } else if (tau_max < bnk->gamma1_i) {
202                 tau = bnk->gamma1_i;
203               } else if ((tau_min < bnk->gamma1_i) && (tau_max >= 1.0)) {
204                 tau = bnk->gamma1_i;
205               } else if ((tau_1 >= bnk->gamma1_i) && (tau_1 < 1.0) && ((tau_2 < bnk->gamma1_i) || (tau_2 >= 1.0))) {
206                 tau = tau_1;
207               } else if ((tau_2 >= bnk->gamma1_i) && (tau_2 < 1.0) && ((tau_1 < bnk->gamma1_i) || (tau_2 >= 1.0))) {
208                 tau = tau_2;
209               } else {
210                 tau = tau_max;
211               }
212             }
213           }
214           tao->trust = tau * tao->trust;
215         }
216 
217         if (f_min < bnk->f) {
218           /* We accidentally found a solution better than the initial, so accept it */
219           bnk->f = f_min;
220           ierr = VecCopy(tao->solution, bnk->Xold);CHKERRQ(ierr);
221           ierr = VecAXPY(tao->solution,sigma,tao->gradient);CHKERRQ(ierr);
222           ierr = TaoBoundSolution(tao->solution, tao->XL,tao->XU, 0.0, &nDiff, tao->solution);CHKERRQ(ierr);
223           ierr = VecCopy(tao->solution, tao->stepdirection);CHKERRQ(ierr);
224           ierr = VecAXPY(tao->stepdirection, -1.0, bnk->Xold);CHKERRQ(ierr);
225           ierr = TaoComputeGradient(tao,tao->solution,bnk->unprojected_gradient);CHKERRQ(ierr);
226           ierr = TaoBNKEstimateActiveSet(tao, bnk->as_type);CHKERRQ(ierr);
227           ierr = VecCopy(bnk->unprojected_gradient, tao->gradient);CHKERRQ(ierr);
228           ierr = VecISSet(tao->gradient, bnk->active_idx, 0.0);CHKERRQ(ierr);
229           /* Compute gradient at the new iterate and flip switch to compute the Hessian later */
230           ierr = TaoGradientNorm(tao, tao->gradient, NORM_2, &bnk->gnorm);CHKERRQ(ierr);
231           *needH = PETSC_TRUE;
232           /* Test the new step for convergence */
233           ierr = VecFischer(tao->solution, bnk->unprojected_gradient, tao->XL, tao->XU, bnk->W);CHKERRQ(ierr);
234           ierr = VecNorm(bnk->W, NORM_2, &resnorm);CHKERRQ(ierr);
235           if (PetscIsInfOrNanReal(resnorm)) SETERRQ(PETSC_COMM_SELF,1, "User provided compute function generated Inf or NaN");
236           ierr = TaoLogConvergenceHistory(tao,bnk->f,resnorm,0.0,tao->ksp_its);CHKERRQ(ierr);
237           ierr = TaoMonitor(tao,tao->niter,bnk->f,resnorm,0.0,1.0);CHKERRQ(ierr);
238           ierr = (*tao->ops->convergencetest)(tao,tao->cnvP);CHKERRQ(ierr);
239           if (tao->reason != TAO_CONTINUE_ITERATING) PetscFunctionReturn(0);
240           /* active BNCG recycling early because we have a stepdirection computed */
241           ierr = TaoBNCGSetRecycleFlag(bnk->bncg, PETSC_TRUE);CHKERRQ(ierr);
242         }
243       }
244       tao->trust = PetscMax(tao->trust, max_radius);
245 
246       /* Ensure that the trust radius is within the limits */
247       tao->trust = PetscMax(tao->trust, bnk->min_radius);
248       tao->trust = PetscMin(tao->trust, bnk->max_radius);
249       break;
250 
251     default:
252       /* Norm of the first direction will initialize radius */
253       tao->trust = 0.0;
254       break;
255     }
256   }
257   PetscFunctionReturn(0);
258 }
259 
260 /*------------------------------------------------------------*/
261 
262 /* Routine for computing the exact Hessian and preparing the preconditioner at the new iterate */
263 
264 PetscErrorCode TaoBNKComputeHessian(Tao tao)
265 {
266   PetscErrorCode               ierr;
267   TAO_BNK                      *bnk = (TAO_BNK *)tao->data;
268 
269   PetscFunctionBegin;
270   /* Compute the Hessian */
271   ierr = TaoComputeHessian(tao,tao->solution,tao->hessian,tao->hessian_pre);CHKERRQ(ierr);
272   /* Add a correction to the BFGS preconditioner */
273   if (bnk->M) {
274     ierr = MatLMVMUpdate(bnk->M, tao->solution, bnk->unprojected_gradient);CHKERRQ(ierr);
275   }
276   /* Prepare the reduced sub-matrices for the inactive set */
277   if (bnk->Hpre_inactive) {
278     ierr = MatDestroy(&bnk->Hpre_inactive);CHKERRQ(ierr);
279   }
280   if (bnk->H_inactive) {
281     ierr = MatDestroy(&bnk->H_inactive);CHKERRQ(ierr);
282   }
283   if (bnk->active_idx) {
284     ierr = MatCreateSubMatrix(tao->hessian, bnk->inactive_idx, bnk->inactive_idx, MAT_INITIAL_MATRIX, &bnk->H_inactive);CHKERRQ(ierr);
285     if (tao->hessian == tao->hessian_pre) {
286       ierr = PetscObjectReference((PetscObject)bnk->H_inactive);CHKERRQ(ierr);
287       bnk->Hpre_inactive = bnk->H_inactive;
288     } else {
289       ierr = MatCreateSubMatrix(tao->hessian_pre, bnk->inactive_idx, bnk->inactive_idx, MAT_INITIAL_MATRIX, &bnk->Hpre_inactive);CHKERRQ(ierr);
290     }
291     if (bnk->bfgs_pre) {
292       ierr = PCLMVMSetIS(bnk->bfgs_pre, bnk->inactive_idx);CHKERRQ(ierr);
293     }
294   } else {
295     ierr = MatDuplicate(tao->hessian, MAT_COPY_VALUES, &bnk->H_inactive);CHKERRQ(ierr);
296     if (tao->hessian == tao->hessian_pre) {
297       ierr = PetscObjectReference((PetscObject)bnk->H_inactive);CHKERRQ(ierr);
298       bnk->Hpre_inactive = bnk->H_inactive;
299     } else {
300       ierr = MatDuplicate(tao->hessian_pre, MAT_COPY_VALUES, &bnk->Hpre_inactive);CHKERRQ(ierr);
301     }
302     if (bnk->bfgs_pre) {
303       ierr = PCLMVMClearIS(bnk->bfgs_pre);CHKERRQ(ierr);
304     }
305   }
306   PetscFunctionReturn(0);
307 }
308 
309 /*------------------------------------------------------------*/
310 
311 /* Routine for estimating the active set */
312 
313 PetscErrorCode TaoBNKEstimateActiveSet(Tao tao, PetscInt asType)
314 {
315   PetscErrorCode               ierr;
316   TAO_BNK                      *bnk = (TAO_BNK *)tao->data;
317   PetscBool                    hessComputed, diagExists;
318 
319   PetscFunctionBegin;
320   switch (asType) {
321   case BNK_AS_NONE:
322     ierr = ISDestroy(&bnk->inactive_idx);CHKERRQ(ierr);
323     ierr = VecWhichInactive(tao->XL, tao->solution, bnk->unprojected_gradient, tao->XU, PETSC_TRUE, &bnk->inactive_idx);CHKERRQ(ierr);
324     ierr = ISDestroy(&bnk->active_idx);CHKERRQ(ierr);
325     ierr = ISComplementVec(bnk->inactive_idx, tao->solution, &bnk->active_idx);CHKERRQ(ierr);
326     break;
327 
328   case BNK_AS_BERTSEKAS:
329     /* Compute the trial step vector with which we will estimate the active set at the next iteration */
330     if (bnk->M) {
331       /* If the BFGS preconditioner matrix is available, we will construct a trial step with it */
332       ierr = MatSolve(bnk->M, bnk->unprojected_gradient, bnk->W);CHKERRQ(ierr);
333     } else {
334       if (tao->hessian) {
335         ierr = MatAssembled(tao->hessian, &hessComputed);CHKERRQ(ierr);
336         ierr = MatHasOperation(tao->hessian, MATOP_GET_DIAGONAL, &diagExists);CHKERRQ(ierr);
337       } else {
338         hessComputed = diagExists = PETSC_FALSE;
339       }
340       if (hessComputed && diagExists) {
341         /* BFGS preconditioner doesn't exist so let's invert the absolute diagonal of the Hessian instead onto the gradient */
342         ierr = MatGetDiagonal(tao->hessian, bnk->Xwork);CHKERRQ(ierr);
343         ierr = VecAbs(bnk->Xwork);CHKERRQ(ierr);
344         ierr = VecMedian(bnk->Diag_min, bnk->Xwork, bnk->Diag_max, bnk->Xwork);CHKERRQ(ierr);
345         ierr = VecReciprocal(bnk->Xwork);CHKERRQ(ierr);CHKERRQ(ierr);
346         ierr = VecPointwiseMult(bnk->W, bnk->Xwork, bnk->unprojected_gradient);CHKERRQ(ierr);
347       } else {
348         /* If the Hessian or its diagonal does not exist, we will simply use gradient step */
349         ierr = VecCopy(bnk->unprojected_gradient, bnk->W);CHKERRQ(ierr);
350       }
351     }
352     ierr = VecScale(bnk->W, -1.0);CHKERRQ(ierr);
353     ierr = TaoEstimateActiveBounds(tao->solution, tao->XL, tao->XU, bnk->unprojected_gradient, bnk->W, bnk->Xwork, bnk->as_step, &bnk->as_tol,
354                                    &bnk->active_lower, &bnk->active_upper, &bnk->active_fixed, &bnk->active_idx, &bnk->inactive_idx);CHKERRQ(ierr);
355     break;
356 
357   default:
358     break;
359   }
360   PetscFunctionReturn(0);
361 }
362 
363 /*------------------------------------------------------------*/
364 
365 /* Routine for bounding the step direction */
366 
367 PetscErrorCode TaoBNKBoundStep(Tao tao, PetscInt asType, Vec step)
368 {
369   PetscErrorCode               ierr;
370   TAO_BNK                      *bnk = (TAO_BNK *)tao->data;
371 
372   PetscFunctionBegin;
373   switch (asType) {
374   case BNK_AS_NONE:
375     ierr = VecISSet(step, bnk->active_idx, 0.0);CHKERRQ(ierr);
376     break;
377 
378   case BNK_AS_BERTSEKAS:
379     ierr = TaoBoundStep(tao->solution, tao->XL, tao->XU, bnk->active_lower, bnk->active_upper, bnk->active_fixed, 1.0, step);CHKERRQ(ierr);
380     break;
381 
382   default:
383     break;
384   }
385   PetscFunctionReturn(0);
386 }
387 
388 /*------------------------------------------------------------*/
389 
390 /* Routine for taking a finite number of BNCG iterations to
391    accelerate Newton convergence.
392 
393    In practice, this approach simply trades off Hessian evaluations
394    for more gradient evaluations.
395 */
396 
397 PetscErrorCode TaoBNKTakeCGSteps(Tao tao, PetscBool *terminate)
398 {
399   TAO_BNK                      *bnk = (TAO_BNK *)tao->data;
400   PetscErrorCode               ierr;
401 
402   PetscFunctionBegin;
403   *terminate = PETSC_FALSE;
404   if (bnk->max_cg_its > 0) {
405     /* Copy the current function value (important vectors are already shared) */
406     bnk->bncg_ctx->f = bnk->f;
407     /* Take some small finite number of BNCG iterations */
408     ierr = TaoSolve(bnk->bncg);CHKERRQ(ierr);
409     /* Add the number of gradient and function evaluations to the total */
410     tao->nfuncs += bnk->bncg->nfuncs;
411     tao->nfuncgrads += bnk->bncg->nfuncgrads;
412     tao->ngrads += bnk->bncg->ngrads;
413     tao->nhess += bnk->bncg->nhess;
414     bnk->tot_cg_its += bnk->bncg->niter;
415     /* Extract the BNCG function value out and save it into BNK */
416     bnk->f = bnk->bncg_ctx->f;
417     if (bnk->bncg->reason == TAO_CONVERGED_GATOL || bnk->bncg->reason == TAO_CONVERGED_GRTOL || bnk->bncg->reason == TAO_CONVERGED_GTTOL || bnk->bncg->reason == TAO_CONVERGED_MINF) {
418       *terminate = PETSC_TRUE;
419     } else {
420       ierr = TaoBNKEstimateActiveSet(tao, bnk->as_type);CHKERRQ(ierr);
421     }
422   }
423   PetscFunctionReturn(0);
424 }
425 
426 /*------------------------------------------------------------*/
427 
428 /* Routine for computing the Newton step. */
429 
430 PetscErrorCode TaoBNKComputeStep(Tao tao, PetscBool shift, KSPConvergedReason *ksp_reason, PetscInt *step_type)
431 {
432   PetscErrorCode               ierr;
433   TAO_BNK                      *bnk = (TAO_BNK *)tao->data;
434   PetscInt                     bfgsUpdates = 0;
435   PetscInt                     kspits;
436   PetscBool                    is_lmvm;
437 
438   PetscFunctionBegin;
439   /* If there are no inactive variables left, save some computation and return an adjusted zero step
440      that has (l-x) and (u-x) for lower and upper bounded variables. */
441   if (!bnk->inactive_idx) {
442     ierr = VecSet(tao->stepdirection, 0.0);CHKERRQ(ierr);
443     ierr = TaoBNKBoundStep(tao, bnk->as_type, tao->stepdirection);CHKERRQ(ierr);
444     PetscFunctionReturn(0);
445   }
446 
447   /* Shift the reduced Hessian matrix */
448   if ((shift) && (bnk->pert > 0)) {
449     ierr = PetscObjectTypeCompare((PetscObject)tao->hessian, MATLMVM, &is_lmvm);CHKERRQ(ierr);
450     if (is_lmvm) {
451       ierr = MatShift(tao->hessian, bnk->pert);CHKERRQ(ierr);
452     } else {
453       ierr = MatShift(bnk->H_inactive, bnk->pert);CHKERRQ(ierr);
454       if (bnk->H_inactive != bnk->Hpre_inactive) {
455         ierr = MatShift(bnk->Hpre_inactive, bnk->pert);CHKERRQ(ierr);
456       }
457     }
458   }
459 
460   /* Solve the Newton system of equations */
461   tao->ksp_its = 0;
462   ierr = VecSet(tao->stepdirection, 0.0);CHKERRQ(ierr);
463   ierr = KSPReset(tao->ksp);CHKERRQ(ierr);
464   ierr = KSPSetOperators(tao->ksp,bnk->H_inactive,bnk->Hpre_inactive);CHKERRQ(ierr);
465   ierr = VecCopy(bnk->unprojected_gradient, bnk->Gwork);CHKERRQ(ierr);
466   if (bnk->active_idx) {
467     ierr = VecGetSubVector(bnk->Gwork, bnk->inactive_idx, &bnk->G_inactive);CHKERRQ(ierr);
468     ierr = VecGetSubVector(tao->stepdirection, bnk->inactive_idx, &bnk->X_inactive);CHKERRQ(ierr);
469   } else {
470     bnk->G_inactive = bnk->unprojected_gradient;
471     bnk->X_inactive = tao->stepdirection;
472   }
473   if (bnk->is_nash || bnk->is_stcg || bnk->is_gltr) {
474     ierr = KSPCGSetRadius(tao->ksp,tao->trust);CHKERRQ(ierr);
475     ierr = KSPSolve(tao->ksp, bnk->G_inactive, bnk->X_inactive);CHKERRQ(ierr);
476     ierr = KSPGetIterationNumber(tao->ksp,&kspits);CHKERRQ(ierr);
477     tao->ksp_its+=kspits;
478     tao->ksp_tot_its+=kspits;
479     ierr = KSPCGGetNormD(tao->ksp,&bnk->dnorm);CHKERRQ(ierr);
480 
481     if (0.0 == tao->trust) {
482       /* Radius was uninitialized; use the norm of the direction */
483       if (bnk->dnorm > 0.0) {
484         tao->trust = bnk->dnorm;
485 
486         /* Modify the radius if it is too large or small */
487         tao->trust = PetscMax(tao->trust, bnk->min_radius);
488         tao->trust = PetscMin(tao->trust, bnk->max_radius);
489       } else {
490         /* The direction was bad; set radius to default value and re-solve
491            the trust-region subproblem to get a direction */
492         tao->trust = tao->trust0;
493 
494         /* Modify the radius if it is too large or small */
495         tao->trust = PetscMax(tao->trust, bnk->min_radius);
496         tao->trust = PetscMin(tao->trust, bnk->max_radius);
497 
498         ierr = KSPCGSetRadius(tao->ksp,tao->trust);CHKERRQ(ierr);
499         ierr = KSPSolve(tao->ksp, bnk->G_inactive, bnk->X_inactive);CHKERRQ(ierr);
500         ierr = KSPGetIterationNumber(tao->ksp,&kspits);CHKERRQ(ierr);
501         tao->ksp_its+=kspits;
502         tao->ksp_tot_its+=kspits;
503         ierr = KSPCGGetNormD(tao->ksp,&bnk->dnorm);CHKERRQ(ierr);
504 
505         if (bnk->dnorm == 0.0) SETERRQ(PETSC_COMM_SELF,1, "Initial direction zero");
506       }
507     }
508   } else {
509     ierr = KSPSolve(tao->ksp, bnk->G_inactive, bnk->X_inactive);CHKERRQ(ierr);
510     ierr = KSPGetIterationNumber(tao->ksp, &kspits);CHKERRQ(ierr);
511     tao->ksp_its += kspits;
512     tao->ksp_tot_its+=kspits;
513   }
514   /* Restore sub vectors back */
515   if (bnk->active_idx) {
516     ierr = VecRestoreSubVector(bnk->Gwork, bnk->inactive_idx, &bnk->G_inactive);CHKERRQ(ierr);
517     ierr = VecRestoreSubVector(tao->stepdirection, bnk->inactive_idx, &bnk->X_inactive);CHKERRQ(ierr);
518   }
519   /* Make sure the safeguarded fall-back step is zero for actively bounded variables */
520   ierr = VecScale(tao->stepdirection, -1.0);CHKERRQ(ierr);
521   ierr = TaoBNKBoundStep(tao, bnk->as_type, tao->stepdirection);CHKERRQ(ierr);
522 
523   /* Record convergence reasons */
524   ierr = KSPGetConvergedReason(tao->ksp, ksp_reason);CHKERRQ(ierr);
525   if (KSP_CONVERGED_ATOL == *ksp_reason) {
526     ++bnk->ksp_atol;
527   } else if (KSP_CONVERGED_RTOL == *ksp_reason) {
528     ++bnk->ksp_rtol;
529   } else if (KSP_CONVERGED_CG_CONSTRAINED == *ksp_reason) {
530     ++bnk->ksp_ctol;
531   } else if (KSP_CONVERGED_CG_NEG_CURVE == *ksp_reason) {
532     ++bnk->ksp_negc;
533   } else if (KSP_DIVERGED_DTOL == *ksp_reason) {
534     ++bnk->ksp_dtol;
535   } else if (KSP_DIVERGED_ITS == *ksp_reason) {
536     ++bnk->ksp_iter;
537   } else {
538     ++bnk->ksp_othr;
539   }
540 
541   /* Make sure the BFGS preconditioner is healthy */
542   if (bnk->M) {
543     ierr = MatLMVMGetUpdateCount(bnk->M, &bfgsUpdates);CHKERRQ(ierr);
544     if ((KSP_DIVERGED_INDEFINITE_PC == *ksp_reason) && (bfgsUpdates > 0)) {
545       /* Preconditioner is numerically indefinite; reset the approximation. */
546       ierr = MatLMVMReset(bnk->M, PETSC_FALSE);CHKERRQ(ierr);
547       ierr = MatLMVMUpdate(bnk->M, tao->solution, bnk->unprojected_gradient);CHKERRQ(ierr);
548     }
549   }
550   *step_type = BNK_NEWTON;
551   PetscFunctionReturn(0);
552 }
553 
554 /*------------------------------------------------------------*/
555 
556 /* Routine for recomputing the predicted reduction for a given step vector */
557 
558 PetscErrorCode TaoBNKRecomputePred(Tao tao, Vec S, PetscReal *prered)
559 {
560   PetscErrorCode               ierr;
561   TAO_BNK                      *bnk = (TAO_BNK *)tao->data;
562 
563   PetscFunctionBegin;
564   /* Extract subvectors associated with the inactive set */
565   if (bnk->active_idx){
566     ierr = VecGetSubVector(tao->stepdirection, bnk->inactive_idx, &bnk->X_inactive);CHKERRQ(ierr);
567     ierr = VecGetSubVector(bnk->Xwork, bnk->inactive_idx, &bnk->inactive_work);CHKERRQ(ierr);
568     ierr = VecGetSubVector(bnk->Gwork, bnk->inactive_idx, &bnk->G_inactive);CHKERRQ(ierr);
569   } else {
570     bnk->X_inactive = tao->stepdirection;
571     bnk->inactive_work = bnk->Xwork;
572     bnk->G_inactive = bnk->Gwork;
573   }
574   /* Recompute the predicted decrease based on the quadratic model */
575   ierr = MatMult(bnk->H_inactive, bnk->X_inactive, bnk->inactive_work);CHKERRQ(ierr);
576   ierr = VecAYPX(bnk->inactive_work, -0.5, bnk->G_inactive);CHKERRQ(ierr);
577   ierr = VecDot(bnk->inactive_work, bnk->X_inactive, prered);CHKERRQ(ierr);
578   /* Restore the sub vectors */
579   if (bnk->active_idx){
580     ierr = VecRestoreSubVector(tao->stepdirection, bnk->inactive_idx, &bnk->X_inactive);CHKERRQ(ierr);
581     ierr = VecRestoreSubVector(bnk->Xwork, bnk->inactive_idx, &bnk->inactive_work);CHKERRQ(ierr);
582     ierr = VecRestoreSubVector(bnk->Gwork, bnk->inactive_idx, &bnk->G_inactive);CHKERRQ(ierr);
583   }
584   PetscFunctionReturn(0);
585 }
586 
587 /*------------------------------------------------------------*/
588 
589 /* Routine for ensuring that the Newton step is a descent direction.
590 
591    The step direction falls back onto BFGS, scaled gradient and gradient steps
592    in the event that the Newton step fails the test.
593 */
594 
595 PetscErrorCode TaoBNKSafeguardStep(Tao tao, KSPConvergedReason ksp_reason, PetscInt *stepType)
596 {
597   PetscErrorCode               ierr;
598   TAO_BNK                      *bnk = (TAO_BNK *)tao->data;
599 
600   PetscReal                    gdx, e_min;
601   PetscInt                     bfgsUpdates;
602 
603   PetscFunctionBegin;
604   switch (*stepType) {
605   case BNK_NEWTON:
606     ierr = VecDot(tao->stepdirection, tao->gradient, &gdx);CHKERRQ(ierr);
607     if ((gdx >= 0.0) || PetscIsInfOrNanReal(gdx)) {
608       /* Newton step is not descent or direction produced Inf or NaN
609         Update the perturbation for next time */
610       if (bnk->pert <= 0.0) {
611         /* Initialize the perturbation */
612         bnk->pert = PetscMin(bnk->imax, PetscMax(bnk->imin, bnk->imfac * bnk->gnorm));
613         if (bnk->is_gltr) {
614           ierr = KSPCGGLTRGetMinEig(tao->ksp,&e_min);CHKERRQ(ierr);
615           bnk->pert = PetscMax(bnk->pert, -e_min);
616         }
617       } else {
618         /* Increase the perturbation */
619         bnk->pert = PetscMin(bnk->pmax, PetscMax(bnk->pgfac * bnk->pert, bnk->pmgfac * bnk->gnorm));
620       }
621 
622       if (!bnk->M) {
623         /* We don't have the bfgs matrix around and updated
624           Must use gradient direction in this case */
625         ierr = VecCopy(tao->gradient, tao->stepdirection);CHKERRQ(ierr);
626         *stepType = BNK_GRADIENT;
627       } else {
628         /* Attempt to use the BFGS direction */
629         ierr = MatSolve(bnk->M, bnk->unprojected_gradient, tao->stepdirection);CHKERRQ(ierr);
630 
631         /* Check for success (descent direction)
632           NOTE: Negative gdx here means not a descent direction because
633           the fall-back step is missing a negative sign. */
634         ierr = VecDot(tao->gradient, tao->stepdirection, &gdx);CHKERRQ(ierr);
635         if ((gdx <= 0.0) || PetscIsInfOrNanReal(gdx)) {
636           /* BFGS direction is not descent or direction produced not a number
637             We can assert bfgsUpdates > 1 in this case because
638             the first solve produces the scaled gradient direction,
639             which is guaranteed to be descent */
640 
641           /* Use steepest descent direction (scaled) */
642           ierr = MatLMVMReset(bnk->M, PETSC_FALSE);CHKERRQ(ierr);
643           ierr = MatLMVMUpdate(bnk->M, tao->solution, bnk->unprojected_gradient);CHKERRQ(ierr);
644           ierr = MatSolve(bnk->M, bnk->unprojected_gradient, tao->stepdirection);CHKERRQ(ierr);
645 
646           *stepType = BNK_SCALED_GRADIENT;
647         } else {
648           ierr = MatLMVMGetUpdateCount(bnk->M, &bfgsUpdates);CHKERRQ(ierr);
649           if (1 == bfgsUpdates) {
650             /* The first BFGS direction is always the scaled gradient */
651             *stepType = BNK_SCALED_GRADIENT;
652           } else {
653             *stepType = BNK_BFGS;
654           }
655         }
656       }
657       /* Make sure the safeguarded fall-back step is zero for actively bounded variables */
658       ierr = VecScale(tao->stepdirection, -1.0);CHKERRQ(ierr);
659       ierr = TaoBNKBoundStep(tao, bnk->as_type, tao->stepdirection);CHKERRQ(ierr);
660     } else {
661       /* Computed Newton step is descent */
662       switch (ksp_reason) {
663       case KSP_DIVERGED_NANORINF:
664       case KSP_DIVERGED_BREAKDOWN:
665       case KSP_DIVERGED_INDEFINITE_MAT:
666       case KSP_DIVERGED_INDEFINITE_PC:
667       case KSP_CONVERGED_CG_NEG_CURVE:
668         /* Matrix or preconditioner is indefinite; increase perturbation */
669         if (bnk->pert <= 0.0) {
670           /* Initialize the perturbation */
671           bnk->pert = PetscMin(bnk->imax, PetscMax(bnk->imin, bnk->imfac * bnk->gnorm));
672           if (bnk->is_gltr) {
673             ierr = KSPCGGLTRGetMinEig(tao->ksp, &e_min);CHKERRQ(ierr);
674             bnk->pert = PetscMax(bnk->pert, -e_min);
675           }
676         } else {
677           /* Increase the perturbation */
678           bnk->pert = PetscMin(bnk->pmax, PetscMax(bnk->pgfac * bnk->pert, bnk->pmgfac * bnk->gnorm));
679         }
680         break;
681 
682       default:
683         /* Newton step computation is good; decrease perturbation */
684         bnk->pert = PetscMin(bnk->psfac * bnk->pert, bnk->pmsfac * bnk->gnorm);
685         if (bnk->pert < bnk->pmin) {
686           bnk->pert = 0.0;
687         }
688         break;
689       }
690       *stepType = BNK_NEWTON;
691     }
692     break;
693 
694   case BNK_BFGS:
695     /* Check for success (descent direction) */
696     ierr = VecDot(tao->stepdirection, tao->gradient, &gdx);CHKERRQ(ierr);
697     if (gdx >= 0 || PetscIsInfOrNanReal(gdx)) {
698       /* Step is not descent or solve was not successful
699          Use steepest descent direction (scaled) */
700       ierr = MatLMVMReset(bnk->M, PETSC_FALSE);CHKERRQ(ierr);
701       ierr = MatLMVMUpdate(bnk->M, tao->solution, bnk->unprojected_gradient);CHKERRQ(ierr);
702       ierr = MatSolve(bnk->M, tao->gradient, tao->stepdirection);CHKERRQ(ierr);
703       ierr = VecScale(tao->stepdirection,-1.0);CHKERRQ(ierr);
704       ierr = TaoBNKBoundStep(tao, bnk->as_type, tao->stepdirection);CHKERRQ(ierr);
705       *stepType = BNK_SCALED_GRADIENT;
706     } else {
707       *stepType = BNK_BFGS;
708     }
709     break;
710 
711   case BNK_SCALED_GRADIENT:
712     break;
713 
714   default:
715     break;
716   }
717 
718   PetscFunctionReturn(0);
719 }
720 
721 /*------------------------------------------------------------*/
722 
723 /* Routine for performing a bound-projected More-Thuente line search.
724 
725   Includes fallbacks to BFGS, scaled gradient, and unscaled gradient steps if the
726   Newton step does not produce a valid step length.
727 */
728 
729 PetscErrorCode TaoBNKPerformLineSearch(Tao tao, PetscInt *stepType, PetscReal *steplen, TaoLineSearchConvergedReason *reason)
730 {
731   TAO_BNK        *bnk = (TAO_BNK *)tao->data;
732   PetscErrorCode ierr;
733   TaoLineSearchConvergedReason ls_reason;
734 
735   PetscReal      e_min, gdx;
736   PetscInt       bfgsUpdates;
737 
738   PetscFunctionBegin;
739   /* Perform the linesearch */
740   ierr = TaoLineSearchApply(tao->linesearch, tao->solution, &bnk->f, bnk->unprojected_gradient, tao->stepdirection, steplen, &ls_reason);CHKERRQ(ierr);
741   ierr = TaoAddLineSearchCounts(tao);CHKERRQ(ierr);
742 
743   while (ls_reason != TAOLINESEARCH_SUCCESS && ls_reason != TAOLINESEARCH_SUCCESS_USER && *stepType != BNK_SCALED_GRADIENT && *stepType != BNK_GRADIENT) {
744     /* Linesearch failed, revert solution */
745     bnk->f = bnk->fold;
746     ierr = VecCopy(bnk->Xold, tao->solution);CHKERRQ(ierr);
747     ierr = VecCopy(bnk->unprojected_gradient_old, bnk->unprojected_gradient);CHKERRQ(ierr);
748 
749     switch(*stepType) {
750     case BNK_NEWTON:
751       /* Failed to obtain acceptable iterate with Newton step
752          Update the perturbation for next time */
753       if (bnk->pert <= 0.0) {
754         /* Initialize the perturbation */
755         bnk->pert = PetscMin(bnk->imax, PetscMax(bnk->imin, bnk->imfac * bnk->gnorm));
756         if (bnk->is_gltr) {
757           ierr = KSPCGGLTRGetMinEig(tao->ksp,&e_min);CHKERRQ(ierr);
758           bnk->pert = PetscMax(bnk->pert, -e_min);
759         }
760       } else {
761         /* Increase the perturbation */
762         bnk->pert = PetscMin(bnk->pmax, PetscMax(bnk->pgfac * bnk->pert, bnk->pmgfac * bnk->gnorm));
763       }
764 
765       if (!bnk->M) {
766         /* We don't have the bfgs matrix around and being updated
767            Must use gradient direction in this case */
768         ierr = VecCopy(bnk->unprojected_gradient, tao->stepdirection);CHKERRQ(ierr);
769         *stepType = BNK_GRADIENT;
770       } else {
771         /* Attempt to use the BFGS direction */
772         ierr = MatSolve(bnk->M, bnk->unprojected_gradient, tao->stepdirection);CHKERRQ(ierr);
773         /* Check for success (descent direction)
774            NOTE: Negative gdx means not a descent direction because the step here is missing a negative sign. */
775         ierr = VecDot(tao->gradient, tao->stepdirection, &gdx);CHKERRQ(ierr);
776         if ((gdx <= 0.0) || PetscIsInfOrNanReal(gdx)) {
777           /* BFGS direction is not descent or direction produced not a number
778              We can assert bfgsUpdates > 1 in this case
779              Use steepest descent direction (scaled) */
780           ierr = MatLMVMReset(bnk->M, PETSC_FALSE);CHKERRQ(ierr);
781           ierr = MatLMVMUpdate(bnk->M, tao->solution, bnk->unprojected_gradient);CHKERRQ(ierr);
782           ierr = MatSolve(bnk->M, bnk->unprojected_gradient, tao->stepdirection);CHKERRQ(ierr);
783 
784           bfgsUpdates = 1;
785           *stepType = BNK_SCALED_GRADIENT;
786         } else {
787           ierr = MatLMVMGetUpdateCount(bnk->M, &bfgsUpdates);CHKERRQ(ierr);
788           if (1 == bfgsUpdates) {
789             /* The first BFGS direction is always the scaled gradient */
790             *stepType = BNK_SCALED_GRADIENT;
791           } else {
792             *stepType = BNK_BFGS;
793           }
794         }
795       }
796       break;
797 
798     case BNK_BFGS:
799       /* Can only enter if pc_type == BNK_PC_BFGS
800          Failed to obtain acceptable iterate with BFGS step
801          Attempt to use the scaled gradient direction */
802       ierr = MatLMVMReset(bnk->M, PETSC_FALSE);CHKERRQ(ierr);
803       ierr = MatLMVMUpdate(bnk->M, tao->solution, bnk->unprojected_gradient);CHKERRQ(ierr);
804       ierr = MatSolve(bnk->M, bnk->unprojected_gradient, tao->stepdirection);CHKERRQ(ierr);
805 
806       bfgsUpdates = 1;
807       *stepType = BNK_SCALED_GRADIENT;
808       break;
809     }
810     /* Make sure the safeguarded fall-back step is zero for actively bounded variables */
811     ierr = VecScale(tao->stepdirection, -1.0);CHKERRQ(ierr);
812     ierr = TaoBNKBoundStep(tao, bnk->as_type, tao->stepdirection);CHKERRQ(ierr);
813 
814     /* Perform one last line search with the fall-back step */
815     ierr = TaoLineSearchApply(tao->linesearch, tao->solution, &bnk->f, bnk->unprojected_gradient, tao->stepdirection, steplen, &ls_reason);CHKERRQ(ierr);
816     ierr = TaoAddLineSearchCounts(tao);CHKERRQ(ierr);
817   }
818   *reason = ls_reason;
819   PetscFunctionReturn(0);
820 }
821 
822 /*------------------------------------------------------------*/
823 
824 /* Routine for updating the trust radius.
825 
826   Function features three different update methods:
827   1) Line-search step length based
828   2) Predicted decrease on the CG quadratic model
829   3) Interpolation
830 */
831 
832 PetscErrorCode TaoBNKUpdateTrustRadius(Tao tao, PetscReal prered, PetscReal actred, PetscInt updateType, PetscInt stepType, PetscBool *accept)
833 {
834   TAO_BNK        *bnk = (TAO_BNK *)tao->data;
835   PetscErrorCode ierr;
836 
837   PetscReal      step, kappa;
838   PetscReal      gdx, tau_1, tau_2, tau_min, tau_max;
839 
840   PetscFunctionBegin;
841   /* Update trust region radius */
842   *accept = PETSC_FALSE;
843   switch(updateType) {
844   case BNK_UPDATE_STEP:
845     *accept = PETSC_TRUE; /* always accept here because line search succeeded */
846     if (stepType == BNK_NEWTON) {
847       ierr = TaoLineSearchGetStepLength(tao->linesearch, &step);CHKERRQ(ierr);
848       if (step < bnk->nu1) {
849         /* Very bad step taken; reduce radius */
850         tao->trust = bnk->omega1 * PetscMin(bnk->dnorm, tao->trust);
851       } else if (step < bnk->nu2) {
852         /* Reasonably bad step taken; reduce radius */
853         tao->trust = bnk->omega2 * PetscMin(bnk->dnorm, tao->trust);
854       } else if (step < bnk->nu3) {
855         /*  Reasonable step was taken; leave radius alone */
856         if (bnk->omega3 < 1.0) {
857           tao->trust = bnk->omega3 * PetscMin(bnk->dnorm, tao->trust);
858         } else if (bnk->omega3 > 1.0) {
859           tao->trust = PetscMax(bnk->omega3 * bnk->dnorm, tao->trust);
860         }
861       } else if (step < bnk->nu4) {
862         /*  Full step taken; increase the radius */
863         tao->trust = PetscMax(bnk->omega4 * bnk->dnorm, tao->trust);
864       } else {
865         /*  More than full step taken; increase the radius */
866         tao->trust = PetscMax(bnk->omega5 * bnk->dnorm, tao->trust);
867       }
868     } else {
869       /*  Newton step was not good; reduce the radius */
870       tao->trust = bnk->omega1 * PetscMin(bnk->dnorm, tao->trust);
871     }
872     break;
873 
874   case BNK_UPDATE_REDUCTION:
875     if (stepType == BNK_NEWTON) {
876       if ((prered < 0.0) || PetscIsInfOrNanReal(prered)) {
877         /* The predicted reduction has the wrong sign.  This cannot
878            happen in infinite precision arithmetic.  Step should
879            be rejected! */
880         tao->trust = bnk->alpha1 * PetscMin(tao->trust, bnk->dnorm);
881       } else {
882         if (PetscIsInfOrNanReal(actred)) {
883           tao->trust = bnk->alpha1 * PetscMin(tao->trust, bnk->dnorm);
884         } else {
885           if ((PetscAbsScalar(actred) <= PetscMax(1.0, PetscAbsScalar(bnk->f))*bnk->epsilon) && (PetscAbsScalar(prered) <= PetscMax(1.0, PetscAbsScalar(bnk->f))*bnk->epsilon)) {
886             kappa = 1.0;
887           } else {
888             kappa = actred / prered;
889           }
890           /* Accept or reject the step and update radius */
891           if (kappa < bnk->eta1) {
892             /* Reject the step */
893             tao->trust = bnk->alpha1 * PetscMin(tao->trust, bnk->dnorm);
894           } else {
895             /* Accept the step */
896             *accept = PETSC_TRUE;
897             /* Update the trust region radius only if the computed step is at the trust radius boundary */
898             if (bnk->dnorm == tao->trust) {
899               if (kappa < bnk->eta2) {
900                 /* Marginal bad step */
901                 tao->trust = bnk->alpha2 * tao->trust;
902               } else if (kappa < bnk->eta3) {
903                 /* Reasonable step */
904                 tao->trust = bnk->alpha3 * tao->trust;
905               } else if (kappa < bnk->eta4) {
906                 /* Good step */
907                 tao->trust = bnk->alpha4 * tao->trust;
908               } else {
909                 /* Very good step */
910                 tao->trust = bnk->alpha5 * tao->trust;
911               }
912             }
913           }
914         }
915       }
916     } else {
917       /*  Newton step was not good; reduce the radius */
918       tao->trust = bnk->alpha1 * PetscMin(bnk->dnorm, tao->trust);
919     }
920     break;
921 
922   default:
923     if (stepType == BNK_NEWTON) {
924       if (prered < 0.0) {
925         /*  The predicted reduction has the wrong sign.  This cannot */
926         /*  happen in infinite precision arithmetic.  Step should */
927         /*  be rejected! */
928         tao->trust = bnk->gamma1 * PetscMin(tao->trust, bnk->dnorm);
929       } else {
930         if (PetscIsInfOrNanReal(actred)) {
931           tao->trust = bnk->gamma1 * PetscMin(tao->trust, bnk->dnorm);
932         } else {
933           if ((PetscAbsScalar(actred) <= bnk->epsilon) && (PetscAbsScalar(prered) <= bnk->epsilon)) {
934             kappa = 1.0;
935           } else {
936             kappa = actred / prered;
937           }
938 
939           ierr = VecDot(tao->gradient, tao->stepdirection, &gdx);CHKERRQ(ierr);
940           tau_1 = bnk->theta * gdx / (bnk->theta * gdx - (1.0 - bnk->theta) * prered + actred);
941           tau_2 = bnk->theta * gdx / (bnk->theta * gdx + (1.0 + bnk->theta) * prered - actred);
942           tau_min = PetscMin(tau_1, tau_2);
943           tau_max = PetscMax(tau_1, tau_2);
944 
945           if (kappa >= 1.0 - bnk->mu1) {
946             /*  Great agreement */
947             *accept = PETSC_TRUE;
948             if (tau_max < 1.0) {
949               tao->trust = PetscMax(tao->trust, bnk->gamma3 * bnk->dnorm);
950             } else if (tau_max > bnk->gamma4) {
951               tao->trust = PetscMax(tao->trust, bnk->gamma4 * bnk->dnorm);
952             } else {
953               tao->trust = PetscMax(tao->trust, tau_max * bnk->dnorm);
954             }
955           } else if (kappa >= 1.0 - bnk->mu2) {
956             /*  Good agreement */
957             *accept = PETSC_TRUE;
958             if (tau_max < bnk->gamma2) {
959               tao->trust = bnk->gamma2 * PetscMin(tao->trust, bnk->dnorm);
960             } else if (tau_max > bnk->gamma3) {
961               tao->trust = PetscMax(tao->trust, bnk->gamma3 * bnk->dnorm);
962             } else if (tau_max < 1.0) {
963               tao->trust = tau_max * PetscMin(tao->trust, bnk->dnorm);
964             } else {
965               tao->trust = PetscMax(tao->trust, tau_max * bnk->dnorm);
966             }
967           } else {
968             /*  Not good agreement */
969             if (tau_min > 1.0) {
970               tao->trust = bnk->gamma2 * PetscMin(tao->trust, bnk->dnorm);
971             } else if (tau_max < bnk->gamma1) {
972               tao->trust = bnk->gamma1 * PetscMin(tao->trust, bnk->dnorm);
973             } else if ((tau_min < bnk->gamma1) && (tau_max >= 1.0)) {
974               tao->trust = bnk->gamma1 * PetscMin(tao->trust, bnk->dnorm);
975             } else if ((tau_1 >= bnk->gamma1) && (tau_1 < 1.0) && ((tau_2 < bnk->gamma1) || (tau_2 >= 1.0))) {
976               tao->trust = tau_1 * PetscMin(tao->trust, bnk->dnorm);
977             } else if ((tau_2 >= bnk->gamma1) && (tau_2 < 1.0) && ((tau_1 < bnk->gamma1) || (tau_2 >= 1.0))) {
978               tao->trust = tau_2 * PetscMin(tao->trust, bnk->dnorm);
979             } else {
980               tao->trust = tau_max * PetscMin(tao->trust, bnk->dnorm);
981             }
982           }
983         }
984       }
985     } else {
986       /*  Newton step was not good; reduce the radius */
987       tao->trust = bnk->gamma1 * PetscMin(bnk->dnorm, tao->trust);
988     }
989     break;
990   }
991   /* Make sure the radius does not violate min and max settings */
992   tao->trust = PetscMin(tao->trust, bnk->max_radius);
993   tao->trust = PetscMax(tao->trust, bnk->min_radius);
994   PetscFunctionReturn(0);
995 }
996 
997 /* ---------------------------------------------------------- */
998 
999 PetscErrorCode TaoBNKAddStepCounts(Tao tao, PetscInt stepType)
1000 {
1001   TAO_BNK        *bnk = (TAO_BNK *)tao->data;
1002 
1003   PetscFunctionBegin;
1004   switch (stepType) {
1005   case BNK_NEWTON:
1006     ++bnk->newt;
1007     break;
1008   case BNK_BFGS:
1009     ++bnk->bfgs;
1010     break;
1011   case BNK_SCALED_GRADIENT:
1012     ++bnk->sgrad;
1013     break;
1014   case BNK_GRADIENT:
1015     ++bnk->grad;
1016     break;
1017   default:
1018     break;
1019   }
1020   PetscFunctionReturn(0);
1021 }
1022 
1023 /* ---------------------------------------------------------- */
1024 
1025 PetscErrorCode TaoSetUp_BNK(Tao tao)
1026 {
1027   TAO_BNK        *bnk = (TAO_BNK *)tao->data;
1028   PetscErrorCode ierr;
1029   PetscInt       i;
1030 
1031   PetscFunctionBegin;
1032   if (!tao->gradient) {
1033     ierr = VecDuplicate(tao->solution,&tao->gradient);CHKERRQ(ierr);
1034   }
1035   if (!tao->stepdirection) {
1036     ierr = VecDuplicate(tao->solution,&tao->stepdirection);CHKERRQ(ierr);
1037   }
1038   if (!bnk->W) {
1039     ierr = VecDuplicate(tao->solution,&bnk->W);CHKERRQ(ierr);
1040   }
1041   if (!bnk->Xold) {
1042     ierr = VecDuplicate(tao->solution,&bnk->Xold);CHKERRQ(ierr);
1043   }
1044   if (!bnk->Gold) {
1045     ierr = VecDuplicate(tao->solution,&bnk->Gold);CHKERRQ(ierr);
1046   }
1047   if (!bnk->Xwork) {
1048     ierr = VecDuplicate(tao->solution,&bnk->Xwork);CHKERRQ(ierr);
1049   }
1050   if (!bnk->Gwork) {
1051     ierr = VecDuplicate(tao->solution,&bnk->Gwork);CHKERRQ(ierr);
1052   }
1053   if (!bnk->unprojected_gradient) {
1054     ierr = VecDuplicate(tao->solution,&bnk->unprojected_gradient);CHKERRQ(ierr);
1055   }
1056   if (!bnk->unprojected_gradient_old) {
1057     ierr = VecDuplicate(tao->solution,&bnk->unprojected_gradient_old);CHKERRQ(ierr);
1058   }
1059   if (!bnk->Diag_min) {
1060     ierr = VecDuplicate(tao->solution,&bnk->Diag_min);CHKERRQ(ierr);
1061   }
1062   if (!bnk->Diag_max) {
1063     ierr = VecDuplicate(tao->solution,&bnk->Diag_max);CHKERRQ(ierr);
1064   }
1065   if (bnk->max_cg_its > 0) {
1066     /* Ensure that the important common vectors are shared between BNK and embedded BNCG */
1067     bnk->bncg_ctx = (TAO_BNCG *)bnk->bncg->data;
1068     ierr = PetscObjectReference((PetscObject)(bnk->unprojected_gradient_old));CHKERRQ(ierr);
1069     ierr = VecDestroy(&bnk->bncg_ctx->unprojected_gradient_old);CHKERRQ(ierr);
1070     bnk->bncg_ctx->unprojected_gradient_old = bnk->unprojected_gradient_old;
1071     ierr = PetscObjectReference((PetscObject)(bnk->unprojected_gradient));CHKERRQ(ierr);
1072     ierr = VecDestroy(&bnk->bncg_ctx->unprojected_gradient);CHKERRQ(ierr);
1073     bnk->bncg_ctx->unprojected_gradient = bnk->unprojected_gradient;
1074     ierr = PetscObjectReference((PetscObject)(bnk->Gold));CHKERRQ(ierr);
1075     ierr = VecDestroy(&bnk->bncg_ctx->G_old);CHKERRQ(ierr);
1076     bnk->bncg_ctx->G_old = bnk->Gold;
1077     ierr = PetscObjectReference((PetscObject)(tao->gradient));CHKERRQ(ierr);
1078     ierr = VecDestroy(&bnk->bncg->gradient);CHKERRQ(ierr);
1079     bnk->bncg->gradient = tao->gradient;
1080     ierr = PetscObjectReference((PetscObject)(tao->stepdirection));CHKERRQ(ierr);
1081     ierr = VecDestroy(&bnk->bncg->stepdirection);CHKERRQ(ierr);
1082     bnk->bncg->stepdirection = tao->stepdirection;
1083     ierr = TaoSetInitialVector(bnk->bncg, tao->solution);CHKERRQ(ierr);
1084     /* Copy over some settings from BNK into BNCG */
1085     ierr = TaoSetMaximumIterations(bnk->bncg, bnk->max_cg_its);CHKERRQ(ierr);
1086     ierr = TaoSetTolerances(bnk->bncg, tao->gatol, tao->grtol, tao->gttol);CHKERRQ(ierr);
1087     ierr = TaoSetFunctionLowerBound(bnk->bncg, tao->fmin);CHKERRQ(ierr);
1088     ierr = TaoSetConvergenceTest(bnk->bncg, tao->ops->convergencetest, tao->cnvP);CHKERRQ(ierr);
1089     ierr = TaoSetObjectiveRoutine(bnk->bncg, tao->ops->computeobjective, tao->user_objP);CHKERRQ(ierr);
1090     ierr = TaoSetGradientRoutine(bnk->bncg, tao->ops->computegradient, tao->user_gradP);CHKERRQ(ierr);
1091     ierr = TaoSetObjectiveAndGradientRoutine(bnk->bncg, tao->ops->computeobjectiveandgradient, tao->user_objgradP);CHKERRQ(ierr);
1092     ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)tao, (PetscObject)(bnk->bncg));CHKERRQ(ierr);
1093     for (i=0; i<tao->numbermonitors; ++i) {
1094       ierr = TaoSetMonitor(bnk->bncg, tao->monitor[i], tao->monitorcontext[i], tao->monitordestroy[i]);CHKERRQ(ierr);
1095       ierr = PetscObjectReference((PetscObject)(tao->monitorcontext[i]));CHKERRQ(ierr);
1096     }
1097   }
1098   bnk->X_inactive = 0;
1099   bnk->G_inactive = 0;
1100   bnk->inactive_work = 0;
1101   bnk->active_work = 0;
1102   bnk->inactive_idx = 0;
1103   bnk->active_idx = 0;
1104   bnk->active_lower = 0;
1105   bnk->active_upper = 0;
1106   bnk->active_fixed = 0;
1107   bnk->M = 0;
1108   bnk->H_inactive = 0;
1109   bnk->Hpre_inactive = 0;
1110   PetscFunctionReturn(0);
1111 }
1112 
1113 /*------------------------------------------------------------*/
1114 
1115 PetscErrorCode TaoDestroy_BNK(Tao tao)
1116 {
1117   TAO_BNK        *bnk = (TAO_BNK *)tao->data;
1118   PetscErrorCode ierr;
1119 
1120   PetscFunctionBegin;
1121   if (tao->setupcalled) {
1122     ierr = VecDestroy(&bnk->W);CHKERRQ(ierr);
1123     ierr = VecDestroy(&bnk->Xold);CHKERRQ(ierr);
1124     ierr = VecDestroy(&bnk->Gold);CHKERRQ(ierr);
1125     ierr = VecDestroy(&bnk->Xwork);CHKERRQ(ierr);
1126     ierr = VecDestroy(&bnk->Gwork);CHKERRQ(ierr);
1127     ierr = VecDestroy(&bnk->unprojected_gradient);CHKERRQ(ierr);
1128     ierr = VecDestroy(&bnk->unprojected_gradient_old);CHKERRQ(ierr);
1129     ierr = VecDestroy(&bnk->Diag_min);CHKERRQ(ierr);
1130     ierr = VecDestroy(&bnk->Diag_max);CHKERRQ(ierr);
1131   }
1132   ierr = ISDestroy(&bnk->active_lower);CHKERRQ(ierr);
1133   ierr = ISDestroy(&bnk->active_upper);CHKERRQ(ierr);
1134   ierr = ISDestroy(&bnk->active_fixed);CHKERRQ(ierr);
1135   ierr = ISDestroy(&bnk->active_idx);CHKERRQ(ierr);
1136   ierr = ISDestroy(&bnk->inactive_idx);CHKERRQ(ierr);
1137   ierr = MatDestroy(&bnk->Hpre_inactive);CHKERRQ(ierr);
1138   ierr = MatDestroy(&bnk->H_inactive);CHKERRQ(ierr);
1139   ierr = TaoDestroy(&bnk->bncg);CHKERRQ(ierr);
1140   ierr = PetscFree(tao->data);CHKERRQ(ierr);
1141   PetscFunctionReturn(0);
1142 }
1143 
1144 /*------------------------------------------------------------*/
1145 
1146 PetscErrorCode TaoSetFromOptions_BNK(PetscOptionItems *PetscOptionsObject,Tao tao)
1147 {
1148   TAO_BNK        *bnk = (TAO_BNK *)tao->data;
1149   PetscErrorCode ierr;
1150   KSPType        ksp_type;
1151 
1152   PetscFunctionBegin;
1153   ierr = PetscOptionsHead(PetscOptionsObject,"Newton-Krylov method for bound constrained optimization");CHKERRQ(ierr);
1154   ierr = PetscOptionsEList("-tao_bnk_init_type", "radius initialization type", "", BNK_INIT, BNK_INIT_TYPES, BNK_INIT[bnk->init_type], &bnk->init_type, 0);CHKERRQ(ierr);
1155   ierr = PetscOptionsEList("-tao_bnk_update_type", "radius update type", "", BNK_UPDATE, BNK_UPDATE_TYPES, BNK_UPDATE[bnk->update_type], &bnk->update_type, 0);CHKERRQ(ierr);
1156   ierr = PetscOptionsEList("-tao_bnk_as_type", "active set estimation method", "", BNK_AS, BNK_AS_TYPES, BNK_AS[bnk->as_type], &bnk->as_type, 0);CHKERRQ(ierr);
1157   ierr = PetscOptionsReal("-tao_bnk_sval", "(developer) Hessian perturbation starting value", "", bnk->sval, &bnk->sval,NULL);CHKERRQ(ierr);
1158   ierr = PetscOptionsReal("-tao_bnk_imin", "(developer) minimum initial Hessian perturbation", "", bnk->imin, &bnk->imin,NULL);CHKERRQ(ierr);
1159   ierr = PetscOptionsReal("-tao_bnk_imax", "(developer) maximum initial Hessian perturbation", "", bnk->imax, &bnk->imax,NULL);CHKERRQ(ierr);
1160   ierr = PetscOptionsReal("-tao_bnk_imfac", "(developer) initial merit factor for Hessian perturbation", "", bnk->imfac, &bnk->imfac,NULL);CHKERRQ(ierr);
1161   ierr = PetscOptionsReal("-tao_bnk_pmin", "(developer) minimum Hessian perturbation", "", bnk->pmin, &bnk->pmin,NULL);CHKERRQ(ierr);
1162   ierr = PetscOptionsReal("-tao_bnk_pmax", "(developer) maximum Hessian perturbation", "", bnk->pmax, &bnk->pmax,NULL);CHKERRQ(ierr);
1163   ierr = PetscOptionsReal("-tao_bnk_pgfac", "(developer) Hessian perturbation growth factor", "", bnk->pgfac, &bnk->pgfac,NULL);CHKERRQ(ierr);
1164   ierr = PetscOptionsReal("-tao_bnk_psfac", "(developer) Hessian perturbation shrink factor", "", bnk->psfac, &bnk->psfac,NULL);CHKERRQ(ierr);
1165   ierr = PetscOptionsReal("-tao_bnk_pmgfac", "(developer) merit growth factor for Hessian perturbation", "", bnk->pmgfac, &bnk->pmgfac,NULL);CHKERRQ(ierr);
1166   ierr = PetscOptionsReal("-tao_bnk_pmsfac", "(developer) merit shrink factor for Hessian perturbation", "", bnk->pmsfac, &bnk->pmsfac,NULL);CHKERRQ(ierr);
1167   ierr = PetscOptionsReal("-tao_bnk_eta1", "(developer) threshold for rejecting step (-tao_bnk_update_type reduction)", "", bnk->eta1, &bnk->eta1,NULL);CHKERRQ(ierr);
1168   ierr = PetscOptionsReal("-tao_bnk_eta2", "(developer) threshold for accepting marginal step (-tao_bnk_update_type reduction)", "", bnk->eta2, &bnk->eta2,NULL);CHKERRQ(ierr);
1169   ierr = PetscOptionsReal("-tao_bnk_eta3", "(developer) threshold for accepting reasonable step (-tao_bnk_update_type reduction)", "", bnk->eta3, &bnk->eta3,NULL);CHKERRQ(ierr);
1170   ierr = PetscOptionsReal("-tao_bnk_eta4", "(developer) threshold for accepting good step (-tao_bnk_update_type reduction)", "", bnk->eta4, &bnk->eta4,NULL);CHKERRQ(ierr);
1171   ierr = PetscOptionsReal("-tao_bnk_alpha1", "(developer) radius reduction factor for rejected step (-tao_bnk_update_type reduction)", "", bnk->alpha1, &bnk->alpha1,NULL);CHKERRQ(ierr);
1172   ierr = PetscOptionsReal("-tao_bnk_alpha2", "(developer) radius reduction factor for marginally accepted bad step (-tao_bnk_update_type reduction)", "", bnk->alpha2, &bnk->alpha2,NULL);CHKERRQ(ierr);
1173   ierr = PetscOptionsReal("-tao_bnk_alpha3", "(developer) radius increase factor for reasonable accepted step (-tao_bnk_update_type reduction)", "", bnk->alpha3, &bnk->alpha3,NULL);CHKERRQ(ierr);
1174   ierr = PetscOptionsReal("-tao_bnk_alpha4", "(developer) radius increase factor for good accepted step (-tao_bnk_update_type reduction)", "", bnk->alpha4, &bnk->alpha4,NULL);CHKERRQ(ierr);
1175   ierr = PetscOptionsReal("-tao_bnk_alpha5", "(developer) radius increase factor for very good accepted step (-tao_bnk_update_type reduction)", "", bnk->alpha5, &bnk->alpha5,NULL);CHKERRQ(ierr);
1176   ierr = PetscOptionsReal("-tao_bnk_nu1", "(developer) threshold for small line-search step length (-tao_bnk_update_type step)", "", bnk->nu1, &bnk->nu1,NULL);CHKERRQ(ierr);
1177   ierr = PetscOptionsReal("-tao_bnk_nu2", "(developer) threshold for reasonable line-search step length (-tao_bnk_update_type step)", "", bnk->nu2, &bnk->nu2,NULL);CHKERRQ(ierr);
1178   ierr = PetscOptionsReal("-tao_bnk_nu3", "(developer) threshold for large line-search step length (-tao_bnk_update_type step)", "", bnk->nu3, &bnk->nu3,NULL);CHKERRQ(ierr);
1179   ierr = PetscOptionsReal("-tao_bnk_nu4", "(developer) threshold for very large line-search step length (-tao_bnk_update_type step)", "", bnk->nu4, &bnk->nu4,NULL);CHKERRQ(ierr);
1180   ierr = PetscOptionsReal("-tao_bnk_omega1", "(developer) radius reduction factor for very small line-search step length (-tao_bnk_update_type step)", "", bnk->omega1, &bnk->omega1,NULL);CHKERRQ(ierr);
1181   ierr = PetscOptionsReal("-tao_bnk_omega2", "(developer) radius reduction factor for small line-search step length (-tao_bnk_update_type step)", "", bnk->omega2, &bnk->omega2,NULL);CHKERRQ(ierr);
1182   ierr = PetscOptionsReal("-tao_bnk_omega3", "(developer) radius factor for decent line-search step length (-tao_bnk_update_type step)", "", bnk->omega3, &bnk->omega3,NULL);CHKERRQ(ierr);
1183   ierr = PetscOptionsReal("-tao_bnk_omega4", "(developer) radius increase factor for large line-search step length (-tao_bnk_update_type step)", "", bnk->omega4, &bnk->omega4,NULL);CHKERRQ(ierr);
1184   ierr = PetscOptionsReal("-tao_bnk_omega5", "(developer) radius increase factor for very large line-search step length (-tao_bnk_update_type step)", "", bnk->omega5, &bnk->omega5,NULL);CHKERRQ(ierr);
1185   ierr = PetscOptionsReal("-tao_bnk_mu1_i", "(developer) threshold for accepting very good step (-tao_bnk_init_type interpolation)", "", bnk->mu1_i, &bnk->mu1_i,NULL);CHKERRQ(ierr);
1186   ierr = PetscOptionsReal("-tao_bnk_mu2_i", "(developer) threshold for accepting good step (-tao_bnk_init_type interpolation)", "", bnk->mu2_i, &bnk->mu2_i,NULL);CHKERRQ(ierr);
1187   ierr = PetscOptionsReal("-tao_bnk_gamma1_i", "(developer) radius reduction factor for rejected very bad step (-tao_bnk_init_type interpolation)", "", bnk->gamma1_i, &bnk->gamma1_i,NULL);CHKERRQ(ierr);
1188   ierr = PetscOptionsReal("-tao_bnk_gamma2_i", "(developer) radius reduction factor for rejected bad step (-tao_bnk_init_type interpolation)", "", bnk->gamma2_i, &bnk->gamma2_i,NULL);CHKERRQ(ierr);
1189   ierr = PetscOptionsReal("-tao_bnk_gamma3_i", "(developer) radius increase factor for accepted good step (-tao_bnk_init_type interpolation)", "", bnk->gamma3_i, &bnk->gamma3_i,NULL);CHKERRQ(ierr);
1190   ierr = PetscOptionsReal("-tao_bnk_gamma4_i", "(developer) radius increase factor for accepted very good step (-tao_bnk_init_type interpolation)", "", bnk->gamma4_i, &bnk->gamma4_i,NULL);CHKERRQ(ierr);
1191   ierr = PetscOptionsReal("-tao_bnk_theta_i", "(developer) trust region interpolation factor (-tao_bnk_init_type interpolation)", "", bnk->theta_i, &bnk->theta_i,NULL);CHKERRQ(ierr);
1192   ierr = PetscOptionsReal("-tao_bnk_mu1", "(developer) threshold for accepting very good step (-tao_bnk_update_type interpolation)", "", bnk->mu1, &bnk->mu1,NULL);CHKERRQ(ierr);
1193   ierr = PetscOptionsReal("-tao_bnk_mu2", "(developer) threshold for accepting good step (-tao_bnk_update_type interpolation)", "", bnk->mu2, &bnk->mu2,NULL);CHKERRQ(ierr);
1194   ierr = PetscOptionsReal("-tao_bnk_gamma1", "(developer) radius reduction factor for rejected very bad step (-tao_bnk_update_type interpolation)", "", bnk->gamma1, &bnk->gamma1,NULL);CHKERRQ(ierr);
1195   ierr = PetscOptionsReal("-tao_bnk_gamma2", "(developer) radius reduction factor for rejected bad step (-tao_bnk_update_type interpolation)", "", bnk->gamma2, &bnk->gamma2,NULL);CHKERRQ(ierr);
1196   ierr = PetscOptionsReal("-tao_bnk_gamma3", "(developer) radius increase factor for accepted good step (-tao_bnk_update_type interpolation)", "", bnk->gamma3, &bnk->gamma3,NULL);CHKERRQ(ierr);
1197   ierr = PetscOptionsReal("-tao_bnk_gamma4", "(developer) radius increase factor for accepted very good step (-tao_bnk_update_type interpolation)", "", bnk->gamma4, &bnk->gamma4,NULL);CHKERRQ(ierr);
1198   ierr = PetscOptionsReal("-tao_bnk_theta", "(developer) trust region interpolation factor (-tao_bnk_update_type interpolation)", "", bnk->theta, &bnk->theta,NULL);CHKERRQ(ierr);
1199   ierr = PetscOptionsReal("-tao_bnk_min_radius", "(developer) lower bound on initial radius", "", bnk->min_radius, &bnk->min_radius,NULL);CHKERRQ(ierr);
1200   ierr = PetscOptionsReal("-tao_bnk_max_radius", "(developer) upper bound on radius", "", bnk->max_radius, &bnk->max_radius,NULL);CHKERRQ(ierr);
1201   ierr = PetscOptionsReal("-tao_bnk_epsilon", "(developer) tolerance used when computing actual and predicted reduction", "", bnk->epsilon, &bnk->epsilon,NULL);CHKERRQ(ierr);
1202   ierr = PetscOptionsReal("-tao_bnk_as_tol", "(developer) initial tolerance used when estimating actively bounded variables", "", bnk->as_tol, &bnk->as_tol,NULL);CHKERRQ(ierr);
1203   ierr = PetscOptionsReal("-tao_bnk_as_step", "(developer) step length used when estimating actively bounded variables", "", bnk->as_step, &bnk->as_step,NULL);CHKERRQ(ierr);
1204   ierr = PetscOptionsInt("-tao_bnk_max_cg_its", "number of BNCG iterations to take for each Newton step", "", bnk->max_cg_its, &bnk->max_cg_its,NULL);CHKERRQ(ierr);
1205   ierr = PetscOptionsTail();CHKERRQ(ierr);
1206   ierr = TaoSetFromOptions(bnk->bncg);CHKERRQ(ierr);
1207   ierr = TaoLineSearchSetFromOptions(tao->linesearch);CHKERRQ(ierr);
1208   ierr = KSPSetFromOptions(tao->ksp);CHKERRQ(ierr);
1209   ierr = KSPGetType(tao->ksp,&ksp_type);CHKERRQ(ierr);
1210   ierr = PetscStrcmp(ksp_type,KSPCGNASH,&bnk->is_nash);CHKERRQ(ierr);
1211   ierr = PetscStrcmp(ksp_type,KSPCGSTCG,&bnk->is_stcg);CHKERRQ(ierr);
1212   ierr = PetscStrcmp(ksp_type,KSPCGGLTR,&bnk->is_gltr);CHKERRQ(ierr);
1213   PetscFunctionReturn(0);
1214 }
1215 
1216 /*------------------------------------------------------------*/
1217 
1218 PetscErrorCode TaoView_BNK(Tao tao, PetscViewer viewer)
1219 {
1220   TAO_BNK        *bnk = (TAO_BNK *)tao->data;
1221   PetscInt       nrejects;
1222   PetscBool      isascii;
1223   PetscErrorCode ierr;
1224 
1225   PetscFunctionBegin;
1226   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&isascii);CHKERRQ(ierr);
1227   if (isascii) {
1228     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1229     if (bnk->M) {
1230       ierr = MatLMVMGetRejectCount(bnk->M,&nrejects);CHKERRQ(ierr);
1231       ierr = PetscViewerASCIIPrintf(viewer, "Rejected BFGS updates: %D\n",nrejects);CHKERRQ(ierr);
1232     }
1233     ierr = PetscViewerASCIIPrintf(viewer, "CG steps: %D\n", bnk->tot_cg_its);CHKERRQ(ierr);
1234     ierr = PetscViewerASCIIPrintf(viewer, "Newton steps: %D\n", bnk->newt);CHKERRQ(ierr);
1235     if (bnk->M) {
1236       ierr = PetscViewerASCIIPrintf(viewer, "BFGS steps: %D\n", bnk->bfgs);CHKERRQ(ierr);
1237     }
1238     ierr = PetscViewerASCIIPrintf(viewer, "Scaled gradient steps: %D\n", bnk->sgrad);CHKERRQ(ierr);
1239     ierr = PetscViewerASCIIPrintf(viewer, "Gradient steps: %D\n", bnk->grad);CHKERRQ(ierr);
1240     ierr = PetscViewerASCIIPrintf(viewer, "KSP termination reasons:\n");CHKERRQ(ierr);
1241     ierr = PetscViewerASCIIPrintf(viewer, "  atol: %D\n", bnk->ksp_atol);CHKERRQ(ierr);
1242     ierr = PetscViewerASCIIPrintf(viewer, "  rtol: %D\n", bnk->ksp_rtol);CHKERRQ(ierr);
1243     ierr = PetscViewerASCIIPrintf(viewer, "  ctol: %D\n", bnk->ksp_ctol);CHKERRQ(ierr);
1244     ierr = PetscViewerASCIIPrintf(viewer, "  negc: %D\n", bnk->ksp_negc);CHKERRQ(ierr);
1245     ierr = PetscViewerASCIIPrintf(viewer, "  dtol: %D\n", bnk->ksp_dtol);CHKERRQ(ierr);
1246     ierr = PetscViewerASCIIPrintf(viewer, "  iter: %D\n", bnk->ksp_iter);CHKERRQ(ierr);
1247     ierr = PetscViewerASCIIPrintf(viewer, "  othr: %D\n", bnk->ksp_othr);CHKERRQ(ierr);
1248     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1249   }
1250   PetscFunctionReturn(0);
1251 }
1252 
1253 /* ---------------------------------------------------------- */
1254 
1255 /*MC
1256   TAOBNK - Shared base-type for Bounded Newton-Krylov type algorithms.
1257   At each iteration, the BNK methods solve the symmetric
1258   system of equations to obtain the step diretion dk:
1259               Hk dk = -gk
1260   for free variables only. The step can be globalized either through
1261   trust-region methods, or a line search, or a heuristic mixture of both.
1262 
1263     Options Database Keys:
1264 + -max_cg_its - maximum number of bounded conjugate-gradient iterations taken in each Newton loop
1265 . -init_type - trust radius initialization method ("constant", "direction", "interpolation")
1266 . -update_type - trust radius update method ("step", "direction", "interpolation")
1267 . -as_type - active-set estimation method ("none", "bertsekas")
1268 . -as_tol - (developer) initial tolerance used in estimating bounded active variables (-as_type bertsekas)
1269 . -as_step - (developer) trial step length used in estimating bounded active variables (-as_type bertsekas)
1270 . -sval - (developer) Hessian perturbation starting value
1271 . -imin - (developer) minimum initial Hessian perturbation
1272 . -imax - (developer) maximum initial Hessian perturbation
1273 . -pmin - (developer) minimum Hessian perturbation
1274 . -pmax - (developer) aximum Hessian perturbation
1275 . -pgfac - (developer) Hessian perturbation growth factor
1276 . -psfac - (developer) Hessian perturbation shrink factor
1277 . -imfac - (developer) initial merit factor for Hessian perturbation
1278 . -pmgfac - (developer) merit growth factor for Hessian perturbation
1279 . -pmsfac - (developer) merit shrink factor for Hessian perturbation
1280 . -eta1 - (developer) threshold for rejecting step (-update_type reduction)
1281 . -eta2 - (developer) threshold for accepting marginal step (-update_type reduction)
1282 . -eta3 - (developer) threshold for accepting reasonable step (-update_type reduction)
1283 . -eta4 - (developer) threshold for accepting good step (-update_type reduction)
1284 . -alpha1 - (developer) radius reduction factor for rejected step (-update_type reduction)
1285 . -alpha2 - (developer) radius reduction factor for marginally accepted bad step (-update_type reduction)
1286 . -alpha3 - (developer) radius increase factor for reasonable accepted step (-update_type reduction)
1287 . -alpha4 - (developer) radius increase factor for good accepted step (-update_type reduction)
1288 . -alpha5 - (developer) radius increase factor for very good accepted step (-update_type reduction)
1289 . -epsilon - (developer) tolerance for small pred/actual ratios that trigger automatic step acceptance (-update_type reduction)
1290 . -mu1 - (developer) threshold for accepting very good step (-update_type interpolation)
1291 . -mu2 - (developer) threshold for accepting good step (-update_type interpolation)
1292 . -gamma1 - (developer) radius reduction factor for rejected very bad step (-update_type interpolation)
1293 . -gamma2 - (developer) radius reduction factor for rejected bad step (-update_type interpolation)
1294 . -gamma3 - (developer) radius increase factor for accepted good step (-update_type interpolation)
1295 . -gamma4 - (developer) radius increase factor for accepted very good step (-update_type interpolation)
1296 . -theta - (developer) trust region interpolation factor (-update_type interpolation)
1297 . -nu1 - (developer) threshold for small line-search step length (-update_type step)
1298 . -nu2 - (developer) threshold for reasonable line-search step length (-update_type step)
1299 . -nu3 - (developer) threshold for large line-search step length (-update_type step)
1300 . -nu4 - (developer) threshold for very large line-search step length (-update_type step)
1301 . -omega1 - (developer) radius reduction factor for very small line-search step length (-update_type step)
1302 . -omega2 - (developer) radius reduction factor for small line-search step length (-update_type step)
1303 . -omega3 - (developer) radius factor for decent line-search step length (-update_type step)
1304 . -omega4 - (developer) radius increase factor for large line-search step length (-update_type step)
1305 . -omega5 - (developer) radius increase factor for very large line-search step length (-update_type step)
1306 . -mu1_i -  (developer) threshold for accepting very good step (-init_type interpolation)
1307 . -mu2_i -  (developer) threshold for accepting good step (-init_type interpolation)
1308 . -gamma1_i - (developer) radius reduction factor for rejected very bad step (-init_type interpolation)
1309 . -gamma2_i - (developer) radius reduction factor for rejected bad step (-init_type interpolation)
1310 . -gamma3_i - (developer) radius increase factor for accepted good step (-init_type interpolation)
1311 . -gamma4_i - (developer) radius increase factor for accepted very good step (-init_type interpolation)
1312 - -theta_i - (developer) trust region interpolation factor (-init_type interpolation)
1313 
1314   Level: beginner
1315 M*/
1316 
1317 PetscErrorCode TaoCreate_BNK(Tao tao)
1318 {
1319   TAO_BNK        *bnk;
1320   const char     *morethuente_type = TAOLINESEARCHMT;
1321   PetscErrorCode ierr;
1322   PC             pc;
1323 
1324   PetscFunctionBegin;
1325   ierr = PetscNewLog(tao,&bnk);CHKERRQ(ierr);
1326 
1327   tao->ops->setup = TaoSetUp_BNK;
1328   tao->ops->view = TaoView_BNK;
1329   tao->ops->setfromoptions = TaoSetFromOptions_BNK;
1330   tao->ops->destroy = TaoDestroy_BNK;
1331 
1332   /*  Override default settings (unless already changed) */
1333   if (!tao->max_it_changed) tao->max_it = 50;
1334   if (!tao->trust0_changed) tao->trust0 = 100.0;
1335 
1336   tao->data = (void*)bnk;
1337 
1338   /*  Hessian shifting parameters */
1339   bnk->computehessian = TaoBNKComputeHessian;
1340   bnk->computestep = TaoBNKComputeStep;
1341 
1342   bnk->sval   = 0.0;
1343   bnk->imin   = 1.0e-4;
1344   bnk->imax   = 1.0e+2;
1345   bnk->imfac  = 1.0e-1;
1346 
1347   bnk->pmin   = 1.0e-12;
1348   bnk->pmax   = 1.0e+2;
1349   bnk->pgfac  = 1.0e+1;
1350   bnk->psfac  = 4.0e-1;
1351   bnk->pmgfac = 1.0e-1;
1352   bnk->pmsfac = 1.0e-1;
1353 
1354   /*  Default values for trust-region radius update based on steplength */
1355   bnk->nu1 = 0.25;
1356   bnk->nu2 = 0.50;
1357   bnk->nu3 = 1.00;
1358   bnk->nu4 = 1.25;
1359 
1360   bnk->omega1 = 0.25;
1361   bnk->omega2 = 0.50;
1362   bnk->omega3 = 1.00;
1363   bnk->omega4 = 2.00;
1364   bnk->omega5 = 4.00;
1365 
1366   /*  Default values for trust-region radius update based on reduction */
1367   bnk->eta1 = 1.0e-4;
1368   bnk->eta2 = 0.25;
1369   bnk->eta3 = 0.50;
1370   bnk->eta4 = 0.90;
1371 
1372   bnk->alpha1 = 0.25;
1373   bnk->alpha2 = 0.50;
1374   bnk->alpha3 = 1.00;
1375   bnk->alpha4 = 2.00;
1376   bnk->alpha5 = 4.00;
1377 
1378   /*  Default values for trust-region radius update based on interpolation */
1379   bnk->mu1 = 0.10;
1380   bnk->mu2 = 0.50;
1381 
1382   bnk->gamma1 = 0.25;
1383   bnk->gamma2 = 0.50;
1384   bnk->gamma3 = 2.00;
1385   bnk->gamma4 = 4.00;
1386 
1387   bnk->theta = 0.05;
1388 
1389   /*  Default values for trust region initialization based on interpolation */
1390   bnk->mu1_i = 0.35;
1391   bnk->mu2_i = 0.50;
1392 
1393   bnk->gamma1_i = 0.0625;
1394   bnk->gamma2_i = 0.5;
1395   bnk->gamma3_i = 2.0;
1396   bnk->gamma4_i = 5.0;
1397 
1398   bnk->theta_i = 0.25;
1399 
1400   /*  Remaining parameters */
1401   bnk->max_cg_its = 0;
1402   bnk->min_radius = 1.0e-10;
1403   bnk->max_radius = 1.0e10;
1404   bnk->epsilon = PetscPowReal(PETSC_MACHINE_EPSILON, 2.0/3.0);
1405   bnk->as_tol = 1.0e-3;
1406   bnk->as_step = 1.0e-3;
1407   bnk->dmin = 1.0e-6;
1408   bnk->dmax = 1.0e6;
1409 
1410   bnk->M               = 0;
1411   bnk->bfgs_pre        = 0;
1412   bnk->init_type       = BNK_INIT_INTERPOLATION;
1413   bnk->update_type     = BNK_UPDATE_REDUCTION;
1414   bnk->as_type         = BNK_AS_BERTSEKAS;
1415 
1416   /* Create the embedded BNCG solver */
1417   ierr = TaoCreate(PetscObjectComm((PetscObject)tao), &bnk->bncg);CHKERRQ(ierr);
1418   ierr = PetscObjectIncrementTabLevel((PetscObject)bnk->bncg, (PetscObject)tao, 1);CHKERRQ(ierr);
1419   ierr = TaoSetOptionsPrefix(bnk->bncg, "tao_bnk_");CHKERRQ(ierr);
1420   ierr = TaoSetType(bnk->bncg, TAOBNCG);CHKERRQ(ierr);
1421 
1422   /* Create the line search */
1423   ierr = TaoLineSearchCreate(((PetscObject)tao)->comm,&tao->linesearch);CHKERRQ(ierr);
1424   ierr = PetscObjectIncrementTabLevel((PetscObject)tao->linesearch, (PetscObject)tao, 1);CHKERRQ(ierr);
1425   ierr = TaoLineSearchSetOptionsPrefix(tao->linesearch,tao->hdr.prefix);CHKERRQ(ierr);
1426   ierr = TaoLineSearchSetType(tao->linesearch,morethuente_type);CHKERRQ(ierr);
1427   ierr = TaoLineSearchUseTaoRoutines(tao->linesearch,tao);CHKERRQ(ierr);
1428 
1429   /*  Set linear solver to default for symmetric matrices */
1430   ierr = KSPCreate(((PetscObject)tao)->comm,&tao->ksp);CHKERRQ(ierr);
1431   ierr = PetscObjectIncrementTabLevel((PetscObject)tao->ksp, (PetscObject)tao, 1);CHKERRQ(ierr);
1432   ierr = KSPSetOptionsPrefix(tao->ksp,"tao_bnk_");CHKERRQ(ierr);
1433   ierr = KSPSetType(tao->ksp,KSPCGSTCG);CHKERRQ(ierr);
1434   ierr = KSPGetPC(tao->ksp, &pc);CHKERRQ(ierr);
1435   ierr = PCSetType(pc, PCLMVM);CHKERRQ(ierr);
1436   PetscFunctionReturn(0);
1437 }
1438