xref: /petsc/src/tao/bound/impls/bnk/bnls.c (revision 03047865b8d8757cf1cf9cda45785c1537b01dc1)
1 #include <../src/tao/bound/impls/bnk/bnk.h>
2 #include <petscksp.h>
3 
4 /*
5  Implements Newton's Method with a line search approach for
6  solving bound constrained minimization problems.
7 
8  x_0 = VecMedian(x_0)
9  f_0, g_0 = TaoComputeObjectiveAndGradient(x_0)
10  pg_0 = project(g_0)
11  check convergence at pg_0
12  needH = TaoBNKInitialize(default:BNK_INIT_DIRECTION)
13  niter = 0
14  step_accepted = true
15 
16  while niter < max_it
17     if needH
18       If max_cg_steps > 0
19         x_k, g_k, pg_k = TaoSolve(BNCG)
20       end
21 
22       H_k = TaoComputeHessian(x_k)
23       if pc_type == BNK_PC_BFGS
24         add correction to BFGS approx
25         if scale_type == BNK_SCALE_AHESS
26           D = VecMedian(1e-6, abs(diag(H_k)), 1e6)
27           scale BFGS with VecReciprocal(D)
28         end
29       end
30       needH = False
31     end
32 
33     if pc_type = BNK_PC_BFGS
34       B_k = BFGS
35     else
36       B_k = VecMedian(1e-6, abs(diag(H_k)), 1e6)
37       B_k = VecReciprocal(B_k)
38     end
39     w = x_k - VecMedian(x_k - 0.001*B_k*g_k)
40     eps = min(eps, norm2(w))
41     determine the active and inactive index sets such that
42       L = {i : (x_k)_i <= l_i + eps && (g_k)_i > 0}
43       U = {i : (x_k)_i >= u_i - eps && (g_k)_i < 0}
44       F = {i : l_i = (x_k)_i = u_i}
45       A = {L + U + F}
46       IA = {i : i not in A}
47 
48     generate the reduced system Hr_k dr_k = -gr_k for variables in IA
49     if p > 0
50       Hr_k += p*
51     end
52     if pc_type == BNK_PC_BFGS && scale_type == BNK_SCALE_PHESS
53       D = VecMedian(1e-6, abs(diag(Hr_k)), 1e6)
54       scale BFGS with VecReciprocal(D)
55     end
56     solve Hr_k dr_k = -gr_k
57     set d_k to (l - x) for variables in L, (u - x) for variables in U, and 0 for variables in F
58 
59     if dot(d_k, pg_k)) >= 0 || norm(d_k) == NaN || norm(d_k) == Inf
60       dr_k = -BFGS*gr_k for variables in I
61       if dot(d_k, pg_k)) >= 0 || norm(d_k) == NaN || norm(d_k) == Inf
62         reset the BFGS preconditioner
63         calculate scale delta and apply it to BFGS
64         dr_k = -BFGS*gr_k for variables in I
65         if dot(d_k, pg_k)) >= 0 || norm(d_k) == NaN || norm(d_k) == Inf
66           dr_k = -gr_k for variables in I
67         end
68       end
69     end
70 
71     x_{k+1}, f_{k+1}, g_{k+1}, ls_failed = TaoBNKPerformLineSearch()
72     if ls_failed
73       f_{k+1} = f_k
74       x_{k+1} = x_k
75       g_{k+1} = g_k
76       pg_{k+1} = pg_k
77       terminate
78     else
79       pg_{k+1} = project(g_{k+1})
80       count the accepted step type (Newton, BFGS, scaled grad or grad)
81     end
82 
83     niter += 1
84     check convergence at pg_{k+1}
85  end
86 */
87 
TaoSolve_BNLS(Tao tao)88 PetscErrorCode TaoSolve_BNLS(Tao tao)
89 {
90   TAO_BNK                     *bnk = (TAO_BNK *)tao->data;
91   KSPConvergedReason           ksp_reason;
92   TaoLineSearchConvergedReason ls_reason;
93   PetscReal                    steplen = 1.0, resnorm;
94   PetscBool                    cgTerminate, needH = PETSC_TRUE, stepAccepted, shift = PETSC_TRUE;
95   PetscInt                     stepType;
96 
97   PetscFunctionBegin;
98   /* Initialize the preconditioner, KSP solver and trust radius/line search */
99   tao->reason = TAO_CONTINUE_ITERATING;
100   PetscCall(TaoBNKInitialize(tao, bnk->init_type, &needH));
101   if (tao->reason != TAO_CONTINUE_ITERATING) PetscFunctionReturn(PETSC_SUCCESS);
102 
103   /* Have not converged; continue with Newton method */
104   while (tao->reason == TAO_CONTINUE_ITERATING) {
105     /* Call general purpose update function */
106     if (tao->ops->update) {
107       PetscUseTypeMethod(tao, update, tao->niter, tao->user_update);
108       PetscCall(TaoComputeObjective(tao, tao->solution, &bnk->f));
109     }
110 
111     if (needH && bnk->inactive_idx) {
112       /* Take BNCG steps (if enabled) to trade-off Hessian evaluations for more gradient evaluations */
113       PetscCall(TaoBNKTakeCGSteps(tao, &cgTerminate));
114       if (cgTerminate) {
115         tao->reason = bnk->bncg->reason;
116         PetscFunctionReturn(PETSC_SUCCESS);
117       }
118       /* Compute the hessian and update the BFGS preconditioner at the new iterate */
119       PetscCall((*bnk->computehessian)(tao));
120       needH = PETSC_FALSE;
121     }
122 
123     /* Use the common BNK kernel to compute the safeguarded Newton step (for inactive variables only) */
124     PetscCall((*bnk->computestep)(tao, shift, &ksp_reason, &stepType));
125     PetscCall(TaoBNKSafeguardStep(tao, ksp_reason, &stepType));
126 
127     /* Store current solution before it changes */
128     bnk->fold = bnk->f;
129     PetscCall(VecCopy(tao->solution, bnk->Xold));
130     PetscCall(VecCopy(tao->gradient, bnk->Gold));
131     PetscCall(VecCopy(bnk->unprojected_gradient, bnk->unprojected_gradient_old));
132 
133     /* Trigger the line search */
134     PetscCall(TaoBNKPerformLineSearch(tao, &stepType, &steplen, &ls_reason));
135 
136     if (ls_reason != TAOLINESEARCH_SUCCESS && ls_reason != TAOLINESEARCH_SUCCESS_USER) {
137       /* Failed to find an improving point */
138       needH  = PETSC_FALSE;
139       bnk->f = bnk->fold;
140       PetscCall(VecCopy(bnk->Xold, tao->solution));
141       PetscCall(VecCopy(bnk->Gold, tao->gradient));
142       PetscCall(VecCopy(bnk->unprojected_gradient_old, bnk->unprojected_gradient));
143       steplen     = 0.0;
144       tao->reason = TAO_DIVERGED_LS_FAILURE;
145     } else {
146       /* new iterate so we need to recompute the Hessian */
147       needH = PETSC_TRUE;
148       /* compute the projected gradient */
149       PetscCall(TaoBNKEstimateActiveSet(tao, bnk->as_type));
150       PetscCall(VecCopy(bnk->unprojected_gradient, tao->gradient));
151       if (bnk->active_idx) PetscCall(VecISSet(tao->gradient, bnk->active_idx, 0.0));
152       PetscCall(TaoGradientNorm(tao, tao->gradient, NORM_2, &bnk->gnorm));
153       /* update the trust radius based on the step length */
154       PetscCall(TaoBNKUpdateTrustRadius(tao, 0.0, 0.0, BNK_UPDATE_STEP, stepType, &stepAccepted));
155       /* count the accepted step type */
156       PetscCall(TaoBNKAddStepCounts(tao, stepType));
157       /* active BNCG recycling for next iteration */
158       PetscCall(TaoSetRecycleHistory(bnk->bncg, PETSC_TRUE));
159     }
160 
161     /*  Check for termination */
162     PetscCall(VecFischer(tao->solution, bnk->unprojected_gradient, tao->XL, tao->XU, bnk->W));
163     PetscCall(VecNorm(bnk->W, NORM_2, &resnorm));
164     PetscCheck(!PetscIsInfOrNanReal(resnorm), PetscObjectComm((PetscObject)tao), PETSC_ERR_USER, "User provided compute function generated infinity or NaN");
165     ++tao->niter;
166     PetscCall(TaoLogConvergenceHistory(tao, bnk->f, resnorm, 0.0, tao->ksp_its));
167     PetscCall(TaoMonitor(tao, tao->niter, bnk->f, resnorm, 0.0, steplen));
168     PetscUseTypeMethod(tao, convergencetest, tao->cnvP);
169   }
170   PetscFunctionReturn(PETSC_SUCCESS);
171 }
172 
173 /*MC
174   TAOBNLS - Bounded Newton Line Search for nonlinear minimization with bound constraints.
175 
176   Options Database Keys:
177 + -tao_bnk_max_cg_its - maximum number of bounded conjugate-gradient iterations taken in each Newton loop
178 . -tao_bnk_init_type - trust radius initialization method ("constant", "direction", "interpolation")
179 . -tao_bnk_update_type - trust radius update method ("step", "direction", "interpolation")
180 - -tao_bnk_as_type - active-set estimation method ("none", "bertsekas")
181 
182   Level: beginner
183 M*/
TaoCreate_BNLS(Tao tao)184 PETSC_EXTERN PetscErrorCode TaoCreate_BNLS(Tao tao)
185 {
186   TAO_BNK *bnk;
187 
188   PetscFunctionBegin;
189   PetscCall(TaoCreate_BNK(tao));
190   tao->ops->solve = TaoSolve_BNLS;
191 
192   bnk              = (TAO_BNK *)tao->data;
193   bnk->init_type   = BNK_INIT_DIRECTION;
194   bnk->update_type = BNK_UPDATE_STEP; /* trust region updates based on line search step length */
195   PetscFunctionReturn(PETSC_SUCCESS);
196 }
197