xref: /petsc/src/binding/petsc4py/src/petsc4py/PETSc/TAO.pyx (revision 887e4694e96d8751f9beb8f1400aa8dad5d21d72)
1# --------------------------------------------------------------------
2
3class TAOType:
4    """TAO solver type.
5
6    See Also
7    --------
8    petsc.TaoType
9
10    """
11    LMVM     = S_(TAOLMVM)
12    NLS      = S_(TAONLS)
13    NTR      = S_(TAONTR)
14    NTL      = S_(TAONTL)
15    CG       = S_(TAOCG)
16    TRON     = S_(TAOTRON)
17    OWLQN    = S_(TAOOWLQN)
18    BMRM     = S_(TAOBMRM)
19    BLMVM    = S_(TAOBLMVM)
20    BQNLS    = S_(TAOBQNLS)
21    BNCG     = S_(TAOBNCG)
22    BNLS     = S_(TAOBNLS)
23    BNTR     = S_(TAOBNTR)
24    BNTL     = S_(TAOBNTL)
25    BQNKLS   = S_(TAOBQNKLS)
26    BQNKTR   = S_(TAOBQNKTR)
27    BQNKTL   = S_(TAOBQNKTL)
28    BQPIP    = S_(TAOBQPIP)
29    GPCG     = S_(TAOGPCG)
30    NM       = S_(TAONM)
31    POUNDERS = S_(TAOPOUNDERS)
32    BRGN     = S_(TAOBRGN)
33    LCL      = S_(TAOLCL)
34    SSILS    = S_(TAOSSILS)
35    SSFLS    = S_(TAOSSFLS)
36    ASILS    = S_(TAOASILS)
37    ASFLS    = S_(TAOASFLS)
38    IPM      = S_(TAOIPM)
39    PDIPM    = S_(TAOPDIPM)
40    SHELL    = S_(TAOSHELL)
41    ADMM     = S_(TAOADMM)
42    ALMM     = S_(TAOALMM)
43    PYTHON   = S_(TAOPYTHON)
44
45class TAOConvergedReason:
46    """TAO solver termination reason.
47
48    See Also
49    --------
50    petsc.TaoConvergedReason
51
52    """
53    # iterating
54    CONTINUE_ITERATING    = TAO_CONTINUE_ITERATING    # iterating
55    CONVERGED_ITERATING   = TAO_CONTINUE_ITERATING    # iterating
56    ITERATING             = TAO_CONTINUE_ITERATING    # iterating
57    # converged
58    CONVERGED_GATOL       = TAO_CONVERGED_GATOL       # ||g(X)|| < gatol
59    CONVERGED_GRTOL       = TAO_CONVERGED_GRTOL       # ||g(X)||/f(X)  < grtol
60    CONVERGED_GTTOL       = TAO_CONVERGED_GTTOL       # ||g(X)||/||g(X0)|| < gttol
61    CONVERGED_STEPTOL     = TAO_CONVERGED_STEPTOL     # small step size
62    CONVERGED_MINF        = TAO_CONVERGED_MINF        # f(X) < F_min
63    CONVERGED_USER        = TAO_CONVERGED_USER        # user defined
64    # diverged
65    DIVERGED_MAXITS       = TAO_DIVERGED_MAXITS       #
66    DIVERGED_NAN          = TAO_DIVERGED_NAN          #
67    DIVERGED_MAXFCN       = TAO_DIVERGED_MAXFCN       #
68    DIVERGED_LS_FAILURE   = TAO_DIVERGED_LS_FAILURE   #
69    DIVERGED_TR_REDUCTION = TAO_DIVERGED_TR_REDUCTION #
70    DIVERGED_USER         = TAO_DIVERGED_USER         # user defined
71
72class TAOBNCGType:
73    """TAO Bound Constrained Conjugate Gradient (BNCG) Update Type."""
74    GD         = TAO_BNCG_GD
75    PCGD       = TAO_BNCG_PCGD
76    HS         = TAO_BNCG_HS
77    FR         = TAO_BNCG_FR
78    PRP        = TAO_BNCG_PRP
79    PRP_PLUS   = TAO_BNCG_PRP_PLUS
80    DY         = TAO_BNCG_DY
81    HZ         = TAO_BNCG_HZ
82    DK         = TAO_BNCG_DK
83    KD         = TAO_BNCG_KD
84    SSML_BFGS  = TAO_BNCG_SSML_BFGS
85    SSML_DFP   = TAO_BNCG_SSML_DFP
86    SSML_BRDN  = TAO_BNCG_SSML_BRDN
87# --------------------------------------------------------------------
88
89cdef class TAO(Object):
90    """Optimization solver.
91
92    TAO is described in the `PETSc manual <petsc:manual/tao>`.
93
94    See Also
95    --------
96    petsc.Tao
97
98    """
99
100    Type = TAOType
101    ConvergedReason = TAOConvergedReason
102    BNCGType = TAOBNCGType
103    # FIXME backward compatibility
104    Reason = TAOConvergedReason
105
106    def __cinit__(self):
107        self.obj = <PetscObject*> &self.tao
108        self.tao = NULL
109
110    def view(self, Viewer viewer=None) -> None:
111        """View the solver.
112
113        Collective.
114
115        Parameters
116        ----------
117        viewer
118            A `Viewer` instance or `None` for the default viewer.
119
120        See Also
121        --------
122        petsc.TaoView
123
124        """
125        cdef PetscViewer vwr = NULL
126        if viewer is not None: vwr = viewer.vwr
127        CHKERR( TaoView(self.tao, vwr) )
128
129    def destroy(self) -> Self:
130        """Destroy the solver.
131
132        Collective.
133
134        See Also
135        --------
136        petsc.TaoDestroy
137
138        """
139        CHKERR( TaoDestroy(&self.tao) )
140        return self
141
142    def create(self, comm: Comm | None = None) -> Self:
143        """Create a TAO solver.
144
145        Collective.
146
147        Parameters
148        ----------
149        comm
150            MPI communicator, defaults to `Sys.getDefaultComm`.
151
152        See Also
153        --------
154        Sys.getDefaultComm, petsc.TaoCreate
155
156        """
157        cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT)
158        cdef PetscTAO newtao = NULL
159        CHKERR( TaoCreate(ccomm, &newtao) )
160        CHKERR( PetscCLEAR(self.obj) ); self.tao = newtao
161        return self
162
163    def setType(self, tao_type: Type | str) -> None:
164        """Set the type of the solver.
165
166        Logically collective.
167
168        Parameters
169        ----------
170        tao_type
171            The type of the solver.
172
173        See Also
174        --------
175        getType, petsc.TaoSetType
176
177        """
178        cdef PetscTAOType ctype = NULL
179        tao_type = str2bytes(tao_type, &ctype)
180        CHKERR( TaoSetType(self.tao, ctype) )
181
182    def getType(self) -> str:
183        """Return the type of the solver.
184
185        Not collective.
186
187        See Also
188        --------
189        setType, petsc.TaoGetType
190
191        """
192        cdef PetscTAOType ctype = NULL
193        CHKERR( TaoGetType(self.tao, &ctype) )
194        return bytes2str(ctype)
195
196    def setOptionsPrefix(self, prefix: str) -> None:
197        """Set the prefix used for searching for options in the database.
198
199        Logically collective.
200
201        See Also
202        --------
203        petsc_options, petsc.TaoSetOptionsPrefix
204
205        """
206        cdef const char *cprefix = NULL
207        prefix = str2bytes(prefix, &cprefix)
208        CHKERR( TaoSetOptionsPrefix(self.tao, cprefix) )
209
210    def appendOptionsPrefix(self, prefix: str) -> None:
211        """Append to the prefix used for searching for options in the database.
212
213        Logically collective.
214
215        See Also
216        --------
217        petsc_options, setOptionsPrefix, petsc.TaoAppendOptionsPrefix
218
219        """
220        cdef const char *cprefix = NULL
221        prefix = str2bytes(prefix, &cprefix)
222        CHKERR( TaoAppendOptionsPrefix(self.tao, cprefix) )
223
224    def getOptionsPrefix(self) -> str:
225        """Return the prefix used for searching for options in the database.
226
227        Not collective.
228
229        See Also
230        --------
231        petsc_options, setOptionsPrefix, petsc.TaoGetOptionsPrefix
232
233        """
234        cdef const char *prefix = NULL
235        CHKERR( TaoGetOptionsPrefix(self.tao, &prefix) )
236        return bytes2str(prefix)
237
238    def setFromOptions(self) -> None:
239        """Configure the solver from the options database.
240
241        Collective.
242
243        See Also
244        --------
245        petsc_options, petsc.TaoSetFromOptions
246
247        """
248        CHKERR( TaoSetFromOptions(self.tao) )
249
250    def setUp(self) -> None:
251        """Set up the internal data structures for using the solver.
252
253        Collective.
254
255        See Also
256        --------
257        petsc.TaoSetUp
258
259        """
260        CHKERR( TaoSetUp(self.tao) )
261
262    #
263
264    def setInitialTrustRegionRadius(self, radius: float) -> None:
265        """Set the initial trust region radius.
266
267        Collective.
268
269        See Also
270        --------
271        petsc.TaoSetInitialTrustRegionRadius
272
273        """
274        cdef PetscReal cradius = asReal(radius)
275        CHKERR( TaoSetInitialTrustRegionRadius(self.tao, cradius) )
276
277    # --------------
278
279    def setAppCtx(self, appctx: Any) -> None:
280        """Set the application context."""
281        self.set_attr("__appctx__", appctx)
282
283    def getAppCtx(self) -> Any:
284        """Return the application context."""
285        return self.get_attr("__appctx__")
286
287    def setSolution(self, Vec x) -> None:
288        """Set the vector used to store the solution.
289
290        Collective.
291
292        See Also
293        --------
294        getSolution, petsc.TaoSetSolution
295
296        """
297        CHKERR( TaoSetSolution(self.tao, x.vec) )
298
299    def setObjective(self, objective: TAOObjectiveFunction, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None:
300        """Set the objective function evaluation callback.
301
302        Logically collective.
303
304        Parameters
305        ----------
306        objective
307            The objective function callback.
308        args
309            Positional arguments for the callback.
310        kargs
311            Keyword arguments for the callback.
312
313        See Also
314        --------
315        setGradient, setObjectiveGradient, petsc.TaoSetObjective
316
317        """
318        if args is None: args = ()
319        if kargs is None: kargs = {}
320        context = (objective, args, kargs)
321        self.set_attr("__objective__", context)
322        CHKERR( TaoSetObjective(self.tao, TAO_Objective, <void*>context) )
323
324    def setResidual(self, residual: TAOResidualFunction, Vec R=None, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None:
325        """Set the residual evaluation callback for least-squares applications.
326
327        Logically collective.
328
329        Parameters
330        ----------
331        residual
332            The residual callback.
333        R
334            The vector to store the residual.
335        args
336            Positional arguments for the callback.
337        kargs
338            Keyword arguments for the callback.
339
340        See Also
341        --------
342        setJacobianResidual, petsc.TaoSetResidualRoutine
343
344        """
345        cdef PetscVec Rvec = NULL
346        if R is not None: Rvec = R.vec
347        if args is None: args = ()
348        if kargs is None: kargs = {}
349        context = (residual, args, kargs)
350        self.set_attr("__residual__", context)
351        CHKERR( TaoSetResidualRoutine(self.tao, Rvec, TAO_Residual, <void*>context) )
352
353    def setJacobianResidual(self, jacobian: TAOJacobianResidualFunction, Mat J=None, Mat P=None, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None:
354        """Set the callback to compute the least-squares residual Jacobian.
355
356        Logically collective.
357
358        Parameters
359        ----------
360        jacobian
361            The Jacobian callback.
362        J
363            The matrix to store the Jacobian.
364        P
365            The matrix to construct the preconditioner.
366        args
367            Positional arguments for the callback.
368        kargs
369            Keyword arguments for the callback.
370
371        See Also
372        --------
373        setResidual, petsc.TaoSetJacobianResidualRoutine
374
375        """
376        cdef PetscMat Jmat = NULL
377        if J is not None: Jmat = J.mat
378        cdef PetscMat Pmat = Jmat
379        if P is not None: Pmat = P.mat
380        if args is None: args = ()
381        if kargs is None: kargs = {}
382        context = (jacobian, args, kargs)
383        self.set_attr("__jacobian_residual__", context)
384        CHKERR( TaoSetJacobianResidualRoutine(self.tao, Jmat, Pmat, TAO_JacobianResidual, <void*>context) )
385
386    def setGradient(self, gradient: TAOGradientFunction, Vec g=None, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None:
387        """Set the gradient evaluation callback.
388
389        Logically collective.
390
391        Parameters
392        ----------
393        gradient
394            The gradient callback.
395        g
396            The vector to store the gradient.
397        args
398            Positional arguments for the callback.
399        kargs
400            Keyword arguments for the callback.
401
402        See Also
403        --------
404        setObjective, setObjectiveGradient, setHessian, petsc.TaoSetGradient
405
406        """
407        cdef PetscVec gvec = NULL
408        if g is not None: gvec = g.vec
409        if args is None: args = ()
410        if kargs is None: kargs = {}
411        context = (gradient, args, kargs)
412        self.set_attr("__gradient__", context)
413        CHKERR( TaoSetGradient(self.tao, gvec, TAO_Gradient, <void*>context) )
414
415    def getGradient(self) -> tuple[Vec, TAOGradientFunction]:
416        """Return the vector used to store the gradient and the evaluation callback.
417
418        Not collective.
419
420        See Also
421        --------
422        setGradient, setHessian, petsc.TaoGetGradient
423
424        """
425        cdef Vec vec = Vec()
426        CHKERR( TaoGetGradient(self.tao, &vec.vec, NULL, NULL) )
427        CHKERR( PetscINCREF(vec.obj) )
428        cdef object gradient = self.get_attr("__gradient__")
429        return (vec, gradient)
430
431    def setObjectiveGradient(self, objgrad: TAOObjectiveGradientFunction, Vec g=None, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None:
432        """Set the objective function and gradient evaluation callback.
433
434        Logically collective.
435
436        Parameters
437        ----------
438        objgrad
439            The objective function and gradient callback.
440        g
441            The vector to store the gradient.
442        args
443            Positional arguments for the callback.
444        kargs
445            Keyword arguments for the callback.
446
447        See Also
448        --------
449        setObjective, setGradient, setHessian, getObjectiveAndGradient
450        petsc.TaoSetObjectiveAndGradient
451
452        """
453        cdef PetscVec gvec = NULL
454        if g is not None: gvec = g.vec
455        if args is None: args = ()
456        if kargs is None: kargs = {}
457        context = (objgrad, args, kargs)
458        self.set_attr("__objgrad__", context)
459        CHKERR( TaoSetObjectiveAndGradient(self.tao, gvec, TAO_ObjGrad, <void*>context) )
460
461    def getObjectiveAndGradient(self) -> tuple[Vec, TAOObjectiveGradientFunction]:
462        """Return the vector used to store the gradient and the evaluation callback.
463
464        Not collective.
465
466        See Also
467        --------
468        setObjectiveGradient, petsc.TaoGetObjectiveAndGradient
469
470        """
471        cdef Vec vec = Vec()
472        CHKERR( TaoGetObjectiveAndGradient(self.tao, &vec.vec, NULL, NULL) )
473        CHKERR( PetscINCREF(vec.obj) )
474        cdef object objgrad = self.get_attr("__objgrad__")
475        return (vec, objgrad)
476
477    def setVariableBounds(self, varbounds: tuple[Vec, Vec] | TAOVariableBoundsFunction, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None:
478        """Set the upper and lower bounds for the optimization problem.
479
480        Logically collective.
481
482        Parameters
483        ----------
484        varbounds
485            Either a tuple of `Vec` or a `TAOVariableBoundsFunction` callback.
486        args
487            Positional arguments for the callback.
488        kargs
489            Keyword arguments for the callback.
490
491        See Also
492        --------
493        petsc.TaoSetVariableBounds, petsc.TaoSetVariableBoundsRoutine
494
495        """
496        cdef Vec xl = None, xu = None
497        if (isinstance(varbounds, list) or isinstance(varbounds, tuple)):
498            ol, ou = varbounds
499            xl = <Vec?> ol; xu = <Vec?> ou
500            CHKERR( TaoSetVariableBounds(self.tao, xl.vec, xu.vec) )
501            return
502        if isinstance(varbounds, Vec): #FIXME
503            ol = varbounds; ou = args
504            xl = <Vec?> ol; xu = <Vec?> ou
505            CHKERR( TaoSetVariableBounds(self.tao, xl.vec, xu.vec) )
506            return
507        if args is None: args = ()
508        if kargs is None: kargs = {}
509        context = (varbounds, args, kargs)
510        self.set_attr("__varbounds__", context)
511        CHKERR( TaoSetVariableBoundsRoutine(self.tao, TAO_VarBounds, <void*>context) )
512
513    def setConstraints(self, constraints: TAOConstraintsFunction, Vec C=None, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None:
514        """Set the callback to compute constraints.
515
516        Logically collective.
517
518        Parameters
519        ----------
520        constraints
521            The callback.
522        C
523            The vector to hold the constraints.
524        args
525            Positional arguments for the callback.
526        kargs
527            Keyword arguments for the callback.
528
529        See Also
530        --------
531        petsc.TaoSetConstraintsRoutine
532
533        """
534        cdef PetscVec Cvec = NULL
535        if C is not None: Cvec = C.vec
536        if args is None: args = ()
537        if kargs is None: kargs = {}
538        context = (constraints, args, kargs)
539        self.set_attr("__constraints__", context)
540        CHKERR( TaoSetConstraintsRoutine(self.tao, Cvec, TAO_Constraints, <void*>context) )
541
542    def setHessian(self, hessian: TAOHessianFunction, Mat H=None, Mat P=None,
543                   args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None:
544        """Set the callback to compute the Hessian matrix.
545
546        Logically collective.
547
548        Parameters
549        ----------
550        hessian
551            The Hessian callback.
552        H
553            The matrix to store the Hessian.
554        P
555            The matrix to construct the preconditioner.
556        args
557            Positional arguments for the callback.
558        kargs
559            Keyword arguments for the callback.
560
561        See Also
562        --------
563        getHessian, setObjective, setObjectiveGradient, setGradient
564        petsc.TaoSetHessian
565
566        """
567        cdef PetscMat Hmat = NULL
568        if H is not None: Hmat = H.mat
569        cdef PetscMat Pmat = Hmat
570        if P is not None: Pmat = P.mat
571        if args is None: args = ()
572        if kargs is None: kargs = {}
573        context = (hessian, args, kargs)
574        self.set_attr("__hessian__", context)
575        CHKERR( TaoSetHessian(self.tao, Hmat, Pmat, TAO_Hessian, <void*>context) )
576
577    def getHessian(self) -> tuple[Mat, Mat, TAOHessianFunction]:
578        """Return the matrices used to store the Hessian and the evaluation callback.
579
580        Not collective.
581
582        See Also
583        --------
584        setHessian, petsc.TaoGetHessian
585
586        """
587        cdef Mat J = Mat()
588        cdef Mat P = Mat()
589        CHKERR( TaoGetHessian(self.tao, &J.mat, &P.mat, NULL, NULL) )
590        CHKERR( PetscINCREF(J.obj) )
591        CHKERR( PetscINCREF(P.obj) )
592        cdef object hessian = self.get_attr("__hessian__")
593        return (J, P, hessian)
594
595    def setJacobian(self, jacobian: TAOJacobianFunction, Mat J=None, Mat P=None,
596                    args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None:
597        """Set the callback to compute the Jacobian.
598
599        Logically collective.
600
601        Parameters
602        ----------
603        jacobian
604            The Jacobian callback.
605        J
606            The matrix to store the Jacobian.
607        P
608            The matrix to construct the preconditioner.
609        args
610            Positional arguments for the callback.
611        kargs
612            Keyword arguments for the callback.
613
614        See Also
615        --------
616        petsc.TaoSetJacobianRoutine
617
618        """
619        cdef PetscMat Jmat = NULL
620        if J is not None: Jmat = J.mat
621        cdef PetscMat Pmat = Jmat
622        if P is not None: Pmat = P.mat
623        if args is None: args = ()
624        if kargs is None: kargs = {}
625        context = (jacobian, args, kargs)
626        self.set_attr("__jacobian__", context)
627        CHKERR( TaoSetJacobianRoutine(self.tao, Jmat, Pmat, TAO_Jacobian, <void*>context) )
628
629    def setStateDesignIS(self, IS state=None, IS design=None) -> None:
630        """Set the index sets indicating state and design variables.
631
632        Collective.
633
634        See Also
635        --------
636        petsc.TaoSetStateDesignIS
637
638        """
639        cdef PetscIS s_is = NULL, d_is = NULL
640        if state  is not None: s_is = state.iset
641        if design is not None: d_is = design.iset
642        CHKERR( TaoSetStateDesignIS(self.tao, s_is, d_is) )
643
644    def setJacobianState(self, jacobian_state, Mat J=None, Mat P=None, Mat I=None,
645                         args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None:
646        """Set Jacobian state callback.
647
648        Logically collective.
649
650        See Also
651        --------
652        petsc.TaoSetJacobianStateRoutine
653
654        """
655        cdef PetscMat Jmat = NULL
656        if J is not None: Jmat = J.mat
657        cdef PetscMat Pmat = Jmat
658        if P is not None: Pmat = P.mat
659        cdef PetscMat Imat = NULL
660        if I is not None: Imat = I.mat
661        if args is None: args = ()
662        if kargs is None: kargs = {}
663        context = (jacobian_state, args, kargs)
664        self.set_attr("__jacobian_state__", context)
665        CHKERR( TaoSetJacobianStateRoutine(self.tao, Jmat, Pmat, Imat,
666                                           TAO_JacobianState, <void*>context) )
667
668    def setJacobianDesign(self, jacobian_design, Mat J=None,
669                          args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None:
670        """Set Jacobian design callback.
671
672        Logically collective.
673
674        See Also
675        --------
676        petsc.TaoSetJacobianDesignRoutine
677
678        """
679        cdef PetscMat Jmat = NULL
680        if J is not None: Jmat = J.mat
681        if args is None: args = ()
682        if kargs is None: kargs = {}
683        context = (jacobian_design, args, kargs)
684        self.set_attr("__jacobian_design__", context)
685        CHKERR( TaoSetJacobianDesignRoutine(self.tao, Jmat,
686                                            TAO_JacobianDesign, <void*>context) )
687
688    def setEqualityConstraints(self, equality_constraints, Vec c,
689                               args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None:
690        """Set equality constraints callback.
691
692        Logically collective.
693
694        See Also
695        --------
696        petsc.TaoSetEqualityConstraintsRoutine
697
698        """
699        if args is None: args = ()
700        if kargs is None: kargs = {}
701        context = (equality_constraints, args, kargs)
702        self.set_attr("__equality_constraints__", context)
703        CHKERR( TaoSetEqualityConstraintsRoutine(self.tao, c.vec,
704                                                 TAO_EqualityConstraints, <void*>context) )
705
706    def setJacobianEquality(self, jacobian_equality, Mat J=None, Mat P=None,
707                            args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None:
708        """Set Jacobian equality constraints callback.
709
710        Logically collective.
711
712        See Also
713        --------
714        petsc.TaoSetJacobianEqualityRoutine
715
716        """
717        cdef PetscMat Jmat = NULL
718        if J is not None: Jmat = J.mat
719        cdef PetscMat Pmat = Jmat
720        if P is not None: Pmat = P.mat
721        if args is None: args = ()
722        if kargs is None: kargs = {}
723        context = (jacobian_equality, args, kargs)
724        self.set_attr("__jacobian_equality__", context)
725        CHKERR( TaoSetJacobianEqualityRoutine(self.tao, Jmat, Pmat,
726                                              TAO_JacobianEquality, <void*>context) )
727
728    def setUpdate(self, update: TAOUpdateFunction, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None:
729        """Set the callback to compute update at each optimization step.
730
731        Logically collective.
732
733        Parameters
734        ----------
735        update
736            The update callback or `None` to reset it.
737        args
738            Positional arguments for the callback.
739        kargs
740            Keyword arguments for the callback.
741
742        See Also
743        --------
744        getUpdate, petsc.TaoSetUpdate
745
746        """
747        if update is not None:
748            if args  is None: args  = ()
749            if kargs is None: kargs = {}
750            context = (update, args, kargs)
751            self.set_attr('__update__', context)
752            CHKERR( TaoSetUpdate(self.tao, TAO_Update, <void*>context) )
753        else:
754            self.set_attr('__update__', None)
755            CHKERR( TaoSetUpdate(self.tao, NULL, NULL) )
756
757    def getUpdate(self) -> tuple[TAOUpdateFunction, tuple[Any,...], dict[str, Any]]:
758        """Return the callback to compute the update.
759
760        Not collective.
761
762        See Also
763        --------
764        setUpdate
765
766        """
767        return self.get_attr('__update__')
768
769    # --------------
770
771    def computeObjective(self, Vec x) -> float:
772        """Compute the value of the objective function.
773
774        Collective.
775
776        Parameters
777        ----------
778        x
779            The parameter vector.
780
781        See Also
782        --------
783        setObjective, petsc.TaoComputeObjective
784
785        """
786        cdef PetscReal f = 0
787        CHKERR( TaoComputeObjective(self.tao, x.vec, &f) )
788        return toReal(f)
789
790    def computeResidual(self, Vec x, Vec f) -> None:
791        """Compute the residual.
792
793        Collective.
794
795        Parameters
796        ----------
797        x
798            The parameter vector.
799        f
800            The output vector.
801
802        See Also
803        --------
804        setResidual, petsc.TaoComputeResidual
805
806        """
807        CHKERR( TaoComputeResidual(self.tao, x.vec, f.vec) )
808
809    def computeGradient(self, Vec x, Vec g) -> None:
810        """Compute the gradient of the objective function.
811
812        Collective.
813
814        Parameters
815        ----------
816        x
817            The parameter vector.
818        g
819            The output gradient vector.
820
821        See Also
822        --------
823        setGradient, petsc.TaoComputeGradient
824
825        """
826        CHKERR( TaoComputeGradient(self.tao, x.vec, g.vec) )
827
828    def computeObjectiveGradient(self, Vec x, Vec g) -> float:
829        """Compute the gradient of the objective function and its value.
830
831        Collective.
832
833        Parameters
834        ----------
835        x
836            The parameter vector.
837        g
838            The output gradient vector.
839
840        See Also
841        --------
842        setObjectiveGradient, setGradient, setObjective
843        petsc.TaoComputeObjectiveAndGradient
844
845        """
846        cdef PetscReal f = 0
847        CHKERR( TaoComputeObjectiveAndGradient(self.tao, x.vec, &f, g.vec) )
848        return toReal(f)
849
850    def computeDualVariables(self, Vec xl, Vec xu) -> None:
851        """Compute the dual vectors corresponding to variables' bounds.
852
853        Collective.
854
855        See Also
856        --------
857        petsc.TaoComputeDualVariables
858
859        """
860        CHKERR( TaoComputeDualVariables(self.tao, xl.vec, xu.vec) )
861
862    def computeVariableBounds(self, Vec xl, Vec xu) -> None:
863        """Compute the vectors corresponding to variables' bounds.
864
865        Collective.
866
867        See Also
868        --------
869        setVariableBounds, petsc.TaoComputeVariableBounds
870
871        """
872        CHKERR( TaoComputeVariableBounds(self.tao) )
873        cdef PetscVec Lvec = NULL, Uvec = NULL
874        CHKERR( TaoGetVariableBounds(self.tao, &Lvec, &Uvec) )
875        if xl.vec != NULL:
876            if Lvec != NULL:
877                CHKERR( VecCopy(Lvec, xl.vec) )
878            else:
879                CHKERR( VecSet(xl.vec, <PetscScalar>PETSC_NINFINITY) )
880        if xu.vec != NULL:
881            if Uvec != NULL:
882                CHKERR( VecCopy(Uvec, xu.vec) )
883            else:
884                CHKERR( VecSet(xu.vec, <PetscScalar>PETSC_INFINITY) )
885
886    def computeConstraints(self, Vec x, Vec c) -> None:
887        """Compute the vector corresponding to the constraints.
888
889        Collective.
890
891        Parameters
892        ----------
893        x
894            The parameter vector.
895        c
896            The output constraints vector.
897
898        See Also
899        --------
900        setVariableBounds, petsc.TaoComputeVariableBounds
901
902        """
903        CHKERR( TaoComputeConstraints(self.tao, x.vec, c.vec) )
904
905    def computeHessian(self, Vec x, Mat H, Mat P=None) -> None:
906        """Compute the Hessian of the objective function.
907
908        Collective.
909
910        Parameters
911        ----------
912        x
913            The parameter vector.
914        H
915            The output Hessian matrix.
916        P
917            The output Hessian matrix used to construct the preconditioner.
918
919        See Also
920        --------
921        setHessian, petsc.TaoComputeHessian
922
923        """
924        cdef PetscMat hmat = H.mat, pmat = H.mat
925        if P is not None: pmat = P.mat
926        CHKERR( TaoComputeHessian(self.tao, x.vec, hmat, pmat) )
927
928    def computeJacobian(self, Vec x, Mat J, Mat P=None) -> None:
929        """Compute the Jacobian.
930
931        Collective.
932
933        Parameters
934        ----------
935        x
936            The parameter vector.
937        J
938            The output Jacobian matrix.
939        P
940            The output Jacobian matrix used to construct the preconditioner.
941
942        See Also
943        --------
944        setJacobian, petsc.TaoComputeJacobian
945
946        """
947        cdef PetscMat jmat = J.mat, pmat = J.mat
948        if P is not None: pmat = P.mat
949        CHKERR( TaoComputeJacobian(self.tao, x.vec, jmat, pmat) )
950
951    # --------------
952
953    def setTolerances(self, gatol: float = None, grtol: float = None, gttol: float = None) -> None:
954        """Set the tolerance parameters used in the solver convergence tests.
955
956        Collective.
957
958        Parameters
959        ----------
960        gatol
961            The absolute norm of the gradient. Defaults to `DEFAULT`.
962        grtol
963            The relative norm of the gradient with respect
964            to the initial norm of the objective. Defaults to `DEFAULT`.
965        gttol
966            The relative norm of the gradient with respect
967            to the initial norm of the gradient. Defaults to `DEFAULT`.
968
969        See Also
970        --------
971        getTolerances, petsc.TaoSetTolerances
972
973        """
974        cdef PetscReal _gatol=PETSC_DEFAULT, _grtol=PETSC_DEFAULT, _gttol=PETSC_DEFAULT
975        if gatol is not None: _gatol = asReal(gatol)
976        if grtol is not None: _grtol = asReal(grtol)
977        if gttol is not None: _gttol = asReal(gttol)
978        CHKERR( TaoSetTolerances(self.tao, _gatol, _grtol, _gttol) )
979
980    def getTolerances(self) -> tuple[float, float, float]:
981        """Return the tolerance parameters used in the solver convergence tests.
982
983        Not collective.
984
985        Returns
986        -------
987        gatol : float
988            The absolute norm of the gradient.
989        grtol : float
990            The relative norm of the gradient with respect to
991            the initial norm of the objective.
992        gttol : float
993            The relative norm of the gradient with respect to
994            the initial norm of the gradient.
995
996        See Also
997        --------
998        setTolerances, petsc.TaoGetTolerances
999
1000        """
1001        cdef PetscReal _gatol=PETSC_DEFAULT, _grtol=PETSC_DEFAULT, _gttol=PETSC_DEFAULT
1002        CHKERR( TaoGetTolerances(self.tao, &_gatol, &_grtol, &_gttol) )
1003        return (toReal(_gatol), toReal(_grtol), toReal(_gttol))
1004
1005    def setMaximumIterations(self, mit: int) -> float:
1006        """Set the maximum number of solver iterations.
1007
1008        Collective.
1009
1010        See Also
1011        --------
1012        setTolerances, petsc.TaoSetMaximumIterations
1013
1014        """
1015        cdef PetscInt _mit = asInt(mit)
1016        CHKERR( TaoSetMaximumIterations(self.tao, _mit) )
1017
1018    def getMaximumIterations(self) -> int:
1019        """Return the maximum number of solver iterations.
1020
1021        Not collective.
1022
1023        See Also
1024        --------
1025        setMaximumIterations, petsc.TaoGetMaximumIterations
1026
1027        """
1028        cdef PetscInt _mit = PETSC_DEFAULT
1029        CHKERR( TaoGetMaximumIterations(self.tao, &_mit) )
1030        return toInt(_mit)
1031
1032    def setMaximumFunctionEvaluations(self, mit: int) -> None:
1033        """Set the maximum number of objective evaluations within the solver.
1034
1035        Collective.
1036
1037        See Also
1038        --------
1039        setMaximumIterations, petsc.TaoSetMaximumFunctionEvaluations
1040
1041        """
1042        cdef PetscInt _mit = asInt(mit)
1043        CHKERR( TaoSetMaximumFunctionEvaluations(self.tao, _mit) )
1044
1045    def getMaximumFunctionEvaluations(self) -> int:
1046        """Return the maximum number of objective evaluations within the solver.
1047
1048        Not collective.
1049
1050        See Also
1051        --------
1052        setMaximumFunctionEvaluations, petsc.TaoGetMaximumFunctionEvaluations
1053
1054        """
1055        cdef PetscInt _mit = PETSC_DEFAULT
1056        CHKERR( TaoGetMaximumFunctionEvaluations(self.tao, &_mit) )
1057        return toInt(_mit)
1058
1059    def setConstraintTolerances(self, catol: float = None, crtol: float = None) -> None:
1060        """Set the constraints tolerance parameters used in the solver convergence tests.
1061
1062        Collective.
1063
1064        Parameters
1065        ----------
1066        catol
1067            The absolute norm of the constraints. Defaults to `DEFAULT`.
1068        crtol
1069            The relative norm of the constraints. Defaults to `DEFAULT`.
1070
1071        See Also
1072        --------
1073        getConstraintTolerances, petsc.TaoSetConstraintTolerances
1074
1075        """
1076        cdef PetscReal _catol=PETSC_DEFAULT, _crtol=PETSC_DEFAULT
1077        if catol is not None: _catol = asReal(catol)
1078        if crtol is not None: _crtol = asReal(crtol)
1079        CHKERR( TaoSetConstraintTolerances(self.tao, _catol, _crtol) )
1080
1081    def getConstraintTolerances(self) -> tuple[float, float]:
1082        """Return the constraints tolerance parameters used in the convergence tests.
1083
1084        Not collective.
1085
1086        Returns
1087        -------
1088        catol : float
1089            The absolute norm of the constraints.
1090        crtol : float
1091            The relative norm of the constraints.
1092
1093        See Also
1094        --------
1095        setConstraintTolerances, petsc.TaoGetConstraintTolerances
1096
1097        """
1098        cdef PetscReal _catol=PETSC_DEFAULT, _crtol=PETSC_DEFAULT
1099        CHKERR( TaoGetConstraintTolerances(self.tao, &_catol, &_crtol) )
1100        return (toReal(_catol), toReal(_crtol))
1101
1102    def setConvergenceTest(self, converged: TAOConvergedFunction | None, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None:
1103        """Set the callback used to test for solver convergence.
1104
1105        Logically collective.
1106
1107        Parameters
1108        ----------
1109        converged
1110            The callback. If `None`, reset to the default convergence test.
1111        args
1112            Positional arguments for the callback.
1113        kargs
1114            Keyword arguments for the callback.
1115
1116        See Also
1117        --------
1118        getConvergenceTest, petsc.TaoSetConvergenceTest
1119
1120        """
1121        if converged is None:
1122            CHKERR( TaoSetConvergenceTest(self.tao, TaoDefaultConvergenceTest, NULL) )
1123            self.set_attr('__converged__', None)
1124        else:
1125            if args is None: args = ()
1126            if kargs is None: kargs = {}
1127            self.set_attr('__converged__', (converged, args, kargs))
1128            CHKERR( TaoSetConvergenceTest(self.tao, TAO_Converged, NULL) )
1129
1130    def getConvergenceTest(self) -> tuple[TAOConvergedFunction, tuple[Any, ...], dict[str, Any]]:
1131        """Return the callback used to test for solver convergence.
1132
1133        Not collective.
1134
1135        See Also
1136        --------
1137        setConvergenceTest
1138
1139        """
1140        return self.get_attr('__converged__')
1141
1142    def setConvergedReason(self, reason: ConvergedReason) -> None:
1143        """Set the termination flag.
1144
1145        Collective.
1146
1147        See Also
1148        --------
1149        getConvergedReason, petsc.TaoSetConvergedReason
1150
1151        """
1152        cdef PetscTAOConvergedReason creason = reason
1153        CHKERR( TaoSetConvergedReason(self.tao, creason) )
1154
1155    def getConvergedReason(self) -> ConvergedReason:
1156        """Return the termination flag.
1157
1158        Not collective.
1159
1160        See Also
1161        --------
1162        setConvergedReason, petsc.TaoGetConvergedReason
1163
1164        """
1165        cdef PetscTAOConvergedReason creason = TAO_CONTINUE_ITERATING
1166        CHKERR( TaoGetConvergedReason(self.tao, &creason) )
1167        return creason
1168
1169    def setMonitor(self, monitor: TAOMonitorFunction, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None:
1170        """Set the callback used to monitor solver convergence.
1171
1172        Logically collective.
1173
1174        Parameters
1175        ----------
1176        monitor
1177            The callback.
1178        args
1179            Positional arguments for the callback.
1180        kargs
1181            Keyword arguments for the callback.
1182
1183        See Also
1184        --------
1185        getMonitor, petsc.TaoMonitorSet
1186
1187        """
1188        if monitor is None: return
1189        cdef object monitorlist = self.get_attr('__monitor__')
1190        if args  is None: args  = ()
1191        if kargs is None: kargs = {}
1192        if monitorlist is None:
1193            CHKERR( TaoMonitorSet(self.tao, TAO_Monitor, NULL, NULL) )
1194            self.set_attr('__monitor__',  [(monitor, args, kargs)])
1195        else:
1196            monitorlist.append((monitor, args, kargs))
1197
1198    def getMonitor(self) -> list[tuple[TAOMonitorFunction, tuple[Any, ...], dict[str, Any]]]:
1199        """Return the callback used to monitor solver convergence.
1200
1201        Not collective.
1202
1203        See Also
1204        --------
1205        setMonitor
1206
1207        """
1208        return self.get_attr('__monitor__')
1209
1210    def cancelMonitor(self) -> None:
1211        """Cancel all the monitors of the solver.
1212
1213        Logically collective.
1214
1215        See Also
1216        --------
1217        setMonitor, petsc.TaoMonitorCancel
1218
1219        """
1220        CHKERR( TaoMonitorCancel(self.tao) )
1221        self.set_attr('__monitor__',  None)
1222
1223    # Tao overwrites these statistics. Copy user defined only if present
1224    def monitor(self, its: int = None, f: float = None, res: float = None, cnorm: float = None, step: float = None) -> None:
1225        """Monitor the solver.
1226
1227        Collective.
1228
1229        This function should be called without arguments,
1230        unless users want to modify the values internally stored by the solver.
1231
1232        Parameters
1233        ----------
1234        its
1235            Current number of iterations
1236            or `None` to use the value stored internally by the solver.
1237        f
1238            Current value of the objective function
1239            or `None` to use the value stored internally by the solver.
1240        res
1241            Current value of the residual norm
1242            or `None` to use the value stored internally by the solver.
1243        cnorm
1244            Current value of the constrains norm
1245            or `None` to use the value stored internally by the solver.
1246        step
1247            Current value of the step
1248            or `None` to use the value stored internally by the solver.
1249
1250        See Also
1251        --------
1252        setMonitor, petsc.TaoMonitor
1253
1254        """
1255        cdef PetscInt cits = 0
1256        cdef PetscReal cf = 0.0
1257        cdef PetscReal cres = 0.0
1258        cdef PetscReal ccnorm = 0.0
1259        cdef PetscReal cstep = 0.0
1260        CHKERR( TaoGetSolutionStatus(self.tao, &cits, &cf, &cres, &ccnorm, &cstep, NULL) )
1261        if its is not None:
1262            cits = asInt(its)
1263        if f is not None:
1264            cf = asReal(f)
1265        if res is not None:
1266            cres = asReal(res)
1267        if cnorm is not None:
1268            ccnorm = asReal(cnorm)
1269        if step is not None:
1270            cstep = asReal(step)
1271        CHKERR( TaoMonitor(self.tao, cits, cf, cres, ccnorm, cstep) )
1272
1273    #
1274
1275    def solve(self, Vec x=None) -> None:
1276        """Solve the optimization problem.
1277
1278        Collective.
1279
1280        Parameters
1281        ----------
1282        x
1283            The starting vector or `None` to use the vector stored internally.
1284
1285        See Also
1286        --------
1287        setSolution, getSolution, petsc.TaoSolve
1288
1289        """
1290        if x is not None:
1291            CHKERR( TaoSetSolution(self.tao, x.vec) )
1292        CHKERR( TaoSolve(self.tao) )
1293
1294    def getSolution(self) -> Vec:
1295        """Return the vector holding the solution.
1296
1297        Not collective.
1298
1299        See Also
1300        --------
1301        setSolution, petsc.TaoGetSolution
1302
1303        """
1304        cdef Vec vec = Vec()
1305        CHKERR( TaoGetSolution(self.tao, &vec.vec) )
1306        CHKERR( PetscINCREF(vec.obj) )
1307        return vec
1308
1309    def setGradientNorm(self, Mat mat) -> None:
1310        """Set the matrix used to compute inner products.
1311
1312        Collective.
1313
1314        See Also
1315        --------
1316        getGradientNorm, petsc.TaoSetGradientNorm
1317
1318        """
1319        CHKERR( TaoSetGradientNorm(self.tao, mat.mat) )
1320
1321    def getGradientNorm(self) -> Mat:
1322        """Return the matrix used to compute inner products.
1323
1324        Not collective.
1325
1326        See Also
1327        --------
1328        setGradientNorm, petsc.TaoGetGradientNorm
1329
1330        """
1331        cdef Mat mat = Mat()
1332        CHKERR( TaoGetGradientNorm(self.tao, &mat.mat) )
1333        CHKERR( PetscINCREF(mat.obj) )
1334        return mat
1335
1336    def setLMVMH0(self, Mat mat) -> None:
1337        """Set the initial Hessian for the quasi-Newton approximation.
1338
1339        Collective.
1340
1341        See Also
1342        --------
1343        getLMVMH0, petsc.TaoLMVMSetH0
1344
1345        """
1346        CHKERR( TaoLMVMSetH0(self.tao, mat.mat) )
1347
1348    def getLMVMH0(self) -> Mat:
1349        """Return the initial Hessian for the quasi-Newton approximation.
1350
1351        Not collective.
1352
1353        See Also
1354        --------
1355        setLMVMH0, petsc.TaoLMVMGetH0
1356
1357        """
1358        cdef Mat mat = Mat()
1359        CHKERR( TaoLMVMGetH0(self.tao, &mat.mat) )
1360        CHKERR( PetscINCREF(mat.obj) )
1361        return mat
1362
1363    def getLMVMH0KSP(self) -> KSP:
1364        """Return the `KSP` for the inverse of the initial Hessian approximation.
1365
1366        Not collective.
1367
1368        See Also
1369        --------
1370        setLMVMH0, petsc.TaoLMVMGetH0KSP
1371
1372        """
1373        cdef KSP ksp = KSP()
1374        CHKERR( TaoLMVMGetH0KSP(self.tao, &ksp.ksp) )
1375        CHKERR( PetscINCREF(ksp.obj) )
1376        return ksp
1377
1378    def getVariableBounds(self) -> tuple[Vec, Vec]:
1379        """Return the upper and lower bounds vectors.
1380
1381        Not collective.
1382
1383        See Also
1384        --------
1385        setVariableBounds, petsc.TaoGetVariableBounds
1386
1387        """
1388        cdef Vec xl = Vec(), xu = Vec()
1389        CHKERR( TaoGetVariableBounds(self.tao, &xl.vec, &xu.vec) )
1390        CHKERR( PetscINCREF(xl.obj) ); CHKERR( PetscINCREF(xu.obj) )
1391        return (xl, xu)
1392
1393    def setBNCGType(self, cg_type: BNCGType) -> None:
1394        """Set the type of the BNCG solver.
1395
1396        Collective.
1397
1398        See Also
1399        --------
1400        getBNCGType, petsc.TaoBNCGSetType
1401
1402        """
1403        cdef PetscTAOBNCGType ctype = cg_type
1404        CHKERR( TaoBNCGSetType(self.tao, ctype) )
1405
1406    def getBNCGType(self) -> BNCGType:
1407        """Return the type of the BNCG solver.
1408
1409        Not collective.
1410
1411        See Also
1412        --------
1413        setBNCGType, petsc.TaoBNCGGetType
1414
1415        """
1416        cdef PetscTAOBNCGType cg_type = TAO_BNCG_SSML_BFGS
1417        CHKERR( TaoBNCGGetType(self.tao, &cg_type) )
1418        return cg_type
1419
1420    def setIterationNumber(self, its: int) -> None:
1421        """Set the current iteration number.
1422
1423        Collective.
1424
1425        See Also
1426        --------
1427        getIterationNumber, petsc.TaoSetIterationNumber
1428
1429        """
1430        cdef PetscInt ival = asInt(its)
1431        CHKERR( TaoSetIterationNumber(self.tao, ival) )
1432
1433    def getIterationNumber(self) -> int:
1434        """Return the current iteration number.
1435
1436        Not collective.
1437
1438        See Also
1439        --------
1440        setIterationNumber, petsc.TaoGetIterationNumber
1441
1442        """
1443        cdef PetscInt its=0
1444        CHKERR( TaoGetIterationNumber(self.tao, &its) )
1445        return toInt(its)
1446
1447    def getObjectiveValue(self) -> float:
1448        """Return the current value of the objective function.
1449
1450        Not collective.
1451
1452        See Also
1453        --------
1454        setObjective, petsc.TaoGetSolutionStatus
1455
1456        """
1457        cdef PetscReal fval=0
1458        CHKERR( TaoGetSolutionStatus(self.tao, NULL, &fval, NULL, NULL, NULL, NULL) )
1459        return toReal(fval)
1460
1461    getFunctionValue = getObjectiveValue
1462
1463    def getConvergedReason(self) -> ConvergedReason:
1464        """Return the reason for the solver convergence.
1465
1466        Not collective.
1467
1468        See Also
1469        --------
1470        petsc.TaoGetConvergedReason
1471
1472        """
1473        cdef PetscTAOConvergedReason reason = TAO_CONTINUE_ITERATING
1474        CHKERR( TaoGetConvergedReason(self.tao, &reason) )
1475        return reason
1476
1477    def getSolutionNorm(self) -> tuple[float, float, float]:
1478        """Return the objective function value and the norms of gradient and constraints.
1479
1480        Not collective.
1481
1482        Returns
1483        -------
1484        f : float
1485            Current value of the objective function.
1486        res : float
1487            Current value of the residual norm.
1488        cnorm : float
1489            Current value of the constrains norm.
1490
1491        See Also
1492        --------
1493        getSolutionStatus, petsc.TaoGetSolutionStatus
1494
1495        """
1496        cdef PetscReal gnorm=0
1497        cdef PetscReal cnorm=0
1498        cdef PetscReal fval=0
1499        CHKERR( TaoGetSolutionStatus(self.tao, NULL, &fval, &gnorm, &cnorm, NULL, NULL) )
1500        return (toReal(fval), toReal(gnorm), toReal(cnorm))
1501
1502    def getSolutionStatus(self) -> tuple[int, float, float, float, float, ConvergedReason]:
1503        """Return the solution status.
1504
1505        Not collective.
1506
1507        Returns
1508        -------
1509        its : int
1510            Current number of iterations.
1511        f : float
1512            Current value of the objective function.
1513        res : float
1514            Current value of the residual norm.
1515        cnorm : float
1516            Current value of the constrains norm.
1517        step : float
1518            Current value of the step.
1519        reason : ConvergedReason
1520            Current value of converged reason.
1521
1522        See Also
1523        --------
1524        petsc.TaoGetSolutionStatus
1525
1526        """
1527        cdef PetscInt its=0
1528        cdef PetscReal fval=0, gnorm=0, cnorm=0, xdiff=0
1529        cdef PetscTAOConvergedReason reason = TAO_CONTINUE_ITERATING
1530        CHKERR( TaoGetSolutionStatus(self.tao, &its,
1531                                     &fval, &gnorm, &cnorm, &xdiff,
1532                                     &reason) )
1533        return (toInt(its), toReal(fval),
1534                toReal(gnorm), toReal(cnorm),
1535                toReal(xdiff), reason)
1536
1537    def getKSP(self) -> KSP:
1538        """Return the linear solver used by the nonlinear solver.
1539
1540        Not collective.
1541
1542        See Also
1543        --------
1544        petsc.TaoGetKSP
1545
1546        """
1547        cdef KSP ksp = KSP()
1548        CHKERR( TaoGetKSP(self.tao, &ksp.ksp) )
1549        CHKERR( PetscINCREF(ksp.obj) )
1550        return ksp
1551
1552    # BRGN routines
1553
1554    def getBRGNSubsolver(self) -> TAO:
1555        """Return the subsolver inside the BRGN solver.
1556
1557        Not collective.
1558
1559        See Also
1560        --------
1561        petsc.TaoBRGNGetSubsolver
1562
1563        """
1564        cdef TAO subsolver = TAO()
1565        CHKERR( TaoBRGNGetSubsolver(self.tao, &subsolver.tao) )
1566        CHKERR( PetscINCREF(subsolver.obj) )
1567        return subsolver
1568
1569    def setBRGNRegularizerObjectiveGradient(self, objgrad, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None:
1570        """Set the callback to compute the regularizer objective and gradient.
1571
1572        Logically collective.
1573
1574        See Also
1575        --------
1576        petsc.TaoBRGNSetRegularizerObjectiveAndGradientRoutine
1577
1578        """
1579        if args is None: args = ()
1580        if kargs is None: kargs = {}
1581        context = (objgrad, args, kargs)
1582        self.set_attr("__brgnregobjgrad__", context)
1583        CHKERR( TaoBRGNSetRegularizerObjectiveAndGradientRoutine(self.tao, TAO_BRGNRegObjGrad, <void*>context) )
1584
1585    def setBRGNRegularizerHessian(self, hessian, Mat H=None, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None:
1586        """Set the callback to compute the regularizer Hessian.
1587
1588        Logically collective.
1589
1590        See Also
1591        --------
1592        petsc.TaoBRGNSetRegularizerHessianRoutine
1593
1594        """
1595        cdef PetscMat Hmat = NULL
1596        if H is not None: Hmat = H.mat
1597        if args is None: args = ()
1598        if kargs is None: kargs = {}
1599        context = (hessian, args, kargs)
1600        self.set_attr("__brgnreghessian__", context)
1601        CHKERR( TaoBRGNSetRegularizerHessianRoutine(self.tao, Hmat, TAO_BRGNRegHessian, <void*>context) )
1602
1603    def setBRGNRegularizerWeight(self, weight: float) -> None:
1604        """Set the regularizer weight.
1605
1606        Collective.
1607
1608        """
1609        cdef PetscReal cweight = asReal(weight)
1610        CHKERR( TaoBRGNSetRegularizerWeight(self.tao, cweight) )
1611
1612    def setBRGNSmoothL1Epsilon(self, epsilon: float) -> None:
1613        """Set the smooth L1 epsilon.
1614
1615        Collective.
1616
1617        See Also
1618        --------
1619        petsc.TaoBRGNSetL1SmoothEpsilon
1620
1621        """
1622        cdef PetscReal ceps = asReal(epsilon)
1623        CHKERR( TaoBRGNSetL1SmoothEpsilon(self.tao, ceps) )
1624
1625    def setBRGNDictionaryMatrix(self, Mat D) -> None:
1626        """Set the dictionary matrix.
1627
1628        Collective.
1629
1630        See Also
1631        --------
1632        petsc.TaoBRGNSetDictionaryMatrix
1633
1634        """
1635        CHKERR( TaoBRGNSetDictionaryMatrix(self.tao, D.mat) )
1636
1637    def getBRGNDampingVector(self) -> Vec:
1638        """Return the damping vector.
1639
1640        Not collective.
1641
1642        """
1643        #FIXME
1644        #See Also
1645        #--------
1646        #petsc.TaoBRGNGetDampingVector
1647        cdef Vec damp = Vec()
1648        CHKERR( TaoBRGNGetDampingVector(self.tao, &damp.vec) )
1649        CHKERR( PetscINCREF(damp.obj) )
1650        return damp
1651
1652    def createPython(self, context: Any = None, comm: Comm | None = None) -> Self:
1653        """Create an optimization solver of Python type.
1654
1655        Collective.
1656
1657        Parameters
1658        ----------
1659        context
1660            An instance of the Python class implementing the required methods.
1661        comm
1662            MPI communicator, defaults to `Sys.getDefaultComm`.
1663
1664        See Also
1665        --------
1666        petsc_python_tao, setType, setPythonContext, Type.PYTHON
1667
1668        """
1669        cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT)
1670        cdef PetscTAO tao = NULL
1671        CHKERR( TaoCreate(ccomm, &tao) )
1672        CHKERR( PetscCLEAR(self.obj) ); self.tao = tao
1673        CHKERR( TaoSetType(self.tao, TAOPYTHON) )
1674        CHKERR( TaoPythonSetContext(self.tao, <void*>context) )
1675        return self
1676
1677    def setPythonContext(self, context: Any) -> None:
1678        """Set the instance of the class implementing the required Python methods.
1679
1680        Not collective.
1681
1682        See Also
1683        --------
1684        petsc_python_tao, getPythonContext
1685
1686        """
1687        CHKERR( TaoPythonSetContext(self.tao, <void*>context) )
1688
1689    def getPythonContext(self) -> Any:
1690        """Return the instance of the class implementing the required Python methods.
1691
1692        Not collective.
1693
1694        See Also
1695        --------
1696        petsc_python_tao, setPythonContext
1697
1698        """
1699        cdef void *context = NULL
1700        CHKERR( TaoPythonGetContext(self.tao, &context) )
1701        if context == NULL: return None
1702        else: return <object> context
1703
1704    def setPythonType(self, py_type: str) -> None:
1705        """Set the fully qualified Python name of the class to be used.
1706
1707        Collective.
1708
1709        See Also
1710        --------
1711        petsc_python_tao, setPythonContext, getPythonType
1712        petsc.TaoPythonSetType
1713
1714        """
1715        cdef const char *cval = NULL
1716        py_type = str2bytes(py_type, &cval)
1717        CHKERR( TaoPythonSetType(self.tao, cval) )
1718
1719    def getPythonType(self) -> str:
1720        """Return the fully qualified Python name of the class used by the solver.
1721
1722        Not collective.
1723
1724        See Also
1725        --------
1726        petsc_python_tao, setPythonContext, setPythonType
1727        petsc.TaoPythonGetType
1728
1729        """
1730        cdef const char *cval = NULL
1731        CHKERR( TaoPythonGetType(self.tao, &cval) )
1732        return bytes2str(cval)
1733
1734    def getLineSearch(self) -> TAOLineSearch:
1735        """Return the TAO Line Search object.
1736
1737        Not collective.
1738
1739        See Also
1740        -------
1741        petsc.TaoGetLineSearch
1742
1743        """
1744        cdef TAOLineSearch ls = TAOLineSearch()
1745        CHKERR( TaoGetLineSearch(self.tao, &ls.taols) )
1746        CHKERR( PetscINCREF(ls.obj) )
1747        return ls
1748
1749    # --- backward compatibility ---
1750
1751    setInitial = setSolution
1752
1753    # --- application context ---
1754
1755    property appctx:
1756        """Application context."""
1757        def __get__(self) -> Any:
1758            return self.getAppCtx()
1759        def __set__(self, value: Any):
1760            self.setAppCtx(value)
1761
1762    # --- linear solver ---
1763
1764    property ksp:
1765        """Linear solver."""
1766        def __get__(self) -> KSP:
1767            return self.getKSP()
1768
1769    # --- tolerances ---
1770
1771    # FIXME: tolerances all broken
1772    property ftol:
1773        """Broken."""
1774        def __get__(self) -> Any:
1775            return self.getFunctionTolerances()
1776        def __set__(self, value):
1777            if isinstance(value, (tuple, list)):
1778                self.setFunctionTolerances(*value)
1779            elif isinstance(value, dict):
1780                self.setFunctionTolerances(**value)
1781            else:
1782                raise TypeError("expecting tuple/list or dict")
1783
1784    property gtol:
1785        """Broken."""
1786        def __get__(self) -> Any:
1787            return self.getGradientTolerances()
1788        def __set__(self, value):
1789            if isinstance(value, (tuple, list)):
1790                self.getGradientTolerances(*value)
1791            elif isinstance(value, dict):
1792                self.getGradientTolerances(**value)
1793            else:
1794                raise TypeError("expecting tuple/list or dict")
1795
1796    property ctol:
1797        """Broken."""
1798        def __get__(self) -> Any:
1799            return self.getConstraintTolerances()
1800        def __set__(self, value):
1801            if isinstance(value, (tuple, list)):
1802                self.getConstraintTolerances(*value)
1803            elif isinstance(value, dict):
1804                self.getConstraintTolerances(**value)
1805            else:
1806                raise TypeError("expecting tuple/list or dict")
1807
1808    # --- iteration ---
1809
1810    property its:
1811        """Number of iterations."""
1812        def __get__(self) -> int:
1813            return self.getIterationNumber()
1814
1815    property gnorm:
1816        """Gradient norm."""
1817        def __get__(self) -> float:
1818            return self.getSolutionNorm()[1]
1819
1820    property cnorm:
1821        """Constraints norm."""
1822        def __get__(self) -> float:
1823            return self.getSolutionNorm()[2]
1824
1825    property solution:
1826        """Solution vector."""
1827        def __get__(self) -> Vec:
1828            return self.getSolution()
1829
1830    property objective:
1831        """Objective value."""
1832        def __get__(self) -> float:
1833            return self.getObjectiveValue()
1834
1835    property function:
1836        """Objective value."""
1837        def __get__(self) -> float:
1838            return self.getFunctionValue()
1839
1840    property gradient:
1841        """Gradient vector."""
1842        def __get__(self) -> Vec:
1843            return self.getGradient()[0]
1844
1845    # --- convergence ---
1846
1847    property reason:
1848        """Converged reason."""
1849        def __get__(self) -> ConvergedReason:
1850            return self.getConvergedReason()
1851
1852    property iterating:
1853        """Boolean indicating if the solver has not converged yet."""
1854        def __get__(self) -> bool:
1855            return self.reason == 0
1856
1857    property converged:
1858        """Boolean indicating if the solver has converged."""
1859        def __get__(self) -> bool:
1860            return self.reason > 0
1861
1862    property diverged:
1863        """Boolean indicating if the solver has failed."""
1864        def __get__(self) -> bool:
1865            return self.reason < 0
1866
1867# --------------------------------------------------------------------
1868
1869del TAOType
1870del TAOConvergedReason
1871del TAOBNCGType
1872
1873# --------------------------------------------------------------------
1874
1875class TAOLineSearchType:
1876    """TAO Line Search Types."""
1877    UNIT        = S_(TAOLINESEARCHUNIT)
1878    ARMIJO      = S_(TAOLINESEARCHARMIJO)
1879    MORETHUENTE = S_(TAOLINESEARCHMT)
1880    IPM         = S_(TAOLINESEARCHIPM)
1881    OWARMIJO    = S_(TAOLINESEARCHOWARMIJO)
1882    GPCG        = S_(TAOLINESEARCHGPCG)
1883
1884class TAOLineSearchConvergedReason:
1885    """TAO Line Search Termination Reasons."""
1886    # iterating
1887    CONTINUE_SEARCH       = TAOLINESEARCH_CONTINUE_ITERATING
1888    # failed
1889    FAILED_INFORNAN       = TAOLINESEARCH_FAILED_INFORNAN      # inf or NaN in user function
1890    FAILED_BADPARAMETER   = TAOLINESEARCH_FAILED_BADPARAMETER  # negative value set as parameter
1891    FAILED_ASCENT         = TAOLINESEARCH_FAILED_ASCENT        # search direction is not a descent direction
1892    # succeeded
1893    SUCCESS               = TAOLINESEARCH_SUCCESS              # found step length
1894    SUCCESS_USER          = TAOLINESEARCH_SUCCESS_USER         # user-defined success criteria reached
1895    # halted
1896    HALTED_OTHER          = TAOLINESEARCH_HALTED_OTHER         # stopped search with unknown reason
1897    HALTED_MAXFCN         = TAOLINESEARCH_HALTED_MAXFCN        # maximum function evaluations reached
1898    HALTED_UPPERBOUND     = TAOLINESEARCH_HALTED_UPPERBOUND    # stopped at upper bound
1899    HALTED_LOWERBOUND     = TAOLINESEARCH_HALTED_LOWERBOUND    # stopped at lower bound
1900    HALTED_RTOL           = TAOLINESEARCH_HALTED_RTOL          # range of uncertainty is below tolerance
1901    HALTED_USER           = TAOLINESEARCH_HALTED_USER          # user-defined halt criteria reached
1902
1903# --------------------------------------------------------------------
1904
1905cdef class TAOLineSearch(Object):
1906    """TAO Line Search."""
1907
1908    Type   = TAOLineSearchType
1909    Reason = TAOLineSearchConvergedReason
1910
1911    def __cinit__(self):
1912         self.obj = <PetscObject*> &self.taols
1913         self.taols = NULL
1914
1915    def view(self, Viewer viewer=None) -> None:
1916        """View the linesearch object.
1917
1918        Collective.
1919
1920        Parameters
1921        ----------
1922        viewer
1923            A `Viewer` instance or `None` for the default viewer.
1924
1925        See Also
1926        --------
1927        petsc.TaoLineSearchView
1928
1929        """
1930        cdef PetscViewer vwr = NULL
1931        if viewer is not None: vwr = viewer.vwr
1932        CHKERR( TaoLineSearchView(self.taols, vwr) )
1933
1934    def destroy(self) -> Self:
1935        """Destroy the linesearch object.
1936
1937        Collective.
1938
1939        See Also
1940        --------
1941        petsc.TaoLineSearchDestroy
1942
1943        """
1944        CHKERR( TaoLineSearchDestroy(&self.taols) )
1945        return self
1946
1947    def create(self, comm=None) -> Self:
1948        """Create a TAO linesearch.
1949
1950        Collective.
1951
1952        Parameters
1953        ----------
1954        comm
1955            MPI communicator, defaults to `Sys.getDefaultComm`.
1956
1957        See Also
1958        --------
1959        Sys.getDefaultComm, petsc.TaoLineSearchCreate
1960
1961        """
1962        cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT)
1963        cdef PetscTAOLineSearch newtaols = NULL
1964        CHKERR( TaoLineSearchCreate(ccomm, &newtaols) )
1965        CHKERR( PetscCLEAR(self.obj) ); self.taols = newtaols
1966        return self
1967
1968    def setType(self, ls_type: Type | str) -> None:
1969        """Set the type of the linesearch.
1970
1971        Logically collective.
1972
1973        Parameters
1974        ----------
1975        ls_type
1976            The type of the solver.
1977
1978        See Also
1979        --------
1980        getType, petsc.TaoLineSearchSetType
1981
1982        """
1983        cdef PetscTAOLineSearchType ctype = NULL
1984        ls_type = str2bytes(ls_type, &ctype)
1985        CHKERR( TaoLineSearchSetType(self.taols, ctype) )
1986
1987    def getType(self) -> str:
1988        """Return the type of the linesearch.
1989
1990        Not collective.
1991
1992        See Also
1993        --------
1994        setType, petsc.TaoLineSearchGetType
1995
1996        """
1997        cdef PetscTAOLineSearchType ctype = NULL
1998        CHKERR( TaoLineSearchGetType(self.taols, &ctype) )
1999        return bytes2str(ctype)
2000
2001    def setFromOptions(self) -> None:
2002        """Configure the linesearch from the options database.
2003
2004        Collective.
2005
2006        See Also
2007        --------
2008        petsc_options, petsc.TaoLineSearchSetFromOptions
2009
2010        """
2011        CHKERR( TaoLineSearchSetFromOptions(self.taols) )
2012
2013    def setUp(self) -> None:
2014        """Set up the internal data structures for using the linesearch.
2015
2016        Collective.
2017
2018        See Also
2019        --------
2020        petsc.TaoLineSearchSetUp
2021
2022        """
2023        CHKERR( TaoLineSearchSetUp(self.taols) )
2024
2025    def setOptionsPrefix(self, prefix) -> None:
2026        """Set the prefix used for searching for options in the database.
2027
2028        Logically collective.
2029
2030        See Also
2031        --------
2032        petsc_options, petsc.TaoLineSearchSetOptionsPrefix
2033
2034        """
2035        cdef const char *cprefix = NULL
2036        prefix = str2bytes(prefix, &cprefix)
2037        CHKERR( TaoLineSearchSetOptionsPrefix(self.taols, cprefix) )
2038
2039    def getOptionsPrefix(self) -> str:
2040        """Return the prefix used for searching for options in the database.
2041
2042        Not collective.
2043
2044        See Also
2045        --------
2046        petsc_options, setOptionsPrefix, petsc.TaoLineSearchGetOptionsPrefix
2047
2048        """
2049        cdef const char *prefix = NULL
2050        CHKERR( TaoLineSearchGetOptionsPrefix(self.taols, &prefix) )
2051        return bytes2str(prefix)
2052
2053    def setObjective(self, objective : TAOLSObjectiveFunction, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None:
2054        """Set the objective function evaluation callback.
2055
2056        Logically collective.
2057
2058        Parameters
2059        ----------
2060        objective
2061            The objective function callback.
2062        args
2063            Positional arguments for the callback.
2064        kargs
2065            Keyword arguments for the callback.
2066
2067        See Also
2068        --------
2069        setGradient, setObjectiveGradient
2070        petsc.TaoLineSearchSetObjectiveRoutine
2071
2072        """
2073        CHKERR( TaoLineSearchSetObjectiveRoutine(self.taols, TAOLS_Objective, NULL) )
2074        if args is None: args = ()
2075        if kargs is None: kargs = {}
2076        self.set_attr("__objective__", (objective, args, kargs))
2077
2078    def setGradient(self, gradient: TAOLSGradientFunction, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None:
2079        """Set the gradient evaluation callback.
2080
2081        Logically collective.
2082
2083        Parameters
2084        ----------
2085        gradient
2086            The gradient callback.
2087        g
2088            The vector to store the gradient.
2089        args
2090            Positional arguments for the callback.
2091        kargs
2092            Keyword arguments for the callback.
2093
2094        See Also
2095        --------
2096        setObjective, setObjectiveGradient, setHessian
2097        petsc.TaoLineSearchSetGradientRoutine
2098
2099        """
2100        CHKERR( TaoLineSearchSetGradientRoutine(self.taols, TAOLS_Gradient, NULL) )
2101        if args is None: args = ()
2102        if kargs is None: kargs = {}
2103        self.set_attr("__gradient__", (gradient, args, kargs))
2104
2105    def setObjectiveGradient(self, objgrad: TAOLSObjectiveGradientFunction, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None:
2106        """Set the objective function and gradient evaluation callback.
2107
2108        Logically collective.
2109
2110        Parameters
2111        ----------
2112        objgrad
2113            The objective function and gradient callback.
2114        g
2115            The vector to store the gradient.
2116        args
2117            Positional arguments for the callback.
2118        kargs
2119            Keyword arguments for the callback.
2120
2121        See Also
2122        --------
2123        setObjective, setGradient, setHessian, getObjectiveAndGradient
2124        petsc.TaoLineSearchSetObjectiveAndGradientRoutine
2125
2126        """
2127        CHKERR( TaoLineSearchSetObjectiveAndGradientRoutine(self.taols, TAOLS_ObjGrad, NULL) )
2128        if args is None: args = ()
2129        if kargs is None: kargs = {}
2130        self.set_attr("__objgrad__", (objgrad, args, kargs))
2131
2132    def useTAORoutine(self, TAO tao) -> None:
2133        """Use the objective and gradient evaluation routines from the given Tao object.
2134
2135        Logically collective.
2136
2137        See Also
2138        --------
2139        petsc.TaoLineSearchUseTaoRoutines
2140
2141        """
2142        CHKERR( TaoLineSearchUseTaoRoutines(self.taols, tao.tao) )
2143
2144    def apply(self, Vec x, Vec g, Vec s) -> tuple[float, float, str]:
2145        """Performs a line-search in a given step direction.
2146
2147        Collective.
2148
2149        See Also
2150        --------
2151        petsc.TaoLineSearchApply
2152
2153        """
2154        cdef PetscReal f = 0
2155        cdef PetscReal steplen = 0
2156        cdef PetscTAOLineSearchConvergedReason reason = TAOLINESEARCH_CONTINUE_ITERATING
2157        CHKERR( TaoLineSearchApply(self.taols,x.vec,&f,g.vec,s.vec,&steplen,&reason))
2158        return (toReal(f), toReal(steplen), reason)
2159
2160# --------------------------------------------------------------------
2161
2162del TAOLineSearchType
2163del TAOLineSearchConvergedReason
2164