xref: /petsc/src/binding/petsc4py/src/petsc4py/PETSc/TAO.pyx (revision 77e5a1f9fcfc1cfeb1010ff17041dc1a220acc3c)
1# --------------------------------------------------------------------
2
3class TAOType:
4    """TAO solver type.
5
6    See Also
7    --------
8    petsc.TaoType
9
10    """
11    LMVM     = S_(TAOLMVM)
12    NLS      = S_(TAONLS)
13    NTR      = S_(TAONTR)
14    NTL      = S_(TAONTL)
15    CG       = S_(TAOCG)
16    TRON     = S_(TAOTRON)
17    OWLQN    = S_(TAOOWLQN)
18    BMRM     = S_(TAOBMRM)
19    BLMVM    = S_(TAOBLMVM)
20    BQNLS    = S_(TAOBQNLS)
21    BNCG     = S_(TAOBNCG)
22    BNLS     = S_(TAOBNLS)
23    BNTR     = S_(TAOBNTR)
24    BNTL     = S_(TAOBNTL)
25    BQNKLS   = S_(TAOBQNKLS)
26    BQNKTR   = S_(TAOBQNKTR)
27    BQNKTL   = S_(TAOBQNKTL)
28    BQPIP    = S_(TAOBQPIP)
29    GPCG     = S_(TAOGPCG)
30    NM       = S_(TAONM)
31    POUNDERS = S_(TAOPOUNDERS)
32    BRGN     = S_(TAOBRGN)
33    LCL      = S_(TAOLCL)
34    SSILS    = S_(TAOSSILS)
35    SSFLS    = S_(TAOSSFLS)
36    ASILS    = S_(TAOASILS)
37    ASFLS    = S_(TAOASFLS)
38    IPM      = S_(TAOIPM)
39    PDIPM    = S_(TAOPDIPM)
40    SHELL    = S_(TAOSHELL)
41    ADMM     = S_(TAOADMM)
42    ALMM     = S_(TAOALMM)
43    PYTHON   = S_(TAOPYTHON)
44
45
46class TAOConvergedReason:
47    """TAO solver termination reason.
48
49    See Also
50    --------
51    petsc.TaoConvergedReason
52
53    """
54    # iterating
55    CONTINUE_ITERATING    = TAO_CONTINUE_ITERATING    # iterating
56    CONVERGED_ITERATING   = TAO_CONTINUE_ITERATING    # iterating
57    ITERATING             = TAO_CONTINUE_ITERATING    # iterating
58    # converged
59    CONVERGED_GATOL       = TAO_CONVERGED_GATOL       # ||g(X)|| < gatol
60    CONVERGED_GRTOL       = TAO_CONVERGED_GRTOL       # ||g(X)||/f(X)  < grtol
61    CONVERGED_GTTOL       = TAO_CONVERGED_GTTOL       # ||g(X)||/||g(X0)|| < gttol
62    CONVERGED_STEPTOL     = TAO_CONVERGED_STEPTOL     # small step size
63    CONVERGED_MINF        = TAO_CONVERGED_MINF        # f(X) < F_min
64    CONVERGED_USER        = TAO_CONVERGED_USER        # user defined
65    # diverged
66    DIVERGED_MAXITS       = TAO_DIVERGED_MAXITS       #
67    DIVERGED_NAN          = TAO_DIVERGED_NAN          #
68    DIVERGED_MAXFCN       = TAO_DIVERGED_MAXFCN       #
69    DIVERGED_LS_FAILURE   = TAO_DIVERGED_LS_FAILURE   #
70    DIVERGED_TR_REDUCTION = TAO_DIVERGED_TR_REDUCTION #
71    DIVERGED_USER         = TAO_DIVERGED_USER         # user defined
72
73
74class TAOBNCGType:
75    """TAO Bound Constrained Conjugate Gradient (BNCG) Update Type."""
76    GD         = TAO_BNCG_GD
77    PCGD       = TAO_BNCG_PCGD
78    HS         = TAO_BNCG_HS
79    FR         = TAO_BNCG_FR
80    PRP        = TAO_BNCG_PRP
81    PRP_PLUS   = TAO_BNCG_PRP_PLUS
82    DY         = TAO_BNCG_DY
83    HZ         = TAO_BNCG_HZ
84    DK         = TAO_BNCG_DK
85    KD         = TAO_BNCG_KD
86    SSML_BFGS  = TAO_BNCG_SSML_BFGS
87    SSML_DFP   = TAO_BNCG_SSML_DFP
88    SSML_BRDN  = TAO_BNCG_SSML_BRDN
89# --------------------------------------------------------------------
90
91
92cdef class TAO(Object):
93    """Optimization solver.
94
95    TAO is described in the `PETSc manual <petsc:manual/tao>`.
96
97    See Also
98    --------
99    petsc.Tao
100
101    """
102
103    Type = TAOType
104    ConvergedReason = TAOConvergedReason
105    BNCGType = TAOBNCGType
106    # FIXME backward compatibility
107    Reason = TAOConvergedReason
108
109    def __cinit__(self):
110        self.obj = <PetscObject*> &self.tao
111        self.tao = NULL
112
113    def view(self, Viewer viewer=None) -> None:
114        """View the solver.
115
116        Collective.
117
118        Parameters
119        ----------
120        viewer
121            A `Viewer` instance or `None` for the default viewer.
122
123        See Also
124        --------
125        petsc.TaoView
126
127        """
128        cdef PetscViewer vwr = NULL
129        if viewer is not None: vwr = viewer.vwr
130        CHKERR(TaoView(self.tao, vwr))
131
132    def destroy(self) -> Self:
133        """Destroy the solver.
134
135        Collective.
136
137        See Also
138        --------
139        petsc.TaoDestroy
140
141        """
142        CHKERR(TaoDestroy(&self.tao))
143        return self
144
145    def create(self, comm: Comm | None = None) -> Self:
146        """Create a TAO solver.
147
148        Collective.
149
150        Parameters
151        ----------
152        comm
153            MPI communicator, defaults to `Sys.getDefaultComm`.
154
155        See Also
156        --------
157        Sys.getDefaultComm, petsc.TaoCreate
158
159        """
160        cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT)
161        cdef PetscTAO newtao = NULL
162        CHKERR(TaoCreate(ccomm, &newtao))
163        CHKERR(PetscCLEAR(self.obj)); self.tao = newtao
164        return self
165
166    def setType(self, tao_type: Type | str) -> None:
167        """Set the type of the solver.
168
169        Logically collective.
170
171        Parameters
172        ----------
173        tao_type
174            The type of the solver.
175
176        See Also
177        --------
178        getType, petsc.TaoSetType
179
180        """
181        cdef PetscTAOType ctype = NULL
182        tao_type = str2bytes(tao_type, &ctype)
183        CHKERR(TaoSetType(self.tao, ctype))
184
185    def getType(self) -> str:
186        """Return the type of the solver.
187
188        Not collective.
189
190        See Also
191        --------
192        setType, petsc.TaoGetType
193
194        """
195        cdef PetscTAOType ctype = NULL
196        CHKERR(TaoGetType(self.tao, &ctype))
197        return bytes2str(ctype)
198
199    def setOptionsPrefix(self, prefix: str | None) -> None:
200        """Set the prefix used for searching for options in the database.
201
202        Logically collective.
203
204        See Also
205        --------
206        petsc_options, petsc.TaoSetOptionsPrefix
207
208        """
209        cdef const char *cprefix = NULL
210        prefix = str2bytes(prefix, &cprefix)
211        CHKERR(TaoSetOptionsPrefix(self.tao, cprefix))
212
213    def appendOptionsPrefix(self, prefix: str | None) -> None:
214        """Append to the prefix used for searching for options in the database.
215
216        Logically collective.
217
218        See Also
219        --------
220        petsc_options, setOptionsPrefix, petsc.TaoAppendOptionsPrefix
221
222        """
223        cdef const char *cprefix = NULL
224        prefix = str2bytes(prefix, &cprefix)
225        CHKERR(TaoAppendOptionsPrefix(self.tao, cprefix))
226
227    def getOptionsPrefix(self) -> str:
228        """Return the prefix used for searching for options in the database.
229
230        Not collective.
231
232        See Also
233        --------
234        petsc_options, setOptionsPrefix, petsc.TaoGetOptionsPrefix
235
236        """
237        cdef const char *prefix = NULL
238        CHKERR(TaoGetOptionsPrefix(self.tao, &prefix))
239        return bytes2str(prefix)
240
241    def setFromOptions(self) -> None:
242        """Configure the solver from the options database.
243
244        Collective.
245
246        See Also
247        --------
248        petsc_options, petsc.TaoSetFromOptions
249
250        """
251        CHKERR(TaoSetFromOptions(self.tao))
252
253    def setUp(self) -> None:
254        """Set up the internal data structures for using the solver.
255
256        Collective.
257
258        See Also
259        --------
260        petsc.TaoSetUp
261
262        """
263        CHKERR(TaoSetUp(self.tao))
264
265    #
266
267    def setInitialTrustRegionRadius(self, radius: float) -> None:
268        """Set the initial trust region radius.
269
270        Collective.
271
272        See Also
273        --------
274        petsc.TaoSetInitialTrustRegionRadius
275
276        """
277        cdef PetscReal cradius = asReal(radius)
278        CHKERR(TaoSetInitialTrustRegionRadius(self.tao, cradius))
279
280    # --------------
281
282    def setAppCtx(self, appctx: Any) -> None:
283        """Set the application context."""
284        self.set_attr("__appctx__", appctx)
285
286    def getAppCtx(self) -> Any:
287        """Return the application context."""
288        return self.get_attr("__appctx__")
289
290    def setSolution(self, Vec x) -> None:
291        """Set the vector used to store the solution.
292
293        Collective.
294
295        See Also
296        --------
297        getSolution, petsc.TaoSetSolution
298
299        """
300        CHKERR(TaoSetSolution(self.tao, x.vec))
301
302    def setObjective(self, objective: TAOObjectiveFunction, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None:
303        """Set the objective function evaluation callback.
304
305        Logically collective.
306
307        Parameters
308        ----------
309        objective
310            The objective function callback.
311        args
312            Positional arguments for the callback.
313        kargs
314            Keyword arguments for the callback.
315
316        See Also
317        --------
318        setGradient, setObjectiveGradient, petsc.TaoSetObjective
319
320        """
321        if args is None: args = ()
322        if kargs is None: kargs = {}
323        context = (objective, args, kargs)
324        self.set_attr("__objective__", context)
325        CHKERR(TaoSetObjective(self.tao, TAO_Objective, <void*>context))
326
327    def setResidual(self, residual: TAOResidualFunction, Vec R, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None:
328        """Set the residual evaluation callback for least-squares applications.
329
330        Logically collective.
331
332        Parameters
333        ----------
334        residual
335            The residual callback.
336        R
337            The vector to store the residual.
338        args
339            Positional arguments for the callback.
340        kargs
341            Keyword arguments for the callback.
342
343        See Also
344        --------
345        setJacobianResidual, petsc.TaoSetResidualRoutine
346
347        """
348        if args is None: args = ()
349        if kargs is None: kargs = {}
350        context = (residual, args, kargs)
351        self.set_attr("__residual__", context)
352        CHKERR(TaoSetResidualRoutine(self.tao, R.vec, TAO_Residual, <void*>context))
353
354    def setJacobianResidual(self, jacobian: TAOJacobianResidualFunction, Mat J=None, Mat P=None, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None:
355        """Set the callback to compute the least-squares residual Jacobian.
356
357        Logically collective.
358
359        Parameters
360        ----------
361        jacobian
362            The Jacobian callback.
363        J
364            The matrix to store the Jacobian.
365        P
366            The matrix to construct the preconditioner.
367        args
368            Positional arguments for the callback.
369        kargs
370            Keyword arguments for the callback.
371
372        See Also
373        --------
374        setResidual, petsc.TaoSetJacobianResidualRoutine
375
376        """
377        cdef PetscMat Jmat = NULL
378        if J is not None: Jmat = J.mat
379        cdef PetscMat Pmat = Jmat
380        if P is not None: Pmat = P.mat
381        if args is None: args = ()
382        if kargs is None: kargs = {}
383        context = (jacobian, args, kargs)
384        self.set_attr("__jacobian_residual__", context)
385        CHKERR(TaoSetJacobianResidualRoutine(self.tao, Jmat, Pmat, TAO_JacobianResidual, <void*>context))
386
387    def setGradient(self, gradient: TAOGradientFunction, Vec g=None, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None:
388        """Set the gradient evaluation callback.
389
390        Logically collective.
391
392        Parameters
393        ----------
394        gradient
395            The gradient callback.
396        g
397            The vector to store the gradient.
398        args
399            Positional arguments for the callback.
400        kargs
401            Keyword arguments for the callback.
402
403        See Also
404        --------
405        setObjective, setObjectiveGradient, setHessian, petsc.TaoSetGradient
406
407        """
408        cdef PetscVec gvec = NULL
409        if g is not None: gvec = g.vec
410        if args is None: args = ()
411        if kargs is None: kargs = {}
412        context = (gradient, args, kargs)
413        self.set_attr("__gradient__", context)
414        CHKERR(TaoSetGradient(self.tao, gvec, TAO_Gradient, <void*>context))
415
416    def getGradient(self) -> tuple[Vec, TAOGradientFunction]:
417        """Return the vector used to store the gradient and the evaluation callback.
418
419        Not collective.
420
421        See Also
422        --------
423        setGradient, setHessian, petsc.TaoGetGradient
424
425        """
426        cdef Vec vec = Vec()
427        CHKERR(TaoGetGradient(self.tao, &vec.vec, NULL, NULL))
428        CHKERR(PetscINCREF(vec.obj))
429        cdef object gradient = self.get_attr("__gradient__")
430        return (vec, gradient)
431
432    def setObjectiveGradient(self, objgrad: TAOObjectiveGradientFunction, Vec g=None, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None:
433        """Set the objective function and gradient evaluation callback.
434
435        Logically collective.
436
437        Parameters
438        ----------
439        objgrad
440            The objective function and gradient callback.
441        g
442            The vector to store the gradient.
443        args
444            Positional arguments for the callback.
445        kargs
446            Keyword arguments for the callback.
447
448        See Also
449        --------
450        setObjective, setGradient, setHessian, getObjectiveAndGradient
451        petsc.TaoSetObjectiveAndGradient
452
453        """
454        cdef PetscVec gvec = NULL
455        if g is not None: gvec = g.vec
456        if args is None: args = ()
457        if kargs is None: kargs = {}
458        context = (objgrad, args, kargs)
459        self.set_attr("__objgrad__", context)
460        CHKERR(TaoSetObjectiveAndGradient(self.tao, gvec, TAO_ObjGrad, <void*>context))
461
462    def getObjectiveAndGradient(self) -> tuple[Vec, TAOObjectiveGradientFunction]:
463        """Return the vector used to store the gradient and the evaluation callback.
464
465        Not collective.
466
467        See Also
468        --------
469        setObjectiveGradient, petsc.TaoGetObjectiveAndGradient
470
471        """
472        cdef Vec vec = Vec()
473        CHKERR(TaoGetObjectiveAndGradient(self.tao, &vec.vec, NULL, NULL))
474        CHKERR(PetscINCREF(vec.obj))
475        cdef object objgrad = self.get_attr("__objgrad__")
476        return (vec, objgrad)
477
478    def setVariableBounds(self, varbounds: tuple[Vec, Vec] | TAOVariableBoundsFunction, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None:
479        """Set the upper and lower bounds for the optimization problem.
480
481        Logically collective.
482
483        Parameters
484        ----------
485        varbounds
486            Either a tuple of `Vec` or a `TAOVariableBoundsFunction` callback.
487        args
488            Positional arguments for the callback.
489        kargs
490            Keyword arguments for the callback.
491
492        See Also
493        --------
494        petsc.TaoSetVariableBounds, petsc.TaoSetVariableBoundsRoutine
495
496        """
497        cdef Vec xl = None, xu = None
498        if (isinstance(varbounds, list) or isinstance(varbounds, tuple)):
499            ol, ou = varbounds
500            xl = <Vec?> ol; xu = <Vec?> ou
501            CHKERR(TaoSetVariableBounds(self.tao, xl.vec, xu.vec))
502            return
503        if isinstance(varbounds, Vec): # FIXME
504            ol = varbounds; ou = args
505            xl = <Vec?> ol; xu = <Vec?> ou
506            CHKERR(TaoSetVariableBounds(self.tao, xl.vec, xu.vec))
507            return
508        if args is None: args = ()
509        if kargs is None: kargs = {}
510        context = (varbounds, args, kargs)
511        self.set_attr("__varbounds__", context)
512        CHKERR(TaoSetVariableBoundsRoutine(self.tao, TAO_VarBounds, <void*>context))
513
514    def setConstraints(self, constraints: TAOConstraintsFunction, Vec C=None, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None:
515        """Set the callback to compute constraints.
516
517        Logically collective.
518
519        Parameters
520        ----------
521        constraints
522            The callback.
523        C
524            The vector to hold the constraints.
525        args
526            Positional arguments for the callback.
527        kargs
528            Keyword arguments for the callback.
529
530        See Also
531        --------
532        petsc.TaoSetConstraintsRoutine
533
534        """
535        cdef PetscVec Cvec = NULL
536        if C is not None: Cvec = C.vec
537        if args is None: args = ()
538        if kargs is None: kargs = {}
539        context = (constraints, args, kargs)
540        self.set_attr("__constraints__", context)
541        CHKERR(TaoSetConstraintsRoutine(self.tao, Cvec, TAO_Constraints, <void*>context))
542
543    def setHessian(self, hessian: TAOHessianFunction, Mat H=None, Mat P=None,
544                   args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None:
545        """Set the callback to compute the Hessian matrix.
546
547        Logically collective.
548
549        Parameters
550        ----------
551        hessian
552            The Hessian callback.
553        H
554            The matrix to store the Hessian.
555        P
556            The matrix to construct the preconditioner.
557        args
558            Positional arguments for the callback.
559        kargs
560            Keyword arguments for the callback.
561
562        See Also
563        --------
564        getHessian, setObjective, setObjectiveGradient, setGradient
565        petsc.TaoSetHessian
566
567        """
568        cdef PetscMat Hmat = NULL
569        if H is not None: Hmat = H.mat
570        cdef PetscMat Pmat = Hmat
571        if P is not None: Pmat = P.mat
572        if args is None: args = ()
573        if kargs is None: kargs = {}
574        context = (hessian, args, kargs)
575        self.set_attr("__hessian__", context)
576        CHKERR(TaoSetHessian(self.tao, Hmat, Pmat, TAO_Hessian, <void*>context))
577
578    def getHessian(self) -> tuple[Mat, Mat, TAOHessianFunction]:
579        """Return the matrices used to store the Hessian and the evaluation callback.
580
581        Not collective.
582
583        See Also
584        --------
585        setHessian, petsc.TaoGetHessian
586
587        """
588        cdef Mat J = Mat()
589        cdef Mat P = Mat()
590        CHKERR(TaoGetHessian(self.tao, &J.mat, &P.mat, NULL, NULL))
591        CHKERR(PetscINCREF(J.obj))
592        CHKERR(PetscINCREF(P.obj))
593        cdef object hessian = self.get_attr("__hessian__")
594        return (J, P, hessian)
595
596    def setJacobian(self, jacobian: TAOJacobianFunction, Mat J=None, Mat P=None,
597                    args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None:
598        """Set the callback to compute the Jacobian.
599
600        Logically collective.
601
602        Parameters
603        ----------
604        jacobian
605            The Jacobian callback.
606        J
607            The matrix to store the Jacobian.
608        P
609            The matrix to construct the preconditioner.
610        args
611            Positional arguments for the callback.
612        kargs
613            Keyword arguments for the callback.
614
615        See Also
616        --------
617        petsc.TaoSetJacobianRoutine
618
619        """
620        cdef PetscMat Jmat = NULL
621        if J is not None: Jmat = J.mat
622        cdef PetscMat Pmat = Jmat
623        if P is not None: Pmat = P.mat
624        if args is None: args = ()
625        if kargs is None: kargs = {}
626        context = (jacobian, args, kargs)
627        self.set_attr("__jacobian__", context)
628        CHKERR(TaoSetJacobianRoutine(self.tao, Jmat, Pmat, TAO_Jacobian, <void*>context))
629
630    def setStateDesignIS(self, IS state=None, IS design=None) -> None:
631        """Set the index sets indicating state and design variables.
632
633        Collective.
634
635        See Also
636        --------
637        petsc.TaoSetStateDesignIS
638
639        """
640        cdef PetscIS s_is = NULL, d_is = NULL
641        if state  is not None: s_is = state.iset
642        if design is not None: d_is = design.iset
643        CHKERR(TaoSetStateDesignIS(self.tao, s_is, d_is))
644
645    def setJacobianState(self, jacobian_state, Mat J=None, Mat P=None, Mat I=None,
646                         args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None:
647        """Set Jacobian state callback.
648
649        Logically collective.
650
651        See Also
652        --------
653        petsc.TaoSetJacobianStateRoutine
654
655        """
656        cdef PetscMat Jmat = NULL
657        if J is not None: Jmat = J.mat
658        cdef PetscMat Pmat = Jmat
659        if P is not None: Pmat = P.mat
660        cdef PetscMat Imat = NULL
661        if I is not None: Imat = I.mat
662        if args is None: args = ()
663        if kargs is None: kargs = {}
664        context = (jacobian_state, args, kargs)
665        self.set_attr("__jacobian_state__", context)
666        CHKERR(TaoSetJacobianStateRoutine(self.tao, Jmat, Pmat, Imat,
667                                          TAO_JacobianState, <void*>context))
668
669    def setJacobianDesign(self, jacobian_design, Mat J=None,
670                          args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None:
671        """Set Jacobian design callback.
672
673        Logically collective.
674
675        See Also
676        --------
677        petsc.TaoSetJacobianDesignRoutine
678
679        """
680        cdef PetscMat Jmat = NULL
681        if J is not None: Jmat = J.mat
682        if args is None: args = ()
683        if kargs is None: kargs = {}
684        context = (jacobian_design, args, kargs)
685        self.set_attr("__jacobian_design__", context)
686        CHKERR(TaoSetJacobianDesignRoutine(self.tao, Jmat,
687                                           TAO_JacobianDesign, <void*>context))
688
689    def setEqualityConstraints(self, equality_constraints, Vec c,
690                               args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None:
691        """Set equality constraints callback.
692
693        Logically collective.
694
695        See Also
696        --------
697        petsc.TaoSetEqualityConstraintsRoutine
698
699        """
700        if args is None: args = ()
701        if kargs is None: kargs = {}
702        context = (equality_constraints, args, kargs)
703        self.set_attr("__equality_constraints__", context)
704        CHKERR(TaoSetEqualityConstraintsRoutine(self.tao, c.vec,
705                                                TAO_EqualityConstraints, <void*>context))
706
707    def setJacobianEquality(self, jacobian_equality, Mat J=None, Mat P=None,
708                            args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None:
709        """Set Jacobian equality constraints callback.
710
711        Logically collective.
712
713        See Also
714        --------
715        petsc.TaoSetJacobianEqualityRoutine
716
717        """
718        cdef PetscMat Jmat = NULL
719        if J is not None: Jmat = J.mat
720        cdef PetscMat Pmat = Jmat
721        if P is not None: Pmat = P.mat
722        if args is None: args = ()
723        if kargs is None: kargs = {}
724        context = (jacobian_equality, args, kargs)
725        self.set_attr("__jacobian_equality__", context)
726        CHKERR(TaoSetJacobianEqualityRoutine(self.tao, Jmat, Pmat,
727                                             TAO_JacobianEquality, <void*>context))
728
729    def setUpdate(self, update: TAOUpdateFunction, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None:
730        """Set the callback to compute update at each optimization step.
731
732        Logically collective.
733
734        Parameters
735        ----------
736        update
737            The update callback or `None` to reset it.
738        args
739            Positional arguments for the callback.
740        kargs
741            Keyword arguments for the callback.
742
743        See Also
744        --------
745        getUpdate, petsc.TaoSetUpdate
746
747        """
748        if update is not None:
749            if args  is None: args  = ()
750            if kargs is None: kargs = {}
751            context = (update, args, kargs)
752            self.set_attr('__update__', context)
753            CHKERR(TaoSetUpdate(self.tao, TAO_Update, <void*>context))
754        else:
755            self.set_attr('__update__', None)
756            CHKERR(TaoSetUpdate(self.tao, NULL, NULL))
757
758    def getUpdate(self) -> tuple[TAOUpdateFunction, tuple[Any, ...], dict[str, Any]]:
759        """Return the callback to compute the update.
760
761        Not collective.
762
763        See Also
764        --------
765        setUpdate
766
767        """
768        return self.get_attr('__update__')
769
770    # --------------
771
772    def computeObjective(self, Vec x) -> float:
773        """Compute the value of the objective function.
774
775        Collective.
776
777        Parameters
778        ----------
779        x
780            The parameter vector.
781
782        See Also
783        --------
784        setObjective, petsc.TaoComputeObjective
785
786        """
787        cdef PetscReal f = 0
788        CHKERR(TaoComputeObjective(self.tao, x.vec, &f))
789        return toReal(f)
790
791    def computeResidual(self, Vec x, Vec f) -> None:
792        """Compute the residual.
793
794        Collective.
795
796        Parameters
797        ----------
798        x
799            The parameter vector.
800        f
801            The output vector.
802
803        See Also
804        --------
805        setResidual, petsc.TaoComputeResidual
806
807        """
808        CHKERR(TaoComputeResidual(self.tao, x.vec, f.vec))
809
810    def computeGradient(self, Vec x, Vec g) -> None:
811        """Compute the gradient of the objective function.
812
813        Collective.
814
815        Parameters
816        ----------
817        x
818            The parameter vector.
819        g
820            The output gradient vector.
821
822        See Also
823        --------
824        setGradient, petsc.TaoComputeGradient
825
826        """
827        CHKERR(TaoComputeGradient(self.tao, x.vec, g.vec))
828
829    def computeObjectiveGradient(self, Vec x, Vec g) -> float:
830        """Compute the gradient of the objective function and its value.
831
832        Collective.
833
834        Parameters
835        ----------
836        x
837            The parameter vector.
838        g
839            The output gradient vector.
840
841        See Also
842        --------
843        setObjectiveGradient, setGradient, setObjective
844        petsc.TaoComputeObjectiveAndGradient
845
846        """
847        cdef PetscReal f = 0
848        CHKERR(TaoComputeObjectiveAndGradient(self.tao, x.vec, &f, g.vec))
849        return toReal(f)
850
851    def computeDualVariables(self, Vec xl, Vec xu) -> None:
852        """Compute the dual vectors corresponding to variables' bounds.
853
854        Collective.
855
856        See Also
857        --------
858        petsc.TaoComputeDualVariables
859
860        """
861        CHKERR(TaoComputeDualVariables(self.tao, xl.vec, xu.vec))
862
863    def computeVariableBounds(self, Vec xl, Vec xu) -> None:
864        """Compute the vectors corresponding to variables' bounds.
865
866        Collective.
867
868        See Also
869        --------
870        setVariableBounds, petsc.TaoComputeVariableBounds
871
872        """
873        CHKERR(TaoComputeVariableBounds(self.tao))
874        cdef PetscVec Lvec = NULL, Uvec = NULL
875        CHKERR(TaoGetVariableBounds(self.tao, &Lvec, &Uvec))
876        if xl.vec != NULL:
877            if Lvec != NULL:
878                CHKERR(VecCopy(Lvec, xl.vec))
879            else:
880                CHKERR(VecSet(xl.vec, <PetscScalar>PETSC_NINFINITY))
881        if xu.vec != NULL:
882            if Uvec != NULL:
883                CHKERR(VecCopy(Uvec, xu.vec))
884            else:
885                CHKERR(VecSet(xu.vec, <PetscScalar>PETSC_INFINITY))
886
887    def computeConstraints(self, Vec x, Vec c) -> None:
888        """Compute the vector corresponding to the constraints.
889
890        Collective.
891
892        Parameters
893        ----------
894        x
895            The parameter vector.
896        c
897            The output constraints vector.
898
899        See Also
900        --------
901        setVariableBounds, petsc.TaoComputeVariableBounds
902
903        """
904        CHKERR(TaoComputeConstraints(self.tao, x.vec, c.vec))
905
906    def computeHessian(self, Vec x, Mat H, Mat P=None) -> None:
907        """Compute the Hessian of the objective function.
908
909        Collective.
910
911        Parameters
912        ----------
913        x
914            The parameter vector.
915        H
916            The output Hessian matrix.
917        P
918            The output Hessian matrix used to construct the preconditioner.
919
920        See Also
921        --------
922        setHessian, petsc.TaoComputeHessian
923
924        """
925        cdef PetscMat hmat = H.mat, pmat = H.mat
926        if P is not None: pmat = P.mat
927        CHKERR(TaoComputeHessian(self.tao, x.vec, hmat, pmat))
928
929    def computeJacobian(self, Vec x, Mat J, Mat P=None) -> None:
930        """Compute the Jacobian.
931
932        Collective.
933
934        Parameters
935        ----------
936        x
937            The parameter vector.
938        J
939            The output Jacobian matrix.
940        P
941            The output Jacobian matrix used to construct the preconditioner.
942
943        See Also
944        --------
945        setJacobian, petsc.TaoComputeJacobian
946
947        """
948        cdef PetscMat jmat = J.mat, pmat = J.mat
949        if P is not None: pmat = P.mat
950        CHKERR(TaoComputeJacobian(self.tao, x.vec, jmat, pmat))
951
952    # --------------
953
954    def setTolerances(self, gatol: float = None, grtol: float = None, gttol: float = None) -> None:
955        """Set the tolerance parameters used in the solver convergence tests.
956
957        Collective.
958
959        Parameters
960        ----------
961        gatol
962            The absolute norm of the gradient. Defaults to `DEFAULT`.
963        grtol
964            The relative norm of the gradient with respect
965            to the initial norm of the objective. Defaults to `DEFAULT`.
966        gttol
967            The relative norm of the gradient with respect
968            to the initial norm of the gradient. Defaults to `DEFAULT`.
969
970        See Also
971        --------
972        getTolerances, petsc.TaoSetTolerances
973
974        """
975        cdef PetscReal _gatol=PETSC_DEFAULT, _grtol=PETSC_DEFAULT, _gttol=PETSC_DEFAULT
976        if gatol is not None: _gatol = asReal(gatol)
977        if grtol is not None: _grtol = asReal(grtol)
978        if gttol is not None: _gttol = asReal(gttol)
979        CHKERR(TaoSetTolerances(self.tao, _gatol, _grtol, _gttol))
980
981    def getTolerances(self) -> tuple[float, float, float]:
982        """Return the tolerance parameters used in the solver convergence tests.
983
984        Not collective.
985
986        Returns
987        -------
988        gatol : float
989            The absolute norm of the gradient.
990        grtol : float
991            The relative norm of the gradient with respect to
992            the initial norm of the objective.
993        gttol : float
994            The relative norm of the gradient with respect to
995            the initial norm of the gradient.
996
997        See Also
998        --------
999        setTolerances, petsc.TaoGetTolerances
1000
1001        """
1002        cdef PetscReal _gatol=PETSC_DEFAULT, _grtol=PETSC_DEFAULT, _gttol=PETSC_DEFAULT
1003        CHKERR(TaoGetTolerances(self.tao, &_gatol, &_grtol, &_gttol))
1004        return (toReal(_gatol), toReal(_grtol), toReal(_gttol))
1005
1006    def setMaximumIterations(self, mit: int) -> float:
1007        """Set the maximum number of solver iterations.
1008
1009        Collective.
1010
1011        See Also
1012        --------
1013        setTolerances, petsc.TaoSetMaximumIterations
1014
1015        """
1016        cdef PetscInt _mit = asInt(mit)
1017        CHKERR(TaoSetMaximumIterations(self.tao, _mit))
1018
1019    def getMaximumIterations(self) -> int:
1020        """Return the maximum number of solver iterations.
1021
1022        Not collective.
1023
1024        See Also
1025        --------
1026        setMaximumIterations, petsc.TaoGetMaximumIterations
1027
1028        """
1029        cdef PetscInt _mit = PETSC_DEFAULT
1030        CHKERR(TaoGetMaximumIterations(self.tao, &_mit))
1031        return toInt(_mit)
1032
1033    def setMaximumFunctionEvaluations(self, mit: int) -> None:
1034        """Set the maximum number of objective evaluations within the solver.
1035
1036        Collective.
1037
1038        See Also
1039        --------
1040        setMaximumIterations, petsc.TaoSetMaximumFunctionEvaluations
1041
1042        """
1043        cdef PetscInt _mit = asInt(mit)
1044        CHKERR(TaoSetMaximumFunctionEvaluations(self.tao, _mit))
1045
1046    def getMaximumFunctionEvaluations(self) -> int:
1047        """Return the maximum number of objective evaluations within the solver.
1048
1049        Not collective.
1050
1051        See Also
1052        --------
1053        setMaximumFunctionEvaluations, petsc.TaoGetMaximumFunctionEvaluations
1054
1055        """
1056        cdef PetscInt _mit = PETSC_DEFAULT
1057        CHKERR(TaoGetMaximumFunctionEvaluations(self.tao, &_mit))
1058        return toInt(_mit)
1059
1060    def setConstraintTolerances(self, catol: float = None, crtol: float = None) -> None:
1061        """Set the constraints tolerance parameters used in the solver convergence tests.
1062
1063        Collective.
1064
1065        Parameters
1066        ----------
1067        catol
1068            The absolute norm of the constraints. Defaults to `DEFAULT`.
1069        crtol
1070            The relative norm of the constraints. Defaults to `DEFAULT`.
1071
1072        See Also
1073        --------
1074        getConstraintTolerances, petsc.TaoSetConstraintTolerances
1075
1076        """
1077        cdef PetscReal _catol=PETSC_DEFAULT, _crtol=PETSC_DEFAULT
1078        if catol is not None: _catol = asReal(catol)
1079        if crtol is not None: _crtol = asReal(crtol)
1080        CHKERR(TaoSetConstraintTolerances(self.tao, _catol, _crtol))
1081
1082    def getConstraintTolerances(self) -> tuple[float, float]:
1083        """Return the constraints tolerance parameters used in the convergence tests.
1084
1085        Not collective.
1086
1087        Returns
1088        -------
1089        catol : float
1090            The absolute norm of the constraints.
1091        crtol : float
1092            The relative norm of the constraints.
1093
1094        See Also
1095        --------
1096        setConstraintTolerances, petsc.TaoGetConstraintTolerances
1097
1098        """
1099        cdef PetscReal _catol=PETSC_DEFAULT, _crtol=PETSC_DEFAULT
1100        CHKERR(TaoGetConstraintTolerances(self.tao, &_catol, &_crtol))
1101        return (toReal(_catol), toReal(_crtol))
1102
1103    def setConvergenceTest(self, converged: TAOConvergedFunction | None, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None:
1104        """Set the callback used to test for solver convergence.
1105
1106        Logically collective.
1107
1108        Parameters
1109        ----------
1110        converged
1111            The callback. If `None`, reset to the default convergence test.
1112        args
1113            Positional arguments for the callback.
1114        kargs
1115            Keyword arguments for the callback.
1116
1117        See Also
1118        --------
1119        getConvergenceTest, petsc.TaoSetConvergenceTest
1120
1121        """
1122        if converged is None:
1123            CHKERR(TaoSetConvergenceTest(self.tao, TaoDefaultConvergenceTest, NULL))
1124            self.set_attr('__converged__', None)
1125        else:
1126            if args is None: args = ()
1127            if kargs is None: kargs = {}
1128            self.set_attr('__converged__', (converged, args, kargs))
1129            CHKERR(TaoSetConvergenceTest(self.tao, TAO_Converged, NULL))
1130
1131    def getConvergenceTest(self) -> tuple[TAOConvergedFunction, tuple[Any, ...], dict[str, Any]]:
1132        """Return the callback used to test for solver convergence.
1133
1134        Not collective.
1135
1136        See Also
1137        --------
1138        setConvergenceTest
1139
1140        """
1141        return self.get_attr('__converged__')
1142
1143    def setConvergedReason(self, reason: ConvergedReason) -> None:
1144        """Set the termination flag.
1145
1146        Collective.
1147
1148        See Also
1149        --------
1150        getConvergedReason, petsc.TaoSetConvergedReason
1151
1152        """
1153        cdef PetscTAOConvergedReason creason = reason
1154        CHKERR(TaoSetConvergedReason(self.tao, creason))
1155
1156    def getConvergedReason(self) -> ConvergedReason:
1157        """Return the termination flag.
1158
1159        Not collective.
1160
1161        See Also
1162        --------
1163        setConvergedReason, petsc.TaoGetConvergedReason
1164
1165        """
1166        cdef PetscTAOConvergedReason creason = TAO_CONTINUE_ITERATING
1167        CHKERR(TaoGetConvergedReason(self.tao, &creason))
1168        return creason
1169
1170    def setMonitor(self, monitor: TAOMonitorFunction, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None:
1171        """Set the callback used to monitor solver convergence.
1172
1173        Logically collective.
1174
1175        Parameters
1176        ----------
1177        monitor
1178            The callback.
1179        args
1180            Positional arguments for the callback.
1181        kargs
1182            Keyword arguments for the callback.
1183
1184        See Also
1185        --------
1186        getMonitor, petsc.TaoMonitorSet
1187
1188        """
1189        if monitor is None: return
1190        cdef object monitorlist = self.get_attr('__monitor__')
1191        if args  is None: args  = ()
1192        if kargs is None: kargs = {}
1193        if monitorlist is None:
1194            CHKERR(TaoMonitorSet(self.tao, TAO_Monitor, NULL, NULL))
1195            self.set_attr('__monitor__',  [(monitor, args, kargs)])
1196        else:
1197            monitorlist.append((monitor, args, kargs))
1198
1199    def getMonitor(self) -> list[tuple[TAOMonitorFunction, tuple[Any, ...], dict[str, Any]]]:
1200        """Return the callback used to monitor solver convergence.
1201
1202        Not collective.
1203
1204        See Also
1205        --------
1206        setMonitor
1207
1208        """
1209        return self.get_attr('__monitor__')
1210
1211    def cancelMonitor(self) -> None:
1212        """Cancel all the monitors of the solver.
1213
1214        Logically collective.
1215
1216        See Also
1217        --------
1218        setMonitor, petsc.TaoMonitorCancel
1219
1220        """
1221        CHKERR(TaoMonitorCancel(self.tao))
1222        self.set_attr('__monitor__',  None)
1223
1224    # Tao overwrites these statistics. Copy user defined only if present
1225    def monitor(self, its: int = None, f: float = None, res: float = None, cnorm: float = None, step: float = None) -> None:
1226        """Monitor the solver.
1227
1228        Collective.
1229
1230        This function should be called without arguments,
1231        unless users want to modify the values internally stored by the solver.
1232
1233        Parameters
1234        ----------
1235        its
1236            Current number of iterations
1237            or `None` to use the value stored internally by the solver.
1238        f
1239            Current value of the objective function
1240            or `None` to use the value stored internally by the solver.
1241        res
1242            Current value of the residual norm
1243            or `None` to use the value stored internally by the solver.
1244        cnorm
1245            Current value of the constrains norm
1246            or `None` to use the value stored internally by the solver.
1247        step
1248            Current value of the step
1249            or `None` to use the value stored internally by the solver.
1250
1251        See Also
1252        --------
1253        setMonitor, petsc.TaoMonitor
1254
1255        """
1256        cdef PetscInt cits = 0
1257        cdef PetscReal cf = 0.0
1258        cdef PetscReal cres = 0.0
1259        cdef PetscReal ccnorm = 0.0
1260        cdef PetscReal cstep = 0.0
1261        CHKERR(TaoGetSolutionStatus(self.tao, &cits, &cf, &cres, &ccnorm, &cstep, NULL))
1262        if its is not None:
1263            cits = asInt(its)
1264        if f is not None:
1265            cf = asReal(f)
1266        if res is not None:
1267            cres = asReal(res)
1268        if cnorm is not None:
1269            ccnorm = asReal(cnorm)
1270        if step is not None:
1271            cstep = asReal(step)
1272        CHKERR(TaoMonitor(self.tao, cits, cf, cres, ccnorm, cstep))
1273
1274    #
1275
1276    def solve(self, Vec x=None) -> None:
1277        """Solve the optimization problem.
1278
1279        Collective.
1280
1281        Parameters
1282        ----------
1283        x
1284            The starting vector or `None` to use the vector stored internally.
1285
1286        See Also
1287        --------
1288        setSolution, getSolution, petsc.TaoSolve
1289
1290        """
1291        if x is not None:
1292            CHKERR(TaoSetSolution(self.tao, x.vec))
1293        CHKERR(TaoSolve(self.tao))
1294
1295    def getSolution(self) -> Vec:
1296        """Return the vector holding the solution.
1297
1298        Not collective.
1299
1300        See Also
1301        --------
1302        setSolution, petsc.TaoGetSolution
1303
1304        """
1305        cdef Vec vec = Vec()
1306        CHKERR(TaoGetSolution(self.tao, &vec.vec))
1307        CHKERR(PetscINCREF(vec.obj))
1308        return vec
1309
1310    def setGradientNorm(self, Mat mat) -> None:
1311        """Set the matrix used to compute inner products.
1312
1313        Collective.
1314
1315        See Also
1316        --------
1317        getGradientNorm, petsc.TaoSetGradientNorm
1318
1319        """
1320        CHKERR(TaoSetGradientNorm(self.tao, mat.mat))
1321
1322    def getGradientNorm(self) -> Mat:
1323        """Return the matrix used to compute inner products.
1324
1325        Not collective.
1326
1327        See Also
1328        --------
1329        setGradientNorm, petsc.TaoGetGradientNorm
1330
1331        """
1332        cdef Mat mat = Mat()
1333        CHKERR(TaoGetGradientNorm(self.tao, &mat.mat))
1334        CHKERR(PetscINCREF(mat.obj))
1335        return mat
1336
1337    def setLMVMH0(self, Mat mat) -> None:
1338        """Set the initial Hessian for the quasi-Newton approximation.
1339
1340        Collective.
1341
1342        See Also
1343        --------
1344        getLMVMH0, petsc.TaoLMVMSetH0
1345
1346        """
1347        CHKERR(TaoLMVMSetH0(self.tao, mat.mat))
1348
1349    def getLMVMH0(self) -> Mat:
1350        """Return the initial Hessian for the quasi-Newton approximation.
1351
1352        Not collective.
1353
1354        See Also
1355        --------
1356        setLMVMH0, petsc.TaoLMVMGetH0
1357
1358        """
1359        cdef Mat mat = Mat()
1360        CHKERR(TaoLMVMGetH0(self.tao, &mat.mat))
1361        CHKERR(PetscINCREF(mat.obj))
1362        return mat
1363
1364    def getLMVMH0KSP(self) -> KSP:
1365        """Return the `KSP` for the inverse of the initial Hessian approximation.
1366
1367        Not collective.
1368
1369        See Also
1370        --------
1371        setLMVMH0, petsc.TaoLMVMGetH0KSP
1372
1373        """
1374        cdef KSP ksp = KSP()
1375        CHKERR(TaoLMVMGetH0KSP(self.tao, &ksp.ksp))
1376        CHKERR(PetscINCREF(ksp.obj))
1377        return ksp
1378
1379    def getVariableBounds(self) -> tuple[Vec, Vec]:
1380        """Return the upper and lower bounds vectors.
1381
1382        Not collective.
1383
1384        See Also
1385        --------
1386        setVariableBounds, petsc.TaoGetVariableBounds
1387
1388        """
1389        cdef Vec xl = Vec(), xu = Vec()
1390        CHKERR(TaoGetVariableBounds(self.tao, &xl.vec, &xu.vec))
1391        CHKERR(PetscINCREF(xl.obj)); CHKERR(PetscINCREF(xu.obj))
1392        return (xl, xu)
1393
1394    def setBNCGType(self, cg_type: BNCGType) -> None:
1395        """Set the type of the BNCG solver.
1396
1397        Collective.
1398
1399        See Also
1400        --------
1401        getBNCGType, petsc.TaoBNCGSetType
1402
1403        """
1404        cdef PetscTAOBNCGType ctype = cg_type
1405        CHKERR(TaoBNCGSetType(self.tao, ctype))
1406
1407    def getBNCGType(self) -> BNCGType:
1408        """Return the type of the BNCG solver.
1409
1410        Not collective.
1411
1412        See Also
1413        --------
1414        setBNCGType, petsc.TaoBNCGGetType
1415
1416        """
1417        cdef PetscTAOBNCGType cg_type = TAO_BNCG_SSML_BFGS
1418        CHKERR(TaoBNCGGetType(self.tao, &cg_type))
1419        return cg_type
1420
1421    def setIterationNumber(self, its: int) -> None:
1422        """Set the current iteration number.
1423
1424        Collective.
1425
1426        See Also
1427        --------
1428        getIterationNumber, petsc.TaoSetIterationNumber
1429
1430        """
1431        cdef PetscInt ival = asInt(its)
1432        CHKERR(TaoSetIterationNumber(self.tao, ival))
1433
1434    def getIterationNumber(self) -> int:
1435        """Return the current iteration number.
1436
1437        Not collective.
1438
1439        See Also
1440        --------
1441        setIterationNumber, petsc.TaoGetIterationNumber
1442
1443        """
1444        cdef PetscInt its=0
1445        CHKERR(TaoGetIterationNumber(self.tao, &its))
1446        return toInt(its)
1447
1448    def getObjectiveValue(self) -> float:
1449        """Return the current value of the objective function.
1450
1451        Not collective.
1452
1453        See Also
1454        --------
1455        setObjective, petsc.TaoGetSolutionStatus
1456
1457        """
1458        cdef PetscReal fval=0
1459        CHKERR(TaoGetSolutionStatus(self.tao, NULL, &fval, NULL, NULL, NULL, NULL))
1460        return toReal(fval)
1461
1462    getFunctionValue = getObjectiveValue
1463
1464    def getConvergedReason(self) -> ConvergedReason:
1465        """Return the reason for the solver convergence.
1466
1467        Not collective.
1468
1469        See Also
1470        --------
1471        petsc.TaoGetConvergedReason
1472
1473        """
1474        cdef PetscTAOConvergedReason reason = TAO_CONTINUE_ITERATING
1475        CHKERR(TaoGetConvergedReason(self.tao, &reason))
1476        return reason
1477
1478    def getSolutionNorm(self) -> tuple[float, float, float]:
1479        """Return the objective function value and the norms of gradient and constraints.
1480
1481        Not collective.
1482
1483        Returns
1484        -------
1485        f : float
1486            Current value of the objective function.
1487        res : float
1488            Current value of the residual norm.
1489        cnorm : float
1490            Current value of the constrains norm.
1491
1492        See Also
1493        --------
1494        getSolutionStatus, petsc.TaoGetSolutionStatus
1495
1496        """
1497        cdef PetscReal gnorm=0
1498        cdef PetscReal cnorm=0
1499        cdef PetscReal fval=0
1500        CHKERR(TaoGetSolutionStatus(self.tao, NULL, &fval, &gnorm, &cnorm, NULL, NULL))
1501        return (toReal(fval), toReal(gnorm), toReal(cnorm))
1502
1503    def getSolutionStatus(self) -> tuple[int, float, float, float, float, ConvergedReason]:
1504        """Return the solution status.
1505
1506        Not collective.
1507
1508        Returns
1509        -------
1510        its : int
1511            Current number of iterations.
1512        f : float
1513            Current value of the objective function.
1514        res : float
1515            Current value of the residual norm.
1516        cnorm : float
1517            Current value of the constrains norm.
1518        step : float
1519            Current value of the step.
1520        reason : ConvergedReason
1521            Current value of converged reason.
1522
1523        See Also
1524        --------
1525        petsc.TaoGetSolutionStatus
1526
1527        """
1528        cdef PetscInt its=0
1529        cdef PetscReal fval=0, gnorm=0, cnorm=0, xdiff=0
1530        cdef PetscTAOConvergedReason reason = TAO_CONTINUE_ITERATING
1531        CHKERR(TaoGetSolutionStatus(self.tao, &its,
1532                                    &fval, &gnorm, &cnorm, &xdiff,
1533                                    &reason))
1534        return (toInt(its), toReal(fval),
1535                toReal(gnorm), toReal(cnorm),
1536                toReal(xdiff), reason)
1537
1538    def getKSP(self) -> KSP:
1539        """Return the linear solver used by the nonlinear solver.
1540
1541        Not collective.
1542
1543        See Also
1544        --------
1545        petsc.TaoGetKSP
1546
1547        """
1548        cdef KSP ksp = KSP()
1549        CHKERR(TaoGetKSP(self.tao, &ksp.ksp))
1550        CHKERR(PetscINCREF(ksp.obj))
1551        return ksp
1552
1553    # BRGN routines
1554
1555    def getBRGNSubsolver(self) -> TAO:
1556        """Return the subsolver inside the BRGN solver.
1557
1558        Not collective.
1559
1560        See Also
1561        --------
1562        petsc.TaoBRGNGetSubsolver
1563
1564        """
1565        cdef TAO subsolver = TAO()
1566        CHKERR(TaoBRGNGetSubsolver(self.tao, &subsolver.tao))
1567        CHKERR(PetscINCREF(subsolver.obj))
1568        return subsolver
1569
1570    def setBRGNRegularizerObjectiveGradient(self, objgrad, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None:
1571        """Set the callback to compute the regularizer objective and gradient.
1572
1573        Logically collective.
1574
1575        See Also
1576        --------
1577        petsc.TaoBRGNSetRegularizerObjectiveAndGradientRoutine
1578
1579        """
1580        if args is None: args = ()
1581        if kargs is None: kargs = {}
1582        context = (objgrad, args, kargs)
1583        self.set_attr("__brgnregobjgrad__", context)
1584        CHKERR(TaoBRGNSetRegularizerObjectiveAndGradientRoutine(self.tao, TAO_BRGNRegObjGrad, <void*>context))
1585
1586    def setBRGNRegularizerHessian(self, hessian, Mat H=None, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None:
1587        """Set the callback to compute the regularizer Hessian.
1588
1589        Logically collective.
1590
1591        See Also
1592        --------
1593        petsc.TaoBRGNSetRegularizerHessianRoutine
1594
1595        """
1596        cdef PetscMat Hmat = NULL
1597        if H is not None: Hmat = H.mat
1598        if args is None: args = ()
1599        if kargs is None: kargs = {}
1600        context = (hessian, args, kargs)
1601        self.set_attr("__brgnreghessian__", context)
1602        CHKERR(TaoBRGNSetRegularizerHessianRoutine(self.tao, Hmat, TAO_BRGNRegHessian, <void*>context))
1603
1604    def setBRGNRegularizerWeight(self, weight: float) -> None:
1605        """Set the regularizer weight.
1606
1607        Collective.
1608
1609        """
1610        cdef PetscReal cweight = asReal(weight)
1611        CHKERR(TaoBRGNSetRegularizerWeight(self.tao, cweight))
1612
1613    def setBRGNSmoothL1Epsilon(self, epsilon: float) -> None:
1614        """Set the smooth L1 epsilon.
1615
1616        Collective.
1617
1618        See Also
1619        --------
1620        petsc.TaoBRGNSetL1SmoothEpsilon
1621
1622        """
1623        cdef PetscReal ceps = asReal(epsilon)
1624        CHKERR(TaoBRGNSetL1SmoothEpsilon(self.tao, ceps))
1625
1626    def setBRGNDictionaryMatrix(self, Mat D) -> None:
1627        """Set the dictionary matrix.
1628
1629        Collective.
1630
1631        See Also
1632        --------
1633        petsc.TaoBRGNSetDictionaryMatrix
1634
1635        """
1636        CHKERR(TaoBRGNSetDictionaryMatrix(self.tao, D.mat))
1637
1638    def getBRGNDampingVector(self) -> Vec:
1639        """Return the damping vector.
1640
1641        Not collective.
1642
1643        """
1644        # FIXME
1645        # See Also
1646        # --------
1647        # petsc.TaoBRGNGetDampingVector
1648        cdef Vec damp = Vec()
1649        CHKERR(TaoBRGNGetDampingVector(self.tao, &damp.vec))
1650        CHKERR(PetscINCREF(damp.obj))
1651        return damp
1652
1653    def createPython(self, context: Any = None, comm: Comm | None = None) -> Self:
1654        """Create an optimization solver of Python type.
1655
1656        Collective.
1657
1658        Parameters
1659        ----------
1660        context
1661            An instance of the Python class implementing the required methods.
1662        comm
1663            MPI communicator, defaults to `Sys.getDefaultComm`.
1664
1665        See Also
1666        --------
1667        petsc_python_tao, setType, setPythonContext, Type.PYTHON
1668
1669        """
1670        cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT)
1671        cdef PetscTAO tao = NULL
1672        CHKERR(TaoCreate(ccomm, &tao))
1673        CHKERR(PetscCLEAR(self.obj)); self.tao = tao
1674        CHKERR(TaoSetType(self.tao, TAOPYTHON))
1675        CHKERR(TaoPythonSetContext(self.tao, <void*>context))
1676        return self
1677
1678    def setPythonContext(self, context: Any) -> None:
1679        """Set the instance of the class implementing the required Python methods.
1680
1681        Not collective.
1682
1683        See Also
1684        --------
1685        petsc_python_tao, getPythonContext
1686
1687        """
1688        CHKERR(TaoPythonSetContext(self.tao, <void*>context))
1689
1690    def getPythonContext(self) -> Any:
1691        """Return the instance of the class implementing the required Python methods.
1692
1693        Not collective.
1694
1695        See Also
1696        --------
1697        petsc_python_tao, setPythonContext
1698
1699        """
1700        cdef void *context = NULL
1701        CHKERR(TaoPythonGetContext(self.tao, &context))
1702        if context == NULL: return None
1703        else: return <object> context
1704
1705    def setPythonType(self, py_type: str) -> None:
1706        """Set the fully qualified Python name of the class to be used.
1707
1708        Collective.
1709
1710        See Also
1711        --------
1712        petsc_python_tao, setPythonContext, getPythonType
1713        petsc.TaoPythonSetType
1714
1715        """
1716        cdef const char *cval = NULL
1717        py_type = str2bytes(py_type, &cval)
1718        CHKERR(TaoPythonSetType(self.tao, cval))
1719
1720    def getPythonType(self) -> str:
1721        """Return the fully qualified Python name of the class used by the solver.
1722
1723        Not collective.
1724
1725        See Also
1726        --------
1727        petsc_python_tao, setPythonContext, setPythonType
1728        petsc.TaoPythonGetType
1729
1730        """
1731        cdef const char *cval = NULL
1732        CHKERR(TaoPythonGetType(self.tao, &cval))
1733        return bytes2str(cval)
1734
1735    def getLineSearch(self) -> TAOLineSearch:
1736        """Return the TAO Line Search object.
1737
1738        Not collective.
1739
1740        See Also
1741        -------
1742        petsc.TaoGetLineSearch
1743
1744        """
1745        cdef TAOLineSearch ls = TAOLineSearch()
1746        CHKERR(TaoGetLineSearch(self.tao, &ls.taols))
1747        CHKERR(PetscINCREF(ls.obj))
1748        return ls
1749
1750    # --- backward compatibility ---
1751
1752    setInitial = setSolution
1753
1754    # --- application context ---
1755
1756    property appctx:
1757        """Application context."""
1758        def __get__(self) -> Any:
1759            return self.getAppCtx()
1760
1761        def __set__(self, value: Any):
1762            self.setAppCtx(value)
1763
1764    # --- linear solver ---
1765
1766    property ksp:
1767        """Linear solver."""
1768        def __get__(self) -> KSP:
1769            return self.getKSP()
1770
1771    # --- tolerances ---
1772
1773    # FIXME: tolerances all broken
1774    property ftol:
1775        """Broken."""
1776        def __get__(self) -> Any:
1777            return self.getFunctionTolerances()
1778
1779        def __set__(self, value):
1780            if isinstance(value, (tuple, list)):
1781                self.setFunctionTolerances(*value)
1782            elif isinstance(value, dict):
1783                self.setFunctionTolerances(**value)
1784            else:
1785                raise TypeError("expecting tuple/list or dict")
1786
1787    property gtol:
1788        """Broken."""
1789        def __get__(self) -> Any:
1790            return self.getGradientTolerances()
1791
1792        def __set__(self, value):
1793            if isinstance(value, (tuple, list)):
1794                self.getGradientTolerances(*value)
1795            elif isinstance(value, dict):
1796                self.getGradientTolerances(**value)
1797            else:
1798                raise TypeError("expecting tuple/list or dict")
1799
1800    property ctol:
1801        """Broken."""
1802        def __get__(self) -> Any:
1803            return self.getConstraintTolerances()
1804
1805        def __set__(self, value):
1806            if isinstance(value, (tuple, list)):
1807                self.getConstraintTolerances(*value)
1808            elif isinstance(value, dict):
1809                self.getConstraintTolerances(**value)
1810            else:
1811                raise TypeError("expecting tuple/list or dict")
1812
1813    # --- iteration ---
1814
1815    property its:
1816        """Number of iterations."""
1817        def __get__(self) -> int:
1818            return self.getIterationNumber()
1819
1820    property gnorm:
1821        """Gradient norm."""
1822        def __get__(self) -> float:
1823            return self.getSolutionNorm()[1]
1824
1825    property cnorm:
1826        """Constraints norm."""
1827        def __get__(self) -> float:
1828            return self.getSolutionNorm()[2]
1829
1830    property solution:
1831        """Solution vector."""
1832        def __get__(self) -> Vec:
1833            return self.getSolution()
1834
1835    property objective:
1836        """Objective value."""
1837        def __get__(self) -> float:
1838            return self.getObjectiveValue()
1839
1840    property function:
1841        """Objective value."""
1842        def __get__(self) -> float:
1843            return self.getFunctionValue()
1844
1845    property gradient:
1846        """Gradient vector."""
1847        def __get__(self) -> Vec:
1848            return self.getGradient()[0]
1849
1850    # --- convergence ---
1851
1852    property reason:
1853        """Converged reason."""
1854        def __get__(self) -> ConvergedReason:
1855            return self.getConvergedReason()
1856
1857    property iterating:
1858        """Boolean indicating if the solver has not converged yet."""
1859        def __get__(self) -> bool:
1860            return self.reason == 0
1861
1862    property converged:
1863        """Boolean indicating if the solver has converged."""
1864        def __get__(self) -> bool:
1865            return self.reason > 0
1866
1867    property diverged:
1868        """Boolean indicating if the solver has failed."""
1869        def __get__(self) -> bool:
1870            return self.reason < 0
1871
1872# --------------------------------------------------------------------
1873
1874del TAOType
1875del TAOConvergedReason
1876del TAOBNCGType
1877
1878# --------------------------------------------------------------------
1879
1880
1881class TAOLineSearchType:
1882    """TAO Line Search Types."""
1883    UNIT        = S_(TAOLINESEARCHUNIT)
1884    ARMIJO      = S_(TAOLINESEARCHARMIJO)
1885    MORETHUENTE = S_(TAOLINESEARCHMT)
1886    IPM         = S_(TAOLINESEARCHIPM)
1887    OWARMIJO    = S_(TAOLINESEARCHOWARMIJO)
1888    GPCG        = S_(TAOLINESEARCHGPCG)
1889
1890
1891class TAOLineSearchConvergedReason:
1892    """TAO Line Search Termination Reasons."""
1893    # iterating
1894    CONTINUE_SEARCH       = TAOLINESEARCH_CONTINUE_ITERATING
1895    # failed
1896    FAILED_INFORNAN       = TAOLINESEARCH_FAILED_INFORNAN      # inf or NaN in user function
1897    FAILED_BADPARAMETER   = TAOLINESEARCH_FAILED_BADPARAMETER  # negative value set as parameter
1898    FAILED_ASCENT         = TAOLINESEARCH_FAILED_ASCENT        # search direction is not a descent direction
1899    # succeeded
1900    SUCCESS               = TAOLINESEARCH_SUCCESS              # found step length
1901    SUCCESS_USER          = TAOLINESEARCH_SUCCESS_USER         # user-defined success criteria reached
1902    # halted
1903    HALTED_OTHER          = TAOLINESEARCH_HALTED_OTHER         # stopped search with unknown reason
1904    HALTED_MAXFCN         = TAOLINESEARCH_HALTED_MAXFCN        # maximum function evaluations reached
1905    HALTED_UPPERBOUND     = TAOLINESEARCH_HALTED_UPPERBOUND    # stopped at upper bound
1906    HALTED_LOWERBOUND     = TAOLINESEARCH_HALTED_LOWERBOUND    # stopped at lower bound
1907    HALTED_RTOL           = TAOLINESEARCH_HALTED_RTOL          # range of uncertainty is below tolerance
1908    HALTED_USER           = TAOLINESEARCH_HALTED_USER          # user-defined halt criteria reached
1909
1910# --------------------------------------------------------------------
1911
1912
1913cdef class TAOLineSearch(Object):
1914    """TAO Line Search."""
1915
1916    Type   = TAOLineSearchType
1917    Reason = TAOLineSearchConvergedReason
1918
1919    def __cinit__(self):
1920        self.obj = <PetscObject*> &self.taols
1921        self.taols = NULL
1922
1923    def view(self, Viewer viewer=None) -> None:
1924        """View the linesearch object.
1925
1926        Collective.
1927
1928        Parameters
1929        ----------
1930        viewer
1931            A `Viewer` instance or `None` for the default viewer.
1932
1933        See Also
1934        --------
1935        petsc.TaoLineSearchView
1936
1937        """
1938        cdef PetscViewer vwr = NULL
1939        if viewer is not None: vwr = viewer.vwr
1940        CHKERR(TaoLineSearchView(self.taols, vwr))
1941
1942    def destroy(self) -> Self:
1943        """Destroy the linesearch object.
1944
1945        Collective.
1946
1947        See Also
1948        --------
1949        petsc.TaoLineSearchDestroy
1950
1951        """
1952        CHKERR(TaoLineSearchDestroy(&self.taols))
1953        return self
1954
1955    def create(self, comm=None) -> Self:
1956        """Create a TAO linesearch.
1957
1958        Collective.
1959
1960        Parameters
1961        ----------
1962        comm
1963            MPI communicator, defaults to `Sys.getDefaultComm`.
1964
1965        See Also
1966        --------
1967        Sys.getDefaultComm, petsc.TaoLineSearchCreate
1968
1969        """
1970        cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT)
1971        cdef PetscTAOLineSearch newtaols = NULL
1972        CHKERR(TaoLineSearchCreate(ccomm, &newtaols))
1973        CHKERR(PetscCLEAR(self.obj)); self.taols = newtaols
1974        return self
1975
1976    def setType(self, ls_type: Type | str) -> None:
1977        """Set the type of the linesearch.
1978
1979        Logically collective.
1980
1981        Parameters
1982        ----------
1983        ls_type
1984            The type of the solver.
1985
1986        See Also
1987        --------
1988        getType, petsc.TaoLineSearchSetType
1989
1990        """
1991        cdef PetscTAOLineSearchType ctype = NULL
1992        ls_type = str2bytes(ls_type, &ctype)
1993        CHKERR(TaoLineSearchSetType(self.taols, ctype))
1994
1995    def getType(self) -> str:
1996        """Return the type of the linesearch.
1997
1998        Not collective.
1999
2000        See Also
2001        --------
2002        setType, petsc.TaoLineSearchGetType
2003
2004        """
2005        cdef PetscTAOLineSearchType ctype = NULL
2006        CHKERR(TaoLineSearchGetType(self.taols, &ctype))
2007        return bytes2str(ctype)
2008
2009    def setFromOptions(self) -> None:
2010        """Configure the linesearch from the options database.
2011
2012        Collective.
2013
2014        See Also
2015        --------
2016        petsc_options, petsc.TaoLineSearchSetFromOptions
2017
2018        """
2019        CHKERR(TaoLineSearchSetFromOptions(self.taols))
2020
2021    def setUp(self) -> None:
2022        """Set up the internal data structures for using the linesearch.
2023
2024        Collective.
2025
2026        See Also
2027        --------
2028        petsc.TaoLineSearchSetUp
2029
2030        """
2031        CHKERR(TaoLineSearchSetUp(self.taols))
2032
2033    def setOptionsPrefix(self, prefix: str | None = None) -> None:
2034        """Set the prefix used for searching for options in the database.
2035
2036        Logically collective.
2037
2038        See Also
2039        --------
2040        petsc_options, petsc.TaoLineSearchSetOptionsPrefix
2041
2042        """
2043        cdef const char *cprefix = NULL
2044        prefix = str2bytes(prefix, &cprefix)
2045        CHKERR(TaoLineSearchSetOptionsPrefix(self.taols, cprefix))
2046
2047    def getOptionsPrefix(self) -> str:
2048        """Return the prefix used for searching for options in the database.
2049
2050        Not collective.
2051
2052        See Also
2053        --------
2054        petsc_options, setOptionsPrefix, petsc.TaoLineSearchGetOptionsPrefix
2055
2056        """
2057        cdef const char *prefix = NULL
2058        CHKERR(TaoLineSearchGetOptionsPrefix(self.taols, &prefix))
2059        return bytes2str(prefix)
2060
2061    def setObjective(self, objective : TAOLSObjectiveFunction, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None:
2062        """Set the objective function evaluation callback.
2063
2064        Logically collective.
2065
2066        Parameters
2067        ----------
2068        objective
2069            The objective function callback.
2070        args
2071            Positional arguments for the callback.
2072        kargs
2073            Keyword arguments for the callback.
2074
2075        See Also
2076        --------
2077        setGradient, setObjectiveGradient
2078        petsc.TaoLineSearchSetObjectiveRoutine
2079
2080        """
2081        CHKERR(TaoLineSearchSetObjectiveRoutine(self.taols, TAOLS_Objective, NULL))
2082        if args is None: args = ()
2083        if kargs is None: kargs = {}
2084        self.set_attr("__objective__", (objective, args, kargs))
2085
2086    def setGradient(self, gradient: TAOLSGradientFunction, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None:
2087        """Set the gradient evaluation callback.
2088
2089        Logically collective.
2090
2091        Parameters
2092        ----------
2093        gradient
2094            The gradient callback.
2095        g
2096            The vector to store the gradient.
2097        args
2098            Positional arguments for the callback.
2099        kargs
2100            Keyword arguments for the callback.
2101
2102        See Also
2103        --------
2104        setObjective, setObjectiveGradient, setHessian
2105        petsc.TaoLineSearchSetGradientRoutine
2106
2107        """
2108        CHKERR(TaoLineSearchSetGradientRoutine(self.taols, TAOLS_Gradient, NULL))
2109        if args is None: args = ()
2110        if kargs is None: kargs = {}
2111        self.set_attr("__gradient__", (gradient, args, kargs))
2112
2113    def setObjectiveGradient(self, objgrad: TAOLSObjectiveGradientFunction, args: tuple[Any, ...] | None = None, kargs: dict[str, Any] | None = None) -> None:
2114        """Set the objective function and gradient evaluation callback.
2115
2116        Logically collective.
2117
2118        Parameters
2119        ----------
2120        objgrad
2121            The objective function and gradient callback.
2122        g
2123            The vector to store the gradient.
2124        args
2125            Positional arguments for the callback.
2126        kargs
2127            Keyword arguments for the callback.
2128
2129        See Also
2130        --------
2131        setObjective, setGradient, setHessian, getObjectiveAndGradient
2132        petsc.TaoLineSearchSetObjectiveAndGradientRoutine
2133
2134        """
2135        CHKERR(TaoLineSearchSetObjectiveAndGradientRoutine(self.taols, TAOLS_ObjGrad, NULL))
2136        if args is None: args = ()
2137        if kargs is None: kargs = {}
2138        self.set_attr("__objgrad__", (objgrad, args, kargs))
2139
2140    def useTAORoutine(self, TAO tao) -> None:
2141        """Use the objective and gradient evaluation routines from the given Tao object.
2142
2143        Logically collective.
2144
2145        See Also
2146        --------
2147        petsc.TaoLineSearchUseTaoRoutines
2148
2149        """
2150        CHKERR(TaoLineSearchUseTaoRoutines(self.taols, tao.tao))
2151
2152    def apply(self, Vec x, Vec g, Vec s) -> tuple[float, float, str]:
2153        """Performs a line-search in a given step direction.
2154
2155        Collective.
2156
2157        See Also
2158        --------
2159        petsc.TaoLineSearchApply
2160
2161        """
2162        cdef PetscReal f = 0
2163        cdef PetscReal steplen = 0
2164        cdef PetscTAOLineSearchConvergedReason reason = TAOLINESEARCH_CONTINUE_ITERATING
2165        CHKERR(TaoLineSearchApply(self.taols, x.vec, &f, g.vec, s.vec, &steplen, &reason))
2166        return (toReal(f), toReal(steplen), reason)
2167
2168# --------------------------------------------------------------------
2169
2170del TAOLineSearchType
2171del TAOLineSearchConvergedReason
2172