xref: /petsc/src/binding/petsc4py/test/test_tao_py.py (revision 607e733f3db3ee7f6f605a13295c517df8dbb9c9)
1import unittest
2from petsc4py import PETSc
3from sys import getrefcount
4import numpy
5
6
7# --------------------------------------------------------------------
8class Objective:
9    def __call__(self, tao, x):
10        return (x[0] - 1.0) ** 2 + (x[1] - 2.0) ** 2
11
12
13class Gradient:
14    def __call__(self, tao, x, g):
15        g[0] = 2.0 * (x[0] - 1.0)
16        g[1] = 2.0 * (x[1] - 2.0)
17        g.assemble()
18
19
20class MyTao:
21    def __init__(self):
22        self.log = {}
23
24    def _log(self, method):
25        self.log.setdefault(method, 0)
26        self.log[method] += 1
27
28    def create(self, tao):
29        self._log('create')
30        self.testvec = PETSc.Vec()
31
32    def destroy(self, tao):
33        self._log('destroy')
34        self.testvec.destroy()
35
36    def setFromOptions(self, tao):
37        self._log('setFromOptions')
38
39    def setUp(self, tao):
40        self._log('setUp')
41        self.testvec = tao.getSolution().duplicate()
42
43    def solve(self, tao):
44        self._log('solve')
45
46    def step(self, tao, x, g, s):
47        self._log('step')
48        tao.computeGradient(x, g)
49        g.copy(s)
50        s.scale(-1.0)
51
52    def preStep(self, tao):
53        self._log('preStep')
54
55    def postStep(self, tao):
56        self._log('postStep')
57
58    def monitor(self, tao):
59        self._log('monitor')
60
61
62class TestTaoPython(unittest.TestCase):
63    def setUp(self):
64        self.tao = PETSc.TAO()
65        self.tao.createPython(MyTao(), comm=PETSc.COMM_SELF)
66        self.assertEqual(getrefcount(self._getCtx()), 2)
67        self.assertEqual(self._getCtx().log['create'], 1)
68        self.nsolve = 0
69
70    def tearDown(self):
71        self.assertEqual(getrefcount(self._getCtx()), 2)
72        self.assertTrue('destroy' not in self._getCtx().log)
73        ctx = self._getCtx()
74        self.tao.destroy()
75        self.tao = None
76        PETSc.garbage_cleanup()
77        self.assertEqual(ctx.log['destroy'], 1)
78
79    def testGetType(self):
80        ctx = self.tao.getPythonContext()
81        pytype = f'{ctx.__module__}.{type(ctx).__name__}'
82        self.assertTrue(self.tao.getPythonType() == pytype)
83
84    def testSolve(self):
85        tao = self.tao
86        ctx = tao.getPythonContext()
87        x = PETSc.Vec().create(tao.getComm())
88        x.setType('standard')
89        x.setSizes(2)
90        y1 = x.duplicate()
91        y2 = x.duplicate()
92        tao.setObjective(Objective())
93        tao.setGradient(Gradient(), None)
94        tao.setMonitor(ctx.monitor)
95        tao.setFromOptions()
96        tao.setMaximumIterations(3)
97
98        def _update(tao, it, cnt):
99            cnt += 1
100
101        cnt_up = numpy.array(0)
102        tao.setUpdate(_update, (cnt_up,))
103        tao.setSolution(x)
104
105        # Call the solve method of MyTAO
106        x.set(0.5)
107        tao.solve()
108        n = tao.getIterationNumber()
109        self.assertTrue(n == 0)
110
111        # Call the default solve method and use step of MyTAO
112        ctx.solve = None
113        x.set(0.5)
114        tao.solve()
115        n = tao.getIterationNumber()
116        self.assertGreater(tao.getConvergedReason(), 0)
117        self.assertTrue(n in [2, 3])
118        self.assertAlmostEqual(x[0], 1.0)
119        self.assertAlmostEqual(x[1], 2.0)
120
121        # Call the default solve method with the default step method
122        ctx.step = None
123        x.set(0.5)
124        tao.solve()
125        n = tao.getIterationNumber()
126        self.assertGreater(tao.getConvergedReason(), 0)
127        self.assertTrue(n in [2, 3])
128        self.assertAlmostEqual(x[0], 1.0)
129        self.assertAlmostEqual(x[1], 2.0)
130
131        self.assertTrue(y1.equal(y2))
132        self.assertTrue(ctx.log['monitor'] == 2 * (n + 1))
133        self.assertTrue(ctx.log['preStep'] == 2 * n)
134        self.assertTrue(ctx.log['postStep'] == 2 * n)
135        self.assertTrue(ctx.log['solve'] == 1)
136        self.assertTrue(ctx.log['setUp'] == 1)
137        self.assertTrue(ctx.log['setFromOptions'] == 1)
138        self.assertTrue(ctx.log['step'] == n)
139        self.assertEqual(cnt_up, 2 * n)
140        tao.cancelMonitor()
141
142    def _getCtx(self):
143        return self.tao.getPythonContext()
144
145
146class MyGradientDescent:
147    def __init__(self):
148        self._ls = None
149
150    def create(self, tao):
151        self._ls = PETSc.TAOLineSearch().create(comm=PETSc.COMM_SELF)
152        self._ls.useTAORoutine(tao)
153        self._ls.setType(PETSc.TAOLineSearch.Type.UNIT)
154        self._ls.setInitialStepLength(0.2)
155
156    def destroy(self, tao):
157        self._ls.destroy()
158
159    def setUp(self, tao):
160        pass
161
162    def solve(self, tao):
163        x = tao.getSolution()
164        gradient = tao.getGradient()[0]
165        search_direction = gradient.copy()
166        for it in range(tao.getMaximumIterations()):
167            tao.setIterationNumber(it)
168
169            # search_direction = -gradient
170            tao.computeGradient(x, gradient)
171            gradient.copy(search_direction)
172            search_direction.scale(-1)
173
174            # x = x + .2 search_direction
175            f, s, reason = self._ls.apply(x, gradient, search_direction)
176
177            tao.monitor(f=f, res=gradient.norm())
178
179            if reason < 0:
180                raise RuntimeError('LS failed.')
181
182            if tao.checkConverged() > 0:
183                break
184
185    def step(self, tao, x, g, s):
186        raise RuntimeError('Should only be called by builtin solve.')
187
188    def preStep(self, tao):
189        raise RuntimeError('Should only be called by builtin solve.')
190
191    def postStep(self, tao):
192        raise RuntimeError('Should only be called by builtin solve.')
193
194
195class TestTaoPythonOptimiser(unittest.TestCase):
196    def setUp(self):
197        self.tao = PETSc.TAO()
198        self.tao.createPython(MyGradientDescent(), comm=PETSc.COMM_SELF)
199
200    def tearDown(self):
201        self.tao.destroy()
202        self.tao = None
203
204    def testSolve(self):
205        tao = self.tao
206
207        opts = PETSc.Options('test_tao_python_optimiser_')
208        opts['tao_max_it'] = 100
209        opts['tao_gatol'] = 1e-6
210
211        tao.setOptionsPrefix('test_tao_python_optimiser_')
212        tao.setFromOptions()
213
214        x = PETSc.Vec().createSeq(2, comm=tao.getComm())
215        x.set(0.5)
216
217        tao.setSolution(x)
218        tao.setObjective(Objective())
219        tao.setGradient(Gradient(), x.copy())
220
221        tao.solve()
222
223        self.assertEqual(tao.getMaximumIterations(), 100)
224        self.assertAlmostEqual(tao.getTolerances()[0], 1e-6)
225        self.assertGreater(tao.getIterationNumber(), 0)
226        self.assertGreater(tao.getConvergedReason(), 0)
227        self.assertAlmostEqual(x[0], 1.0, places=5)
228        self.assertAlmostEqual(x[1], 2.0, places=5)
229        self.assertGreater(tao.getObjectiveValue(), 0)
230        self.assertAlmostEqual(tao.getObjectiveValue(), 0, places=5)
231
232
233# --------------------------------------------------------------------
234
235if numpy.iscomplexobj(PETSc.ScalarType()):
236    del TestTaoPython
237    del TestTaoPythonOptimiser
238
239if __name__ == '__main__':
240    unittest.main()
241
242# --------------------------------------------------------------------
243