xref: /petsc/src/binding/petsc4py/test/test_dmplex.py (revision 0aed07b9805c9616851d69fcfe7cc3cc71b23e26)
1import petsc4py
2from petsc4py import PETSc
3import unittest
4import os
5import filecmp
6import numpy as np
7
8# --------------------------------------------------------------------
9
10ERR_SUP = 56
11
12class BaseTestPlex(object):
13
14    COMM = PETSc.COMM_WORLD
15    DIM = 1
16    CELLS = [[0, 1], [1, 2]]
17    COORDS = [[0.], [0.5], [1.]]
18    COMP = 1
19    DOFS = [1, 0]
20
21    def setUp(self):
22        self.plex = PETSc.DMPlex().createFromCellList(self.DIM,
23                                                      self.CELLS,
24                                                      self.COORDS,
25                                                      comm=self.COMM)
26
27    def tearDown(self):
28        self.plex.destroy()
29        self.plex = None
30
31    def testTopology(self):
32        rank = self.COMM.rank
33        dim = self.plex.getDimension()
34        pStart, pEnd = self.plex.getChart()
35        cStart, cEnd = self.plex.getHeightStratum(0)
36        vStart, vEnd = self.plex.getDepthStratum(0)
37        numDepths = self.plex.getLabelSize("depth")
38        coords_raw = self.plex.getCoordinates().getArray()
39        coords = np.reshape(coords_raw, (vEnd - vStart, dim))
40        self.assertEqual(dim, self.DIM)
41        self.assertEqual(numDepths, self.DIM+1)
42        if rank == 0 and self.CELLS is not None:
43            self.assertEqual(cEnd-cStart, len(self.CELLS))
44        if rank == 0 and self.COORDS is not None:
45            self.assertEqual(vEnd-vStart, len(self.COORDS))
46            self.assertTrue((coords == self.COORDS).all())
47
48    def testClosure(self):
49        pStart, pEnd = self.plex.getChart()
50        for p in range(pStart, pEnd):
51            closure = self.plex.getTransitiveClosure(p)[0]
52            for c in closure:
53                cone = self.plex.getCone(c)
54                self.assertEqual(self.plex.getConeSize(c), len(cone))
55                for i in cone:
56                    self.assertIn(i, closure)
57            star = self.plex.getTransitiveClosure(p, useCone=False)[0]
58            for s in star:
59                support = self.plex.getSupport(s)
60                self.assertEqual(self.plex.getSupportSize(s), len(support))
61                for i in support:
62                    self.assertIn(i, star)
63
64    def testAdjacency(self):
65        PETSc.DMPlex.setAdjacencyUseAnchors(self.plex, False)
66        flag = PETSc.DMPlex.getAdjacencyUseAnchors(self.plex)
67        self.assertFalse(flag)
68        PETSc.DMPlex.setAdjacencyUseAnchors(self.plex, True)
69        flag = PETSc.DMPlex.getAdjacencyUseAnchors(self.plex)
70        self.assertTrue(flag)
71        PETSc.DMPlex.setBasicAdjacency(self.plex, False, False)
72        flagA, flagB = PETSc.DMPlex.getBasicAdjacency(self.plex)
73        self.assertFalse(flagA)
74        self.assertFalse(flagB)
75        PETSc.DMPlex.setBasicAdjacency(self.plex, True, True)
76        flagA, flagB = PETSc.DMPlex.getBasicAdjacency(self.plex)
77        self.assertTrue(flagA)
78        self.assertTrue(flagB)
79        pStart, pEnd = self.plex.getChart()
80        for p in range(pStart, pEnd):
81            adjacency = self.plex.getAdjacency(p)
82            self.assertTrue(p in adjacency)
83            self.assertTrue(len(adjacency) > 1)
84
85    def testSectionDofs(self):
86        self.plex.setNumFields(1)
87        section = self.plex.createSection([self.COMP], [self.DOFS])
88        size = section.getStorageSize()
89        entity_dofs = [self.plex.getStratumSize("depth", d) *
90                       self.DOFS[d] for d in range(self.DIM+1)]
91        self.assertEqual(sum(entity_dofs), size)
92
93    def testSectionClosure(self):
94        section = self.plex.createSection([self.COMP], [self.DOFS])
95        self.plex.setSection(section)
96        vec = self.plex.createLocalVec()
97        pStart, pEnd = self.plex.getChart()
98        for p in range(pStart, pEnd):
99            for i in range(section.getDof(p)):
100                off = section.getOffset(p)
101                vec.setValue(off+i, p)
102
103        for p in range(pStart, pEnd):
104            point_closure = self.plex.getTransitiveClosure(p)[0]
105            dof_closure = self.plex.vecGetClosure(section, vec, p)
106            for p in dof_closure:
107                self.assertIn(p, point_closure)
108
109    def testBoundaryLabel(self):
110        pStart, pEnd = self.plex.getChart()
111        if (pEnd - pStart == 0): return
112
113        self.assertFalse(self.plex.hasLabel("boundary"))
114        self.plex.markBoundaryFaces("boundary")
115        self.assertTrue(self.plex.hasLabel("boundary"))
116
117        faces = self.plex.getStratumIS("boundary", 1)
118        for f in faces.getIndices():
119            points, orient = self.plex.getTransitiveClosure(f, useCone=True)
120            for p in points:
121                self.plex.setLabelValue("boundary", p, 1)
122
123        for p in range(pStart, pEnd):
124            if self.plex.getLabelValue("boundary", p) != 1:
125                self.plex.setLabelValue("boundary", p, 2)
126
127        numBoundary = self.plex.getStratumSize("boundary", 1)
128        numInterior = self.plex.getStratumSize("boundary", 2)
129        self.assertNotEqual(numBoundary, pEnd - pStart)
130        self.assertNotEqual(numInterior, pEnd - pStart)
131        self.assertEqual(numBoundary + numInterior, pEnd - pStart)
132
133
134    def testAdapt(self):
135        dim = self.plex.getDimension()
136        if dim == 1: return
137        vStart, vEnd = self.plex.getDepthStratum(0)
138        numVertices = vEnd-vStart
139        metric_array = np.zeros([numVertices,dim,dim],dtype=PETSc.ScalarType)
140        for met in metric_array:
141            met[:,:] = np.diag([9]*dim)
142        metric = PETSc.Vec().createWithArray(metric_array)
143        try:
144            newplex = self.plex.adaptMetric(metric,"")
145        except PETSc.Error as exc:
146            if exc.ierr != ERR_SUP: raise
147
148
149# --------------------------------------------------------------------
150
151class BaseTestPlex_2D(BaseTestPlex):
152    DIM = 2
153    CELLS = [[0, 1, 3], [1, 3, 4], [1, 2, 4], [2, 4, 5],
154             [3, 4, 6], [4, 6, 7], [4, 5, 7], [5, 7, 8]]
155    COORDS = [[0.0, 0.0], [0.5, 0.0], [1.0, 0.0],
156              [0.0, 0.5], [0.5, 0.5], [1.0, 0.5],
157              [0.0, 1.0], [0.5, 1.0], [1.0, 1.0]]
158    DOFS = [1, 0, 0]
159
160class BaseTestPlex_3D(BaseTestPlex):
161    DIM = 3
162    CELLS = [[0, 2, 3, 7], [0, 2, 6, 7], [0, 4, 6, 7],
163             [0, 1, 3, 7], [0, 1, 5, 7], [0, 4, 5, 7]]
164    COORDS = [[0., 0., 0.], [1., 0., 0.], [0., 1., 0.], [1., 1., 0.],
165              [0., 0., 1.], [1., 0., 1.], [0., 1., 1.], [1., 1., 1.]]
166    DOFS = [1, 0, 0, 0]
167
168# --------------------------------------------------------------------
169
170class TestPlex_1D(BaseTestPlex, unittest.TestCase):
171    pass
172
173class TestPlex_2D(BaseTestPlex_2D, unittest.TestCase):
174    pass
175
176class TestPlex_3D(BaseTestPlex_3D, unittest.TestCase):
177    pass
178
179class TestPlex_2D_P3(BaseTestPlex_2D, unittest.TestCase):
180    DOFS = [1, 2, 1]
181
182class TestPlex_3D_P3(BaseTestPlex_3D, unittest.TestCase):
183    DOFS = [1, 2, 1, 0]
184
185class TestPlex_3D_P4(BaseTestPlex_3D, unittest.TestCase):
186    DOFS = [1, 3, 3, 1]
187
188class TestPlex_2D_BoxTensor(BaseTestPlex_2D, unittest.TestCase):
189    CELLS = None
190    COORDS = None
191    def setUp(self):
192        self.plex = PETSc.DMPlex().createBoxMesh([3,3], simplex=False)
193
194class TestPlex_3D_BoxTensor(BaseTestPlex_3D, unittest.TestCase):
195    CELLS = None
196    COORDS = None
197    def setUp(self):
198        self.plex = PETSc.DMPlex().createBoxMesh([3,3,3], simplex=False)
199
200try:
201    raise PETSc.Error
202    PETSc.DMPlex().createBoxMesh([2,2], simplex=True, comm=PETSc.COMM_SELF).destroy()
203except PETSc.Error:
204    pass
205else:
206    class TestPlex_2D_Box(BaseTestPlex_2D, unittest.TestCase):
207        CELLS = None
208        COORDS = None
209        def setUp(self):
210            self.plex = PETSc.DMPlex().createBoxMesh([1,1], simplex=True)
211
212    class TestPlex_2D_Boundary(BaseTestPlex_2D, unittest.TestCase):
213        CELLS = None
214        COORDS = None
215        def setUp(self):
216            boundary = PETSc.DMPlex().create(self.COMM)
217            boundary.createSquareBoundary([0., 0.], [1., 1.], [2, 2])
218            boundary.setDimension(self.DIM-1)
219            self.plex = PETSc.DMPlex().generate(boundary)
220
221    class TestPlex_3D_Box(BaseTestPlex_3D, unittest.TestCase):
222        CELLS = None
223        COORDS = None
224        def setUp(self):
225            self.plex = PETSc.DMPlex().createBoxMesh([1,1,1], simplex=True)
226
227    class TestPlex_3D_Boundary(BaseTestPlex_3D, unittest.TestCase):
228        CELLS = None
229        COORDS = None
230        def setUp(self):
231            boundary = PETSc.DMPlex().create(self.COMM)
232            boundary.createCubeBoundary([0., 0., 0.], [1., 1., 1.], [1, 1, 1])
233            boundary.setDimension(self.DIM-1)
234            self.plex = PETSc.DMPlex().generate(boundary)
235
236# --------------------------------------------------------------------
237
238PETSC_DIR = petsc4py.get_config()['PETSC_DIR']
239
240def check_dtype(method):
241    def wrapper(self, *args, **kwargs):
242        if PETSc.ScalarType is PETSc.ComplexType:
243            return
244        else:
245            return method(self, *args, **kwargs)
246    return wrapper
247
248def check_package(method):
249    def wrapper(self, *args, **kwargs):
250        if not PETSc.Sys.hasExternalPackage("hdf5"):
251            return
252        elif self.PARTITIONERTYPE != "simple" and \
253           not PETSc.Sys.hasExternalPackage(self.PARTITIONERTYPE):
254            return
255        else:
256            return method(self, *args, **kwargs)
257    return wrapper
258
259def check_nsize(method):
260    def wrapper(self, *args, **kwargs):
261        if PETSc.COMM_WORLD.size != self.NSIZE:
262            return
263        else:
264            return method(self, *args, **kwargs)
265    return wrapper
266
267class BaseTestPlexHDF5(object):
268    NSIZE = 4
269    NTIMES = 3
270
271    def setUp(self):
272        self.txtvwr = PETSc.Viewer()
273
274    def tearDown(self):
275        if not PETSc.COMM_WORLD.rank:
276            if os.path.exists(self.outfile()):
277                os.remove(self.outfile())
278            if os.path.exists(self.tmp_output_file()):
279                os.remove(self.tmp_output_file())
280        self.txtvwr = None
281
282    def _name(self):
283        return "%s_outformat-%s_%s" % (self.SUFFIX,
284                                       self.OUTFORMAT,
285                                       self.PARTITIONERTYPE)
286
287    def infile(self):
288        return os.path.join(PETSC_DIR, "share/petsc/datafiles/",
289                            "meshes/blockcylinder-50.h5")
290
291    def outfile(self):
292        return os.path.join("./temp_test_dmplex_%s.h5" % self._name())
293
294    def informat(self):
295        return PETSc.Viewer.Format.HDF5_XDMF
296
297    def outformat(self):
298        d = {"hdf5_petsc": PETSc.Viewer.Format.HDF5_PETSC,
299             "hdf5_xdmf": PETSc.Viewer.Format.HDF5_XDMF}
300        return d[self.OUTFORMAT]
301
302    def partitionerType(self):
303        d = {"simple": PETSc.Partitioner.Type.SIMPLE,
304             "ptscotch": PETSc.Partitioner.Type.PTSCOTCH,
305             "parmetis": PETSc.Partitioner.Type.PARMETIS}
306        return d[self.PARTITIONERTYPE]
307
308    def ref_output_file(self):
309        return os.path.join(PETSC_DIR, "src/dm/impls/plex/tutorials/",
310                            "output/ex5_%s.out" % self._name())
311
312    def tmp_output_file(self):
313        return os.path.join("./temp_test_dmplex_%s.out" % self._name())
314
315    def outputText(self, msg, comm):
316        if not comm.rank:
317            with open(self.tmp_output_file(), 'a') as f:
318                f.write(msg)
319
320    def outputPlex(self, plex):
321        self.txtvwr.createASCII(self.tmp_output_file(),
322                                mode='a', comm=plex.comm)
323        plex.view(viewer=self.txtvwr)
324        self.txtvwr.destroy()
325
326    @check_dtype
327    @check_package
328    @check_nsize
329    def testViewLoadCycle(self):
330        grank = PETSc.COMM_WORLD.rank
331        for i in range(self.NTIMES):
332            if i == 0:
333                infname = self.infile()
334                informt = self.informat()
335            else:
336                infname = self.outfile()
337                informt = self.outformat()
338            if self.HETEROGENEOUS:
339                mycolor = (grank > self.NTIMES - i)
340            else:
341                mycolor = 0
342            try:
343                import mpi4py
344            except ImportError:
345                self.skipTest('mpi4py') # throws special exception to signal test skip
346            mpicomm = PETSc.COMM_WORLD.tompi4py()
347            comm = PETSc.Comm(comm=mpicomm.Split(color=mycolor, key=grank))
348            if mycolor == 0:
349                self.outputText("Begin cycle %d\n" % i, comm)
350                plex = PETSc.DMPlex()
351                vwr = PETSc.ViewerHDF5()
352                # Create plex
353                plex.create(comm=comm)
354                plex.setName("DMPlex Object")
355                # Load data from XDMF into dm in parallel
356                vwr.create(infname, mode='r', comm=comm)
357                vwr.pushFormat(format=informt)
358                plex.load(viewer=vwr)
359                plex.setOptionsPrefix("loaded_")
360                plex.setFromOptions()
361                vwr.popFormat()
362                vwr.destroy()
363                self.outputPlex(plex)
364                # Test DM is indeed distributed
365                flg = plex.isDistributed()
366                self.outputText("Loaded mesh distributed? %s\n" %
367                                str(flg).upper(), comm)
368                # Interpolate
369                plex.interpolate()
370                plex.setOptionsPrefix("interpolated_")
371                plex.setFromOptions()
372                self.outputPlex(plex)
373                # Redistribute
374                part = plex.getPartitioner()
375                part.setType(self.partitionerType())
376                _ = plex.distribute(overlap=0)
377                plex.setOptionsPrefix("redistributed_")
378                plex.setFromOptions()
379                self.outputPlex(plex)
380                # Save redistributed dm to XDMF in parallel
381                vwr.create(self.outfile(), mode='w', comm=comm)
382                vwr.pushFormat(format=self.outformat())
383                plex.view(viewer=vwr)
384                vwr.popFormat()
385                vwr.destroy()
386                # Destroy plex
387                plex.destroy()
388                self.outputText("End   cycle %d\n--------\n" % i, comm)
389            PETSc.COMM_WORLD.Barrier()
390        # Check that the output is identical to that of plex/tutorial/ex5.c.
391        self.assertTrue(filecmp.cmp(self.tmp_output_file(),
392                                    self.ref_output_file(), shallow=False),
393                        'Contents of the files not the same.')
394        PETSc.COMM_WORLD.Barrier()
395
396class BaseTestPlexHDF5Homogeneous(BaseTestPlexHDF5):
397    """Test save on N / load on N."""
398    SUFFIX = 0
399    HETEROGENEOUS = False
400
401class BaseTestPlexHDF5Heterogeneous(BaseTestPlexHDF5):
402    """Test save on N / load on M."""
403    SUFFIX = 1
404    HETEROGENEOUS = True
405
406class TestPlexHDF5PETSCSimpleHomogeneous(BaseTestPlexHDF5Homogeneous,
407                                         unittest.TestCase):
408    OUTFORMAT = "hdf5_petsc"
409    PARTITIONERTYPE = "simple"
410
411"""
412Skipping. PTScotch produces different distributions when run
413in a sequence in a single session.
414
415class TestPlexHDF5PETSCPTScotchHomogeneous(BaseTestPlexHDF5Homogeneous,
416                                           unittest.TestCase):
417    OUTFORMAT = "hdf5_petsc"
418    PARTITIONERTYPE = "ptscotch"
419"""
420
421class TestPlexHDF5PETSCParmetisHomogeneous(BaseTestPlexHDF5Homogeneous,
422                                           unittest.TestCase):
423    OUTFORMAT = "hdf5_petsc"
424    PARTITIONERTYPE = "parmetis"
425
426class TestPlexHDF5XDMFSimpleHomogeneous(BaseTestPlexHDF5Homogeneous,
427                                        unittest.TestCase):
428    OUTFORMAT = "hdf5_xdmf"
429    PARTITIONERTYPE = "simple"
430
431"""
432Skipping. PTScotch produces different distributions when run
433in a sequence in a single session.
434
435class TestPlexHDF5XDMFPTScotchHomogeneous(BaseTestPlexHDF5Homogeneous,
436                                          unittest.TestCase):
437    OUTFORMAT = "hdf5_xdmf"
438    PARTITIONERTYPE = "ptscotch"
439"""
440
441class TestPlexHDF5XDMFParmetisHomogeneous(BaseTestPlexHDF5Homogeneous,
442                                          unittest.TestCase):
443    OUTFORMAT = "hdf5_xdmf"
444    PARTITIONERTYPE = "parmetis"
445
446class TestPlexHDF5PETSCSimpleHeterogeneous(BaseTestPlexHDF5Heterogeneous,
447                                           unittest.TestCase):
448    OUTFORMAT = "hdf5_petsc"
449    PARTITIONERTYPE = "simple"
450
451"""
452Skipping. PTScotch produces different distributions when run
453in a sequence in a single session.
454
455class TestPlexHDF5PETSCPTScotchHeterogeneous(BaseTestPlexHDF5Heterogeneous,
456                                             unittest.TestCase):
457    OUTFORMAT = "hdf5_petsc"
458    PARTITIONERTYPE = "ptscotch"
459"""
460
461class TestPlexHDF5PETSCParmetisHeterogeneous(BaseTestPlexHDF5Heterogeneous,
462                                             unittest.TestCase):
463    OUTFORMAT = "hdf5_petsc"
464    PARTITIONERTYPE = "parmetis"
465
466class TestPlexHDF5XDMFSimpleHeterogeneous(BaseTestPlexHDF5Heterogeneous,
467                                          unittest.TestCase):
468    OUTFORMAT = "hdf5_xdmf"
469    PARTITIONERTYPE = "simple"
470
471class TestPlexHDF5XDMFPTScotchHeterogeneous(BaseTestPlexHDF5Heterogeneous,
472                                            unittest.TestCase):
473    OUTFORMAT = "hdf5_xdmf"
474    PARTITIONERTYPE = "ptscotch"
475
476class TestPlexHDF5XDMFParmetisHeterogeneous(BaseTestPlexHDF5Heterogeneous,
477                                            unittest.TestCase):
478    OUTFORMAT = "hdf5_xdmf"
479    PARTITIONERTYPE = "parmetis"
480
481# --------------------------------------------------------------------
482
483if __name__ == '__main__':
484    unittest.main()
485