xref: /petsc/src/dm/impls/plex/tests/ex55.c (revision f97672e55eacc8688507b9471cd7ec2664d7f203)
1 static char help[] = "Load and save the mesh and fields to HDF5 and ExodusII\n\n";
2 
3 #include <petscdmplex.h>
4 #include <petscviewerhdf5.h>
5 #include <petscsf.h>
6 
7 typedef struct {
8   PetscBool compare;                      /* Compare the meshes using DMPlexEqual() */
9   PetscBool compare_labels;               /* Compare labels in the meshes using DMCompareLabels() */
10   PetscBool distribute;                   /* Distribute the mesh */
11   PetscBool interpolate;                  /* Generate intermediate mesh elements */
12   char      filename[PETSC_MAX_PATH_LEN]; /* Mesh filename */
13   char      meshname[PETSC_MAX_PATH_LEN]; /* Mesh name */
14   PetscViewerFormat format;               /* Format to write and read */
15   PetscBool second_write_read;            /* Write and read for the 2nd time */
16   PetscBool use_low_level_functions;      /* Use low level functions for viewing and loading */
17 } AppCtx;
18 
19 static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
20 {
21   PetscFunctionBeginUser;
22   options->compare = PETSC_FALSE;
23   options->compare_labels = PETSC_FALSE;
24   options->distribute = PETSC_TRUE;
25   options->interpolate = PETSC_FALSE;
26   options->filename[0] = '\0';
27   options->meshname[0] = '\0';
28   options->format = PETSC_VIEWER_DEFAULT;
29   options->second_write_read = PETSC_FALSE;
30   options->use_low_level_functions = PETSC_FALSE;
31 
32   PetscOptionsBegin(comm, "", "Meshing Problem Options", "DMPLEX");
33   PetscCall(PetscOptionsBool("-compare", "Compare the meshes using DMPlexEqual()", "ex55.c", options->compare, &options->compare, NULL));
34   PetscCall(PetscOptionsBool("-compare_labels", "Compare labels in the meshes using DMCompareLabels()", "ex55.c", options->compare_labels, &options->compare_labels, NULL));
35   PetscCall(PetscOptionsBool("-distribute", "Distribute the mesh", "ex55.c", options->distribute, &options->distribute, NULL));
36   PetscCall(PetscOptionsBool("-interpolate", "Generate intermediate mesh elements", "ex55.c", options->interpolate, &options->interpolate, NULL));
37   PetscCall(PetscOptionsString("-filename", "The mesh file", "ex55.c", options->filename, options->filename, sizeof(options->filename), NULL));
38   PetscCall(PetscOptionsString("-meshname", "The mesh file", "ex55.c", options->meshname, options->meshname, sizeof(options->meshname), NULL));
39   PetscCall(PetscOptionsEnum("-format", "Format to write and read", "ex55.c", PetscViewerFormats, (PetscEnum)options->format, (PetscEnum*)&options->format, NULL));
40   PetscCall(PetscOptionsBool("-second_write_read", "Write and read for the 2nd time", "ex55.c", options->second_write_read, &options->second_write_read, NULL));
41   PetscCall(PetscOptionsBool("-use_low_level_functions", "Use low level functions for viewing and loading", "ex55.c", options->use_low_level_functions, &options->use_low_level_functions, NULL));
42   PetscOptionsEnd();
43   PetscFunctionReturn(0);
44 };
45 
46 static PetscErrorCode DMPlexWriteAndReadHDF5(DM dm, const char filename[], const char prefix[], AppCtx user, DM *dm_new)
47 {
48   DM             dmnew;
49   const char     savedName[]  = "Mesh";
50   const char     loadedName[] = "Mesh_new";
51   PetscViewer    v;
52 
53   PetscFunctionBeginUser;
54   PetscCall(PetscViewerHDF5Open(PetscObjectComm((PetscObject) dm), filename, FILE_MODE_WRITE, &v));
55   PetscCall(PetscViewerPushFormat(v, user.format));
56   PetscCall(PetscObjectSetName((PetscObject) dm, savedName));
57   if (user.use_low_level_functions) {
58     PetscCall(DMPlexTopologyView(dm, v));
59     PetscCall(DMPlexCoordinatesView(dm, v));
60     PetscCall(DMPlexLabelsView(dm, v));
61   } else {
62     PetscCall(DMView(dm, v));
63   }
64 
65   PetscCall(PetscViewerFileSetMode(v, FILE_MODE_READ));
66   PetscCall(DMCreate(PETSC_COMM_WORLD, &dmnew));
67   PetscCall(DMSetType(dmnew, DMPLEX));
68   PetscCall(PetscObjectSetName((PetscObject) dmnew, savedName));
69   PetscCall(DMSetOptionsPrefix(dmnew, prefix));
70   if (user.use_low_level_functions) {
71     PetscSF  sfXC;
72 
73     PetscCall(DMPlexTopologyLoad(dmnew, v, &sfXC));
74     PetscCall(DMPlexCoordinatesLoad(dmnew, v, sfXC));
75     PetscCall(DMPlexLabelsLoad(dmnew, v, sfXC));
76     PetscCall(PetscSFDestroy(&sfXC));
77   } else {
78     PetscCall(DMLoad(dmnew, v));
79   }
80   PetscCall(PetscObjectSetName((PetscObject)dmnew,loadedName));
81 
82   PetscCall(PetscViewerPopFormat(v));
83   PetscCall(PetscViewerDestroy(&v));
84   *dm_new = dmnew;
85   PetscFunctionReturn(0);
86 }
87 
88 int main(int argc, char **argv)
89 {
90   DM             dm, dmnew;
91   PetscPartitioner part;
92   AppCtx         user;
93   PetscBool      flg;
94 
95   PetscCall(PetscInitialize(&argc, &argv, NULL,help));
96   PetscCall(ProcessOptions(PETSC_COMM_WORLD, &user));
97   PetscCall(DMPlexCreateFromFile(PETSC_COMM_WORLD, user.filename, user.meshname, user.interpolate, &dm));
98   PetscCall(DMSetOptionsPrefix(dm,"orig_"));
99   PetscCall(DMViewFromOptions(dm, NULL, "-dm_view"));
100 
101   if (user.distribute) {
102     DM dmdist;
103 
104     PetscCall(DMPlexGetPartitioner(dm, &part));
105     PetscCall(PetscPartitionerSetFromOptions(part));
106     PetscCall(DMPlexDistribute(dm, 0, NULL, &dmdist));
107     if (dmdist) {
108       PetscCall(DMDestroy(&dm));
109       dm   = dmdist;
110     }
111   }
112 
113   PetscCall(DMSetOptionsPrefix(dm,NULL));
114   PetscCall(DMPlexDistributeSetDefault(dm, PETSC_FALSE));
115   PetscCall(DMSetFromOptions(dm));
116   PetscCall(DMViewFromOptions(dm, NULL, "-dm_view"));
117 
118   PetscCall(DMPlexWriteAndReadHDF5(dm, "dmdist.h5", "new_", user, &dmnew));
119 
120   if (user.second_write_read) {
121     PetscCall(DMDestroy(&dm));
122     dm = dmnew;
123     PetscCall(DMPlexWriteAndReadHDF5(dm, "dmdist.h5", "new_", user, &dmnew));
124   }
125 
126   PetscCall(DMViewFromOptions(dmnew, NULL, "-dm_view"));
127   /* TODO: Is it still true? */
128   /* The NATIVE format for coordiante viewing is killing parallel output, since we have a local vector. Map it to global, and it will work. */
129 
130   /* This currently makes sense only for sequential meshes. */
131   if (user.compare) {
132     PetscCall(DMPlexEqual(dmnew, dm, &flg));
133     PetscCheck(flg,PETSC_COMM_WORLD, PETSC_ERR_ARG_INCOMP, "DMs are not equal");
134     PetscCall(PetscPrintf(PETSC_COMM_WORLD,"DMs equal\n"));
135   }
136   if (user.compare_labels) {
137     PetscCall(DMCompareLabels(dmnew, dm, NULL, NULL));
138     PetscCall(PetscPrintf(PETSC_COMM_WORLD,"DMLabels equal\n"));
139   }
140 
141   PetscCall(DMDestroy(&dm));
142   PetscCall(DMDestroy(&dmnew));
143   PetscCall(PetscFinalize());
144   return 0;
145 }
146 
147 /*TEST
148   build:
149     requires: hdf5
150   # Idempotence of saving/loading
151   #   Have to replace Exodus file, which is creating uninterpolated edges
152   test:
153     suffix: 0
154     TODO: broken
155     requires: exodusii
156     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/Rect-tri3.exo -dm_view ascii::ascii_info_detail
157     args: -format hdf5_petsc -compare
158   test:
159     suffix: 1
160     TODO: broken
161     requires: exodusii parmetis !defined(PETSC_USE_64BIT_INDICES)
162     nsize: 2
163     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/Rect-tri3.exo -dm_view ascii::ascii_info_detail
164     args: -petscpartitioner_type parmetis
165     args: -format hdf5_petsc -new_dm_view ascii::ascii_info_detail
166   testset:
167     requires: exodusii
168     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.exo
169     args: -petscpartitioner_type simple
170     args: -dm_view ascii::ascii_info_detail
171     args: -new_dm_view ascii::ascii_info_detail
172     test:
173       suffix: 2
174       nsize: {{1 2 4 8}separate output}
175       args: -format {{default hdf5_petsc}separate output}
176       args: -interpolate {{0 1}separate output}
177     test:
178       suffix: 2a
179       nsize: {{1 2 4 8}separate output}
180       args: -format {{hdf5_xdmf hdf5_viz}separate output}
181   test:
182     suffix: 3
183     requires: exodusii
184     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.exo -compare -compare_labels
185 
186   # Load HDF5 file in XDMF format in parallel, write, read dm1, write, read dm2, and compare dm1 and dm2
187   testset:
188     suffix: 4
189     requires: !complex
190     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -dm_plex_create_from_hdf5_xdmf
191     args: -distribute 0 -second_write_read -compare
192     test:
193       suffix: hdf5_petsc
194       nsize: {{1 2}}
195       args: -format hdf5_petsc -compare_labels
196     test:
197       suffix: hdf5_xdmf
198       nsize: {{1 3 8}}
199       args: -format hdf5_xdmf
200 
201   # Use low level functions, DMPlexTopologyView()/Load(), DMPlexCoordinatesView()/Load(), and DMPlexLabelsView()/Load()
202   # Output must be the same as ex55_2_nsize-2_format-hdf5_petsc_interpolate-0.out
203   test:
204     suffix: 5
205     requires: exodusii
206     nsize: 2
207     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.exo
208     args: -petscpartitioner_type simple
209     args: -dm_view ascii::ascii_info_detail
210     args: -new_dm_view ascii::ascii_info_detail
211     args: -format hdf5_petsc -use_low_level_functions
212 
213   testset:
214     suffix: 6
215     requires: hdf5 !complex datafilespath
216     nsize: {{1 3}}
217     args: -dm_plex_check_symmetry -dm_plex_check_skeleton -dm_plex_check_geometry
218     args: -filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /cells -dm_plex_hdf5_geometry_path /coordinates
219     args: -format hdf5_petsc -second_write_read -compare -compare_labels
220     args: -interpolate {{0 1}} -distribute {{0 1}} -petscpartitioner_type simple
221 
222   testset:
223     # the same data and settings as dm_impls_plex_tests-ex18_9%
224     suffix: 9
225     requires: hdf5 !complex datafilespath
226     nsize: {{1 2 4}}
227     args: -dm_plex_check_symmetry -dm_plex_check_skeleton -dm_plex_check_geometry
228     args: -filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /cells -dm_plex_hdf5_geometry_path /coordinates
229     args: -format hdf5_xdmf -second_write_read -compare
230     test:
231       suffix: hdf5_seqload
232       args: -distribute -petscpartitioner_type simple
233       args: -interpolate {{0 1}}
234       args: -dm_plex_hdf5_force_sequential
235     test:
236       suffix: hdf5_seqload_metis
237       requires: parmetis
238       args: -distribute -petscpartitioner_type parmetis
239       args: -interpolate 1
240       args: -dm_plex_hdf5_force_sequential
241     test:
242       suffix: hdf5
243       args: -interpolate 1 -petscpartitioner_type simple
244     test:
245       suffix: hdf5_repart
246       requires: parmetis
247       args: -distribute -petscpartitioner_type parmetis
248       args: -interpolate 1
249     test:
250       TODO: Parallel partitioning of uninterpolated meshes not supported
251       suffix: hdf5_repart_ppu
252       requires: parmetis
253       args: -distribute -petscpartitioner_type parmetis
254       args: -interpolate 0
255 
256   # reproduce PetscSFView() crash - fixed, left as regression test
257   test:
258     suffix: new_dm_view
259     requires: exodusii
260     nsize: 2
261     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/TwoQuads.exo -new_dm_view ascii:ex5_new_dm_view.log:ascii_info_detail
262 
263   # test backward compatibility of petsc_hdf5 format
264   testset:
265     suffix: 10-v3.16.0-v1.0.0
266     requires: hdf5 !complex datafilespath
267     args: -dm_plex_check_all -compare -compare_labels
268     args: -dm_plex_view_hdf5_storage_version {{1.0.0 2.0.0}} -use_low_level_functions {{0 1}}
269     test:
270       suffix: a
271       args: -filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/annulus-20.h5
272     test:
273       suffix: b
274       TODO: broken
275       args: -filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/barycentricallyrefinedcube.h5
276     test:
277       suffix: c
278       args: -filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/blockcylinder-50.h5
279     test:
280       suffix: d
281       args: -filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/cube-hexahedra-refined.h5
282     test:
283       suffix: e
284       args: -filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/hybrid_hexwedge.h5
285     test:
286       suffix: f
287       args: -filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/square.h5
288 
289 TEST*/
290