xref: /petsc/src/dm/impls/plex/tests/ex55.c (revision b122ec5aa1bd4469eb4e0673542fb7de3f411254)
1 static char help[] = "Load and save the mesh and fields to HDF5 and ExodusII\n\n";
2 
3 #include <petscdmplex.h>
4 #include <petscviewerhdf5.h>
5 #include <petscsf.h>
6 
7 typedef struct {
8   PetscBool compare;                      /* Compare the meshes using DMPlexEqual() */
9   PetscBool compare_labels;               /* Compare labels in the meshes using DMCompareLabels() */
10   PetscBool distribute;                   /* Distribute the mesh */
11   PetscBool interpolate;                  /* Generate intermediate mesh elements */
12   char      filename[PETSC_MAX_PATH_LEN]; /* Mesh filename */
13   char      meshname[PETSC_MAX_PATH_LEN]; /* Mesh name */
14   PetscViewerFormat format;               /* Format to write and read */
15   PetscBool second_write_read;            /* Write and read for the 2nd time */
16   PetscBool use_low_level_functions;      /* Use low level functions for viewing and loading */
17 } AppCtx;
18 
19 static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
20 {
21   PetscErrorCode ierr;
22 
23   PetscFunctionBeginUser;
24   options->compare = PETSC_FALSE;
25   options->compare_labels = PETSC_FALSE;
26   options->distribute = PETSC_TRUE;
27   options->interpolate = PETSC_FALSE;
28   options->filename[0] = '\0';
29   options->meshname[0] = '\0';
30   options->format = PETSC_VIEWER_DEFAULT;
31   options->second_write_read = PETSC_FALSE;
32   options->use_low_level_functions = PETSC_FALSE;
33 
34   ierr = PetscOptionsBegin(comm, "", "Meshing Problem Options", "DMPLEX");CHKERRQ(ierr);
35   CHKERRQ(PetscOptionsBool("-compare", "Compare the meshes using DMPlexEqual()", "ex55.c", options->compare, &options->compare, NULL));
36   CHKERRQ(PetscOptionsBool("-compare_labels", "Compare labels in the meshes using DMCompareLabels()", "ex55.c", options->compare_labels, &options->compare_labels, NULL));
37   CHKERRQ(PetscOptionsBool("-distribute", "Distribute the mesh", "ex55.c", options->distribute, &options->distribute, NULL));
38   CHKERRQ(PetscOptionsBool("-interpolate", "Generate intermediate mesh elements", "ex55.c", options->interpolate, &options->interpolate, NULL));
39   CHKERRQ(PetscOptionsString("-filename", "The mesh file", "ex55.c", options->filename, options->filename, sizeof(options->filename), NULL));
40   CHKERRQ(PetscOptionsString("-meshname", "The mesh file", "ex55.c", options->meshname, options->meshname, sizeof(options->meshname), NULL));
41   CHKERRQ(PetscOptionsEnum("-format", "Format to write and read", "ex55.c", PetscViewerFormats, (PetscEnum)options->format, (PetscEnum*)&options->format, NULL));
42   CHKERRQ(PetscOptionsBool("-second_write_read", "Write and read for the 2nd time", "ex55.c", options->second_write_read, &options->second_write_read, NULL));
43   CHKERRQ(PetscOptionsBool("-use_low_level_functions", "Use low level functions for viewing and loading", "ex55.c", options->use_low_level_functions, &options->use_low_level_functions, NULL));
44   ierr = PetscOptionsEnd();CHKERRQ(ierr);
45   PetscFunctionReturn(0);
46 };
47 
48 static PetscErrorCode DMPlexWriteAndReadHDF5(DM dm, const char filename[], const char prefix[], AppCtx user, DM *dm_new)
49 {
50   DM             dmnew;
51   const char     savedName[]  = "Mesh";
52   const char     loadedName[] = "Mesh_new";
53   PetscViewer    v;
54 
55   PetscFunctionBeginUser;
56   CHKERRQ(PetscViewerHDF5Open(PetscObjectComm((PetscObject) dm), filename, FILE_MODE_WRITE, &v));
57   CHKERRQ(PetscViewerPushFormat(v, user.format));
58   CHKERRQ(PetscObjectSetName((PetscObject) dm, savedName));
59   if (user.use_low_level_functions) {
60     CHKERRQ(DMPlexTopologyView(dm, v));
61     CHKERRQ(DMPlexCoordinatesView(dm, v));
62     CHKERRQ(DMPlexLabelsView(dm, v));
63   } else {
64     CHKERRQ(DMView(dm, v));
65   }
66 
67   CHKERRQ(PetscViewerFileSetMode(v, FILE_MODE_READ));
68   CHKERRQ(DMCreate(PETSC_COMM_WORLD, &dmnew));
69   CHKERRQ(DMSetType(dmnew, DMPLEX));
70   CHKERRQ(PetscObjectSetName((PetscObject) dmnew, savedName));
71   CHKERRQ(DMSetOptionsPrefix(dmnew, prefix));
72   if (user.use_low_level_functions) {
73     PetscSF  sfXC;
74 
75     CHKERRQ(DMPlexTopologyLoad(dmnew, v, &sfXC));
76     CHKERRQ(DMPlexCoordinatesLoad(dmnew, v, sfXC));
77     CHKERRQ(DMPlexLabelsLoad(dmnew, v, sfXC));
78     CHKERRQ(PetscSFDestroy(&sfXC));
79   } else {
80     CHKERRQ(DMLoad(dmnew, v));
81   }
82   CHKERRQ(PetscObjectSetName((PetscObject)dmnew,loadedName));
83 
84   CHKERRQ(PetscViewerPopFormat(v));
85   CHKERRQ(PetscViewerDestroy(&v));
86   *dm_new = dmnew;
87   PetscFunctionReturn(0);
88 }
89 
90 int main(int argc, char **argv)
91 {
92   DM             dm, dmnew;
93   PetscPartitioner part;
94   AppCtx         user;
95   PetscBool      flg;
96 
97   CHKERRQ(PetscInitialize(&argc, &argv, NULL,help));
98   CHKERRQ(ProcessOptions(PETSC_COMM_WORLD, &user));
99   CHKERRQ(DMPlexCreateFromFile(PETSC_COMM_WORLD, user.filename, user.meshname, user.interpolate, &dm));
100   CHKERRQ(DMSetOptionsPrefix(dm,"orig_"));
101   CHKERRQ(DMViewFromOptions(dm, NULL, "-dm_view"));
102 
103   if (user.distribute) {
104     DM dmdist;
105 
106     CHKERRQ(DMPlexGetPartitioner(dm, &part));
107     CHKERRQ(PetscPartitionerSetFromOptions(part));
108     CHKERRQ(DMPlexDistribute(dm, 0, NULL, &dmdist));
109     if (dmdist) {
110       CHKERRQ(DMDestroy(&dm));
111       dm   = dmdist;
112     }
113   }
114 
115   CHKERRQ(DMSetOptionsPrefix(dm,NULL));
116   CHKERRQ(DMPlexDistributeSetDefault(dm, PETSC_FALSE));
117   CHKERRQ(DMSetFromOptions(dm));
118   CHKERRQ(DMViewFromOptions(dm, NULL, "-dm_view"));
119 
120   CHKERRQ(DMPlexWriteAndReadHDF5(dm, "dmdist.h5", "new_", user, &dmnew));
121 
122   if (user.second_write_read) {
123     CHKERRQ(DMDestroy(&dm));
124     dm = dmnew;
125     CHKERRQ(DMPlexWriteAndReadHDF5(dm, "dmdist.h5", "new_", user, &dmnew));
126   }
127 
128   CHKERRQ(DMViewFromOptions(dmnew, NULL, "-dm_view"));
129   /* TODO: Is it still true? */
130   /* The NATIVE format for coordiante viewing is killing parallel output, since we have a local vector. Map it to global, and it will work. */
131 
132   /* This currently makes sense only for sequential meshes. */
133   if (user.compare) {
134     CHKERRQ(DMPlexEqual(dmnew, dm, &flg));
135     PetscCheck(flg,PETSC_COMM_WORLD, PETSC_ERR_ARG_INCOMP, "DMs are not equal");
136     CHKERRQ(PetscPrintf(PETSC_COMM_WORLD,"DMs equal\n"));
137   }
138   if (user.compare_labels) {
139     CHKERRQ(DMCompareLabels(dmnew, dm, NULL, NULL));
140     CHKERRQ(PetscPrintf(PETSC_COMM_WORLD,"DMLabels equal\n"));
141   }
142 
143   CHKERRQ(DMDestroy(&dm));
144   CHKERRQ(DMDestroy(&dmnew));
145   CHKERRQ(PetscFinalize());
146   return 0;
147 }
148 
149 /*TEST
150   build:
151     requires: hdf5
152   # Idempotence of saving/loading
153   #   Have to replace Exodus file, which is creating uninterpolated edges
154   test:
155     suffix: 0
156     TODO: broken
157     requires: exodusii
158     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/Rect-tri3.exo -dm_view ascii::ascii_info_detail
159     args: -format hdf5_petsc -compare
160   test:
161     suffix: 1
162     TODO: broken
163     requires: exodusii parmetis !defined(PETSC_USE_64BIT_INDICES)
164     nsize: 2
165     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/Rect-tri3.exo -dm_view ascii::ascii_info_detail
166     args: -petscpartitioner_type parmetis
167     args: -format hdf5_petsc -new_dm_view ascii::ascii_info_detail
168   testset:
169     requires: exodusii
170     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.exo
171     args: -petscpartitioner_type simple
172     args: -dm_view ascii::ascii_info_detail
173     args: -new_dm_view ascii::ascii_info_detail
174     test:
175       suffix: 2
176       nsize: {{1 2 4 8}separate output}
177       args: -format {{default hdf5_petsc}separate output}
178       args: -interpolate {{0 1}separate output}
179     test:
180       suffix: 2a
181       nsize: {{1 2 4 8}separate output}
182       args: -format {{hdf5_xdmf hdf5_viz}separate output}
183   test:
184     suffix: 3
185     requires: exodusii
186     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.exo -compare -compare_labels
187 
188   # Load HDF5 file in XDMF format in parallel, write, read dm1, write, read dm2, and compare dm1 and dm2
189   testset:
190     suffix: 4
191     requires: !complex
192     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -dm_plex_create_from_hdf5_xdmf
193     args: -distribute 0 -second_write_read -compare
194     test:
195       suffix: hdf5_petsc
196       nsize: {{1 2}}
197       args: -format hdf5_petsc -compare_labels
198     test:
199       suffix: hdf5_xdmf
200       nsize: {{1 3 8}}
201       args: -format hdf5_xdmf
202 
203   # Use low level functions, DMPlexTopologyView()/Load(), DMPlexCoordinatesView()/Load(), and DMPlexLabelsView()/Load()
204   # Output must be the same as ex55_2_nsize-2_format-hdf5_petsc_interpolate-0.out
205   test:
206     suffix: 5
207     requires: exodusii
208     nsize: 2
209     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.exo
210     args: -petscpartitioner_type simple
211     args: -dm_view ascii::ascii_info_detail
212     args: -new_dm_view ascii::ascii_info_detail
213     args: -format hdf5_petsc -use_low_level_functions
214 
215   testset:
216     suffix: 6
217     requires: hdf5 !complex datafilespath
218     nsize: {{1 3}}
219     args: -dm_plex_check_symmetry -dm_plex_check_skeleton -dm_plex_check_geometry
220     args: -filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /cells -dm_plex_hdf5_geometry_path /coordinates
221     args: -format hdf5_petsc -second_write_read -compare -compare_labels
222     args: -interpolate {{0 1}} -distribute {{0 1}} -petscpartitioner_type simple
223 
224   testset:
225     # the same data and settings as dm_impls_plex_tests-ex18_9%
226     suffix: 9
227     requires: hdf5 !complex datafilespath
228     nsize: {{1 2 4}}
229     args: -dm_plex_check_symmetry -dm_plex_check_skeleton -dm_plex_check_geometry
230     args: -filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /cells -dm_plex_hdf5_geometry_path /coordinates
231     args: -format hdf5_xdmf -second_write_read -compare
232     test:
233       suffix: hdf5_seqload
234       args: -distribute -petscpartitioner_type simple
235       args: -interpolate {{0 1}}
236       args: -dm_plex_hdf5_force_sequential
237     test:
238       suffix: hdf5_seqload_metis
239       requires: parmetis
240       args: -distribute -petscpartitioner_type parmetis
241       args: -interpolate 1
242       args: -dm_plex_hdf5_force_sequential
243     test:
244       suffix: hdf5
245       args: -interpolate 1 -petscpartitioner_type simple
246     test:
247       suffix: hdf5_repart
248       requires: parmetis
249       args: -distribute -petscpartitioner_type parmetis
250       args: -interpolate 1
251     test:
252       TODO: Parallel partitioning of uninterpolated meshes not supported
253       suffix: hdf5_repart_ppu
254       requires: parmetis
255       args: -distribute -petscpartitioner_type parmetis
256       args: -interpolate 0
257 
258   # reproduce PetscSFView() crash - fixed, left as regression test
259   test:
260     suffix: new_dm_view
261     requires: exodusii
262     nsize: 2
263     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/TwoQuads.exo -new_dm_view ascii:ex5_new_dm_view.log:ascii_info_detail
264 
265   # test backward compatibility of petsc_hdf5 format
266   testset:
267     suffix: 10-v3.16.0-v1.0.0
268     requires: hdf5 !complex datafilespath
269     args: -dm_plex_check_all -compare -compare_labels
270     args: -dm_plex_view_hdf5_storage_version {{1.0.0 2.0.0}} -use_low_level_functions {{0 1}}
271     test:
272       suffix: a
273       args: -filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/annulus-20.h5
274     test:
275       suffix: b
276       TODO: broken
277       args: -filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/barycentricallyrefinedcube.h5
278     test:
279       suffix: c
280       args: -filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/blockcylinder-50.h5
281     test:
282       suffix: d
283       args: -filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/cube-hexahedra-refined.h5
284     test:
285       suffix: e
286       args: -filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/hybrid_hexwedge.h5
287     test:
288       suffix: f
289       args: -filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/square.h5
290 
291 TEST*/
292