xref: /petsc/src/dm/impls/plex/tests/ex1.c (revision ebead697dbf761eb322f829370bbe90b3bd93fa3)
1 static char help[] = "Tests various DMPlex routines to construct, refine and distribute a mesh.\n\n";
2 
3 #include <petscdmplex.h>
4 #include <petscdmplextransform.h>
5 #include <petscsf.h>
6 
7 enum {STAGE_LOAD, STAGE_DISTRIBUTE, STAGE_REFINE, STAGE_OVERLAP};
8 
9 typedef struct {
10   PetscLogEvent createMeshEvent;
11   PetscLogStage stages[4];
12   /* Domain and mesh definition */
13   PetscInt      dim;                             /* The topological mesh dimension */
14   PetscInt      overlap;                         /* The cell overlap to use during partitioning */
15   PetscBool     testp4est[2];
16   PetscBool     redistribute;
17   PetscBool     final_ref;                       /* Run refinement at the end */
18   PetscBool     final_diagnostics;               /* Run diagnostics on the final mesh */
19 } AppCtx;
20 
21 PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
22 {
23   PetscFunctionBegin;
24   options->dim               = 2;
25   options->overlap           = 0;
26   options->testp4est[0]      = PETSC_FALSE;
27   options->testp4est[1]      = PETSC_FALSE;
28   options->redistribute      = PETSC_FALSE;
29   options->final_ref         = PETSC_FALSE;
30   options->final_diagnostics = PETSC_TRUE;
31 
32   PetscOptionsBegin(comm, "", "Meshing Problem Options", "DMPLEX");
33   PetscCall(PetscOptionsRangeInt("-dim", "The topological mesh dimension", "ex1.c", options->dim, &options->dim, NULL,1,3));
34   PetscCall(PetscOptionsBoundedInt("-overlap", "The cell overlap for partitioning", "ex1.c", options->overlap, &options->overlap, NULL,0));
35   PetscCall(PetscOptionsBool("-test_p4est_seq", "Test p4est with sequential base DM", "ex1.c", options->testp4est[0], &options->testp4est[0], NULL));
36   PetscCall(PetscOptionsBool("-test_p4est_par", "Test p4est with parallel base DM", "ex1.c", options->testp4est[1], &options->testp4est[1], NULL));
37   PetscCall(PetscOptionsBool("-test_redistribute", "Test redistribution", "ex1.c", options->redistribute, &options->redistribute, NULL));
38   PetscCall(PetscOptionsBool("-final_ref", "Run uniform refinement on the final mesh", "ex1.c", options->final_ref, &options->final_ref, NULL));
39   PetscCall(PetscOptionsBool("-final_diagnostics", "Run diagnostics on the final mesh", "ex1.c", options->final_diagnostics, &options->final_diagnostics, NULL));
40   PetscOptionsEnd();
41 
42   PetscCall(PetscLogEventRegister("CreateMesh", DM_CLASSID, &options->createMeshEvent));
43   PetscCall(PetscLogStageRegister("MeshLoad",       &options->stages[STAGE_LOAD]));
44   PetscCall(PetscLogStageRegister("MeshDistribute", &options->stages[STAGE_DISTRIBUTE]));
45   PetscCall(PetscLogStageRegister("MeshRefine",     &options->stages[STAGE_REFINE]));
46   PetscCall(PetscLogStageRegister("MeshOverlap",    &options->stages[STAGE_OVERLAP]));
47   PetscFunctionReturn(0);
48 }
49 
50 PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm)
51 {
52   PetscInt    dim           = user->dim;
53   PetscBool   testp4est_seq = user->testp4est[0];
54   PetscBool   testp4est_par = user->testp4est[1];
55   PetscMPIInt rank, size;
56 
57   PetscFunctionBegin;
58   PetscCall(PetscLogEventBegin(user->createMeshEvent,0,0,0,0));
59   PetscCallMPI(MPI_Comm_rank(comm, &rank));
60   PetscCallMPI(MPI_Comm_size(comm, &size));
61   PetscCall(PetscLogStagePush(user->stages[STAGE_LOAD]));
62   PetscCall(DMCreate(comm, dm));
63   PetscCall(DMSetType(*dm, DMPLEX));
64   PetscCall(DMPlexDistributeSetDefault(*dm, PETSC_FALSE));
65   PetscCall(DMSetFromOptions(*dm));
66   PetscCall(DMLocalizeCoordinates(*dm));
67 
68   PetscCall(DMViewFromOptions(*dm,NULL,"-init_dm_view"));
69   PetscCall(DMGetDimension(*dm, &dim));
70 
71   if (testp4est_seq) {
72 #if defined(PETSC_HAVE_P4EST)
73     DM dmConv = NULL;
74 
75     PetscCall(DMPlexCheck(*dm));
76     PetscCall(DMPlexSetRefinementUniform(*dm, PETSC_TRUE));
77     PetscCall(DMPlexSetTransformType(*dm, DMPLEXREFINETOBOX));
78     PetscCall(DMRefine(*dm, PETSC_COMM_WORLD, &dmConv));
79     PetscCall(PetscObjectSetOptionsPrefix((PetscObject) *dm, NULL));
80     if (dmConv) {
81       PetscCall(DMDestroy(dm));
82       *dm  = dmConv;
83     }
84     PetscCall(DMViewFromOptions(*dm,NULL,"-initref_dm_view"));
85     PetscCall(DMPlexCheck(*dm));
86 
87     /* For topologically periodic meshes, we first localize coordinates,
88        and then remove any information related with the
89        automatic computation of localized vertices.
90        This way, refinement operations and conversions to p4est
91        will preserve the shape of the domain in physical space */
92     PetscCall(DMSetPeriodicity(*dm, NULL, NULL, NULL));
93 
94     PetscCall(DMConvert(*dm,dim == 2 ? DMP4EST : DMP8EST,&dmConv));
95     if (dmConv) {
96       PetscCall(PetscObjectSetOptionsPrefix((PetscObject) dmConv, "conv_seq_1_"));
97       PetscCall(DMSetFromOptions(dmConv));
98       PetscCall(DMDestroy(dm));
99       *dm  = dmConv;
100     }
101     PetscCall(PetscObjectSetOptionsPrefix((PetscObject) *dm, "conv_seq_1_"));
102     PetscCall(DMSetUp(*dm));
103     PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
104     PetscCall(DMConvert(*dm,DMPLEX,&dmConv));
105     if (dmConv) {
106       PetscCall(PetscObjectSetOptionsPrefix((PetscObject) dmConv, "conv_seq_2_"));
107       PetscCall(DMPlexDistributeSetDefault(dmConv, PETSC_FALSE));
108       PetscCall(DMSetFromOptions(dmConv));
109       PetscCall(DMDestroy(dm));
110       *dm  = dmConv;
111     }
112     PetscCall(PetscObjectSetOptionsPrefix((PetscObject) *dm, "conv_seq_2_"));
113     PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
114     PetscCall(PetscObjectSetOptionsPrefix((PetscObject) *dm, NULL));
115 #else
116     SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_SUP,"Recompile with --download-p4est");
117 #endif
118   }
119 
120   PetscCall(PetscLogStagePop());
121   if (!testp4est_seq) {
122     PetscCall(PetscLogStagePush(user->stages[STAGE_DISTRIBUTE]));
123     PetscCall(DMViewFromOptions(*dm, NULL, "-dm_pre_dist_view"));
124     PetscCall(PetscObjectSetOptionsPrefix((PetscObject) *dm, "dist_"));
125     PetscCall(DMSetFromOptions(*dm));
126     PetscCall(PetscObjectSetOptionsPrefix((PetscObject) *dm, NULL));
127     PetscCall(PetscLogStagePop());
128     PetscCall(DMViewFromOptions(*dm, NULL, "-distributed_dm_view"));
129   }
130   PetscCall(PetscLogStagePush(user->stages[STAGE_REFINE]));
131   PetscCall(PetscObjectSetOptionsPrefix((PetscObject) *dm, "ref_"));
132   PetscCall(DMSetFromOptions(*dm));
133   PetscCall(PetscObjectSetOptionsPrefix((PetscObject) *dm, NULL));
134   PetscCall(PetscLogStagePop());
135 
136   if (testp4est_par) {
137 #if defined(PETSC_HAVE_P4EST)
138     DM dmConv = NULL;
139 
140     PetscCall(DMPlexCheck(*dm));
141     PetscCall(DMViewFromOptions(*dm, NULL, "-dm_tobox_view"));
142     PetscCall(DMPlexSetRefinementUniform(*dm, PETSC_TRUE));
143     PetscCall(DMPlexSetTransformType(*dm, DMPLEXREFINETOBOX));
144     PetscCall(DMRefine(*dm, PETSC_COMM_WORLD, &dmConv));
145     PetscCall(PetscObjectSetOptionsPrefix((PetscObject) *dm, NULL));
146     if (dmConv) {
147       PetscCall(DMDestroy(dm));
148       *dm  = dmConv;
149     }
150     PetscCall(DMViewFromOptions(*dm, NULL, "-dm_tobox_view"));
151     PetscCall(DMPlexCheck(*dm));
152 
153     PetscCall(DMConvert(*dm,dim == 2 ? DMP4EST : DMP8EST,&dmConv));
154     if (dmConv) {
155       PetscCall(PetscObjectSetOptionsPrefix((PetscObject) dmConv, "conv_par_1_"));
156       PetscCall(DMSetFromOptions(dmConv));
157       PetscCall(DMDestroy(dm));
158       *dm  = dmConv;
159     }
160     PetscCall(PetscObjectSetOptionsPrefix((PetscObject) *dm, "conv_par_1_"));
161     PetscCall(DMSetUp(*dm));
162     PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
163     PetscCall(DMConvert(*dm, DMPLEX, &dmConv));
164     if (dmConv) {
165       PetscCall(PetscObjectSetOptionsPrefix((PetscObject) dmConv, "conv_par_2_"));
166       PetscCall(DMPlexDistributeSetDefault(dmConv, PETSC_FALSE));
167       PetscCall(DMSetFromOptions(dmConv));
168       PetscCall(DMDestroy(dm));
169       *dm  = dmConv;
170     }
171     PetscCall(PetscObjectSetOptionsPrefix((PetscObject) *dm, "conv_par_2_"));
172     PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
173     PetscCall(PetscObjectSetOptionsPrefix((PetscObject) *dm, NULL));
174 #else
175     SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_SUP,"Recompile with --download-p4est");
176 #endif
177   }
178 
179   /* test redistribution of an already distributed mesh */
180   if (user->redistribute) {
181     DM       distributedMesh;
182     PetscSF  sf;
183     PetscInt nranks;
184 
185     PetscCall(DMViewFromOptions(*dm, NULL, "-dm_pre_redist_view"));
186     PetscCall(DMPlexDistribute(*dm, 0, NULL, &distributedMesh));
187     if (distributedMesh) {
188       PetscCall(DMGetPointSF(distributedMesh, &sf));
189       PetscCall(PetscSFSetUp(sf));
190       PetscCall(DMGetNeighbors(distributedMesh, &nranks, NULL));
191       PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE, &nranks, 1, MPIU_INT, MPI_MIN, PetscObjectComm((PetscObject)*dm)));
192       PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_(PetscObjectComm((PetscObject)*dm)), "Minimum number of neighbors: %" PetscInt_FMT "\n", nranks));
193       PetscCall(DMDestroy(dm));
194       *dm  = distributedMesh;
195     }
196     PetscCall(DMViewFromOptions(*dm, NULL, "-dm_post_redist_view"));
197   }
198 
199   if (user->overlap) {
200     DM overlapMesh = NULL;
201 
202     /* Add the overlap to refined mesh */
203     PetscCall(PetscLogStagePush(user->stages[STAGE_OVERLAP]));
204     PetscCall(DMViewFromOptions(*dm, NULL, "-dm_pre_overlap_view"));
205     PetscCall(DMPlexDistributeOverlap(*dm, user->overlap, NULL, &overlapMesh));
206     if (overlapMesh) {
207       PetscInt overlap;
208       PetscCall(DMPlexGetOverlap(overlapMesh, &overlap));
209       PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "Overlap: %" PetscInt_FMT "\n", overlap));
210       PetscCall(DMDestroy(dm));
211       *dm = overlapMesh;
212     }
213     PetscCall(DMViewFromOptions(*dm, NULL, "-dm_post_overlap_view"));
214     PetscCall(PetscLogStagePop());
215   }
216   if (user->final_ref) {
217     DM refinedMesh = NULL;
218 
219     PetscCall(DMPlexSetRefinementUniform(*dm, PETSC_TRUE));
220     PetscCall(DMRefine(*dm, comm, &refinedMesh));
221     if (refinedMesh) {
222       PetscCall(DMDestroy(dm));
223       *dm  = refinedMesh;
224     }
225   }
226 
227   PetscCall(PetscObjectSetName((PetscObject) *dm, "Generated Mesh"));
228   PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
229   if (user->final_diagnostics) PetscCall(DMPlexCheck(*dm));
230   PetscCall(PetscLogEventEnd(user->createMeshEvent,0,0,0,0));
231   PetscFunctionReturn(0);
232 }
233 
234 int main(int argc, char **argv)
235 {
236   DM             dm;
237   AppCtx         user;
238 
239   PetscFunctionBeginUser;
240   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
241   PetscCall(ProcessOptions(PETSC_COMM_WORLD, &user));
242   PetscCall(CreateMesh(PETSC_COMM_WORLD, &user, &dm));
243   PetscCall(DMDestroy(&dm));
244   PetscCall(PetscFinalize());
245   return 0;
246 }
247 
248 /*TEST
249 
250   # CTetGen 0-1
251   test:
252     suffix: 0
253     requires: ctetgen
254     args: -dm_coord_space 0 -dm_plex_dim 3 -dim 3 -dm_plex_interpolate 0 -ctetgen_verbose 4 -dm_view ascii::ascii_info_detail -info :~sys
255   test:
256     suffix: 1
257     requires: ctetgen
258     args: -dm_coord_space 0 -dm_plex_dim 3 -dim 3 -dm_plex_interpolate 0 -ctetgen_verbose 4 -dm_refine_volume_limit_pre 0.0625 -dm_view ascii::ascii_info_detail -info :~sys
259 
260   # 2D LaTex and ASCII output 2-9
261   test:
262     suffix: 2
263     requires: triangle
264     args: -dm_plex_interpolate 0 -dm_view ascii::ascii_latex
265   test:
266     suffix: 3
267     requires: triangle
268     args: -ref_dm_refine 1 -dm_view ascii::ascii_info_detail
269   test:
270     suffix: 4
271     requires: triangle
272     nsize: 2
273     args: -dm_coord_space 0 -ref_dm_refine 1 -dist_dm_distribute -petscpartitioner_type simple -dm_view ascii::ascii_info_detail
274   test:
275     suffix: 5
276     requires: triangle
277     nsize: 2
278     args: -dm_coord_space 0 -ref_dm_refine 1 -dist_dm_distribute -petscpartitioner_type simple -dm_view ascii::ascii_latex
279   test:
280     suffix: 6
281     args: -dm_coord_space 0 -dm_plex_simplex 0 -dm_view ascii::ascii_info_detail
282   test:
283     suffix: 7
284     args: -dm_coord_space 0 -dm_plex_simplex 0 -ref_dm_refine 1 -dm_view ascii::ascii_info_detail
285   test:
286     suffix: 8
287     nsize: 2
288     args: -dm_plex_simplex 0 -ref_dm_refine 1 -dist_dm_distribute -petscpartitioner_type simple -dm_view ascii::ascii_latex
289 
290   # 1D ASCII output
291   testset:
292     args: -dm_coord_space 0 -dm_plex_dim 1 -dm_view ascii::ascii_info_detail -dm_plex_check_all
293     test:
294       suffix: 1d_0
295       args:
296     test:
297       suffix: 1d_1
298       args: -ref_dm_refine 2
299     test:
300       suffix: 1d_2
301       args: -dm_plex_box_faces 5 -dm_plex_box_bd periodic
302 
303   # Parallel refinement tests with overlap
304   test:
305     suffix: refine_overlap_1d
306     nsize: 2
307     args: -dm_plex_dim 1 -dim 1 -dm_plex_box_faces 4 -dm_plex_box_faces 4 -ref_dm_refine 1 -overlap {{0 1 2}separate output} -dist_dm_distribute -petscpartitioner_type simple -dm_view ascii::ascii_info
308   test:
309     suffix: refine_overlap_2d
310     requires: triangle
311     nsize: {{2 8}separate output}
312     args: -dm_coord_space 0 -ref_dm_refine 1 -dist_dm_distribute -petscpartitioner_type simple -overlap {{0 1 2}separate output} -dm_view ascii::ascii_info
313 
314   # Parallel extrusion tests
315   test:
316     suffix: spheresurface_extruded
317     nsize : 4
318     args: -dm_coord_space 0 -dm_plex_shape sphere -dm_extrude 3 -dist_dm_distribute -petscpartitioner_type simple \
319           -dm_plex_check_all -dm_view ::ascii_info_detail -dm_plex_view_coord_system spherical
320 
321   test:
322     suffix: spheresurface_extruded_symmetric
323     nsize : 4
324     args: -dm_coord_space 0 -dm_plex_shape sphere -dm_extrude 3 -dm_plex_transform_extrude_symmetric -dist_dm_distribute -petscpartitioner_type simple \
325           -dm_plex_check_all -dm_view ::ascii_info_detail -dm_plex_view_coord_system spherical
326 
327   # Parallel simple partitioner tests
328   test:
329     suffix: part_simple_0
330     requires: triangle
331     nsize: 2
332     args: -dm_coord_space 0 -dm_plex_interpolate 0 -dist_dm_distribute -petscpartitioner_type simple -dist_partition_view -dm_view ascii::ascii_info_detail
333   test:
334     suffix: part_simple_1
335     requires: triangle
336     nsize: 8
337     args: -dm_coord_space 0 -ref_dm_refine 1 -dist_dm_distribute -petscpartitioner_type simple -dist_partition_view -dm_view ascii::ascii_info_detail
338 
339   # Parallel partitioner tests
340   test:
341     suffix: part_parmetis_0
342     requires: parmetis
343     nsize: 2
344     args: -dm_plex_simplex 0 -ref_dm_refine 1 -dist_dm_distribute -petscpartitioner_type parmetis -dm_view -petscpartitioner_view -test_redistribute -dm_plex_csr_alg {{mat graph overlap}} -dm_pre_redist_view ::load_balance -dm_post_redist_view ::load_balance -petscpartitioner_view_graph
345   test:
346     suffix: part_ptscotch_0
347     requires: ptscotch
348     nsize: 2
349     args: -dm_plex_simplex 0 -dist_dm_distribute -petscpartitioner_type ptscotch -petscpartitioner_view -petscpartitioner_ptscotch_strategy quality -test_redistribute -dm_plex_csr_alg {{mat graph overlap}} -dm_pre_redist_view ::load_balance -dm_post_redist_view ::load_balance -petscpartitioner_view_graph
350   test:
351     suffix: part_ptscotch_1
352     requires: ptscotch
353     nsize: 8
354     args: -dm_plex_simplex 0 -ref_dm_refine 1 -dist_dm_distribute -petscpartitioner_type ptscotch -petscpartitioner_view -petscpartitioner_ptscotch_imbalance 0.1
355 
356   # CGNS reader tests 10-11 (need to find smaller test meshes)
357   test:
358     suffix: cgns_0
359     requires: cgns
360     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/tut21.cgns -dm_view
361 
362   # ExodusII reader tests
363   testset:
364     args: -dm_plex_boundary_label boundary -dm_plex_check_all -dm_view
365     test:
366       suffix: exo_0
367       requires: exodusii
368       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/sevenside-quad.exo
369     test:
370       suffix: exo_1
371       requires: exodusii
372       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/sevenside-quad-15.exo
373     test:
374       suffix: exo_2
375       requires: exodusii
376       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/squaremotor-30.exo
377     test:
378       suffix: exo_3
379       requires: exodusii
380       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.exo
381     test:
382       suffix: exo_4
383       requires: exodusii
384      args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/simpleblock-100.exo
385 
386   # Gmsh mesh reader tests
387   testset:
388     args: -dm_coord_space 0 -dm_view
389 
390     test:
391       suffix: gmsh_0
392       requires: !single
393       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh
394     test:
395       suffix: gmsh_1
396       requires: !single
397       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square.msh
398     test:
399       suffix: gmsh_2
400       requires: !single
401       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_bin.msh
402     test:
403       suffix: gmsh_3
404       nsize: 3
405       requires: !single
406       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square.msh -dist_dm_distribute -petscpartitioner_type simple
407     test:
408       suffix: gmsh_4
409       nsize: 3
410       requires: !single
411       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_bin.msh -dist_dm_distribute -petscpartitioner_type simple
412     test:
413       suffix: gmsh_5
414       requires: !single
415       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_quad.msh
416     # TODO: it seems the mesh is not a valid gmsh (inverted cell)
417     test:
418       suffix: gmsh_6
419       requires: !single
420       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_bin_physnames.msh -final_diagnostics 0
421     test:
422       suffix: gmsh_7
423       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere_bin.msh -dm_view ::ascii_info_detail -dm_plex_check_all
424     test:
425       suffix: gmsh_8
426       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere.msh -dm_view ::ascii_info_detail -dm_plex_check_all
427   testset:
428     args: -dm_coord_space 0 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic_bin.msh -dm_view ::ascii_info_detail -dm_plex_check_all
429     test:
430       suffix: gmsh_9
431     test:
432       suffix: gmsh_9_periodic_0
433       args: -dm_plex_gmsh_periodic 0
434   testset:
435     args: -dm_coord_space 0 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh -dm_view ::ascii_info_detail -dm_plex_check_all
436     test:
437       suffix: gmsh_10
438     test:
439       suffix: gmsh_10_periodic_0
440       args: -dm_plex_gmsh_periodic 0
441   testset:
442     args: -dm_coord_space 0 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh -dm_view ::ascii_info_detail -dm_plex_check_all -ref_dm_refine 1
443     test:
444       suffix: gmsh_11
445     test:
446       suffix: gmsh_11_periodic_0
447       args: -dm_plex_gmsh_periodic 0
448   # TODO: it seems the mesh is not a valid gmsh (inverted cell)
449   test:
450     suffix: gmsh_12
451     nsize: 4
452     requires: !single mpiio
453     args: -dm_coord_space 0 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_bin_physnames.msh -viewer_binary_mpiio -dist_dm_distribute -petscpartitioner_type simple -dm_view -final_diagnostics 0
454   test:
455     suffix: gmsh_13_hybs2t
456     nsize: 4
457     args: -dm_coord_space 0 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_triquad.msh -dist_dm_distribute -petscpartitioner_type simple -dm_view -ref_dm_refine 1 -ref_dm_plex_transform_type refine_tobox -dm_plex_check_all
458   test:
459     suffix: gmsh_14_ext
460     requires: !single
461     args: -dm_coord_space 0 -dm_extrude 2 -dm_plex_transform_extrude_thickness 1.5 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_bin.msh -dm_view -dm_plex_check_all
462   test:
463     suffix: gmsh_14_ext_s2t
464     requires: !single
465     args: -dm_coord_space 0 -dm_extrude 2 -dm_plex_transform_extrude_thickness 1.5 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_bin.msh -dm_view -dm_plex_check_all -ref_dm_refine 1 -ref_dm_plex_transform_type refine_tobox
466   test:
467     suffix: gmsh_15_hyb3d
468     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_tetwedge.msh -dm_view -dm_plex_check_all
469   test:
470     suffix: gmsh_15_hyb3d_vtk
471     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_tetwedge.msh -dm_view vtk: -dm_plex_gmsh_hybrid -dm_plex_check_all
472   test:
473     suffix: gmsh_15_hyb3d_s2t
474     args: -dm_coord_space 0 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_tetwedge.msh -dm_view -dm_plex_check_all -ref_dm_refine 1 -ref_dm_plex_transform_type refine_tobox
475   test:
476     suffix: gmsh_16_spheresurface
477     nsize : 4
478     args: -dm_coord_space 0 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3 -dm_plex_check_all -dm_view -dist_dm_distribute -petscpartitioner_type simple
479   test:
480     suffix: gmsh_16_spheresurface_s2t
481     nsize : 4
482     args: -dm_coord_space 0 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3 -ref_dm_refine 1 -ref_dm_plex_transform_type refine_tobox -dm_plex_check_all -dm_view -dist_dm_distribute -petscpartitioner_type simple
483   test:
484     suffix: gmsh_16_spheresurface_extruded
485     nsize : 4
486     args: -dm_coord_space 0 -dm_extrude 3 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3 -dm_plex_check_all -dm_view -dist_dm_distribute -petscpartitioner_type simple
487   test:
488     suffix: gmsh_16_spheresurface_extruded_s2t
489     nsize : 4
490     args: -dm_coord_space 0 -dm_extrude 3 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3 -ref_dm_refine 1 -ref_dm_plex_transform_type refine_tobox -dm_plex_check_all -dm_view -dist_dm_distribute -petscpartitioner_type simple
491   test:
492     suffix: gmsh_17_hyb3d_interp_ascii
493     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_hexwedge.msh -dm_view -dm_plex_check_all
494   test:
495     suffix: exodus_17_hyb3d_interp_ascii
496     requires: exodusii
497     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_hexwedge.exo -dm_view -dm_plex_check_all
498 
499   # Legacy Gmsh v22/v40 ascii/binary reader tests
500   testset:
501     output_file: output/ex1_gmsh_3d_legacy.out
502     args: -dm_coord_space 0 -dm_view ::ascii_info_detail -dm_plex_check_all
503     test:
504       suffix: gmsh_3d_ascii_v22
505       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-ascii.msh2
506     test:
507       suffix: gmsh_3d_ascii_v40
508       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-ascii.msh4
509     test:
510       suffix: gmsh_3d_binary_v22
511       # Could not remake binary to remove extra face labeling
512       output_file: output/ex1_gmsh_3d_legacy_v22_bin.out
513       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary.msh2
514     test:
515       suffix: gmsh_3d_binary_v40
516       requires: long64
517       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary.msh4
518 
519   # Gmsh v41 ascii/binary reader tests
520   testset: # 32bit mesh, sequential
521     args: -dm_coord_space 0 -dm_view ::ascii_info_detail -dm_plex_check_all -dm_plex_gmsh_mark_vertices
522     output_file: output/ex1_gmsh_3d_32.out
523     test:
524       suffix: gmsh_3d_ascii_v41_32
525       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-ascii-32.msh
526     test:
527       suffix: gmsh_3d_binary_v41_32
528       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-32.msh
529     test:
530       suffix: gmsh_3d_binary_v41_32_mpiio
531       requires: defined(PETSC_HAVE_MPIIO)
532       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-32.msh -viewer_binary_mpiio
533   test:
534     suffix: gmsh_quad_8node
535     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-qua-8node.msh \
536           -dm_view -dm_plex_check_all -dm_plex_gmsh_mark_vertices
537   test:
538     suffix: gmsh_hex_20node
539     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-hex-20node.msh \
540           -dm_view -dm_plex_check_all -dm_plex_gmsh_mark_vertices
541   testset:  # 32bit mesh, parallel
542     args: -dm_coord_space 0 -dist_dm_distribute -petscpartitioner_type simple -dm_view ::ascii_info_detail -dm_plex_check_all -dm_plex_gmsh_mark_vertices
543     nsize: 2
544     output_file: output/ex1_gmsh_3d_32_np2.out
545     test:
546       suffix: gmsh_3d_ascii_v41_32_np2
547       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-ascii-32.msh
548     test:
549       suffix: gmsh_3d_binary_v41_32_np2
550       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-32.msh
551     test:
552       suffix: gmsh_3d_binary_v41_32_np2_mpiio
553       requires: defined(PETSC_HAVE_MPIIO)
554       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-32.msh -viewer_binary_mpiio
555   testset: # 64bit mesh, sequential
556     args: -dm_coord_space 0 -dm_view ::ascii_info_detail -dm_plex_check_all -dm_plex_gmsh_mark_vertices
557     output_file: output/ex1_gmsh_3d_64.out
558     test:
559       suffix: gmsh_3d_ascii_v41_64
560       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-ascii-64.msh
561     test:
562       suffix: gmsh_3d_binary_v41_64
563       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-64.msh
564     test:
565       suffix: gmsh_3d_binary_v41_64_mpiio
566       requires: defined(PETSC_HAVE_MPIIO)
567       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-64.msh -viewer_binary_mpiio
568   testset:  # 64bit mesh, parallel
569     args: -dm_coord_space 0 -dist_dm_distribute -petscpartitioner_type simple -dm_view ::ascii_info_detail -dm_plex_check_all -dm_plex_gmsh_mark_vertices
570     nsize: 2
571     output_file: output/ex1_gmsh_3d_64_np2.out
572     test:
573       suffix: gmsh_3d_ascii_v41_64_np2
574       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-ascii-64.msh
575     test:
576       suffix: gmsh_3d_binary_v41_64_np2
577       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-64.msh
578     test:
579       suffix: gmsh_3d_binary_v41_64_np2_mpiio
580       requires: defined(PETSC_HAVE_MPIIO)
581       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-64.msh -viewer_binary_mpiio
582 
583   # Fluent mesh reader tests
584   # TODO: Geometry checks fail
585   test:
586     suffix: fluent_0
587     requires: !complex
588     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square.cas -dm_view -final_diagnostics 0
589   test:
590     suffix: fluent_1
591     nsize: 3
592     requires: !complex
593     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square.cas -dist_dm_distribute -petscpartitioner_type simple -dm_view -final_diagnostics 0
594   test:
595     suffix: fluent_2
596     requires: !complex
597     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/cube_5tets_ascii.cas -dm_view -final_diagnostics 0
598   test:
599     suffix: fluent_3
600     requires: !complex
601     TODO: Fails on non-linux: fseek(), fileno() ? https://gitlab.com/petsc/petsc/merge_requests/2206#note_238166382
602     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/cube_5tets.cas -dm_view -final_diagnostics 0
603 
604   # Med mesh reader tests, including parallel file reads
605   test:
606     suffix: med_0
607     requires: med
608     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square.med -dm_view
609   test:
610     suffix: med_1
611     requires: med
612     nsize: 3
613     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square.med -dist_dm_distribute -petscpartitioner_type simple -dm_view
614   test:
615     suffix: med_2
616     requires: med
617     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/cylinder.med -dm_view
618   test:
619     suffix: med_3
620     requires: med
621     TODO: MED
622     nsize: 3
623     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/cylinder.med -dist_dm_distribute -petscpartitioner_type simple -dm_view
624 
625   # Test shape quality
626   test:
627     suffix: test_shape
628     requires: ctetgen
629     args: -dm_plex_dim 3 -dim 3 -dm_refine_hierarchy 3 -dm_plex_check_all -dm_plex_check_cell_shape
630 
631   # Test simplex to tensor conversion
632   test:
633     suffix: s2t2
634     requires: triangle
635     args: -dm_coord_space 0 -ref_dm_refine 1 -ref_dm_plex_transform_type refine_tobox -dm_refine_volume_limit_pre 0.0625 -dm_view ascii::ascii_info_detail
636 
637   test:
638     suffix: s2t3
639     requires: ctetgen
640     args: -dm_coord_space 0 -dm_plex_dim 3 -dim 3 -ref_dm_refine 1 -ref_dm_plex_transform_type refine_tobox -dm_refine_volume_limit_pre 0.0625 -dm_view ascii::ascii_info_detail
641 
642   # Test cylinder
643   testset:
644     args: -dm_plex_shape cylinder -dm_plex_check_all -dm_view
645     test:
646       suffix: cylinder
647       args: -ref_dm_refine 1
648     test:
649       suffix: cylinder_per
650       args: -dm_plex_cylinder_bd periodic -ref_dm_refine 1 -ref_dm_refine_remap 0
651     test:
652       suffix: cylinder_wedge
653       args: -dm_coord_space 0 -dm_plex_interpolate 0 -dm_plex_cell tensor_triangular_prism -dm_view vtk:
654     test:
655       suffix: cylinder_wedge_int
656       output_file: output/ex1_cylinder_wedge.out
657       args: -dm_coord_space 0 -dm_plex_cell tensor_triangular_prism -dm_view vtk:
658 
659   test:
660     suffix: box_2d
661     args: -dm_plex_simplex 0 -ref_dm_refine 2 -dm_plex_check_all -dm_view
662 
663   test:
664     suffix: box_2d_per
665     args: -dm_plex_simplex 0 -ref_dm_refine 2 -dm_plex_check_all -dm_view
666 
667   test:
668     suffix: box_2d_per_unint
669     args: -dm_coord_space 0 -dm_plex_simplex 0 -dm_plex_interpolate 0 -dm_plex_box_faces 3,3 -dm_plex_box_faces 3,3 -dm_plex_check_all -dm_view ::ascii_info_detail
670 
671   test:
672     suffix: box_3d
673     args: -dm_plex_dim 3 -dim 3 -dm_plex_simplex 0 -ref_dm_refine 3 -dm_plex_check_all -dm_view
674 
675   test:
676     requires: triangle
677     suffix: box_wedge
678     args: -dm_coord_space 0 -dm_plex_dim 3 -dim 3 -dm_plex_simplex 0 -dm_plex_cell tensor_triangular_prism -dm_view vtk: -dm_plex_check_all
679 
680   testset:
681     requires: triangle
682     args: -dm_coord_space 0 -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_cell tensor_triangular_prism -dm_plex_box_faces 2,3,1 -dm_view -dm_plex_check_all -ref_dm_refine 1 -ref_dm_plex_transform_type refine_tobox
683     test:
684       suffix: box_wedge_s2t
685     test:
686       nsize: 3
687       args: -dist_dm_distribute -petscpartitioner_type simple
688       suffix: box_wedge_s2t_parallel
689 
690   # Test GLVis output
691   testset:
692     args: -dm_coord_space 0 -dm_plex_interpolate 0
693     test:
694       suffix: glvis_2d_tet
695       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh -dm_plex_gmsh_periodic 0 -dm_view glvis:
696     test:
697       suffix: glvis_2d_tet_per
698       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary 0
699     test:
700       suffix: glvis_3d_tet
701       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere_bin.msh -dm_plex_gmsh_periodic 0 -dm_view glvis:
702   testset:
703     args: -dm_coord_space 0
704     test:
705       suffix: glvis_2d_tet_per_mfem
706       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh -viewer_glvis_dm_plex_enable_boundary -viewer_glvis_dm_plex_enable_mfem -dm_view glvis:
707     test:
708       suffix: glvis_2d_quad
709       args: -dm_plex_simplex 0 -dm_plex_box_faces 3,3 -dm_view glvis:
710     test:
711       suffix: glvis_2d_quad_per
712       args: -dm_plex_simplex 0 -dm_plex_box_faces 3,3 -dm_plex_box_bd periodic,periodic -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary
713     test:
714       suffix: glvis_2d_quad_per_shift
715       args: -dm_plex_simplex 0 -dm_plex_box_faces 3,3 -dm_plex_box_bd periodic,periodic -dm_plex_box_lower -1,-1 -dm_plex_box_upper 1,1 -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary
716     test:
717       suffix: glvis_2d_quad_per_mfem
718       args: -dm_plex_simplex 0 -dm_plex_box_faces 3,3 -dm_plex_box_bd periodic,periodic -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary -viewer_glvis_dm_plex_enable_mfem
719     test:
720       suffix: glvis_3d_tet_per
721       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere_bin.msh -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary
722     test:
723       suffix: glvis_3d_tet_per_mfem
724       TODO: broken
725       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere_bin.msh -viewer_glvis_dm_plex_enable_mfem -dm_view glvis:
726     test:
727       suffix: glvis_3d_hex
728       args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_faces 3,3,3 -dm_view glvis:
729     test:
730       suffix: glvis_3d_hex_per
731       args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_faces 3,3,3 -dm_plex_box_bd periodic,periodic,periodic -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary 0
732     test:
733       suffix: glvis_3d_hex_per_mfem
734       args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_faces 3,3,3 -dm_plex_box_bd periodic,periodic,periodic -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary -viewer_glvis_dm_plex_enable_mfem
735     test:
736       suffix: glvis_2d_hyb
737       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_triquad.msh -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary
738     test:
739       suffix: glvis_3d_hyb
740       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_tetwedge.msh -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary
741     test:
742       suffix: glvis_3d_hyb_s2t
743       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_3d_cube.msh -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary -ref_dm_refine 1 -ref_dm_plex_transform_type refine_tobox -dm_plex_check_all
744 
745   # Test P4EST
746   testset:
747     requires: p4est
748     args: -dm_coord_space 0 -dm_view -test_p4est_seq -conv_seq_2_dm_plex_check_all -conv_seq_1_dm_forest_minimum_refinement 1
749     test:
750       suffix: p4est_periodic
751       args: -dm_plex_simplex 0 -dm_plex_box_bd periodic,periodic -dm_plex_box_faces 3,5 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 2 -conv_seq_1_dm_p4est_refine_pattern hash
752     test:
753       suffix: p4est_periodic_3d
754       args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_bd periodic,periodic,none -dm_plex_box_faces 3,5,4 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 2 -conv_seq_1_dm_p4est_refine_pattern hash
755     test:
756       suffix: p4est_gmsh_periodic
757       args: -dm_coord_space 0 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh
758     test:
759       suffix: p4est_gmsh_surface
760       args: -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3
761     test:
762       suffix: p4est_gmsh_surface_parallel
763       nsize: 2
764       args: -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3 -petscpartitioner_type simple -dm_view ::load_balance
765     test:
766       suffix: p4est_hyb_2d
767       args: -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_triquad.msh
768     test:
769       suffix: p4est_hyb_3d
770       args: -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_tetwedge.msh
771     test:
772       requires: ctetgen
773       suffix: p4est_s2t_bugfaces_3d
774       args: -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 0 -dm_plex_dim 3 -dm_plex_box_faces 1,1
775     test:
776       suffix: p4est_bug_overlapsf
777       nsize: 3
778       args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_faces 2,2,1 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -petscpartitioner_type simple
779     test:
780       suffix: p4est_redistribute
781       nsize: 3
782       args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_faces 2,2,1 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -petscpartitioner_type simple -test_redistribute -dm_plex_csr_alg {{mat graph overlap}} -dm_view ::load_balance
783     test:
784       suffix: p4est_gmsh_s2t_3d
785       args: -conv_seq_1_dm_forest_initial_refinement 1 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh
786     test:
787       suffix: p4est_gmsh_s2t_3d_hash
788       args: -conv_seq_1_dm_forest_initial_refinement 1 -conv_seq_1_dm_forest_maximum_refinement 2 -conv_seq_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh
789     test:
790       requires: long_runtime
791       suffix: p4est_gmsh_periodic_3d
792       args: -dm_coord_space 0 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere.msh
793 
794   testset:
795     requires: p4est
796     nsize: 6
797     args: -dm_coord_space 0 -test_p4est_par -conv_par_2_dm_plex_check_all -conv_par_1_dm_forest_minimum_refinement 1 -conv_par_1_dm_forest_partition_overlap 0 -dist_dm_distribute
798     test:
799       TODO: interface cones do not conform
800       suffix: p4est_par_periodic
801       args: -dm_plex_simplex 0 -dm_plex_box_bd periodic,periodic -dm_plex_box_faces 3,5 -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 2 -conv_par_1_dm_p4est_refine_pattern hash
802     test:
803       TODO: interface cones do not conform
804       suffix: p4est_par_periodic_3d
805       args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_bd periodic,periodic,periodic -dm_plex_box_faces 3,5,4 -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 2 -conv_par_1_dm_p4est_refine_pattern hash
806     test:
807       TODO: interface cones do not conform
808       suffix: p4est_par_gmsh_periodic
809       args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh
810     test:
811       suffix: p4est_par_gmsh_surface
812       args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3
813     test:
814       suffix: p4est_par_gmsh_s2t_3d
815       args: -conv_par_1_dm_forest_initial_refinement 1 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh
816     test:
817       TODO: interface cones do not conform
818       suffix: p4est_par_gmsh_s2t_3d_hash
819       args: -conv_par_1_dm_forest_initial_refinement 1 -conv_par_1_dm_forest_maximum_refinement 2 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh
820     test:
821       requires: long_runtime
822       suffix: p4est_par_gmsh_periodic_3d
823       args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere.msh
824 
825   testset:
826     requires: p4est
827     nsize: 6
828     args: -dm_coord_space 0 -test_p4est_par -conv_par_2_dm_plex_check_all -conv_par_1_dm_forest_minimum_refinement 1 -conv_par_1_dm_forest_partition_overlap 1 -dist_dm_distribute -petscpartitioner_type simple
829     test:
830       suffix: p4est_par_ovl_periodic
831       args: -dm_plex_simplex 0 -dm_plex_box_bd periodic,periodic -dm_plex_box_faces 3,5 -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 2 -conv_par_1_dm_p4est_refine_pattern hash
832     #TODO Mesh cell 201 is inverted, vol = 0. (FVM Volume. Is it correct? -> Diagnostics disabled)
833     test:
834       suffix: p4est_par_ovl_periodic_3d
835       args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_bd periodic,periodic,none -dm_plex_box_faces 3,5,4 -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 2 -conv_par_1_dm_p4est_refine_pattern hash -final_diagnostics 0
836     test:
837       suffix: p4est_par_ovl_gmsh_periodic
838       args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh
839     test:
840       suffix: p4est_par_ovl_gmsh_surface
841       args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3
842     test:
843       suffix: p4est_par_ovl_gmsh_s2t_3d
844       args: -conv_par_1_dm_forest_initial_refinement 1 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh
845     test:
846       suffix: p4est_par_ovl_gmsh_s2t_3d_hash
847       args: -conv_par_1_dm_forest_initial_refinement 1 -conv_par_1_dm_forest_maximum_refinement 2 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh
848     test:
849       requires: long_runtime
850       suffix: p4est_par_ovl_gmsh_periodic_3d
851       args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere.msh
852     test:
853       suffix: p4est_par_ovl_hyb_2d
854       args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_triquad.msh
855     test:
856       suffix: p4est_par_ovl_hyb_3d
857       args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_tetwedge.msh
858 
859   test:
860     TODO: broken
861     requires: p4est
862     nsize: 2
863     suffix: p4est_bug_labels_noovl
864     args: -test_p4est_seq -dm_plex_check_all -dm_forest_minimum_refinement 0 -dm_forest_partition_overlap 1 -dm_plex_simplex 0 -dm_plex_box_faces 3,3 -dm_forest_initial_refinement 0 -dm_forest_maximum_refinement 2 -dm_p4est_refine_pattern hash -dist_dm_distribute -petscpartitioner_type simple -dm_forest_print_label_error
865 
866   test:
867     requires: p4est
868     nsize: 2
869     suffix: p4est_bug_distribute_overlap
870     args: -dm_coord_space 0 -test_p4est_seq -conv_seq_2_dm_plex_check_all -conv_seq_1_dm_forest_minimum_refinement 0 -conv_seq_1_dm_forest_partition_overlap 0 -dm_plex_simplex 0 -dm_plex_box_faces 3,3 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 2 -conv_seq_1_dm_p4est_refine_pattern hash -petscpartitioner_type simple -overlap 1 -dm_view ::load_balance
871     args: -dm_post_overlap_view
872 
873   test:
874     suffix: ref_alfeld2d_0
875     requires: triangle
876     args: -dm_plex_box_faces 5,3 -dm_view -dm_plex_check_all -ref_dm_refine 1 -ref_dm_plex_transform_type refine_alfeld -final_diagnostics
877   test:
878     suffix: ref_alfeld3d_0
879     requires: ctetgen
880     args: -dm_plex_dim 3 -dm_plex_box_faces 5,1,1 -dm_view -dm_plex_check_all -ref_dm_refine 1 -ref_dm_plex_transform_type refine_alfeld -final_diagnostics
881 
882   # Boundary layer refiners
883   test:
884     suffix: ref_bl_1
885     args: -dm_plex_dim 1 -dm_plex_simplex 0 -dm_plex_box_faces 5,1 -dm_view -dm_plex_check_all 0 -ref_dm_refine 1 -ref_dm_plex_transform_type refine_boundary_layer -dm_extrude 2 -final_diagnostics -ref_dm_plex_transform_bl_splits 3
886   test:
887     suffix: ref_bl_2_tri
888     requires: triangle
889     args: -dm_coord_space 0 -dm_plex_box_faces 5,3 -dm_view -dm_plex_check_all 0 -ref_dm_refine 1 -ref_dm_plex_transform_type refine_boundary_layer -dm_extrude 3 -final_diagnostics -ref_dm_plex_transform_bl_splits 4
890   test:
891     suffix: ref_bl_3_quad
892     args: -dm_plex_simplex 0 -dm_plex_box_faces 5,1 -dm_view -dm_plex_check_all 0 -ref_dm_refine 1 -ref_dm_plex_transform_type refine_boundary_layer -dm_extrude 3 -final_diagnostics -ref_dm_plex_transform_bl_splits 4
893   test:
894     suffix: ref_bl_spheresurface_extruded
895     nsize : 4
896     args: -dm_coord_space 0 -dm_extrude 3 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3 -dm_plex_check_all -dm_view -dist_dm_distribute -petscpartitioner_type simple -final_diagnostics -ref_dm_refine 1 -ref_dm_plex_transform_type refine_boundary_layer -ref_dm_plex_transform_bl_splits 2
897   test:
898     suffix: ref_bl_3d_hyb
899     nsize : 4
900     args: -dm_coord_space 0 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_3d_cube.msh -dm_plex_check_all -dm_view -dist_dm_distribute -petscpartitioner_type simple -final_diagnostics -ref_dm_refine 1 -ref_dm_plex_transform_type refine_boundary_layer -ref_dm_plex_transform_bl_splits 4 -ref_dm_plex_transform_bl_height_factor 3.1
901 
902   testset:
903     args: -dm_plex_shape sphere -dm_plex_check_all -dm_view
904     test:
905       suffix: sphere_0
906       args:
907     test:
908       suffix: sphere_1
909       args: -ref_dm_refine 2
910     test:
911       suffix: sphere_2
912       args: -dm_plex_simplex 0
913     test:
914       suffix: sphere_3
915       args: -dm_plex_simplex 0 -ref_dm_refine 2
916 
917   test:
918     suffix: ball_0
919     requires: ctetgen
920     args: -dm_plex_dim 3 -dm_plex_shape ball -dm_plex_check_all -dm_view
921 
922   test:
923     suffix: ball_1
924     requires: ctetgen
925     args: -dm_plex_dim 3 -dm_plex_shape ball -bd_dm_refine 2 -dm_plex_check_all -dm_view
926 
927   test:
928     suffix: schwarz_p_extrude
929     args: -dm_plex_shape schwarz_p -dm_plex_tps_extent 1,1,1 -dm_plex_tps_layers 1 -dm_plex_tps_thickness .2 -dm_view
930 
931   test:
932     suffix: pyr_mixed_0
933     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/pyr_tet.msh -dm_plex_check_all -dm_view
934 TEST*/
935