xref: /petsc/src/dm/impls/plex/tests/ex1.c (revision 4ffacfe27a72f4cdf51b68a3bbb6aed96040fb2f)
1 static char help[] = "Tests various DMPlex routines to construct, refine and distribute a mesh.\n\n";
2 
3 #include <petscdmplex.h>
4 #include <petscdmplextransform.h>
5 #include <petscsf.h>
6 
7 enum {STAGE_LOAD, STAGE_DISTRIBUTE, STAGE_REFINE, STAGE_OVERLAP};
8 
9 typedef struct {
10   PetscLogEvent createMeshEvent;
11   PetscLogStage stages[4];
12   /* Domain and mesh definition */
13   PetscInt      dim;                             /* The topological mesh dimension */
14   PetscInt      overlap;                         /* The cell overlap to use during partitioning */
15   PetscBool     testp4est[2];
16   PetscBool     redistribute;
17   PetscBool     final_ref;                       /* Run refinement at the end */
18   PetscBool     final_diagnostics;               /* Run diagnostics on the final mesh */
19 } AppCtx;
20 
21 PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
22 {
23   PetscFunctionBegin;
24   options->dim               = 2;
25   options->overlap           = 0;
26   options->testp4est[0]      = PETSC_FALSE;
27   options->testp4est[1]      = PETSC_FALSE;
28   options->redistribute      = PETSC_FALSE;
29   options->final_ref         = PETSC_FALSE;
30   options->final_diagnostics = PETSC_TRUE;
31 
32   PetscOptionsBegin(comm, "", "Meshing Problem Options", "DMPLEX");
33   PetscCall(PetscOptionsRangeInt("-dim", "The topological mesh dimension", "ex1.c", options->dim, &options->dim, NULL,1,3));
34   PetscCall(PetscOptionsBoundedInt("-overlap", "The cell overlap for partitioning", "ex1.c", options->overlap, &options->overlap, NULL,0));
35   PetscCall(PetscOptionsBool("-test_p4est_seq", "Test p4est with sequential base DM", "ex1.c", options->testp4est[0], &options->testp4est[0], NULL));
36   PetscCall(PetscOptionsBool("-test_p4est_par", "Test p4est with parallel base DM", "ex1.c", options->testp4est[1], &options->testp4est[1], NULL));
37   PetscCall(PetscOptionsBool("-test_redistribute", "Test redistribution", "ex1.c", options->redistribute, &options->redistribute, NULL));
38   PetscCall(PetscOptionsBool("-final_ref", "Run uniform refinement on the final mesh", "ex1.c", options->final_ref, &options->final_ref, NULL));
39   PetscCall(PetscOptionsBool("-final_diagnostics", "Run diagnostics on the final mesh", "ex1.c", options->final_diagnostics, &options->final_diagnostics, NULL));
40   PetscOptionsEnd();
41 
42   PetscCall(PetscLogEventRegister("CreateMesh", DM_CLASSID, &options->createMeshEvent));
43   PetscCall(PetscLogStageRegister("MeshLoad",       &options->stages[STAGE_LOAD]));
44   PetscCall(PetscLogStageRegister("MeshDistribute", &options->stages[STAGE_DISTRIBUTE]));
45   PetscCall(PetscLogStageRegister("MeshRefine",     &options->stages[STAGE_REFINE]));
46   PetscCall(PetscLogStageRegister("MeshOverlap",    &options->stages[STAGE_OVERLAP]));
47   PetscFunctionReturn(0);
48 }
49 
50 PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm)
51 {
52   PetscInt    dim           = user->dim;
53   PetscBool   testp4est_seq = user->testp4est[0];
54   PetscBool   testp4est_par = user->testp4est[1];
55   PetscMPIInt rank, size;
56 
57   PetscFunctionBegin;
58   PetscCall(PetscLogEventBegin(user->createMeshEvent,0,0,0,0));
59   PetscCallMPI(MPI_Comm_rank(comm, &rank));
60   PetscCallMPI(MPI_Comm_size(comm, &size));
61   PetscCall(PetscLogStagePush(user->stages[STAGE_LOAD]));
62   PetscCall(DMCreate(comm, dm));
63   PetscCall(DMSetType(*dm, DMPLEX));
64   PetscCall(DMPlexDistributeSetDefault(*dm, PETSC_FALSE));
65   PetscCall(DMSetFromOptions(*dm));
66   PetscCall(DMLocalizeCoordinates(*dm));
67 
68   PetscCall(DMViewFromOptions(*dm,NULL,"-init_dm_view"));
69   PetscCall(DMGetDimension(*dm, &dim));
70 
71   if (testp4est_seq) {
72 #if defined(PETSC_HAVE_P4EST)
73     DM dmConv = NULL;
74 
75     PetscCall(DMPlexCheck(*dm));
76     PetscCall(DMPlexSetRefinementUniform(*dm, PETSC_TRUE));
77     PetscCall(DMPlexSetTransformType(*dm, DMPLEXREFINETOBOX));
78     PetscCall(DMRefine(*dm, PETSC_COMM_WORLD, &dmConv));
79     PetscCall(PetscObjectSetOptionsPrefix((PetscObject) *dm, NULL));
80     if (dmConv) {
81       PetscCall(DMDestroy(dm));
82       *dm  = dmConv;
83     }
84     PetscCall(DMViewFromOptions(*dm,NULL,"-initref_dm_view"));
85     PetscCall(DMPlexCheck(*dm));
86 
87     /* For topologically periodic meshes, we first localize coordinates,
88        and then remove any information related with the
89        automatic computation of localized vertices.
90        This way, refinement operations and conversions to p4est
91        will preserve the shape of the domain in physical space */
92     PetscCall(DMSetPeriodicity(*dm, NULL, NULL));
93 
94     PetscCall(DMConvert(*dm,dim == 2 ? DMP4EST : DMP8EST,&dmConv));
95     if (dmConv) {
96       PetscCall(PetscObjectSetOptionsPrefix((PetscObject) dmConv, "conv_seq_1_"));
97       PetscCall(DMSetFromOptions(dmConv));
98       PetscCall(DMDestroy(dm));
99       *dm  = dmConv;
100     }
101     PetscCall(PetscObjectSetOptionsPrefix((PetscObject) *dm, "conv_seq_1_"));
102     PetscCall(DMSetUp(*dm));
103     PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
104     PetscCall(DMConvert(*dm,DMPLEX,&dmConv));
105     if (dmConv) {
106       PetscCall(PetscObjectSetOptionsPrefix((PetscObject) dmConv, "conv_seq_2_"));
107       PetscCall(DMPlexDistributeSetDefault(dmConv, PETSC_FALSE));
108       PetscCall(DMSetFromOptions(dmConv));
109       PetscCall(DMDestroy(dm));
110       *dm  = dmConv;
111     }
112     PetscCall(PetscObjectSetOptionsPrefix((PetscObject) *dm, "conv_seq_2_"));
113     PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
114     PetscCall(PetscObjectSetOptionsPrefix((PetscObject) *dm, NULL));
115 #else
116     SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_SUP,"Recompile with --download-p4est");
117 #endif
118   }
119 
120   PetscCall(PetscLogStagePop());
121   if (!testp4est_seq) {
122     PetscCall(PetscLogStagePush(user->stages[STAGE_DISTRIBUTE]));
123     PetscCall(DMViewFromOptions(*dm, NULL, "-dm_pre_dist_view"));
124     PetscCall(PetscObjectSetOptionsPrefix((PetscObject) *dm, "dist_"));
125     PetscCall(DMSetFromOptions(*dm));
126     PetscCall(PetscObjectSetOptionsPrefix((PetscObject) *dm, NULL));
127     PetscCall(PetscLogStagePop());
128     PetscCall(DMViewFromOptions(*dm, NULL, "-distributed_dm_view"));
129   }
130   PetscCall(PetscLogStagePush(user->stages[STAGE_REFINE]));
131   PetscCall(PetscObjectSetOptionsPrefix((PetscObject) *dm, "ref_"));
132   PetscCall(DMSetFromOptions(*dm));
133   PetscCall(PetscObjectSetOptionsPrefix((PetscObject) *dm, NULL));
134   PetscCall(PetscLogStagePop());
135 
136   if (testp4est_par) {
137 #if defined(PETSC_HAVE_P4EST)
138     DM dmConv = NULL;
139 
140     PetscCall(DMPlexCheck(*dm));
141     PetscCall(DMViewFromOptions(*dm, NULL, "-dm_tobox_view"));
142     PetscCall(DMPlexSetRefinementUniform(*dm, PETSC_TRUE));
143     PetscCall(DMPlexSetTransformType(*dm, DMPLEXREFINETOBOX));
144     PetscCall(DMRefine(*dm, PETSC_COMM_WORLD, &dmConv));
145     PetscCall(PetscObjectSetOptionsPrefix((PetscObject) *dm, NULL));
146     if (dmConv) {
147       PetscCall(DMDestroy(dm));
148       *dm  = dmConv;
149     }
150     PetscCall(DMViewFromOptions(*dm, NULL, "-dm_tobox_view"));
151     PetscCall(DMPlexCheck(*dm));
152 
153     PetscCall(DMConvert(*dm,dim == 2 ? DMP4EST : DMP8EST,&dmConv));
154     if (dmConv) {
155       PetscCall(PetscObjectSetOptionsPrefix((PetscObject) dmConv, "conv_par_1_"));
156       PetscCall(DMSetFromOptions(dmConv));
157       PetscCall(DMDestroy(dm));
158       *dm  = dmConv;
159     }
160     PetscCall(PetscObjectSetOptionsPrefix((PetscObject) *dm, "conv_par_1_"));
161     PetscCall(DMSetUp(*dm));
162     PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
163     PetscCall(DMConvert(*dm, DMPLEX, &dmConv));
164     if (dmConv) {
165       PetscCall(PetscObjectSetOptionsPrefix((PetscObject) dmConv, "conv_par_2_"));
166       PetscCall(DMPlexDistributeSetDefault(dmConv, PETSC_FALSE));
167       PetscCall(DMSetFromOptions(dmConv));
168       PetscCall(DMDestroy(dm));
169       *dm  = dmConv;
170     }
171     PetscCall(PetscObjectSetOptionsPrefix((PetscObject) *dm, "conv_par_2_"));
172     PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
173     PetscCall(PetscObjectSetOptionsPrefix((PetscObject) *dm, NULL));
174 #else
175     SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_SUP,"Recompile with --download-p4est");
176 #endif
177   }
178 
179   /* test redistribution of an already distributed mesh */
180   if (user->redistribute) {
181     DM       distributedMesh;
182     PetscSF  sf;
183     PetscInt nranks;
184 
185     PetscCall(DMViewFromOptions(*dm, NULL, "-dm_pre_redist_view"));
186     PetscCall(DMPlexDistribute(*dm, 0, NULL, &distributedMesh));
187     if (distributedMesh) {
188       PetscCall(DMGetPointSF(distributedMesh, &sf));
189       PetscCall(PetscSFSetUp(sf));
190       PetscCall(DMGetNeighbors(distributedMesh, &nranks, NULL));
191       PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE, &nranks, 1, MPIU_INT, MPI_MIN, PetscObjectComm((PetscObject)*dm)));
192       PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_(PetscObjectComm((PetscObject)*dm)), "Minimum number of neighbors: %" PetscInt_FMT "\n", nranks));
193       PetscCall(DMDestroy(dm));
194       *dm  = distributedMesh;
195     }
196     PetscCall(DMViewFromOptions(*dm, NULL, "-dm_post_redist_view"));
197   }
198 
199   if (user->overlap) {
200     DM overlapMesh = NULL;
201 
202     /* Add the overlap to refined mesh */
203     PetscCall(PetscLogStagePush(user->stages[STAGE_OVERLAP]));
204     PetscCall(DMViewFromOptions(*dm, NULL, "-dm_pre_overlap_view"));
205     PetscCall(DMPlexDistributeOverlap(*dm, user->overlap, NULL, &overlapMesh));
206     if (overlapMesh) {
207       PetscInt overlap;
208       PetscCall(DMPlexGetOverlap(overlapMesh, &overlap));
209       PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "Overlap: %" PetscInt_FMT "\n", overlap));
210       PetscCall(DMDestroy(dm));
211       *dm = overlapMesh;
212     }
213     PetscCall(DMViewFromOptions(*dm, NULL, "-dm_post_overlap_view"));
214     PetscCall(PetscLogStagePop());
215   }
216   if (user->final_ref) {
217     DM refinedMesh = NULL;
218 
219     PetscCall(DMPlexSetRefinementUniform(*dm, PETSC_TRUE));
220     PetscCall(DMRefine(*dm, comm, &refinedMesh));
221     if (refinedMesh) {
222       PetscCall(DMDestroy(dm));
223       *dm  = refinedMesh;
224     }
225   }
226 
227   PetscCall(PetscObjectSetName((PetscObject) *dm, "Generated Mesh"));
228   PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
229   if (user->final_diagnostics) PetscCall(DMPlexCheck(*dm));
230   PetscCall(PetscLogEventEnd(user->createMeshEvent,0,0,0,0));
231   PetscFunctionReturn(0);
232 }
233 
234 int main(int argc, char **argv)
235 {
236   DM             dm;
237   AppCtx         user;
238 
239   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
240   PetscCall(ProcessOptions(PETSC_COMM_WORLD, &user));
241   PetscCall(CreateMesh(PETSC_COMM_WORLD, &user, &dm));
242   PetscCall(DMDestroy(&dm));
243   PetscCall(PetscFinalize());
244   return 0;
245 }
246 
247 /*TEST
248 
249   # CTetGen 0-1
250   test:
251     suffix: 0
252     requires: ctetgen
253     args: -dm_coord_space 0 -dm_plex_dim 3 -dim 3 -dm_plex_interpolate 0 -ctetgen_verbose 4 -dm_view ascii::ascii_info_detail -info :~sys
254   test:
255     suffix: 1
256     requires: ctetgen
257     args: -dm_coord_space 0 -dm_plex_dim 3 -dim 3 -dm_plex_interpolate 0 -ctetgen_verbose 4 -dm_refine_volume_limit_pre 0.0625 -dm_view ascii::ascii_info_detail -info :~sys
258 
259   # 2D LaTex and ASCII output 2-9
260   test:
261     suffix: 2
262     requires: triangle
263     args: -dm_plex_interpolate 0 -dm_view ascii::ascii_latex
264   test:
265     suffix: 3
266     requires: triangle
267     args: -ref_dm_refine 1 -dm_view ascii::ascii_info_detail
268   test:
269     suffix: 4
270     requires: triangle
271     nsize: 2
272     args: -dm_coord_space 0 -ref_dm_refine 1 -dist_dm_distribute -petscpartitioner_type simple -dm_view ascii::ascii_info_detail
273   test:
274     suffix: 5
275     requires: triangle
276     nsize: 2
277     args: -dm_coord_space 0 -ref_dm_refine 1 -dist_dm_distribute -petscpartitioner_type simple -dm_view ascii::ascii_latex
278   test:
279     suffix: 6
280     args: -dm_coord_space 0 -dm_plex_simplex 0 -dm_view ascii::ascii_info_detail
281   test:
282     suffix: 7
283     args: -dm_coord_space 0 -dm_plex_simplex 0 -ref_dm_refine 1 -dm_view ascii::ascii_info_detail
284   test:
285     suffix: 8
286     nsize: 2
287     args: -dm_plex_simplex 0 -ref_dm_refine 1 -dist_dm_distribute -petscpartitioner_type simple -dm_view ascii::ascii_latex
288 
289   # 1D ASCII output
290   testset:
291     args: -dm_coord_space 0 -dm_plex_dim 1 -dm_view ascii::ascii_info_detail -dm_plex_check_all
292     test:
293       suffix: 1d_0
294       args:
295     test:
296       suffix: 1d_1
297       args: -ref_dm_refine 2
298     test:
299       suffix: 1d_2
300       args: -dm_plex_box_faces 5 -dm_plex_box_bd periodic
301 
302   # Parallel refinement tests with overlap
303   test:
304     suffix: refine_overlap_1d
305     nsize: 2
306     args: -dm_plex_dim 1 -dim 1 -dm_plex_box_faces 4 -dm_plex_box_faces 4 -ref_dm_refine 1 -overlap {{0 1 2}separate output} -dist_dm_distribute -petscpartitioner_type simple -dm_view ascii::ascii_info
307   test:
308     suffix: refine_overlap_2d
309     requires: triangle
310     nsize: {{2 8}separate output}
311     args: -dm_coord_space 0 -ref_dm_refine 1 -dist_dm_distribute -petscpartitioner_type simple -overlap {{0 1 2}separate output} -dm_view ascii::ascii_info
312 
313   # Parallel extrusion tests
314   test:
315     suffix: spheresurface_extruded
316     nsize : 4
317     args: -dm_coord_space 0 -dm_plex_shape sphere -dm_extrude 3 -dist_dm_distribute -petscpartitioner_type simple \
318           -dm_plex_check_all -dm_view ::ascii_info_detail -dm_plex_view_coord_system spherical
319 
320   test:
321     suffix: spheresurface_extruded_symmetric
322     nsize : 4
323     args: -dm_coord_space 0 -dm_plex_shape sphere -dm_extrude 3 -dm_plex_transform_extrude_symmetric -dist_dm_distribute -petscpartitioner_type simple \
324           -dm_plex_check_all -dm_view ::ascii_info_detail -dm_plex_view_coord_system spherical
325 
326   # Parallel simple partitioner tests
327   test:
328     suffix: part_simple_0
329     requires: triangle
330     nsize: 2
331     args: -dm_coord_space 0 -dm_plex_interpolate 0 -dist_dm_distribute -petscpartitioner_type simple -dist_partition_view -dm_view ascii::ascii_info_detail
332   test:
333     suffix: part_simple_1
334     requires: triangle
335     nsize: 8
336     args: -dm_coord_space 0 -ref_dm_refine 1 -dist_dm_distribute -petscpartitioner_type simple -dist_partition_view -dm_view ascii::ascii_info_detail
337 
338   # Parallel partitioner tests
339   test:
340     suffix: part_parmetis_0
341     requires: parmetis
342     nsize: 2
343     args: -dm_plex_simplex 0 -ref_dm_refine 1 -dist_dm_distribute -petscpartitioner_type parmetis -dm_view -petscpartitioner_view -test_redistribute -dm_plex_csr_alg {{mat graph overlap}} -dm_pre_redist_view ::load_balance -dm_post_redist_view ::load_balance -petscpartitioner_view_graph
344   test:
345     suffix: part_ptscotch_0
346     requires: ptscotch
347     nsize: 2
348     args: -dm_plex_simplex 0 -dist_dm_distribute -petscpartitioner_type ptscotch -petscpartitioner_view -petscpartitioner_ptscotch_strategy quality -test_redistribute -dm_plex_csr_alg {{mat graph overlap}} -dm_pre_redist_view ::load_balance -dm_post_redist_view ::load_balance -petscpartitioner_view_graph
349   test:
350     suffix: part_ptscotch_1
351     requires: ptscotch
352     nsize: 8
353     args: -dm_plex_simplex 0 -ref_dm_refine 1 -dist_dm_distribute -petscpartitioner_type ptscotch -petscpartitioner_view -petscpartitioner_ptscotch_imbalance 0.1
354 
355   # CGNS reader tests 10-11 (need to find smaller test meshes)
356   test:
357     suffix: cgns_0
358     requires: cgns
359     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/tut21.cgns -dm_view
360 
361   # ExodusII reader tests
362   testset:
363     args: -dm_plex_boundary_label boundary -dm_plex_check_all -dm_view
364     test:
365       suffix: exo_0
366       requires: exodusii
367       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/sevenside-quad.exo
368     test:
369       suffix: exo_1
370       requires: exodusii
371       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/sevenside-quad-15.exo
372     test:
373       suffix: exo_2
374       requires: exodusii
375       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/squaremotor-30.exo
376     test:
377       suffix: exo_3
378       requires: exodusii
379       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.exo
380     test:
381       suffix: exo_4
382       requires: exodusii
383      args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/simpleblock-100.exo
384 
385   # Gmsh mesh reader tests
386   testset:
387     args: -dm_coord_space 0 -dm_view
388 
389     test:
390       suffix: gmsh_0
391       requires: !single
392       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh
393     test:
394       suffix: gmsh_1
395       requires: !single
396       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square.msh
397     test:
398       suffix: gmsh_2
399       requires: !single
400       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_bin.msh
401     test:
402       suffix: gmsh_3
403       nsize: 3
404       requires: !single
405       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square.msh -dist_dm_distribute -petscpartitioner_type simple
406     test:
407       suffix: gmsh_4
408       nsize: 3
409       requires: !single
410       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_bin.msh -dist_dm_distribute -petscpartitioner_type simple
411     test:
412       suffix: gmsh_5
413       requires: !single
414       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_quad.msh
415     # TODO: it seems the mesh is not a valid gmsh (inverted cell)
416     test:
417       suffix: gmsh_6
418       requires: !single
419       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_bin_physnames.msh -final_diagnostics 0
420     test:
421       suffix: gmsh_7
422       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere_bin.msh -dm_view ::ascii_info_detail -dm_plex_check_all
423     test:
424       suffix: gmsh_8
425       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere.msh -dm_view ::ascii_info_detail -dm_plex_check_all
426   testset:
427     args: -dm_coord_space 0 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic_bin.msh -dm_view ::ascii_info_detail -dm_plex_check_all
428     test:
429       suffix: gmsh_9
430     test:
431       suffix: gmsh_9_periodic_0
432       args: -dm_plex_gmsh_periodic 0
433   testset:
434     args: -dm_coord_space 0 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh -dm_view ::ascii_info_detail -dm_plex_check_all
435     test:
436       suffix: gmsh_10
437     test:
438       suffix: gmsh_10_periodic_0
439       args: -dm_plex_gmsh_periodic 0
440   testset:
441     args: -dm_coord_space 0 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh -dm_view ::ascii_info_detail -dm_plex_check_all -ref_dm_refine 1
442     test:
443       suffix: gmsh_11
444     test:
445       suffix: gmsh_11_periodic_0
446       args: -dm_plex_gmsh_periodic 0
447   # TODO: it seems the mesh is not a valid gmsh (inverted cell)
448   test:
449     suffix: gmsh_12
450     nsize: 4
451     requires: !single mpiio
452     args: -dm_coord_space 0 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_bin_physnames.msh -viewer_binary_mpiio -dist_dm_distribute -petscpartitioner_type simple -dm_view -final_diagnostics 0
453   test:
454     suffix: gmsh_13_hybs2t
455     nsize: 4
456     args: -dm_coord_space 0 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_triquad.msh -dist_dm_distribute -petscpartitioner_type simple -dm_view -ref_dm_refine 1 -ref_dm_plex_transform_type refine_tobox -dm_plex_check_all
457   test:
458     suffix: gmsh_14_ext
459     requires: !single
460     args: -dm_coord_space 0 -dm_extrude 2 -dm_plex_transform_extrude_thickness 1.5 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_bin.msh -dm_view -dm_plex_check_all
461   test:
462     suffix: gmsh_14_ext_s2t
463     requires: !single
464     args: -dm_coord_space 0 -dm_extrude 2 -dm_plex_transform_extrude_thickness 1.5 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_bin.msh -dm_view -dm_plex_check_all -ref_dm_refine 1 -ref_dm_plex_transform_type refine_tobox
465   test:
466     suffix: gmsh_15_hyb3d
467     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_tetwedge.msh -dm_view -dm_plex_check_all
468   test:
469     suffix: gmsh_15_hyb3d_vtk
470     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_tetwedge.msh -dm_view vtk: -dm_plex_gmsh_hybrid -dm_plex_check_all
471   test:
472     suffix: gmsh_15_hyb3d_s2t
473     args: -dm_coord_space 0 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_tetwedge.msh -dm_view -dm_plex_check_all -ref_dm_refine 1 -ref_dm_plex_transform_type refine_tobox
474   test:
475     suffix: gmsh_16_spheresurface
476     nsize : 4
477     args: -dm_coord_space 0 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3 -dm_plex_check_all -dm_view -dist_dm_distribute -petscpartitioner_type simple
478   test:
479     suffix: gmsh_16_spheresurface_s2t
480     nsize : 4
481     args: -dm_coord_space 0 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3 -ref_dm_refine 1 -ref_dm_plex_transform_type refine_tobox -dm_plex_check_all -dm_view -dist_dm_distribute -petscpartitioner_type simple
482   test:
483     suffix: gmsh_16_spheresurface_extruded
484     nsize : 4
485     args: -dm_coord_space 0 -dm_extrude 3 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3 -dm_plex_check_all -dm_view -dist_dm_distribute -petscpartitioner_type simple
486   test:
487     suffix: gmsh_16_spheresurface_extruded_s2t
488     nsize : 4
489     args: -dm_coord_space 0 -dm_extrude 3 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3 -ref_dm_refine 1 -ref_dm_plex_transform_type refine_tobox -dm_plex_check_all -dm_view -dist_dm_distribute -petscpartitioner_type simple
490   test:
491     suffix: gmsh_17_hyb3d_interp_ascii
492     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_hexwedge.msh -dm_view -dm_plex_check_all
493   test:
494     suffix: exodus_17_hyb3d_interp_ascii
495     requires: exodusii
496     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_hexwedge.exo -dm_view -dm_plex_check_all
497 
498   # Legacy Gmsh v22/v40 ascii/binary reader tests
499   testset:
500     output_file: output/ex1_gmsh_3d_legacy.out
501     args: -dm_coord_space 0 -dm_view ::ascii_info_detail -dm_plex_check_all
502     test:
503       suffix: gmsh_3d_ascii_v22
504       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-ascii.msh2
505     test:
506       suffix: gmsh_3d_ascii_v40
507       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-ascii.msh4
508     test:
509       suffix: gmsh_3d_binary_v22
510       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary.msh2
511     test:
512       suffix: gmsh_3d_binary_v40
513       requires: long64
514       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary.msh4
515 
516   # Gmsh v41 ascii/binary reader tests
517   testset: # 32bit mesh, sequential
518     args: -dm_coord_space 0 -dm_view ::ascii_info_detail -dm_plex_check_all -dm_plex_gmsh_mark_vertices
519     output_file: output/ex1_gmsh_3d_32.out
520     test:
521       suffix: gmsh_3d_ascii_v41_32
522       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-ascii-32.msh
523     test:
524       suffix: gmsh_3d_binary_v41_32
525       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-32.msh
526     test:
527       suffix: gmsh_3d_binary_v41_32_mpiio
528       requires: defined(PETSC_HAVE_MPIIO)
529       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-32.msh -viewer_binary_mpiio
530   test:
531     suffix: gmsh_quad_8node
532     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-qua-8node.msh \
533           -dm_view -dm_plex_check_all -dm_plex_gmsh_mark_vertices
534   test:
535     suffix: gmsh_hex_20node
536     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-hex-20node.msh \
537           -dm_view -dm_plex_check_all -dm_plex_gmsh_mark_vertices
538   testset:  # 32bit mesh, parallel
539     args: -dm_coord_space 0 -dist_dm_distribute -petscpartitioner_type simple -dm_view ::ascii_info_detail -dm_plex_check_all -dm_plex_gmsh_mark_vertices
540     nsize: 2
541     output_file: output/ex1_gmsh_3d_32_np2.out
542     test:
543       suffix: gmsh_3d_ascii_v41_32_np2
544       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-ascii-32.msh
545     test:
546       suffix: gmsh_3d_binary_v41_32_np2
547       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-32.msh
548     test:
549       suffix: gmsh_3d_binary_v41_32_np2_mpiio
550       requires: defined(PETSC_HAVE_MPIIO)
551       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-32.msh -viewer_binary_mpiio
552   testset: # 64bit mesh, sequential
553     args: -dm_coord_space 0 -dm_view ::ascii_info_detail -dm_plex_check_all -dm_plex_gmsh_mark_vertices
554     output_file: output/ex1_gmsh_3d_64.out
555     test:
556       suffix: gmsh_3d_ascii_v41_64
557       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-ascii-64.msh
558     test:
559       suffix: gmsh_3d_binary_v41_64
560       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-64.msh
561     test:
562       suffix: gmsh_3d_binary_v41_64_mpiio
563       requires: defined(PETSC_HAVE_MPIIO)
564       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-64.msh -viewer_binary_mpiio
565   testset:  # 64bit mesh, parallel
566     args: -dm_coord_space 0 -dist_dm_distribute -petscpartitioner_type simple -dm_view ::ascii_info_detail -dm_plex_check_all -dm_plex_gmsh_mark_vertices
567     nsize: 2
568     output_file: output/ex1_gmsh_3d_64_np2.out
569     test:
570       suffix: gmsh_3d_ascii_v41_64_np2
571       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-ascii-64.msh
572     test:
573       suffix: gmsh_3d_binary_v41_64_np2
574       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-64.msh
575     test:
576       suffix: gmsh_3d_binary_v41_64_np2_mpiio
577       requires: defined(PETSC_HAVE_MPIIO)
578       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-64.msh -viewer_binary_mpiio
579 
580   # Fluent mesh reader tests
581   # TODO: Geometry checks fail
582   test:
583     suffix: fluent_0
584     requires: !complex
585     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square.cas -dm_view -final_diagnostics 0
586   test:
587     suffix: fluent_1
588     nsize: 3
589     requires: !complex
590     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square.cas -dist_dm_distribute -petscpartitioner_type simple -dm_view -final_diagnostics 0
591   test:
592     suffix: fluent_2
593     requires: !complex
594     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/cube_5tets_ascii.cas -dm_view -final_diagnostics 0
595   test:
596     suffix: fluent_3
597     requires: !complex
598     TODO: Fails on non-linux: fseek(), fileno() ? https://gitlab.com/petsc/petsc/merge_requests/2206#note_238166382
599     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/cube_5tets.cas -dm_view -final_diagnostics 0
600 
601   # Med mesh reader tests, including parallel file reads
602   test:
603     suffix: med_0
604     requires: med
605     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square.med -dm_view
606   test:
607     suffix: med_1
608     requires: med
609     nsize: 3
610     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square.med -dist_dm_distribute -petscpartitioner_type simple -dm_view
611   test:
612     suffix: med_2
613     requires: med
614     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/cylinder.med -dm_view
615   test:
616     suffix: med_3
617     requires: med
618     TODO: MED
619     nsize: 3
620     args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/cylinder.med -dist_dm_distribute -petscpartitioner_type simple -dm_view
621 
622   # Test shape quality
623   test:
624     suffix: test_shape
625     requires: ctetgen
626     args: -dm_plex_dim 3 -dim 3 -dm_refine_hierarchy 3 -dm_plex_check_all -dm_plex_check_cell_shape
627 
628   # Test simplex to tensor conversion
629   test:
630     suffix: s2t2
631     requires: triangle
632     args: -dm_coord_space 0 -ref_dm_refine 1 -ref_dm_plex_transform_type refine_tobox -dm_refine_volume_limit_pre 0.0625 -dm_view ascii::ascii_info_detail
633 
634   test:
635     suffix: s2t3
636     requires: ctetgen
637     args: -dm_coord_space 0 -dm_plex_dim 3 -dim 3 -ref_dm_refine 1 -ref_dm_plex_transform_type refine_tobox -dm_refine_volume_limit_pre 0.0625 -dm_view ascii::ascii_info_detail
638 
639   # Test cylinder
640   testset:
641     args: -dm_plex_shape cylinder -dm_plex_check_all -dm_view
642     test:
643       suffix: cylinder
644       args: -ref_dm_refine 1
645     test:
646       suffix: cylinder_per
647       args: -dm_plex_cylinder_bd periodic -ref_dm_refine 1 -ref_dm_refine_remap 0
648     test:
649       suffix: cylinder_wedge
650       args: -dm_coord_space 0 -dm_plex_interpolate 0 -dm_plex_cell tensor_triangular_prism -dm_view vtk:
651     test:
652       suffix: cylinder_wedge_int
653       output_file: output/ex1_cylinder_wedge.out
654       args: -dm_coord_space 0 -dm_plex_cell tensor_triangular_prism -dm_view vtk:
655 
656   test:
657     suffix: box_2d
658     args: -dm_plex_simplex 0 -ref_dm_refine 2 -dm_plex_check_all -dm_view
659 
660   test:
661     suffix: box_2d_per
662     args: -dm_plex_simplex 0 -ref_dm_refine 2 -dm_plex_check_all -dm_view
663 
664   test:
665     suffix: box_2d_per_unint
666     args: -dm_coord_space 0 -dm_plex_simplex 0 -dm_plex_interpolate 0 -dm_plex_box_faces 3,3 -dm_plex_box_faces 3,3 -dm_plex_check_all -dm_view ::ascii_info_detail
667 
668   test:
669     suffix: box_3d
670     args: -dm_plex_dim 3 -dim 3 -dm_plex_simplex 0 -ref_dm_refine 3 -dm_plex_check_all -dm_view
671 
672   test:
673     requires: triangle
674     suffix: box_wedge
675     args: -dm_coord_space 0 -dm_plex_dim 3 -dim 3 -dm_plex_simplex 0 -dm_plex_cell tensor_triangular_prism -dm_view vtk: -dm_plex_check_all
676 
677   testset:
678     requires: triangle
679     args: -dm_coord_space 0 -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_cell tensor_triangular_prism -dm_plex_box_faces 2,3,1 -dm_view -dm_plex_check_all -ref_dm_refine 1 -ref_dm_plex_transform_type refine_tobox
680     test:
681       suffix: box_wedge_s2t
682     test:
683       nsize: 3
684       args: -dist_dm_distribute -petscpartitioner_type simple
685       suffix: box_wedge_s2t_parallel
686 
687   # Test GLVis output
688   testset:
689     args: -dm_coord_space 0 -dm_plex_interpolate 0
690     test:
691       suffix: glvis_2d_tet
692       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh -dm_plex_gmsh_periodic 0 -dm_view glvis:
693     test:
694       suffix: glvis_2d_tet_per
695       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary 0
696     test:
697       suffix: glvis_3d_tet
698       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere_bin.msh -dm_plex_gmsh_periodic 0 -dm_view glvis:
699   testset:
700     args: -dm_coord_space 0
701     test:
702       suffix: glvis_2d_tet_per_mfem
703       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh -viewer_glvis_dm_plex_enable_boundary -viewer_glvis_dm_plex_enable_mfem -dm_view glvis:
704     test:
705       suffix: glvis_2d_quad
706       args: -dm_plex_simplex 0 -dm_plex_box_faces 3,3 -dm_view glvis:
707     test:
708       suffix: glvis_2d_quad_per
709       args: -dm_plex_simplex 0 -dm_plex_box_faces 3,3 -dm_plex_box_bd periodic,periodic -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary
710     test:
711       suffix: glvis_2d_quad_per_mfem
712       args: -dm_plex_simplex 0 -dm_plex_box_faces 3,3 -dm_plex_box_bd periodic,periodic -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary -viewer_glvis_dm_plex_enable_mfem
713     test:
714       suffix: glvis_3d_tet_per
715       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere_bin.msh -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary
716     test:
717       suffix: glvis_3d_tet_per_mfem
718       TODO: broken
719       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere_bin.msh -viewer_glvis_dm_plex_enable_mfem -dm_view glvis:
720     test:
721       suffix: glvis_3d_hex
722       args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_faces 3,3,3 -dm_view glvis:
723     test:
724       suffix: glvis_3d_hex_per
725       args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_faces 3,3,3 -dm_plex_box_bd periodic,periodic,periodic -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary 0
726     test:
727       suffix: glvis_3d_hex_per_mfem
728       args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_faces 3,3,3 -dm_plex_box_bd periodic,periodic,periodic -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary -viewer_glvis_dm_plex_enable_mfem
729     test:
730       suffix: glvis_2d_hyb
731       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_triquad.msh -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary
732     test:
733       suffix: glvis_3d_hyb
734       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_tetwedge.msh -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary
735     test:
736       suffix: glvis_3d_hyb_s2t
737       args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_3d_cube.msh -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary -ref_dm_refine 1 -ref_dm_plex_transform_type refine_tobox -dm_plex_check_all
738 
739   # Test P4EST
740   testset:
741     requires: p4est
742     args: -dm_coord_space 0 -dm_view -test_p4est_seq -conv_seq_2_dm_plex_check_all -conv_seq_1_dm_forest_minimum_refinement 1
743     test:
744       suffix: p4est_periodic
745       args: -dm_plex_simplex 0 -dm_plex_box_bd periodic,periodic -dm_plex_box_faces 3,5 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 2 -conv_seq_1_dm_p4est_refine_pattern hash
746     test:
747       suffix: p4est_periodic_3d
748       args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_bd periodic,periodic,none -dm_plex_box_faces 3,5,4 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 2 -conv_seq_1_dm_p4est_refine_pattern hash
749     test:
750       suffix: p4est_gmsh_periodic
751       args: -dm_coord_space 0 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh
752     test:
753       suffix: p4est_gmsh_surface
754       args: -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3
755     test:
756       suffix: p4est_gmsh_surface_parallel
757       nsize: 2
758       args: -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3 -petscpartitioner_type simple -dm_view ::load_balance
759     test:
760       suffix: p4est_hyb_2d
761       args: -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_triquad.msh
762     test:
763       suffix: p4est_hyb_3d
764       args: -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_tetwedge.msh
765     test:
766       requires: ctetgen
767       suffix: p4est_s2t_bugfaces_3d
768       args: -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 0 -dm_plex_dim 3 -dm_plex_box_faces 1,1
769     test:
770       suffix: p4est_bug_overlapsf
771       nsize: 3
772       args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_faces 2,2,1 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -petscpartitioner_type simple
773     test:
774       suffix: p4est_redistribute
775       nsize: 3
776       args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_faces 2,2,1 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -petscpartitioner_type simple -test_redistribute -dm_plex_csr_alg {{mat graph overlap}} -dm_view ::load_balance
777     test:
778       suffix: p4est_gmsh_s2t_3d
779       args: -conv_seq_1_dm_forest_initial_refinement 1 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh
780     test:
781       suffix: p4est_gmsh_s2t_3d_hash
782       args: -conv_seq_1_dm_forest_initial_refinement 1 -conv_seq_1_dm_forest_maximum_refinement 2 -conv_seq_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh
783     test:
784       requires: long_runtime
785       suffix: p4est_gmsh_periodic_3d
786       args: -dm_coord_space 0 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere.msh
787 
788   testset:
789     requires: p4est
790     nsize: 6
791     args: -dm_coord_space 0 -test_p4est_par -conv_par_2_dm_plex_check_all -conv_par_1_dm_forest_minimum_refinement 1 -conv_par_1_dm_forest_partition_overlap 0 -dist_dm_distribute
792     test:
793       TODO: interface cones do not conform
794       suffix: p4est_par_periodic
795       args: -dm_plex_simplex 0 -dm_plex_box_bd periodic,periodic -dm_plex_box_faces 3,5 -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 2 -conv_par_1_dm_p4est_refine_pattern hash
796     test:
797       TODO: interface cones do not conform
798       suffix: p4est_par_periodic_3d
799       args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_bd periodic,periodic,periodic -dm_plex_box_faces 3,5,4 -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 2 -conv_par_1_dm_p4est_refine_pattern hash
800     test:
801       TODO: interface cones do not conform
802       suffix: p4est_par_gmsh_periodic
803       args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh
804     test:
805       suffix: p4est_par_gmsh_surface
806       args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3
807     test:
808       suffix: p4est_par_gmsh_s2t_3d
809       args: -conv_par_1_dm_forest_initial_refinement 1 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh
810     test:
811       TODO: interface cones do not conform
812       suffix: p4est_par_gmsh_s2t_3d_hash
813       args: -conv_par_1_dm_forest_initial_refinement 1 -conv_par_1_dm_forest_maximum_refinement 2 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh
814     test:
815       requires: long_runtime
816       suffix: p4est_par_gmsh_periodic_3d
817       args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere.msh
818 
819   testset:
820     requires: p4est
821     nsize: 6
822     args: -dm_coord_space 0 -test_p4est_par -conv_par_2_dm_plex_check_all -conv_par_1_dm_forest_minimum_refinement 1 -conv_par_1_dm_forest_partition_overlap 1 -dist_dm_distribute -petscpartitioner_type simple
823     test:
824       suffix: p4est_par_ovl_periodic
825       args: -dm_plex_simplex 0 -dm_plex_box_bd periodic,periodic -dm_plex_box_faces 3,5 -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 2 -conv_par_1_dm_p4est_refine_pattern hash
826     #TODO Mesh cell 201 is inverted, vol = 0. (FVM Volume. Is it correct? -> Diagnostics disabled)
827     test:
828       suffix: p4est_par_ovl_periodic_3d
829       args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_bd periodic,periodic,none -dm_plex_box_faces 3,5,4 -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 2 -conv_par_1_dm_p4est_refine_pattern hash -final_diagnostics 0
830     test:
831       suffix: p4est_par_ovl_gmsh_periodic
832       args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh
833     test:
834       suffix: p4est_par_ovl_gmsh_surface
835       args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3
836     test:
837       suffix: p4est_par_ovl_gmsh_s2t_3d
838       args: -conv_par_1_dm_forest_initial_refinement 1 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh
839     test:
840       suffix: p4est_par_ovl_gmsh_s2t_3d_hash
841       args: -conv_par_1_dm_forest_initial_refinement 1 -conv_par_1_dm_forest_maximum_refinement 2 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh
842     test:
843       requires: long_runtime
844       suffix: p4est_par_ovl_gmsh_periodic_3d
845       args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere.msh
846     test:
847       suffix: p4est_par_ovl_hyb_2d
848       args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_triquad.msh
849     test:
850       suffix: p4est_par_ovl_hyb_3d
851       args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_tetwedge.msh
852 
853   test:
854     TODO: broken
855     requires: p4est
856     nsize: 2
857     suffix: p4est_bug_labels_noovl
858     args: -test_p4est_seq -dm_plex_check_all -dm_forest_minimum_refinement 0 -dm_forest_partition_overlap 1 -dm_plex_simplex 0 -dm_plex_box_faces 3,3 -dm_forest_initial_refinement 0 -dm_forest_maximum_refinement 2 -dm_p4est_refine_pattern hash -dist_dm_distribute -petscpartitioner_type simple -dm_forest_print_label_error
859 
860   test:
861     requires: p4est
862     nsize: 2
863     suffix: p4est_bug_distribute_overlap
864     args: -dm_coord_space 0 -test_p4est_seq -conv_seq_2_dm_plex_check_all -conv_seq_1_dm_forest_minimum_refinement 0 -conv_seq_1_dm_forest_partition_overlap 0 -dm_plex_simplex 0 -dm_plex_box_faces 3,3 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 2 -conv_seq_1_dm_p4est_refine_pattern hash -petscpartitioner_type simple -overlap 1 -dm_view ::load_balance
865     args: -dm_post_overlap_view
866 
867   test:
868     suffix: ref_alfeld2d_0
869     requires: triangle
870     args: -dm_plex_box_faces 5,3 -dm_view -dm_plex_check_all -ref_dm_refine 1 -ref_dm_plex_transform_type refine_alfeld -final_diagnostics
871   test:
872     suffix: ref_alfeld3d_0
873     requires: ctetgen
874     args: -dm_plex_dim 3 -dm_plex_box_faces 5,1,1 -dm_view -dm_plex_check_all -ref_dm_refine 1 -ref_dm_plex_transform_type refine_alfeld -final_diagnostics
875 
876   # Boundary layer refiners
877   test:
878     suffix: ref_bl_1
879     args: -dm_plex_dim 1 -dm_plex_simplex 0 -dm_plex_box_faces 5,1 -dm_view -dm_plex_check_all 0 -ref_dm_refine 1 -ref_dm_plex_transform_type refine_boundary_layer -dm_extrude 2 -final_diagnostics -ref_dm_plex_transform_bl_splits 3
880   test:
881     suffix: ref_bl_2_tri
882     requires: triangle
883     args: -dm_coord_space 0 -dm_plex_box_faces 5,3 -dm_view -dm_plex_check_all 0 -ref_dm_refine 1 -ref_dm_plex_transform_type refine_boundary_layer -dm_extrude 3 -final_diagnostics -ref_dm_plex_transform_bl_splits 4
884   test:
885     suffix: ref_bl_3_quad
886     args: -dm_plex_simplex 0 -dm_plex_box_faces 5,1 -dm_view -dm_plex_check_all 0 -ref_dm_refine 1 -ref_dm_plex_transform_type refine_boundary_layer -dm_extrude 3 -final_diagnostics -ref_dm_plex_transform_bl_splits 4
887   test:
888     suffix: ref_bl_spheresurface_extruded
889     nsize : 4
890     args: -dm_coord_space 0 -dm_extrude 3 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3 -dm_plex_check_all -dm_view -dist_dm_distribute -petscpartitioner_type simple -final_diagnostics -ref_dm_refine 1 -ref_dm_plex_transform_type refine_boundary_layer -ref_dm_plex_transform_bl_splits 2
891   test:
892     suffix: ref_bl_3d_hyb
893     nsize : 4
894     args: -dm_coord_space 0 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_3d_cube.msh -dm_plex_check_all -dm_view -dist_dm_distribute -petscpartitioner_type simple -final_diagnostics -ref_dm_refine 1 -ref_dm_plex_transform_type refine_boundary_layer -ref_dm_plex_transform_bl_splits 4 -ref_dm_plex_transform_bl_height_factor 3.1
895 
896   testset:
897     args: -dm_plex_shape sphere -dm_plex_check_all -dm_view
898     test:
899       suffix: sphere_0
900       args:
901     test:
902       suffix: sphere_1
903       args: -ref_dm_refine 2
904     test:
905       suffix: sphere_2
906       args: -dm_plex_simplex 0
907     test:
908       suffix: sphere_3
909       args: -dm_plex_simplex 0 -ref_dm_refine 2
910 
911   test:
912     suffix: ball_0
913     requires: ctetgen
914     args: -dm_plex_dim 3 -dm_plex_shape ball -dm_plex_check_all -dm_view
915 
916   test:
917     suffix: ball_1
918     requires: ctetgen
919     args: -dm_plex_dim 3 -dm_plex_shape ball -bd_dm_refine 2 -dm_plex_check_all -dm_view
920 
921   test:
922     suffix: schwarz_p_extrude
923     args: -dm_plex_shape schwarz_p -dm_plex_tps_extent 1,1,1 -dm_plex_tps_layers 1 -dm_plex_tps_thickness .2 -dm_view
924 TEST*/
925