1 static char help[] = "Tests various DMPlex routines to construct, refine and distribute a mesh.\n\n"; 2 3 #include <petscdmplex.h> 4 #include <petscsf.h> 5 6 enum {STAGE_LOAD, STAGE_DISTRIBUTE, STAGE_REFINE, STAGE_OVERLAP}; 7 8 typedef struct { 9 PetscLogEvent createMeshEvent; 10 PetscLogStage stages[4]; 11 /* Domain and mesh definition */ 12 PetscInt dim; /* The topological mesh dimension */ 13 PetscInt overlap; /* The cell overlap to use during partitioning */ 14 PetscBool testp4est[2]; 15 PetscBool redistribute; 16 PetscBool final_ref; /* Run refinement at the end */ 17 PetscBool final_diagnostics; /* Run diagnostics on the final mesh */ 18 } AppCtx; 19 20 PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options) 21 { 22 PetscErrorCode ierr; 23 24 PetscFunctionBegin; 25 options->dim = 2; 26 options->overlap = 0; 27 options->testp4est[0] = PETSC_FALSE; 28 options->testp4est[1] = PETSC_FALSE; 29 options->redistribute = PETSC_FALSE; 30 options->final_ref = PETSC_FALSE; 31 options->final_diagnostics = PETSC_TRUE; 32 33 ierr = PetscOptionsBegin(comm, "", "Meshing Problem Options", "DMPLEX");CHKERRQ(ierr); 34 ierr = PetscOptionsRangeInt("-dim", "The topological mesh dimension", "ex1.c", options->dim, &options->dim, NULL,1,3);CHKERRQ(ierr); 35 ierr = PetscOptionsBoundedInt("-overlap", "The cell overlap for partitioning", "ex1.c", options->overlap, &options->overlap, NULL,0);CHKERRQ(ierr); 36 ierr = PetscOptionsBool("-test_p4est_seq", "Test p4est with sequential base DM", "ex1.c", options->testp4est[0], &options->testp4est[0], NULL);CHKERRQ(ierr); 37 ierr = PetscOptionsBool("-test_p4est_par", "Test p4est with parallel base DM", "ex1.c", options->testp4est[1], &options->testp4est[1], NULL);CHKERRQ(ierr); 38 ierr = PetscOptionsBool("-test_redistribute", "Test redistribution", "ex1.c", options->redistribute, &options->redistribute, NULL);CHKERRQ(ierr); 39 ierr = PetscOptionsBool("-final_ref", "Run uniform refinement on the final mesh", "ex1.c", options->final_ref, &options->final_ref, NULL);CHKERRQ(ierr); 40 ierr = PetscOptionsBool("-final_diagnostics", "Run diagnostics on the final mesh", "ex1.c", options->final_diagnostics, &options->final_diagnostics, NULL);CHKERRQ(ierr); 41 ierr = PetscOptionsEnd();CHKERRQ(ierr); 42 43 ierr = PetscLogEventRegister("CreateMesh", DM_CLASSID, &options->createMeshEvent);CHKERRQ(ierr); 44 ierr = PetscLogStageRegister("MeshLoad", &options->stages[STAGE_LOAD]);CHKERRQ(ierr); 45 ierr = PetscLogStageRegister("MeshDistribute", &options->stages[STAGE_DISTRIBUTE]);CHKERRQ(ierr); 46 ierr = PetscLogStageRegister("MeshRefine", &options->stages[STAGE_REFINE]);CHKERRQ(ierr); 47 ierr = PetscLogStageRegister("MeshOverlap", &options->stages[STAGE_OVERLAP]);CHKERRQ(ierr); 48 PetscFunctionReturn(0); 49 } 50 51 PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm) 52 { 53 PetscInt dim = user->dim; 54 PetscBool testp4est_seq = user->testp4est[0]; 55 PetscBool testp4est_par = user->testp4est[1]; 56 PetscMPIInt rank, size; 57 PetscBool periodic; 58 PetscErrorCode ierr; 59 60 PetscFunctionBegin; 61 ierr = PetscLogEventBegin(user->createMeshEvent,0,0,0,0);CHKERRQ(ierr); 62 ierr = MPI_Comm_rank(comm, &rank);CHKERRMPI(ierr); 63 ierr = MPI_Comm_size(comm, &size);CHKERRMPI(ierr); 64 ierr = PetscLogStagePush(user->stages[STAGE_LOAD]);CHKERRQ(ierr); 65 ierr = DMCreate(comm, dm);CHKERRQ(ierr); 66 ierr = DMSetType(*dm, DMPLEX);CHKERRQ(ierr); 67 ierr = DMSetFromOptions(*dm);CHKERRQ(ierr); 68 69 /* For topologically periodic meshes, we first localize coordinates, 70 and then remove any information related with the 71 automatic computation of localized vertices. 72 This way, refinement operations and conversions to p4est 73 will preserve the shape of the domain in physical space */ 74 ierr = DMLocalizeCoordinates(*dm);CHKERRQ(ierr); 75 ierr = DMGetPeriodicity(*dm, &periodic, NULL, NULL, NULL);CHKERRQ(ierr); 76 if (periodic) {ierr = DMSetPeriodicity(*dm, PETSC_TRUE, NULL, NULL, NULL);CHKERRQ(ierr);} 77 78 ierr = DMViewFromOptions(*dm,NULL,"-init_dm_view");CHKERRQ(ierr); 79 ierr = DMGetDimension(*dm, &dim);CHKERRQ(ierr); 80 81 if (testp4est_seq) { 82 #if defined(PETSC_HAVE_P4EST) 83 DM dmConv = NULL; 84 85 ierr = DMPlexCheckSymmetry(*dm);CHKERRQ(ierr); 86 ierr = DMPlexCheckSkeleton(*dm, 0);CHKERRQ(ierr); 87 ierr = DMPlexCheckFaces(*dm, 0);CHKERRQ(ierr); 88 ierr = DMPlexCheckGeometry(*dm);CHKERRQ(ierr); 89 ierr = DMPlexCheckPointSF(*dm);CHKERRQ(ierr); 90 ierr = DMPlexCheckInterfaceCones(*dm);CHKERRQ(ierr); 91 ierr = DMPlexSetRefinementUniform(*dm, PETSC_TRUE);CHKERRQ(ierr); 92 ierr = DMPlexSetCellRefinerType(*dm, DM_REFINER_TO_BOX);CHKERRQ(ierr); 93 ierr = DMRefine(*dm, PETSC_COMM_WORLD, &dmConv);CHKERRQ(ierr); 94 if (dmConv) { 95 ierr = DMDestroy(dm);CHKERRQ(ierr); 96 *dm = dmConv; 97 } 98 ierr = DMViewFromOptions(*dm,NULL,"-initref_dm_view");CHKERRQ(ierr); 99 ierr = DMPlexCheckSymmetry(*dm);CHKERRQ(ierr); 100 ierr = DMPlexCheckSkeleton(*dm, 0);CHKERRQ(ierr); 101 ierr = DMPlexCheckFaces(*dm, 0);CHKERRQ(ierr); 102 ierr = DMPlexCheckGeometry(*dm);CHKERRQ(ierr); 103 ierr = DMPlexCheckPointSF(*dm);CHKERRQ(ierr); 104 ierr = DMPlexCheckInterfaceCones(*dm);CHKERRQ(ierr); 105 106 ierr = DMConvert(*dm,dim == 2 ? DMP4EST : DMP8EST,&dmConv);CHKERRQ(ierr); 107 if (dmConv) { 108 ierr = PetscObjectSetOptionsPrefix((PetscObject) dmConv, "conv_seq_1_");CHKERRQ(ierr); 109 ierr = DMSetFromOptions(dmConv);CHKERRQ(ierr); 110 ierr = DMDestroy(dm);CHKERRQ(ierr); 111 *dm = dmConv; 112 } 113 ierr = PetscObjectSetOptionsPrefix((PetscObject) *dm, "conv_seq_1_");CHKERRQ(ierr); 114 ierr = DMSetUp(*dm);CHKERRQ(ierr); 115 ierr = DMViewFromOptions(*dm, NULL, "-dm_view");CHKERRQ(ierr); 116 ierr = DMConvert(*dm,DMPLEX,&dmConv);CHKERRQ(ierr); 117 if (dmConv) { 118 ierr = PetscObjectSetOptionsPrefix((PetscObject) dmConv, "conv_seq_2_");CHKERRQ(ierr); 119 ierr = DMSetFromOptions(dmConv);CHKERRQ(ierr); 120 ierr = DMDestroy(dm);CHKERRQ(ierr); 121 *dm = dmConv; 122 } 123 ierr = PetscObjectSetOptionsPrefix((PetscObject) *dm, "conv_seq_2_");CHKERRQ(ierr); 124 ierr = DMViewFromOptions(*dm, NULL, "-dm_view");CHKERRQ(ierr); 125 ierr = PetscObjectSetOptionsPrefix((PetscObject) *dm, NULL);CHKERRQ(ierr); 126 #else 127 SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_SUP,"Recompile with --download-p4est"); 128 #endif 129 } 130 131 ierr = PetscLogStagePop();CHKERRQ(ierr); 132 if (!testp4est_seq) { 133 ierr = PetscLogStagePush(user->stages[STAGE_DISTRIBUTE]);CHKERRQ(ierr); 134 ierr = DMViewFromOptions(*dm, NULL, "-dm_pre_dist_view");CHKERRQ(ierr); 135 ierr = PetscObjectSetOptionsPrefix((PetscObject) *dm, "dist_");CHKERRQ(ierr); 136 ierr = DMSetFromOptions(*dm);CHKERRQ(ierr); 137 ierr = PetscObjectSetOptionsPrefix((PetscObject) *dm, NULL);CHKERRQ(ierr); 138 ierr = PetscLogStagePop();CHKERRQ(ierr); 139 ierr = DMViewFromOptions(*dm, NULL, "-distributed_dm_view");CHKERRQ(ierr); 140 } 141 ierr = PetscLogStagePush(user->stages[STAGE_REFINE]);CHKERRQ(ierr); 142 ierr = PetscObjectSetOptionsPrefix((PetscObject) *dm, "ref_");CHKERRQ(ierr); 143 ierr = DMSetFromOptions(*dm);CHKERRQ(ierr); 144 ierr = PetscObjectSetOptionsPrefix((PetscObject) *dm, NULL);CHKERRQ(ierr); 145 ierr = PetscLogStagePop();CHKERRQ(ierr); 146 147 if (testp4est_par) { 148 #if defined(PETSC_HAVE_P4EST) 149 DM dmConv = NULL; 150 151 ierr = DMViewFromOptions(*dm, NULL, "-dm_tobox_view");CHKERRQ(ierr); 152 ierr = DMPlexSetRefinementUniform(*dm, PETSC_TRUE);CHKERRQ(ierr); 153 ierr = DMPlexSetCellRefinerType(*dm, DM_REFINER_TO_BOX);CHKERRQ(ierr); 154 ierr = DMRefine(*dm, PETSC_COMM_WORLD, &dmConv);CHKERRQ(ierr); 155 if (dmConv) { 156 ierr = DMDestroy(dm);CHKERRQ(ierr); 157 *dm = dmConv; 158 } 159 ierr = DMViewFromOptions(*dm, NULL, "-dm_tobox_view");CHKERRQ(ierr); 160 ierr = DMPlexCheckSymmetry(*dm);CHKERRQ(ierr); 161 ierr = DMPlexCheckSkeleton(*dm, 0);CHKERRQ(ierr); 162 ierr = DMPlexCheckFaces(*dm, 0);CHKERRQ(ierr); 163 ierr = DMPlexCheckGeometry(*dm);CHKERRQ(ierr); 164 ierr = DMPlexCheckPointSF(*dm);CHKERRQ(ierr); 165 ierr = DMPlexCheckInterfaceCones(*dm);CHKERRQ(ierr); 166 167 ierr = DMConvert(*dm,dim == 2 ? DMP4EST : DMP8EST,&dmConv);CHKERRQ(ierr); 168 if (dmConv) { 169 ierr = PetscObjectSetOptionsPrefix((PetscObject) dmConv, "conv_par_1_");CHKERRQ(ierr); 170 ierr = DMSetFromOptions(dmConv);CHKERRQ(ierr); 171 ierr = DMDestroy(dm);CHKERRQ(ierr); 172 *dm = dmConv; 173 } 174 ierr = PetscObjectSetOptionsPrefix((PetscObject) *dm, "conv_par_1_");CHKERRQ(ierr); 175 ierr = DMSetUp(*dm);CHKERRQ(ierr); 176 ierr = DMViewFromOptions(*dm, NULL, "-dm_view");CHKERRQ(ierr); 177 ierr = DMConvert(*dm, DMPLEX, &dmConv);CHKERRQ(ierr); 178 if (dmConv) { 179 ierr = PetscObjectSetOptionsPrefix((PetscObject) dmConv, "conv_par_2_");CHKERRQ(ierr); 180 ierr = DMSetFromOptions(dmConv);CHKERRQ(ierr); 181 ierr = DMDestroy(dm);CHKERRQ(ierr); 182 *dm = dmConv; 183 } 184 ierr = PetscObjectSetOptionsPrefix((PetscObject) *dm, "conv_par_2_");CHKERRQ(ierr); 185 ierr = DMViewFromOptions(*dm, NULL, "-dm_view");CHKERRQ(ierr); 186 ierr = PetscObjectSetOptionsPrefix((PetscObject) *dm, NULL);CHKERRQ(ierr); 187 #else 188 SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_SUP,"Recompile with --download-p4est"); 189 #endif 190 } 191 192 /* test redistribution of an already distributed mesh */ 193 if (user->redistribute) { 194 DM distributedMesh; 195 PetscSF sf; 196 PetscInt nranks; 197 198 ierr = DMViewFromOptions(*dm, NULL, "-dm_pre_redist_view");CHKERRQ(ierr); 199 ierr = DMPlexDistribute(*dm, 0, NULL, &distributedMesh);CHKERRQ(ierr); 200 if (distributedMesh) { 201 ierr = DMGetPointSF(distributedMesh, &sf);CHKERRQ(ierr); 202 ierr = PetscSFSetUp(sf);CHKERRQ(ierr); 203 ierr = DMGetNeighbors(distributedMesh, &nranks, NULL);CHKERRQ(ierr); 204 ierr = MPI_Allreduce(MPI_IN_PLACE, &nranks, 1, MPIU_INT, MPI_MIN, PetscObjectComm((PetscObject)*dm));CHKERRMPI(ierr); 205 ierr = PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_(PetscObjectComm((PetscObject)*dm)), "Minimum number of neighbors: %D\n", nranks);CHKERRQ(ierr); 206 ierr = DMDestroy(dm);CHKERRQ(ierr); 207 *dm = distributedMesh; 208 } 209 ierr = DMViewFromOptions(*dm, NULL, "-dm_post_redist_view");CHKERRQ(ierr); 210 } 211 212 if (user->overlap) { 213 DM overlapMesh = NULL; 214 215 /* Add the overlap to refined mesh */ 216 ierr = PetscLogStagePush(user->stages[STAGE_OVERLAP]);CHKERRQ(ierr); 217 ierr = DMViewFromOptions(*dm, NULL, "-dm_pre_overlap_view");CHKERRQ(ierr); 218 ierr = DMPlexDistributeOverlap(*dm, user->overlap, NULL, &overlapMesh);CHKERRQ(ierr); 219 if (overlapMesh) { 220 PetscInt overlap; 221 ierr = DMPlexGetOverlap(overlapMesh, &overlap);CHKERRQ(ierr); 222 ierr = PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "Overlap: %D\n", overlap);CHKERRQ(ierr); 223 ierr = DMDestroy(dm);CHKERRQ(ierr); 224 *dm = overlapMesh; 225 } 226 ierr = DMViewFromOptions(*dm, NULL, "-dm_post_overlap_view");CHKERRQ(ierr); 227 ierr = PetscLogStagePop();CHKERRQ(ierr); 228 } 229 if (user->final_ref) { 230 DM refinedMesh = NULL; 231 232 ierr = DMPlexSetRefinementUniform(*dm, PETSC_TRUE);CHKERRQ(ierr); 233 ierr = DMRefine(*dm, comm, &refinedMesh);CHKERRQ(ierr); 234 if (refinedMesh) { 235 ierr = DMDestroy(dm);CHKERRQ(ierr); 236 *dm = refinedMesh; 237 } 238 } 239 240 ierr = PetscObjectSetName((PetscObject) *dm, "Simplicial Mesh");CHKERRQ(ierr); 241 ierr = DMViewFromOptions(*dm, NULL, "-dm_view");CHKERRQ(ierr); 242 if (user->final_diagnostics) { 243 DMPlexInterpolatedFlag interpolated; 244 PetscInt dim, depth; 245 246 ierr = DMGetDimension(*dm, &dim);CHKERRQ(ierr); 247 ierr = DMPlexGetDepth(*dm, &depth);CHKERRQ(ierr); 248 ierr = DMPlexIsInterpolatedCollective(*dm, &interpolated);CHKERRQ(ierr); 249 250 ierr = DMPlexCheckSymmetry(*dm);CHKERRQ(ierr); 251 if (interpolated == DMPLEX_INTERPOLATED_FULL) { 252 ierr = DMPlexCheckFaces(*dm, 0);CHKERRQ(ierr); 253 } 254 ierr = DMPlexCheckSkeleton(*dm, 0);CHKERRQ(ierr); 255 ierr = DMPlexCheckGeometry(*dm);CHKERRQ(ierr); 256 } 257 ierr = PetscLogEventEnd(user->createMeshEvent,0,0,0,0);CHKERRQ(ierr); 258 PetscFunctionReturn(0); 259 } 260 261 int main(int argc, char **argv) 262 { 263 DM dm; 264 AppCtx user; 265 PetscErrorCode ierr; 266 267 ierr = PetscInitialize(&argc, &argv, NULL, help);if (ierr) return ierr; 268 ierr = ProcessOptions(PETSC_COMM_WORLD, &user);CHKERRQ(ierr); 269 ierr = CreateMesh(PETSC_COMM_WORLD, &user, &dm);CHKERRQ(ierr); 270 ierr = DMDestroy(&dm);CHKERRQ(ierr); 271 ierr = PetscFinalize(); 272 return ierr; 273 } 274 275 /*TEST 276 277 # CTetGen 0-1 278 test: 279 suffix: 0 280 requires: ctetgen 281 args: -dm_coord_space 0 -dm_plex_dim 3 -dim 3 -dm_plex_interpolate 0 -ctetgen_verbose 4 -dm_view ascii::ascii_info_detail -info :~sys 282 test: 283 suffix: 1 284 requires: ctetgen 285 args: -dm_coord_space 0 -dm_plex_dim 3 -dim 3 -dm_plex_interpolate 0 -ctetgen_verbose 4 -dm_refine_volume_limit_pre 0.0625 -dm_view ascii::ascii_info_detail -info :~sys 286 287 # 2D LaTex and ASCII output 2-9 288 test: 289 suffix: 2 290 requires: triangle 291 args: -dm_plex_interpolate 0 -dm_view ascii::ascii_latex 292 test: 293 suffix: 3 294 requires: triangle 295 args: -ref_dm_refine 1 -dm_view ascii::ascii_info_detail 296 test: 297 suffix: 4 298 requires: triangle 299 nsize: 2 300 args: -dm_coord_space 0 -ref_dm_refine 1 -dist_dm_distribute -petscpartitioner_type simple -dm_view ascii::ascii_info_detail 301 test: 302 suffix: 5 303 requires: triangle 304 nsize: 2 305 args: -dm_coord_space 0 -ref_dm_refine 1 -dist_dm_distribute -petscpartitioner_type simple -dm_view ascii::ascii_latex 306 test: 307 suffix: 6 308 args: -dm_coord_space 0 -dm_plex_simplex 0 -dm_view ascii::ascii_info_detail 309 test: 310 suffix: 7 311 args: -dm_coord_space 0 -dm_plex_simplex 0 -ref_dm_refine 1 -dm_view ascii::ascii_info_detail 312 test: 313 suffix: 8 314 nsize: 2 315 args: -dm_plex_simplex 0 -ref_dm_refine 1 -dist_dm_distribute -petscpartitioner_type simple -dm_view ascii::ascii_latex 316 317 # 1D ASCII output 318 testset: 319 args: -dm_coord_space 0 -dm_plex_dim 1 -dm_view ascii::ascii_info_detail -dm_plex_check_all 320 test: 321 suffix: 1d_0 322 args: 323 test: 324 suffix: 1d_1 325 args: -ref_dm_refine 2 326 test: 327 suffix: 1d_2 328 args: -dm_plex_box_faces 5 -dm_plex_box_bd periodic 329 330 # Parallel refinement tests with overlap 331 test: 332 suffix: refine_overlap_1d 333 nsize: 2 334 args: -dm_plex_dim 1 -dim 1 -dm_plex_box_faces 4 -dm_plex_box_faces 4 -ref_dm_refine 1 -overlap {{0 1 2}separate output} -dist_dm_distribute -petscpartitioner_type simple -dm_view ascii::ascii_info 335 test: 336 suffix: refine_overlap_2d 337 requires: triangle 338 nsize: {{2 8}separate output} 339 args: -dm_coord_space 0 -ref_dm_refine 1 -dist_dm_distribute -petscpartitioner_type simple -overlap {{0 1 2}separate output} -dm_view ascii::ascii_info 340 341 # Parallel simple partitioner tests 342 test: 343 suffix: part_simple_0 344 requires: triangle 345 nsize: 2 346 args: -dm_coord_space 0 -dm_plex_interpolate 0 -dist_dm_distribute -petscpartitioner_type simple -dist_partition_view -dm_view ascii::ascii_info_detail 347 test: 348 suffix: part_simple_1 349 requires: triangle 350 nsize: 8 351 args: -dm_coord_space 0 -ref_dm_refine 1 -dist_dm_distribute -petscpartitioner_type simple -dist_partition_view -dm_view ascii::ascii_info_detail 352 353 # Parallel partitioner tests 354 test: 355 suffix: part_parmetis_0 356 requires: parmetis 357 nsize: 2 358 args: -dm_plex_simplex 0 -ref_dm_refine 1 -dist_dm_distribute -petscpartitioner_type parmetis -dm_view -petscpartitioner_view -test_redistribute -dm_plex_csr_via_mat {{0 1}} -dm_pre_redist_view ::load_balance -dm_post_redist_view ::load_balance -petscpartitioner_view_graph 359 test: 360 suffix: part_ptscotch_0 361 requires: ptscotch 362 nsize: 2 363 args: -dm_plex_simplex 0 -dist_dm_distribute -petscpartitioner_type ptscotch -petscpartitioner_view -petscpartitioner_ptscotch_strategy quality -test_redistribute -dm_plex_csr_via_mat {{0 1}} -dm_pre_redist_view ::load_balance -dm_post_redist_view ::load_balance -petscpartitioner_view_graph 364 test: 365 suffix: part_ptscotch_1 366 requires: ptscotch 367 nsize: 8 368 args: -dm_plex_simplex 0 -ref_dm_refine 1 -dist_dm_distribute -petscpartitioner_type ptscotch -petscpartitioner_view -petscpartitioner_ptscotch_imbalance 0.1 369 370 # CGNS reader tests 10-11 (need to find smaller test meshes) 371 test: 372 suffix: cgns_0 373 requires: cgns 374 args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/tut21.cgns -dm_view 375 376 # Gmsh mesh reader tests 377 testset: 378 args: -dm_coord_space 0 -dm_view 379 380 test: 381 suffix: gmsh_0 382 requires: !single 383 args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh 384 test: 385 suffix: gmsh_1 386 requires: !single 387 args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square.msh 388 test: 389 suffix: gmsh_2 390 requires: !single 391 args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_bin.msh 392 test: 393 suffix: gmsh_3 394 nsize: 3 395 requires: !single 396 args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square.msh -dist_dm_distribute -petscpartitioner_type simple 397 test: 398 suffix: gmsh_4 399 nsize: 3 400 requires: !single 401 args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_bin.msh -dist_dm_distribute -petscpartitioner_type simple 402 test: 403 suffix: gmsh_5 404 requires: !single 405 args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_quad.msh 406 # TODO: it seems the mesh is not a valid gmsh (inverted cell) 407 test: 408 suffix: gmsh_6 409 requires: !single 410 args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_bin_physnames.msh -final_diagnostics 0 411 test: 412 suffix: gmsh_7 413 args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere_bin.msh -dm_view ::ascii_info_detail -dm_plex_check_all 414 test: 415 suffix: gmsh_8 416 args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere.msh -dm_view ::ascii_info_detail -dm_plex_check_all 417 testset: 418 args: -dm_coord_space 0 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic_bin.msh -dm_view ::ascii_info_detail -dm_plex_check_all 419 test: 420 suffix: gmsh_9 421 test: 422 suffix: gmsh_9_periodic_0 423 args: -dm_plex_gmsh_periodic 0 424 testset: 425 args: -dm_coord_space 0 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh -dm_view ::ascii_info_detail -dm_plex_check_all 426 test: 427 suffix: gmsh_10 428 test: 429 suffix: gmsh_10_periodic_0 430 args: -dm_plex_gmsh_periodic 0 431 testset: 432 args: -dm_coord_space 0 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh -dm_view ::ascii_info_detail -dm_plex_check_all -ref_dm_refine 1 433 test: 434 suffix: gmsh_11 435 test: 436 suffix: gmsh_11_periodic_0 437 args: -dm_plex_gmsh_periodic 0 438 # TODO: it seems the mesh is not a valid gmsh (inverted cell) 439 test: 440 suffix: gmsh_12 441 nsize: 4 442 requires: !single mpiio 443 args: -dm_coord_space 0 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_bin_physnames.msh -viewer_binary_mpiio -dist_dm_distribute -petscpartitioner_type simple -dm_view -final_diagnostics 0 444 test: 445 suffix: gmsh_13_hybs2t 446 nsize: 4 447 args: -dm_coord_space 0 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_triquad.msh -dist_dm_distribute -petscpartitioner_type simple -dm_view -ref_dm_refine 1 -ref_dm_plex_cell_refiner tobox -dm_plex_check_all 448 test: 449 suffix: gmsh_14_ext 450 requires: !single 451 args: -dm_coord_space 0 -dm_extrude_layers 2 -dm_extrude_thickness 1.5 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_bin.msh -dm_view -dm_plex_check_all 452 test: 453 suffix: gmsh_14_ext_s2t 454 requires: !single 455 args: -dm_coord_space 0 -dm_extrude_layers 2 -dm_extrude_thickness 1.5 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_bin.msh -dm_view -dm_plex_check_all -ref_dm_refine 1 -ref_dm_plex_cell_refiner tobox 456 test: 457 suffix: gmsh_15_hyb3d 458 args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_tetwedge.msh -dm_view -dm_plex_check_all 459 test: 460 suffix: gmsh_15_hyb3d_vtk 461 args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_tetwedge.msh -dm_view vtk: -dm_plex_gmsh_hybrid -dm_plex_check_all 462 test: 463 suffix: gmsh_15_hyb3d_s2t 464 args: -dm_coord_space 0 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_tetwedge.msh -dm_view -dm_plex_check_all -ref_dm_refine 1 -ref_dm_plex_cell_refiner tobox 465 test: 466 suffix: gmsh_16_spheresurface 467 nsize : 4 468 args: -dm_coord_space 0 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3 -dm_plex_check_all -dm_view -dist_dm_distribute -petscpartitioner_type simple 469 test: 470 suffix: gmsh_16_spheresurface_s2t 471 nsize : 4 472 args: -dm_coord_space 0 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3 -ref_dm_refine 1 -ref_dm_plex_cell_refiner tobox -dm_plex_check_all -dm_view -dist_dm_distribute -petscpartitioner_type simple 473 test: 474 suffix: gmsh_16_spheresurface_extruded 475 nsize : 4 476 args: -dm_coord_space 0 -dm_extrude_layers 3 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3 -dm_plex_check_all -dm_view -dist_dm_distribute -petscpartitioner_type simple 477 test: 478 suffix: gmsh_16_spheresurface_extruded_s2t 479 nsize : 4 480 args: -dm_coord_space 0 -dm_extrude_layers 3 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3 -ref_dm_refine 1 -ref_dm_plex_cell_refiner tobox -dm_plex_check_all -dm_view -dist_dm_distribute -petscpartitioner_type simple 481 test: 482 suffix: gmsh_17_hyb3d_interp_ascii 483 args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_hexwedge.msh -dm_view -dm_plex_check_all 484 test: 485 suffix: exodus_17_hyb3d_interp_ascii 486 requires: exodusii 487 args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_hexwedge.exo -dm_view -dm_plex_check_all 488 489 # Legacy Gmsh v22/v40 ascii/binary reader tests 490 testset: 491 output_file: output/ex1_gmsh_3d_legacy.out 492 args: -dm_coord_space 0 -dm_view ::ascii_info_detail -dm_plex_check_all 493 test: 494 suffix: gmsh_3d_ascii_v22 495 args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-ascii.msh2 496 test: 497 suffix: gmsh_3d_ascii_v40 498 args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-ascii.msh4 499 test: 500 suffix: gmsh_3d_binary_v22 501 args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary.msh2 502 test: 503 suffix: gmsh_3d_binary_v40 504 requires: long64 505 args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary.msh4 506 507 # Gmsh v41 ascii/binary reader tests 508 testset: # 32bit mesh, sequential 509 args: -dm_coord_space 0 -dm_view ::ascii_info_detail -dm_plex_check_all 510 output_file: output/ex1_gmsh_3d_32.out 511 test: 512 suffix: gmsh_3d_ascii_v41_32 513 args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-ascii-32.msh 514 test: 515 suffix: gmsh_3d_binary_v41_32 516 args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-32.msh 517 test: 518 suffix: gmsh_3d_binary_v41_32_mpiio 519 requires: define(PETSC_HAVE_MPIIO) 520 args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-32.msh -viewer_binary_mpiio 521 testset: # 32bit mesh, parallel 522 args: -dm_coord_space 0 -dist_dm_distribute -petscpartitioner_type simple -dm_view ::ascii_info_detail -dm_plex_check_all 523 nsize: 2 524 output_file: output/ex1_gmsh_3d_32_np2.out 525 test: 526 suffix: gmsh_3d_ascii_v41_32_np2 527 args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-ascii-32.msh 528 test: 529 suffix: gmsh_3d_binary_v41_32_np2 530 args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-32.msh 531 test: 532 suffix: gmsh_3d_binary_v41_32_np2_mpiio 533 requires: define(PETSC_HAVE_MPIIO) 534 args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-32.msh -viewer_binary_mpiio 535 testset: # 64bit mesh, sequential 536 args: -dm_coord_space 0 -dm_view ::ascii_info_detail -dm_plex_check_all 537 output_file: output/ex1_gmsh_3d_64.out 538 test: 539 suffix: gmsh_3d_ascii_v41_64 540 args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-ascii-64.msh 541 test: 542 suffix: gmsh_3d_binary_v41_64 543 args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-64.msh 544 test: 545 suffix: gmsh_3d_binary_v41_64_mpiio 546 requires: define(PETSC_HAVE_MPIIO) 547 args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-64.msh -viewer_binary_mpiio 548 testset: # 64bit mesh, parallel 549 args: -dm_coord_space 0 -dist_dm_distribute -petscpartitioner_type simple -dm_view ::ascii_info_detail -dm_plex_check_all 550 nsize: 2 551 output_file: output/ex1_gmsh_3d_64_np2.out 552 test: 553 suffix: gmsh_3d_ascii_v41_64_np2 554 args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-ascii-64.msh 555 test: 556 suffix: gmsh_3d_binary_v41_64_np2 557 args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-64.msh 558 test: 559 suffix: gmsh_3d_binary_v41_64_np2_mpiio 560 requires: define(PETSC_HAVE_MPIIO) 561 args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-64.msh -viewer_binary_mpiio 562 563 # Fluent mesh reader tests 564 # TODO: Geometry checks fail 565 test: 566 suffix: fluent_0 567 requires: !complex 568 args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square.cas -dm_view -final_diagnostics 0 569 test: 570 suffix: fluent_1 571 nsize: 3 572 requires: !complex 573 args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square.cas -dist_dm_distribute -petscpartitioner_type simple -dm_view -final_diagnostics 0 574 test: 575 suffix: fluent_2 576 requires: !complex 577 args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/cube_5tets_ascii.cas -dm_view -final_diagnostics 0 578 test: 579 suffix: fluent_3 580 requires: !complex 581 TODO: Fails on non-linux: fseek(), fileno() ? https://gitlab.com/petsc/petsc/merge_requests/2206#note_238166382 582 args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/cube_5tets.cas -dm_view -final_diagnostics 0 583 584 # Med mesh reader tests, including parallel file reads 585 test: 586 suffix: med_0 587 requires: med 588 args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square.med -dm_view 589 test: 590 suffix: med_1 591 requires: med 592 nsize: 3 593 args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square.med -dist_dm_distribute -petscpartitioner_type simple -dm_view 594 test: 595 suffix: med_2 596 requires: med 597 args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/cylinder.med -dm_view 598 test: 599 suffix: med_3 600 requires: med 601 TODO: MED 602 nsize: 3 603 args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/cylinder.med -dist_dm_distribute -petscpartitioner_type simple -dm_view 604 605 # Test shape quality 606 test: 607 suffix: test_shape 608 requires: ctetgen 609 args: -dm_plex_dim 3 -dim 3 -dm_refine_hierarchy 3 -dm_plex_check_all -dm_plex_check_cell_shape 610 611 # Test simplex to tensor conversion 612 test: 613 suffix: s2t2 614 requires: triangle 615 args: -dm_coord_space 0 -ref_dm_refine 1 -ref_dm_plex_cell_refiner tobox -dm_refine_volume_limit_pre 0.0625 -dm_view ascii::ascii_info_detail 616 617 test: 618 suffix: s2t3 619 requires: ctetgen 620 args: -dm_coord_space 0 -dm_plex_dim 3 -dim 3 -ref_dm_refine 1 -ref_dm_plex_cell_refiner tobox -dm_refine_volume_limit_pre 0.0625 -dm_view ascii::ascii_info_detail 621 622 # Test cylinder 623 testset: 624 args: -dm_plex_shape cylinder -dm_plex_check_all -dm_view 625 test: 626 suffix: cylinder 627 args: -ref_dm_refine 1 628 test: 629 suffix: cylinder_per 630 args: -dm_plex_cylinder_bd periodic -ref_dm_refine 1 -ref_dm_refine_remap 0 631 test: 632 suffix: cylinder_wedge 633 args: -dm_coord_space 0 -dm_plex_interpolate 0 -dm_plex_cell tensor_triangular_prism -dm_view vtk: 634 test: 635 suffix: cylinder_wedge_int 636 output_file: output/ex1_cylinder_wedge.out 637 args: -dm_coord_space 0 -dm_plex_cell tensor_triangular_prism -dm_view vtk: 638 639 test: 640 suffix: box_2d 641 args: -dm_plex_simplex 0 -ref_dm_refine 2 -dm_plex_check_all -dm_view 642 643 test: 644 suffix: box_2d_per 645 args: -dm_plex_simplex 0 -ref_dm_refine 2 -dm_plex_check_all -dm_view 646 647 test: 648 suffix: box_2d_per_unint 649 args: -dm_coord_space 0 -dm_plex_simplex 0 -dm_plex_interpolate 0 -dm_plex_box_faces 3,3 -dm_plex_box_faces 3,3 -dm_plex_check_all -dm_view ::ascii_info_detail 650 651 test: 652 suffix: box_3d 653 args: -dm_plex_dim 3 -dim 3 -dm_plex_simplex 0 -ref_dm_refine 3 -dm_plex_check_all -dm_view 654 655 test: 656 requires: triangle 657 suffix: box_wedge 658 args: -dm_coord_space 0 -dm_plex_dim 3 -dim 3 -dm_plex_simplex 0 -dm_plex_cell tensor_triangular_prism -dm_view vtk: -dm_plex_check_all 659 660 testset: 661 requires: triangle 662 args: -dm_coord_space 0 -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_cell tensor_triangular_prism -dm_plex_box_faces 2,3,1 -dm_view -dm_plex_check_all -ref_dm_refine 1 -ref_dm_plex_cell_refiner tobox 663 test: 664 suffix: box_wedge_s2t 665 test: 666 nsize: 3 667 args: -dist_dm_distribute -petscpartitioner_type simple 668 suffix: box_wedge_s2t_parallel 669 670 # Test GLVis output 671 testset: 672 args: -dm_coord_space 0 -dm_plex_interpolate 0 673 test: 674 suffix: glvis_2d_tet 675 args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh -dm_plex_gmsh_periodic 0 -dm_view glvis: 676 test: 677 suffix: glvis_2d_tet_per 678 args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary 0 679 test: 680 suffix: glvis_3d_tet 681 args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere_bin.msh -dm_plex_gmsh_periodic 0 -dm_view glvis: 682 testset: 683 args: -dm_coord_space 0 684 test: 685 suffix: glvis_2d_tet_per_mfem 686 args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh -viewer_glvis_dm_plex_enable_boundary -viewer_glvis_dm_plex_enable_mfem -dm_view glvis: 687 test: 688 suffix: glvis_2d_quad 689 args: -dm_plex_simplex 0 -dm_plex_box_faces 3,3 -dm_view glvis: 690 test: 691 suffix: glvis_2d_quad_per 692 args: -dm_plex_simplex 0 -dm_plex_box_faces 3,3 -dm_plex_box_bd periodic,periodic -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary 693 test: 694 suffix: glvis_2d_quad_per_mfem 695 args: -dm_plex_simplex 0 -dm_plex_box_faces 3,3 -dm_plex_box_bd periodic,periodic -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary -viewer_glvis_dm_plex_enable_mfem 696 test: 697 suffix: glvis_3d_tet_per 698 args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere_bin.msh -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary 699 test: 700 suffix: glvis_3d_tet_per_mfem 701 TODO: broken 702 args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere_bin.msh -viewer_glvis_dm_plex_enable_mfem -dm_view glvis: 703 test: 704 suffix: glvis_3d_hex 705 args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_faces 3,3,3 -dm_view glvis: 706 test: 707 suffix: glvis_3d_hex_per 708 args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_faces 3,3,3 -dm_plex_box_bd periodic,periodic,periodic -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary 0 709 test: 710 suffix: glvis_3d_hex_per_mfem 711 args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_faces 3,3,3 -dm_plex_box_bd periodic,periodic,periodic -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary -viewer_glvis_dm_plex_enable_mfem 712 test: 713 suffix: glvis_2d_hyb 714 args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_triquad.msh -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary 715 test: 716 suffix: glvis_3d_hyb 717 args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_tetwedge.msh -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary 718 test: 719 suffix: glvis_3d_hyb_s2t 720 args: -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_3d_cube.msh -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary -ref_dm_refine 1 -ref_dm_plex_cell_refiner tobox -dm_plex_check_all 721 722 # Test P4EST 723 testset: 724 requires: p4est 725 args: -dm_coord_space 0 -dm_view -test_p4est_seq -conv_seq_2_dm_plex_check_all -conv_seq_1_dm_forest_minimum_refinement 1 726 test: 727 suffix: p4est_periodic 728 args: -dm_plex_simplex 0 -dm_plex_box_bd periodic,periodic -dm_plex_box_faces 3,5 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 2 -conv_seq_1_dm_p4est_refine_pattern hash 729 test: 730 suffix: p4est_periodic_3d 731 args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_bd periodic,periodic,none -dm_plex_box_faces 3,5,4 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 2 -conv_seq_1_dm_p4est_refine_pattern hash 732 test: 733 suffix: p4est_gmsh_periodic 734 args: -dm_coord_space 0 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh 735 test: 736 suffix: p4est_gmsh_surface 737 args: -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3 738 test: 739 suffix: p4est_gmsh_surface_parallel 740 nsize: 2 741 args: -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3 -petscpartitioner_type simple -dm_view ::load_balance 742 test: 743 suffix: p4est_hyb_2d 744 args: -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_triquad.msh 745 test: 746 suffix: p4est_hyb_3d 747 args: -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_tetwedge.msh 748 test: 749 requires: ctetgen 750 suffix: p4est_s2t_bugfaces_3d 751 args: -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 0 -dm_plex_dim 3 -dm_plex_box_faces 1,1 752 test: 753 suffix: p4est_bug_overlapsf 754 nsize: 3 755 args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_faces 2,2,1 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -petscpartitioner_type simple 756 test: 757 suffix: p4est_redistribute 758 nsize: 3 759 args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_faces 2,2,1 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -petscpartitioner_type simple -test_redistribute -dm_plex_csr_via_mat {{0 1}} -dm_view ::load_balance 760 test: 761 suffix: p4est_gmsh_s2t_3d 762 args: -conv_seq_1_dm_forest_initial_refinement 1 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh 763 test: 764 suffix: p4est_gmsh_s2t_3d_hash 765 args: -conv_seq_1_dm_forest_initial_refinement 1 -conv_seq_1_dm_forest_maximum_refinement 2 -conv_seq_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh 766 test: 767 requires: long_runtime 768 suffix: p4est_gmsh_periodic_3d 769 args: -dm_coord_space 0 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere.msh 770 771 testset: 772 requires: p4est 773 nsize: 6 774 args: -dm_coord_space 0 -test_p4est_par -conv_par_2_dm_plex_check_all -conv_par_1_dm_forest_minimum_refinement 1 -conv_par_1_dm_forest_partition_overlap 0 -dist_dm_distribute 775 test: 776 TODO: interface cones do not conform 777 suffix: p4est_par_periodic 778 args: -dm_plex_simplex 0 -dm_plex_box_bd periodic,periodic -dm_plex_box_faces 3,5 -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 2 -conv_par_1_dm_p4est_refine_pattern hash 779 test: 780 TODO: interface cones do not conform 781 suffix: p4est_par_periodic_3d 782 args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_bd periodic,periodic,periodic -dm_plex_box_faces 3,5,4 -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 2 -conv_par_1_dm_p4est_refine_pattern hash 783 test: 784 TODO: interface cones do not conform 785 suffix: p4est_par_gmsh_periodic 786 args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh 787 test: 788 suffix: p4est_par_gmsh_surface 789 args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3 790 test: 791 suffix: p4est_par_gmsh_s2t_3d 792 args: -conv_par_1_dm_forest_initial_refinement 1 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh 793 test: 794 TODO: interface cones do not conform 795 suffix: p4est_par_gmsh_s2t_3d_hash 796 args: -conv_par_1_dm_forest_initial_refinement 1 -conv_par_1_dm_forest_maximum_refinement 2 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh 797 test: 798 requires: long_runtime 799 suffix: p4est_par_gmsh_periodic_3d 800 args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere.msh 801 802 testset: 803 requires: p4est 804 nsize: 6 805 args: -dm_coord_space 0 -test_p4est_par -conv_par_2_dm_plex_check_all -conv_par_1_dm_forest_minimum_refinement 1 -conv_par_1_dm_forest_partition_overlap 1 -dist_dm_distribute -petscpartitioner_type simple 806 test: 807 suffix: p4est_par_ovl_periodic 808 args: -dm_plex_simplex 0 -dm_plex_box_bd periodic,periodic -dm_plex_box_faces 3,5 -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 2 -conv_par_1_dm_p4est_refine_pattern hash 809 #TODO Mesh cell 201 is inverted, vol = 0. (FVM Volume. Is it correct? -> Diagnostics disabled) 810 test: 811 suffix: p4est_par_ovl_periodic_3d 812 args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_plex_box_bd periodic,periodic,none -dm_plex_box_faces 3,5,4 -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 2 -conv_par_1_dm_p4est_refine_pattern hash -final_diagnostics 0 813 test: 814 suffix: p4est_par_ovl_gmsh_periodic 815 args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh 816 test: 817 suffix: p4est_par_ovl_gmsh_surface 818 args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3 819 test: 820 suffix: p4est_par_ovl_gmsh_s2t_3d 821 args: -conv_par_1_dm_forest_initial_refinement 1 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh 822 test: 823 suffix: p4est_par_ovl_gmsh_s2t_3d_hash 824 args: -conv_par_1_dm_forest_initial_refinement 1 -conv_par_1_dm_forest_maximum_refinement 2 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh 825 test: 826 requires: long_runtime 827 suffix: p4est_par_ovl_gmsh_periodic_3d 828 args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere.msh 829 test: 830 suffix: p4est_par_ovl_hyb_2d 831 args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_triquad.msh 832 test: 833 suffix: p4est_par_ovl_hyb_3d 834 args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_tetwedge.msh 835 836 test: 837 TODO: broken 838 requires: p4est 839 nsize: 2 840 suffix: p4est_bug_labels_noovl 841 args: -test_p4est_seq -dm_plex_check_all -dm_forest_minimum_refinement 0 -dm_forest_partition_overlap 1 -dm_plex_simplex 0 -dm_plex_box_faces 3,3 -dm_forest_initial_refinement 0 -dm_forest_maximum_refinement 2 -dm_p4est_refine_pattern hash -dist_dm_distribute -petscpartitioner_type simple -dm_forest_print_label_error 842 843 test: 844 requires: p4est 845 nsize: 2 846 suffix: p4est_bug_distribute_overlap 847 args: -dm_coord_space 0 -test_p4est_seq -conv_seq_2_dm_plex_check_all -conv_seq_1_dm_forest_minimum_refinement 0 -conv_seq_1_dm_forest_partition_overlap 0 -dm_plex_simplex 0 -dm_plex_box_faces 3,3 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 2 -conv_seq_1_dm_p4est_refine_pattern hash -petscpartitioner_type simple -overlap 1 -dm_view ::load_balance 848 args: -dm_post_overlap_view 849 850 test: 851 suffix: ref_alfeld2d_0 852 requires: triangle 853 args: -dm_plex_box_faces 5,3 -dm_view -dm_plex_check_all -ref_dm_refine 1 -ref_dm_plex_cell_refiner alfeld2d -final_diagnostics 854 test: 855 suffix: ref_alfeld3d_0 856 requires: ctetgen 857 args: -dm_plex_dim 3 -dm_plex_box_faces 5,1,1 -dm_view -dm_plex_check_all -ref_dm_refine 1 -ref_dm_plex_cell_refiner alfeld3d -final_diagnostics 858 859 # Boundary layer refiners 860 test: 861 suffix: ref_bl_1 862 args: -dm_plex_dim 1 -dm_plex_simplex 0 -dm_plex_box_faces 5,1 -dm_view -dm_plex_check_all 0 -ref_dm_refine 1 -ref_dm_plex_cell_refiner boundarylayer -dm_extrude_layers 2 -final_diagnostics -ref_dm_plex_refine_boundarylayer_splits 3 -dm_extrude_column_first {{0 1}} 863 test: 864 suffix: ref_bl_2_tri 865 requires: triangle 866 args: -dm_plex_box_faces 5,3 -dm_view -dm_plex_check_all 0 -ref_dm_refine 1 -ref_dm_plex_cell_refiner boundarylayer -dm_extrude_layers 3 -final_diagnostics -ref_dm_plex_refine_boundarylayer_splits 4 -dm_extrude_column_first {{0 1}} 867 test: 868 suffix: ref_bl_3_quad 869 args: -dm_plex_simplex 0 -dm_plex_box_faces 5,1 -dm_view -dm_plex_check_all 0 -ref_dm_refine 1 -ref_dm_plex_cell_refiner boundarylayer -dm_extrude_layers 3 -final_diagnostics -ref_dm_plex_refine_boundarylayer_splits 4 -dm_extrude_column_first {{0 1}} 870 test: 871 suffix: ref_bl_spheresurface_extruded 872 nsize : 4 873 args: -dm_extrude_layers 3 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3 -dm_plex_check_all -dm_view -dist_dm_distribute -petscpartitioner_type simple -dm_extrude_column_first {{0 1}separate output} -final_diagnostics -ref_dm_refine 1 -ref_dm_plex_cell_refiner boundarylayer -ref_dm_plex_refine_boundarylayer_splits 2 874 test: 875 suffix: ref_bl_3d_hyb 876 nsize : 4 877 args: -dm_coord_space 0 -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_3d_cube.msh -dm_plex_check_all -dm_view -dist_dm_distribute -petscpartitioner_type simple -final_diagnostics -ref_dm_refine 1 -ref_dm_plex_cell_refiner boundarylayer -ref_dm_plex_refine_boundarylayer_splits 4 -ref_dm_plex_refine_boundarylayer_progression 3.1 878 879 testset: 880 args: -dm_plex_shape sphere -dm_plex_check_all -dm_view 881 test: 882 suffix: sphere_0 883 args: 884 test: 885 suffix: sphere_1 886 args: -ref_dm_refine 2 887 test: 888 suffix: sphere_2 889 args: -dm_plex_simplex 0 890 test: 891 suffix: sphere_3 892 args: -dm_plex_simplex 0 -ref_dm_refine 2 893 894 test: 895 suffix: ball_0 896 requires: ctetgen 897 args: -dm_plex_dim 3 -dm_plex_shape ball -dm_plex_check_all -dm_view 898 899 test: 900 suffix: ball_1 901 requires: ctetgen 902 args: -dm_plex_dim 3 -dm_plex_shape ball -bd_dm_refine 2 -dm_plex_check_all -dm_view 903 904 TEST*/ 905