1 static char help[] = "Tests for parallel mesh loading and parallel topological interpolation\n\n"; 2 3 #include <petsc/private/dmpleximpl.h> 4 /* List of test meshes 5 6 Network 7 ------- 8 Test 0 (2 ranks): 9 10 network=0: 11 --------- 12 cell 0 cell 1 cell 2 nCells-1 (edge) 13 0 ------ 1 ------ 2 ------ 3 -- -- v -- -- nCells (vertex) 14 15 vertex distribution: 16 rank 0: 0 1 17 rank 1: 2 3 ... nCells 18 cell(edge) distribution: 19 rank 0: 0 1 20 rank 1: 2 ... nCells-1 21 22 network=1: 23 --------- 24 v2 25 ^ 26 | 27 cell 2 28 | 29 v0 --cell 0--> v3--cell 1--> v1 30 31 vertex distribution: 32 rank 0: 0 1 3 33 rank 1: 2 34 cell(edge) distribution: 35 rank 0: 0 1 36 rank 1: 2 37 38 example: 39 mpiexec -n 2 ./ex18 -distribute 1 -dim 1 -orig_dm_view -dist_dm_view -dist_dm_view -petscpartitioner_type parmetis -ncells 50 40 41 Triangle 42 -------- 43 Test 0 (2 ranks): 44 Two triangles sharing a face 45 46 2 47 / | \ 48 / | \ 49 / | \ 50 0 0 | 1 3 51 \ | / 52 \ | / 53 \ | / 54 1 55 56 vertex distribution: 57 rank 0: 0 1 58 rank 1: 2 3 59 cell distribution: 60 rank 0: 0 61 rank 1: 1 62 63 Test 1 (3 ranks): 64 Four triangles partitioned across 3 ranks 65 66 0 _______ 3 67 | \ / | 68 | \ 1 / | 69 | \ / | 70 | 0 2 2 | 71 | / \ | 72 | / 3 \ | 73 | / \ | 74 1 ------- 4 75 76 vertex distribution: 77 rank 0: 0 1 78 rank 1: 2 3 79 rank 2: 4 80 cell distribution: 81 rank 0: 0 82 rank 1: 1 83 rank 2: 2 3 84 85 Test 2 (3 ranks): 86 Four triangles partitioned across 3 ranks 87 88 1 _______ 3 89 | \ / | 90 | \ 1 / | 91 | \ / | 92 | 0 0 2 | 93 | / \ | 94 | / 3 \ | 95 | / \ | 96 2 ------- 4 97 98 vertex distribution: 99 rank 0: 0 1 100 rank 1: 2 3 101 rank 2: 4 102 cell distribution: 103 rank 0: 0 104 rank 1: 1 105 rank 2: 2 3 106 107 Tetrahedron 108 ----------- 109 Test 0: 110 Two tets sharing a face 111 112 cell 3 _______ cell 113 0 / | \ \ 1 114 / | \ \ 115 / | \ \ 116 0----|----4-----2 117 \ | / / 118 \ | / / 119 \ | / / 120 1------- 121 y 122 | x 123 |/ 124 *----z 125 126 vertex distribution: 127 rank 0: 0 1 128 rank 1: 2 3 4 129 cell distribution: 130 rank 0: 0 131 rank 1: 1 132 133 Quadrilateral 134 ------------- 135 Test 0 (2 ranks): 136 Two quads sharing a face 137 138 3-------2-------5 139 | | | 140 | 0 | 1 | 141 | | | 142 0-------1-------4 143 144 vertex distribution: 145 rank 0: 0 1 2 146 rank 1: 3 4 5 147 cell distribution: 148 rank 0: 0 149 rank 1: 1 150 151 TODO Test 1: 152 A quad and a triangle sharing a face 153 154 5-------4 155 | | \ 156 | 0 | \ 157 | | 1 \ 158 2-------3----6 159 160 Hexahedron 161 ---------- 162 Test 0 (2 ranks): 163 Two hexes sharing a face 164 165 cell 7-------------6-------------11 cell 166 0 /| /| /| 1 167 / | F1 / | F7 / | 168 / | / | / | 169 4-------------5-------------10 | 170 | | F4 | | F10 | | 171 | | | | | | 172 |F5 | |F3 | |F9 | 173 | | F2 | | F8 | | 174 | 3---------|---2---------|---9 175 | / | / | / 176 | / F0 | / F6 | / 177 |/ |/ |/ 178 0-------------1-------------8 179 180 vertex distribution: 181 rank 0: 0 1 2 3 4 5 182 rank 1: 6 7 8 9 10 11 183 cell distribution: 184 rank 0: 0 185 rank 1: 1 186 187 */ 188 189 typedef enum {NONE, CREATE, AFTER_CREATE, AFTER_DISTRIBUTE} InterpType; 190 191 typedef struct { 192 PetscInt debug; /* The debugging level */ 193 PetscInt testNum; /* Indicates the mesh to create */ 194 PetscInt dim; /* The topological mesh dimension */ 195 PetscBool cellSimplex; /* Use simplices or hexes */ 196 PetscBool distribute; /* Distribute the mesh */ 197 InterpType interpolate; /* Interpolate the mesh before or after DMPlexDistribute() */ 198 PetscBool useGenerator; /* Construct mesh with a mesh generator */ 199 PetscBool testOrientIF; /* Test for different original interface orientations */ 200 PetscBool testHeavy; /* Run the heavy PointSF test */ 201 PetscBool customView; /* Show results of DMPlexIsInterpolated() etc. */ 202 PetscInt ornt[2]; /* Orientation of interface on rank 0 and rank 1 */ 203 PetscInt faces[3]; /* Number of faces per dimension for generator */ 204 PetscScalar coords[128]; 205 PetscReal coordsTol; 206 PetscInt ncoords; 207 PetscInt pointsToExpand[128]; 208 PetscInt nPointsToExpand; 209 PetscBool testExpandPointsEmpty; 210 char filename[PETSC_MAX_PATH_LEN]; /* Import mesh from file */ 211 } AppCtx; 212 213 struct _n_PortableBoundary { 214 Vec coordinates; 215 PetscInt depth; 216 PetscSection *sections; 217 }; 218 typedef struct _n_PortableBoundary * PortableBoundary; 219 220 #if defined(PETSC_USE_LOG) 221 static PetscLogStage stage[3]; 222 #endif 223 224 static PetscErrorCode DMPlexCheckPointSFHeavy(DM, PortableBoundary); 225 static PetscErrorCode DMPlexSetOrientInterface_Private(DM,PetscBool); 226 static PetscErrorCode DMPlexGetExpandedBoundary_Private(DM, PortableBoundary *); 227 static PetscErrorCode DMPlexExpandedConesToFaces_Private(DM, IS, PetscSection, IS *); 228 229 static PetscErrorCode PortableBoundaryDestroy(PortableBoundary *bnd) 230 { 231 PetscInt d; 232 233 PetscFunctionBegin; 234 if (!*bnd) PetscFunctionReturn(0); 235 PetscCall(VecDestroy(&(*bnd)->coordinates)); 236 for (d=0; d < (*bnd)->depth; d++) { 237 PetscCall(PetscSectionDestroy(&(*bnd)->sections[d])); 238 } 239 PetscCall(PetscFree((*bnd)->sections)); 240 PetscCall(PetscFree(*bnd)); 241 PetscFunctionReturn(0); 242 } 243 244 static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options) 245 { 246 const char *interpTypes[4] = {"none", "create", "after_create", "after_distribute"}; 247 PetscInt interp=NONE, dim; 248 PetscBool flg1, flg2; 249 250 PetscFunctionBegin; 251 options->debug = 0; 252 options->testNum = 0; 253 options->dim = 2; 254 options->cellSimplex = PETSC_TRUE; 255 options->distribute = PETSC_FALSE; 256 options->interpolate = NONE; 257 options->useGenerator = PETSC_FALSE; 258 options->testOrientIF = PETSC_FALSE; 259 options->testHeavy = PETSC_TRUE; 260 options->customView = PETSC_FALSE; 261 options->testExpandPointsEmpty = PETSC_FALSE; 262 options->ornt[0] = 0; 263 options->ornt[1] = 0; 264 options->faces[0] = 2; 265 options->faces[1] = 2; 266 options->faces[2] = 2; 267 options->filename[0] = '\0'; 268 options->coordsTol = PETSC_DEFAULT; 269 270 PetscOptionsBegin(comm, "", "Meshing Interpolation Test Options", "DMPLEX"); 271 PetscCall(PetscOptionsBoundedInt("-debug", "The debugging level", "ex18.c", options->debug, &options->debug, NULL,0)); 272 PetscCall(PetscOptionsBoundedInt("-testnum", "The mesh to create", "ex18.c", options->testNum, &options->testNum, NULL,0)); 273 PetscCall(PetscOptionsBool("-cell_simplex", "Generate simplices if true, otherwise hexes", "ex18.c", options->cellSimplex, &options->cellSimplex, NULL)); 274 PetscCall(PetscOptionsBool("-distribute", "Distribute the mesh", "ex18.c", options->distribute, &options->distribute, NULL)); 275 PetscCall(PetscOptionsEList("-interpolate", "Type of mesh interpolation (none, create, after_create, after_distribute)", "ex18.c", interpTypes, 4, interpTypes[options->interpolate], &interp, NULL)); 276 options->interpolate = (InterpType) interp; 277 PetscCheck(options->distribute || options->interpolate != AFTER_DISTRIBUTE,comm, PETSC_ERR_SUP, "-interpolate after_distribute needs -distribute 1"); 278 PetscCall(PetscOptionsBool("-use_generator", "Use a mesh generator to build the mesh", "ex18.c", options->useGenerator, &options->useGenerator, NULL)); 279 options->ncoords = 128; 280 PetscCall(PetscOptionsScalarArray("-view_vertices_from_coords", "Print DAG points corresponding to vertices with given coordinates", "ex18.c", options->coords, &options->ncoords, NULL)); 281 PetscCall(PetscOptionsReal("-view_vertices_from_coords_tol", "Tolerance for -view_vertices_from_coords", "ex18.c", options->coordsTol, &options->coordsTol, NULL)); 282 options->nPointsToExpand = 128; 283 PetscCall(PetscOptionsIntArray("-test_expand_points", "Expand given array of DAG point using DMPlexGetConeRecursive() and print results", "ex18.c", options->pointsToExpand, &options->nPointsToExpand, NULL)); 284 if (options->nPointsToExpand) { 285 PetscCall(PetscOptionsBool("-test_expand_points_empty", "For -test_expand_points, rank 0 will have empty input array", "ex18.c", options->testExpandPointsEmpty, &options->testExpandPointsEmpty, NULL)); 286 } 287 PetscCall(PetscOptionsBool("-test_heavy", "Run the heavy PointSF test", "ex18.c", options->testHeavy, &options->testHeavy, NULL)); 288 PetscCall(PetscOptionsBool("-custom_view", "Custom DMPlex view", "ex18.c", options->customView, &options->customView, NULL)); 289 PetscCall(PetscOptionsRangeInt("-dim", "The topological mesh dimension", "ex18.c", options->dim, &options->dim, &flg1,1,3)); 290 dim = 3; 291 PetscCall(PetscOptionsIntArray("-faces", "Number of faces per dimension", "ex18.c", options->faces, &dim, &flg2)); 292 if (flg2) { 293 PetscCheck(!flg1 || dim == options->dim,comm, PETSC_ERR_ARG_OUTOFRANGE, "specified -dim %" PetscInt_FMT " is not equal to length %" PetscInt_FMT " of -faces (note that -dim can be omitted)", options->dim, dim); 294 options->dim = dim; 295 } 296 PetscCall(PetscOptionsString("-filename", "The mesh file", "ex18.c", options->filename, options->filename, sizeof(options->filename), NULL)); 297 PetscCall(PetscOptionsBoundedInt("-rotate_interface_0", "Rotation (relative orientation) of interface on rank 0; implies -interpolate create -distribute 0", "ex18.c", options->ornt[0], &options->ornt[0], &options->testOrientIF,0)); 298 PetscCall(PetscOptionsBoundedInt("-rotate_interface_1", "Rotation (relative orientation) of interface on rank 1; implies -interpolate create -distribute 0", "ex18.c", options->ornt[1], &options->ornt[1], &flg2,0)); 299 PetscCheck(flg2 == options->testOrientIF,comm, PETSC_ERR_ARG_OUTOFRANGE, "neither or both -rotate_interface_0 -rotate_interface_1 must be set"); 300 if (options->testOrientIF) { 301 PetscInt i; 302 for (i=0; i<2; i++) { 303 if (options->ornt[i] >= 10) options->ornt[i] = -(options->ornt[i]-10); /* 11 12 13 become -1 -2 -3 */ 304 } 305 options->filename[0] = 0; 306 options->useGenerator = PETSC_FALSE; 307 options->dim = 3; 308 options->cellSimplex = PETSC_TRUE; 309 options->interpolate = CREATE; 310 options->distribute = PETSC_FALSE; 311 } 312 PetscOptionsEnd(); 313 PetscFunctionReturn(0); 314 } 315 316 static PetscErrorCode CreateMesh_1D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm) 317 { 318 PetscInt testNum = user->testNum; 319 PetscMPIInt rank,size; 320 PetscInt numCorners=2,i; 321 PetscInt numCells,numVertices,network; 322 PetscInt *cells; 323 PetscReal *coords; 324 325 PetscFunctionBegin; 326 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 327 PetscCallMPI(MPI_Comm_size(comm, &size)); 328 PetscCheck(size <= 2,comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for <=2 processes",testNum); 329 330 numCells = 3; 331 PetscCall(PetscOptionsGetInt(NULL, NULL, "-ncells", &numCells, NULL)); 332 PetscCheck(numCells >= 3,comm, PETSC_ERR_ARG_OUTOFRANGE, "Test ncells %" PetscInt_FMT " must >=3",numCells); 333 334 if (size == 1) { 335 numVertices = numCells + 1; 336 PetscCall(PetscMalloc2(2*numCells,&cells,2*numVertices,&coords)); 337 for (i=0; i<numCells; i++) { 338 cells[2*i] = i; cells[2*i+1] = i + 1; 339 coords[2*i] = i; coords[2*i+1] = i + 1; 340 } 341 342 PetscCall(DMPlexCreateFromCellListPetsc(comm, user->dim, numCells, numVertices, numCorners, PETSC_FALSE, cells, user->dim, coords, dm)); 343 PetscCall(PetscFree2(cells,coords)); 344 PetscFunctionReturn(0); 345 } 346 347 network = 0; 348 PetscCall(PetscOptionsGetInt(NULL, NULL, "-network_case", &network, NULL)); 349 if (network == 0) { 350 switch (rank) { 351 case 0: 352 { 353 numCells = 2; 354 numVertices = numCells; 355 PetscCall(PetscMalloc2(2*numCells,&cells,2*numCells,&coords)); 356 cells[0] = 0; cells[1] = 1; 357 cells[2] = 1; cells[3] = 2; 358 coords[0] = 0.; coords[1] = 1.; 359 coords[2] = 1.; coords[3] = 2.; 360 } 361 break; 362 case 1: 363 { 364 numCells -= 2; 365 numVertices = numCells + 1; 366 PetscCall(PetscMalloc2(2*numCells,&cells,2*numCells,&coords)); 367 for (i=0; i<numCells; i++) { 368 cells[2*i] = 2+i; cells[2*i+1] = 2 + i + 1; 369 coords[2*i] = 2+i; coords[2*i+1] = 2 + i + 1; 370 } 371 } 372 break; 373 default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank); 374 } 375 } else { /* network_case = 1 */ 376 /* ----------------------- */ 377 switch (rank) { 378 case 0: 379 { 380 numCells = 2; 381 numVertices = 3; 382 PetscCall(PetscMalloc2(2*numCells,&cells,2*numCells,&coords)); 383 cells[0] = 0; cells[1] = 3; 384 cells[2] = 3; cells[3] = 1; 385 } 386 break; 387 case 1: 388 { 389 numCells = 1; 390 numVertices = 1; 391 PetscCall(PetscMalloc2(2*numCells,&cells,2*numCells,&coords)); 392 cells[0] = 3; cells[1] = 2; 393 } 394 break; 395 default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank); 396 } 397 } 398 PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, PETSC_FALSE, cells, user->dim, coords, NULL, NULL, dm)); 399 PetscCall(PetscFree2(cells,coords)); 400 PetscFunctionReturn(0); 401 } 402 403 static PetscErrorCode CreateSimplex_2D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm) 404 { 405 PetscInt testNum = user->testNum, p; 406 PetscMPIInt rank, size; 407 408 PetscFunctionBegin; 409 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 410 PetscCallMPI(MPI_Comm_size(comm, &size)); 411 switch (testNum) { 412 case 0: 413 PetscCheck(size == 2,comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 2 processes", testNum); 414 switch (rank) { 415 case 0: 416 { 417 const PetscInt numCells = 1, numVertices = 2, numCorners = 3; 418 const PetscInt cells[3] = {0, 1, 2}; 419 PetscReal coords[4] = {-0.5, 0.5, 0.0, 0.0}; 420 PetscInt markerPoints[6] = {1, 1, 2, 1, 3, 1}; 421 422 PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm)); 423 for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1])); 424 } 425 break; 426 case 1: 427 { 428 const PetscInt numCells = 1, numVertices = 2, numCorners = 3; 429 const PetscInt cells[3] = {1, 3, 2}; 430 PetscReal coords[4] = {0.0, 1.0, 0.5, 0.5}; 431 PetscInt markerPoints[6] = {1, 1, 2, 1, 3, 1}; 432 433 PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm)); 434 for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1])); 435 } 436 break; 437 default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank); 438 } 439 break; 440 case 1: 441 PetscCheck(size == 3,comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 3 processes", testNum); 442 switch (rank) { 443 case 0: 444 { 445 const PetscInt numCells = 1, numVertices = 2, numCorners = 3; 446 const PetscInt cells[3] = {0, 1, 2}; 447 PetscReal coords[4] = {0.0, 1.0, 0.0, 0.0}; 448 PetscInt markerPoints[6] = {1, 1, 2, 1, 3, 1}; 449 450 PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm)); 451 for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1])); 452 } 453 break; 454 case 1: 455 { 456 const PetscInt numCells = 1, numVertices = 2, numCorners = 3; 457 const PetscInt cells[3] = {0, 2, 3}; 458 PetscReal coords[4] = {0.5, 0.5, 1.0, 1.0}; 459 PetscInt markerPoints[6] = {1, 1, 2, 1, 3, 1}; 460 461 PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm)); 462 for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1])); 463 } 464 break; 465 case 2: 466 { 467 const PetscInt numCells = 2, numVertices = 1, numCorners = 3; 468 const PetscInt cells[6] = {2, 4, 3, 2, 1, 4}; 469 PetscReal coords[2] = {1.0, 0.0}; 470 PetscInt markerPoints[10] = {2, 1, 3, 1, 4, 1, 5, 1, 6, 1}; 471 472 PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm)); 473 for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1])); 474 } 475 break; 476 default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank); 477 } 478 break; 479 case 2: 480 PetscCheck(size == 3,comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 3 processes", testNum); 481 switch (rank) { 482 case 0: 483 { 484 const PetscInt numCells = 1, numVertices = 2, numCorners = 3; 485 const PetscInt cells[3] = {1, 2, 0}; 486 PetscReal coords[4] = {0.5, 0.5, 0.0, 1.0}; 487 PetscInt markerPoints[6] = {1, 1, 2, 1, 3, 1}; 488 489 PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm)); 490 for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1])); 491 } 492 break; 493 case 1: 494 { 495 const PetscInt numCells = 1, numVertices = 2, numCorners = 3; 496 const PetscInt cells[3] = {1, 0, 3}; 497 PetscReal coords[4] = {0.0, 0.0, 1.0, 1.0}; 498 PetscInt markerPoints[6] = {1, 1, 2, 1, 3, 1}; 499 500 PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm)); 501 for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1])); 502 } 503 break; 504 case 2: 505 { 506 const PetscInt numCells = 2, numVertices = 1, numCorners = 3; 507 const PetscInt cells[6] = {0, 4, 3, 0, 2, 4}; 508 PetscReal coords[2] = {1.0, 0.0}; 509 PetscInt markerPoints[10] = {2, 1, 3, 1, 4, 1, 5, 1, 6, 1}; 510 511 PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm)); 512 for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1])); 513 } 514 break; 515 default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank); 516 } 517 break; 518 default: SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh %" PetscInt_FMT, testNum); 519 } 520 PetscFunctionReturn(0); 521 } 522 523 static PetscErrorCode CreateSimplex_3D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm) 524 { 525 PetscInt testNum = user->testNum, p; 526 PetscMPIInt rank, size; 527 528 PetscFunctionBegin; 529 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 530 PetscCallMPI(MPI_Comm_size(comm, &size)); 531 switch (testNum) { 532 case 0: 533 PetscCheck(size == 2,comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 2 processes", testNum); 534 switch (rank) { 535 case 0: 536 { 537 const PetscInt numCells = 1, numVertices = 2, numCorners = 4; 538 const PetscInt cells[4] = {0, 2, 1, 3}; 539 PetscReal coords[6] = {0.0, 0.0, -0.5, 0.0, -0.5, 0.0}; 540 PetscInt markerPoints[8] = {1, 1, 2, 1, 3, 1, 4, 1}; 541 542 PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm)); 543 for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1])); 544 } 545 break; 546 case 1: 547 { 548 const PetscInt numCells = 1, numVertices = 3, numCorners = 4; 549 const PetscInt cells[4] = {1, 2, 4, 3}; 550 PetscReal coords[9] = {1.0, 0.0, 0.0, 0.0, 0.5, 0.0, 0.0, 0.0, 0.5}; 551 PetscInt markerPoints[8] = {1, 1, 2, 1, 3, 1, 4, 1}; 552 553 PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm)); 554 for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1])); 555 } 556 break; 557 default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank); 558 } 559 break; 560 default: SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh %" PetscInt_FMT, testNum); 561 } 562 if (user->testOrientIF) { 563 PetscInt ifp[] = {8, 6}; 564 565 PetscCall(PetscObjectSetName((PetscObject) *dm, "Mesh before orientation")); 566 PetscCall(DMViewFromOptions(*dm, NULL, "-before_orientation_dm_view")); 567 /* rotate interface face ifp[rank] by given orientation ornt[rank] */ 568 PetscCall(DMPlexOrientPoint(*dm, ifp[rank], user->ornt[rank])); 569 PetscCall(DMViewFromOptions(*dm, NULL, "-before_orientation_dm_view")); 570 PetscCall(DMPlexCheckFaces(*dm, 0)); 571 PetscCall(DMPlexOrientInterface_Internal(*dm)); 572 PetscCall(PetscPrintf(comm, "Orientation test PASSED\n")); 573 } 574 PetscFunctionReturn(0); 575 } 576 577 static PetscErrorCode CreateQuad_2D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm) 578 { 579 PetscInt testNum = user->testNum, p; 580 PetscMPIInt rank, size; 581 582 PetscFunctionBegin; 583 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 584 PetscCallMPI(MPI_Comm_size(comm, &size)); 585 switch (testNum) { 586 case 0: 587 PetscCheck(size == 2,comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 2 processes", testNum); 588 switch (rank) { 589 case 0: 590 { 591 const PetscInt numCells = 1, numVertices = 3, numCorners = 4; 592 const PetscInt cells[4] = {0, 1, 2, 3}; 593 PetscReal coords[6] = {-0.5, 0.0, 0.0, 0.0, 0.0, 1.0}; 594 PetscInt markerPoints[4*2] = {1, 1, 2, 1, 3, 1, 4, 1}; 595 596 PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm)); 597 for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1])); 598 } 599 break; 600 case 1: 601 { 602 const PetscInt numCells = 1, numVertices = 3, numCorners = 4; 603 const PetscInt cells[4] = {1, 4, 5, 2}; 604 PetscReal coords[6] = {-0.5, 1.0, 0.5, 0.0, 0.5, 1.0}; 605 PetscInt markerPoints[4*2] = {1, 1, 2, 1, 3, 1, 4, 1}; 606 607 PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm)); 608 for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1])); 609 } 610 break; 611 default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank); 612 } 613 break; 614 default: SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh %" PetscInt_FMT, testNum); 615 } 616 PetscFunctionReturn(0); 617 } 618 619 static PetscErrorCode CreateHex_3D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm) 620 { 621 PetscInt testNum = user->testNum, p; 622 PetscMPIInt rank, size; 623 624 PetscFunctionBegin; 625 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 626 PetscCallMPI(MPI_Comm_size(comm, &size)); 627 switch (testNum) { 628 case 0: 629 PetscCheck(size == 2,comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 2 processes", testNum); 630 switch (rank) { 631 case 0: 632 { 633 const PetscInt numCells = 1, numVertices = 6, numCorners = 8; 634 const PetscInt cells[8] = {0, 3, 2, 1, 4, 5, 6, 7}; 635 PetscReal coords[6*3] = {-0.5,0.0,0.0, 0.0,0.0,0.0, 0.0,1.0,0.0, -0.5,1.0,0.0, -0.5,0.0,1.0, 0.0,0.0,1.0}; 636 PetscInt markerPoints[8*2] = {2,1,3,1,4,1,5,1,6,1,7,1,8,1,9,1}; 637 638 PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm)); 639 for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1])); 640 } 641 break; 642 case 1: 643 { 644 const PetscInt numCells = 1, numVertices = 6, numCorners = 8; 645 const PetscInt cells[8] = {1, 2, 9, 8, 5, 10, 11, 6}; 646 PetscReal coords[6*3] = {0.0,1.0,1.0, -0.5,1.0,1.0, 0.5,0.0,0.0, 0.5,1.0,0.0, 0.5,0.0,1.0, 0.5,1.0,1.0}; 647 PetscInt markerPoints[8*2] = {2,1,3,1,4,1,5,1,6,1,7,1,8,1,9,1}; 648 649 PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm)); 650 for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1])); 651 } 652 break; 653 default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank); 654 } 655 break; 656 default: SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh %" PetscInt_FMT, testNum); 657 } 658 PetscFunctionReturn(0); 659 } 660 661 static PetscErrorCode CustomView(DM dm, PetscViewer v) 662 { 663 DMPlexInterpolatedFlag interpolated; 664 PetscBool distributed; 665 666 PetscFunctionBegin; 667 PetscCall(DMPlexIsDistributed(dm, &distributed)); 668 PetscCall(DMPlexIsInterpolatedCollective(dm, &interpolated)); 669 PetscCall(PetscViewerASCIIPrintf(v, "DMPlexIsDistributed: %s\n", PetscBools[distributed])); 670 PetscCall(PetscViewerASCIIPrintf(v, "DMPlexIsInterpolatedCollective: %s\n", DMPlexInterpolatedFlags[interpolated])); 671 PetscFunctionReturn(0); 672 } 673 674 static PetscErrorCode CreateMeshFromFile(MPI_Comm comm, AppCtx *user, DM *dm, DM *serialDM) 675 { 676 const char *filename = user->filename; 677 PetscBool testHeavy = user->testHeavy; 678 PetscBool interpCreate = user->interpolate == CREATE ? PETSC_TRUE : PETSC_FALSE; 679 PetscBool distributed = PETSC_FALSE; 680 681 PetscFunctionBegin; 682 *serialDM = NULL; 683 if (testHeavy && interpCreate) PetscCall(DMPlexSetOrientInterface_Private(NULL, PETSC_FALSE)); 684 PetscCall(PetscLogStagePush(stage[0])); 685 PetscCall(DMPlexCreateFromFile(comm, filename, "ex18_plex", interpCreate, dm)); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexCheckPointSFHeavy() */ 686 PetscCall(PetscLogStagePop()); 687 if (testHeavy && interpCreate) PetscCall(DMPlexSetOrientInterface_Private(NULL, PETSC_TRUE)); 688 PetscCall(DMPlexIsDistributed(*dm, &distributed)); 689 PetscCall(PetscPrintf(comm, "DMPlexCreateFromFile produced %s mesh.\n", distributed ? "distributed" : "serial")); 690 if (testHeavy && distributed) { 691 PetscCall(PetscOptionsSetValue(NULL, "-dm_plex_hdf5_force_sequential", NULL)); 692 PetscCall(DMPlexCreateFromFile(comm, filename, "ex18_plex", interpCreate, serialDM)); 693 PetscCall(DMPlexIsDistributed(*serialDM, &distributed)); 694 PetscCheck(!distributed,comm, PETSC_ERR_PLIB, "unable to create a serial DM from file"); 695 } 696 PetscCall(DMGetDimension(*dm, &user->dim)); 697 PetscFunctionReturn(0); 698 } 699 700 static PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm) 701 { 702 PetscPartitioner part; 703 PortableBoundary boundary = NULL; 704 DM serialDM = NULL; 705 PetscBool cellSimplex = user->cellSimplex; 706 PetscBool useGenerator = user->useGenerator; 707 PetscBool interpCreate = user->interpolate == CREATE ? PETSC_TRUE : PETSC_FALSE; 708 PetscBool interpSerial = user->interpolate == AFTER_CREATE ? PETSC_TRUE : PETSC_FALSE; 709 PetscBool interpParallel = user->interpolate == AFTER_DISTRIBUTE ? PETSC_TRUE : PETSC_FALSE; 710 PetscBool testHeavy = user->testHeavy; 711 PetscMPIInt rank; 712 713 PetscFunctionBegin; 714 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 715 if (user->filename[0]) { 716 PetscCall(CreateMeshFromFile(comm, user, dm, &serialDM)); 717 } else if (useGenerator) { 718 PetscCall(PetscLogStagePush(stage[0])); 719 PetscCall(DMPlexCreateBoxMesh(comm, user->dim, cellSimplex, user->faces, NULL, NULL, NULL, interpCreate, dm)); 720 PetscCall(PetscLogStagePop()); 721 } else { 722 PetscCall(PetscLogStagePush(stage[0])); 723 switch (user->dim) { 724 case 1: 725 PetscCall(CreateMesh_1D(comm, interpCreate, user, dm)); 726 break; 727 case 2: 728 if (cellSimplex) { 729 PetscCall(CreateSimplex_2D(comm, interpCreate, user, dm)); 730 } else { 731 PetscCall(CreateQuad_2D(comm, interpCreate, user, dm)); 732 } 733 break; 734 case 3: 735 if (cellSimplex) { 736 PetscCall(CreateSimplex_3D(comm, interpCreate, user, dm)); 737 } else { 738 PetscCall(CreateHex_3D(comm, interpCreate, user, dm)); 739 } 740 break; 741 default: 742 SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "Cannot make meshes for dimension %" PetscInt_FMT, user->dim); 743 } 744 PetscCall(PetscLogStagePop()); 745 } 746 PetscCheck(user->ncoords % user->dim == 0,comm, PETSC_ERR_ARG_OUTOFRANGE, "length of coordinates array %" PetscInt_FMT " must be divisable by spatial dimension %" PetscInt_FMT, user->ncoords, user->dim); 747 PetscCall(PetscObjectSetName((PetscObject) *dm, "Original Mesh")); 748 PetscCall(DMViewFromOptions(*dm, NULL, "-orig_dm_view")); 749 750 if (interpSerial) { 751 DM idm; 752 753 if (testHeavy) PetscCall(DMPlexSetOrientInterface_Private(*dm, PETSC_FALSE)); 754 PetscCall(PetscLogStagePush(stage[2])); 755 PetscCall(DMPlexInterpolate(*dm, &idm)); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexCheckPointSFHeavy() */ 756 PetscCall(PetscLogStagePop()); 757 if (testHeavy) PetscCall(DMPlexSetOrientInterface_Private(*dm, PETSC_TRUE)); 758 PetscCall(DMDestroy(dm)); 759 *dm = idm; 760 PetscCall(PetscObjectSetName((PetscObject) *dm, "Interpolated Mesh")); 761 PetscCall(DMViewFromOptions(*dm, NULL, "-intp_dm_view")); 762 } 763 764 /* Set partitioner options */ 765 PetscCall(DMPlexGetPartitioner(*dm, &part)); 766 if (part) { 767 PetscCall(PetscPartitionerSetType(part, PETSCPARTITIONERSIMPLE)); 768 PetscCall(PetscPartitionerSetFromOptions(part)); 769 } 770 771 if (user->customView) PetscCall(CustomView(*dm, PETSC_VIEWER_STDOUT_(comm))); 772 if (testHeavy) { 773 PetscBool distributed; 774 775 PetscCall(DMPlexIsDistributed(*dm, &distributed)); 776 if (!serialDM && !distributed) { 777 serialDM = *dm; 778 PetscCall(PetscObjectReference((PetscObject)*dm)); 779 } 780 if (serialDM) { 781 PetscCall(DMPlexGetExpandedBoundary_Private(serialDM, &boundary)); 782 } 783 if (boundary) { 784 /* check DM which has been created in parallel and already interpolated */ 785 PetscCall(DMPlexCheckPointSFHeavy(*dm, boundary)); 786 } 787 /* Orient interface because it could be deliberately skipped above. It is idempotent. */ 788 PetscCall(DMPlexOrientInterface_Internal(*dm)); 789 } 790 if (user->distribute) { 791 DM pdm = NULL; 792 793 /* Redistribute mesh over processes using that partitioner */ 794 PetscCall(PetscLogStagePush(stage[1])); 795 PetscCall(DMPlexDistribute(*dm, 0, NULL, &pdm)); 796 PetscCall(PetscLogStagePop()); 797 if (pdm) { 798 PetscCall(DMDestroy(dm)); 799 *dm = pdm; 800 PetscCall(PetscObjectSetName((PetscObject) *dm, "Redistributed Mesh")); 801 PetscCall(DMViewFromOptions(*dm, NULL, "-dist_dm_view")); 802 } 803 804 if (interpParallel) { 805 DM idm; 806 807 if (testHeavy) PetscCall(DMPlexSetOrientInterface_Private(*dm, PETSC_FALSE)); 808 PetscCall(PetscLogStagePush(stage[2])); 809 PetscCall(DMPlexInterpolate(*dm, &idm)); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexCheckPointSFHeavy() */ 810 PetscCall(PetscLogStagePop()); 811 if (testHeavy) PetscCall(DMPlexSetOrientInterface_Private(*dm, PETSC_TRUE)); 812 PetscCall(DMDestroy(dm)); 813 *dm = idm; 814 PetscCall(PetscObjectSetName((PetscObject) *dm, "Interpolated Redistributed Mesh")); 815 PetscCall(DMViewFromOptions(*dm, NULL, "-intp_dm_view")); 816 } 817 } 818 if (testHeavy) { 819 if (boundary) PetscCall(DMPlexCheckPointSFHeavy(*dm, boundary)); 820 /* Orient interface because it could be deliberately skipped above. It is idempotent. */ 821 PetscCall(DMPlexOrientInterface_Internal(*dm)); 822 } 823 824 PetscCall(PetscObjectSetName((PetscObject) *dm, "Parallel Mesh")); 825 PetscCall(DMPlexDistributeSetDefault(*dm, PETSC_FALSE)); 826 PetscCall(DMSetFromOptions(*dm)); 827 PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view")); 828 829 if (user->customView) PetscCall(CustomView(*dm, PETSC_VIEWER_STDOUT_(comm))); 830 PetscCall(DMDestroy(&serialDM)); 831 PetscCall(PortableBoundaryDestroy(&boundary)); 832 PetscFunctionReturn(0); 833 } 834 835 #define ps2d(number) ((double) PetscRealPart(number)) 836 static inline PetscErrorCode coord2str(char buf[], size_t len, PetscInt dim, const PetscScalar coords[], PetscReal tol) 837 { 838 PetscFunctionBegin; 839 PetscCheck(dim <= 3,PETSC_COMM_SELF, PETSC_ERR_SUP, "dim must be less than or equal 3"); 840 if (tol >= 1e-3) { 841 switch (dim) { 842 case 1: PetscCall(PetscSNPrintf(buf,len,"(%12.3f)",ps2d(coords[0]))); 843 case 2: PetscCall(PetscSNPrintf(buf,len,"(%12.3f, %12.3f)",ps2d(coords[0]),ps2d(coords[1]))); 844 case 3: PetscCall(PetscSNPrintf(buf,len,"(%12.3f, %12.3f, %12.3f)",ps2d(coords[0]),ps2d(coords[1]),ps2d(coords[2]))); 845 } 846 } else { 847 switch (dim) { 848 case 1: PetscCall(PetscSNPrintf(buf,len,"(%12.6f)",ps2d(coords[0]))); 849 case 2: PetscCall(PetscSNPrintf(buf,len,"(%12.6f, %12.6f)",ps2d(coords[0]),ps2d(coords[1]))); 850 case 3: PetscCall(PetscSNPrintf(buf,len,"(%12.6f, %12.6f, %12.6f)",ps2d(coords[0]),ps2d(coords[1]),ps2d(coords[2]))); 851 } 852 } 853 PetscFunctionReturn(0); 854 } 855 856 static PetscErrorCode ViewVerticesFromCoords(DM dm, Vec coordsVec, PetscReal tol, PetscViewer viewer) 857 { 858 PetscInt dim, i, npoints; 859 IS pointsIS; 860 const PetscInt *points; 861 const PetscScalar *coords; 862 char coordstr[128]; 863 MPI_Comm comm; 864 PetscMPIInt rank; 865 866 PetscFunctionBegin; 867 PetscCall(PetscObjectGetComm((PetscObject)dm, &comm)); 868 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 869 PetscCall(DMGetDimension(dm, &dim)); 870 PetscCall(PetscViewerASCIIPushSynchronized(viewer)); 871 PetscCall(DMPlexFindVertices(dm, coordsVec, tol, &pointsIS)); 872 PetscCall(ISGetIndices(pointsIS, &points)); 873 PetscCall(ISGetLocalSize(pointsIS, &npoints)); 874 PetscCall(VecGetArrayRead(coordsVec, &coords)); 875 for (i=0; i < npoints; i++) { 876 PetscCall(coord2str(coordstr, sizeof(coordstr), dim, &coords[i*dim], tol)); 877 if (rank == 0 && i) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "-----\n")); 878 PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] %s --> points[%" PetscInt_FMT "] = %" PetscInt_FMT "\n", rank, coordstr, i, points[i])); 879 PetscCall(PetscViewerFlush(viewer)); 880 } 881 PetscCall(PetscViewerASCIIPopSynchronized(viewer)); 882 PetscCall(VecRestoreArrayRead(coordsVec, &coords)); 883 PetscCall(ISRestoreIndices(pointsIS, &points)); 884 PetscCall(ISDestroy(&pointsIS)); 885 PetscFunctionReturn(0); 886 } 887 888 static PetscErrorCode TestExpandPoints(DM dm, AppCtx *user) 889 { 890 IS is; 891 PetscSection *sects; 892 IS *iss; 893 PetscInt d,depth; 894 PetscMPIInt rank; 895 PetscViewer viewer=PETSC_VIEWER_STDOUT_WORLD, sviewer; 896 897 PetscFunctionBegin; 898 PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm),&rank)); 899 if (user->testExpandPointsEmpty && rank == 0) { 900 PetscCall(ISCreateGeneral(PETSC_COMM_SELF, 0, NULL, PETSC_USE_POINTER, &is)); 901 } else { 902 PetscCall(ISCreateGeneral(PETSC_COMM_SELF, user->nPointsToExpand, user->pointsToExpand, PETSC_USE_POINTER, &is)); 903 } 904 PetscCall(DMPlexGetConeRecursive(dm, is, &depth, &iss, §s)); 905 PetscCall(PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer)); 906 PetscCall(PetscViewerASCIIPrintf(sviewer, "[%d] ==========================\n",rank)); 907 for (d=depth-1; d>=0; d--) { 908 IS checkIS; 909 PetscBool flg; 910 911 PetscCall(PetscViewerASCIIPrintf(sviewer, "depth %" PetscInt_FMT " ---------------\n",d)); 912 PetscCall(PetscSectionView(sects[d], sviewer)); 913 PetscCall(ISView(iss[d], sviewer)); 914 /* check reverse operation */ 915 if (d < depth-1) { 916 PetscCall(DMPlexExpandedConesToFaces_Private(dm, iss[d], sects[d], &checkIS)); 917 PetscCall(ISEqualUnsorted(checkIS, iss[d+1], &flg)); 918 PetscCheck(flg,PetscObjectComm((PetscObject) checkIS), PETSC_ERR_PLIB, "DMPlexExpandedConesToFaces_Private produced wrong IS"); 919 PetscCall(ISDestroy(&checkIS)); 920 } 921 } 922 PetscCall(PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer)); 923 PetscCall(PetscViewerFlush(viewer)); 924 PetscCall(DMPlexRestoreConeRecursive(dm, is, &depth, &iss, §s)); 925 PetscCall(ISDestroy(&is)); 926 PetscFunctionReturn(0); 927 } 928 929 static PetscErrorCode DMPlexExpandedConesToFaces_Private(DM dm, IS is, PetscSection section, IS *newis) 930 { 931 PetscInt n,n1,ncone,numCoveredPoints,o,p,q,start,end; 932 const PetscInt *coveredPoints; 933 const PetscInt *arr, *cone; 934 PetscInt *newarr; 935 936 PetscFunctionBegin; 937 PetscCall(ISGetLocalSize(is, &n)); 938 PetscCall(PetscSectionGetStorageSize(section, &n1)); 939 PetscCall(PetscSectionGetChart(section, &start, &end)); 940 PetscCheck(n == n1,PETSC_COMM_SELF, PETSC_ERR_PLIB, "IS size = %" PetscInt_FMT " != %" PetscInt_FMT " = section storage size", n, n1); 941 PetscCall(ISGetIndices(is, &arr)); 942 PetscCall(PetscMalloc1(end-start, &newarr)); 943 for (q=start; q<end; q++) { 944 PetscCall(PetscSectionGetDof(section, q, &ncone)); 945 PetscCall(PetscSectionGetOffset(section, q, &o)); 946 cone = &arr[o]; 947 if (ncone == 1) { 948 numCoveredPoints = 1; 949 p = cone[0]; 950 } else { 951 PetscInt i; 952 p = PETSC_MAX_INT; 953 for (i=0; i<ncone; i++) if (cone[i] < 0) {p = -1; break;} 954 if (p >= 0) { 955 PetscCall(DMPlexGetJoin(dm, ncone, cone, &numCoveredPoints, &coveredPoints)); 956 PetscCheck(numCoveredPoints <= 1,PETSC_COMM_SELF, PETSC_ERR_PLIB, "more than one covered points for section point %" PetscInt_FMT,q); 957 if (numCoveredPoints) p = coveredPoints[0]; 958 else p = -2; 959 PetscCall(DMPlexRestoreJoin(dm, ncone, cone, &numCoveredPoints, &coveredPoints)); 960 } 961 } 962 newarr[q-start] = p; 963 } 964 PetscCall(ISRestoreIndices(is, &arr)); 965 PetscCall(ISCreateGeneral(PETSC_COMM_SELF, end-start, newarr, PETSC_OWN_POINTER, newis)); 966 PetscFunctionReturn(0); 967 } 968 969 static PetscErrorCode DMPlexExpandedVerticesToFaces_Private(DM dm, IS boundary_expanded_is, PetscInt depth, PetscSection sections[], IS *boundary_is) 970 { 971 PetscInt d; 972 IS is,newis; 973 974 PetscFunctionBegin; 975 is = boundary_expanded_is; 976 PetscCall(PetscObjectReference((PetscObject)is)); 977 for (d = 0; d < depth-1; ++d) { 978 PetscCall(DMPlexExpandedConesToFaces_Private(dm, is, sections[d], &newis)); 979 PetscCall(ISDestroy(&is)); 980 is = newis; 981 } 982 *boundary_is = is; 983 PetscFunctionReturn(0); 984 } 985 986 #define CHKERRQI(incall,ierr) if (ierr) {incall = PETSC_FALSE; } 987 988 static PetscErrorCode DMLabelViewFromOptionsOnComm_Private(DMLabel label, const char optionname[], MPI_Comm comm) 989 { 990 PetscViewer viewer; 991 PetscBool flg; 992 static PetscBool incall = PETSC_FALSE; 993 PetscViewerFormat format; 994 995 PetscFunctionBegin; 996 if (incall) PetscFunctionReturn(0); 997 incall = PETSC_TRUE; 998 CHKERRQI(incall,PetscOptionsGetViewer(comm,((PetscObject)label)->options,((PetscObject)label)->prefix,optionname,&viewer,&format,&flg)); 999 if (flg) { 1000 CHKERRQI(incall,PetscViewerPushFormat(viewer,format)); 1001 CHKERRQI(incall,DMLabelView(label, viewer)); 1002 CHKERRQI(incall,PetscViewerPopFormat(viewer)); 1003 CHKERRQI(incall,PetscViewerDestroy(&viewer)); 1004 } 1005 incall = PETSC_FALSE; 1006 PetscFunctionReturn(0); 1007 } 1008 1009 /* TODO: this is hotfixing DMLabelGetStratumIS() - it should be fixed systematically instead */ 1010 static inline PetscErrorCode DMLabelGetStratumISOnComm_Private(DMLabel label, PetscInt value, MPI_Comm comm, IS *is) 1011 { 1012 IS tmpis; 1013 1014 PetscFunctionBegin; 1015 PetscCall(DMLabelGetStratumIS(label, value, &tmpis)); 1016 if (!tmpis) PetscCall(ISCreateGeneral(PETSC_COMM_SELF, 0, NULL, PETSC_USE_POINTER, &tmpis)); 1017 PetscCall(ISOnComm(tmpis, comm, PETSC_COPY_VALUES, is)); 1018 PetscCall(ISDestroy(&tmpis)); 1019 PetscFunctionReturn(0); 1020 } 1021 1022 /* currently only for simple PetscSection without fields or constraints */ 1023 static PetscErrorCode PetscSectionReplicate_Private(MPI_Comm comm, PetscMPIInt rootrank, PetscSection sec0, PetscSection *secout) 1024 { 1025 PetscSection sec; 1026 PetscInt chart[2], p; 1027 PetscInt *dofarr; 1028 PetscMPIInt rank; 1029 1030 PetscFunctionBegin; 1031 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 1032 if (rank == rootrank) { 1033 PetscCall(PetscSectionGetChart(sec0, &chart[0], &chart[1])); 1034 } 1035 PetscCallMPI(MPI_Bcast(chart, 2, MPIU_INT, rootrank, comm)); 1036 PetscCall(PetscMalloc1(chart[1]-chart[0], &dofarr)); 1037 if (rank == rootrank) { 1038 for (p = chart[0]; p < chart[1]; p++) { 1039 PetscCall(PetscSectionGetDof(sec0, p, &dofarr[p-chart[0]])); 1040 } 1041 } 1042 PetscCallMPI(MPI_Bcast(dofarr, chart[1]-chart[0], MPIU_INT, rootrank, comm)); 1043 PetscCall(PetscSectionCreate(comm, &sec)); 1044 PetscCall(PetscSectionSetChart(sec, chart[0], chart[1])); 1045 for (p = chart[0]; p < chart[1]; p++) { 1046 PetscCall(PetscSectionSetDof(sec, p, dofarr[p-chart[0]])); 1047 } 1048 PetscCall(PetscSectionSetUp(sec)); 1049 PetscCall(PetscFree(dofarr)); 1050 *secout = sec; 1051 PetscFunctionReturn(0); 1052 } 1053 1054 static PetscErrorCode DMPlexExpandedVerticesCoordinatesToFaces_Private(DM ipdm, PortableBoundary bnd, IS *face_is) 1055 { 1056 IS faces_expanded_is; 1057 1058 PetscFunctionBegin; 1059 PetscCall(DMPlexFindVertices(ipdm, bnd->coordinates, 0.0, &faces_expanded_is)); 1060 PetscCall(DMPlexExpandedVerticesToFaces_Private(ipdm, faces_expanded_is, bnd->depth, bnd->sections, face_is)); 1061 PetscCall(ISDestroy(&faces_expanded_is)); 1062 PetscFunctionReturn(0); 1063 } 1064 1065 /* hack disabling DMPlexOrientInterface() call in DMPlexInterpolate() via -dm_plex_interpolate_orient_interfaces option */ 1066 static PetscErrorCode DMPlexSetOrientInterface_Private(DM dm, PetscBool enable) 1067 { 1068 PetscOptions options = NULL; 1069 const char *prefix = NULL; 1070 const char opt[] = "-dm_plex_interpolate_orient_interfaces"; 1071 char prefix_opt[512]; 1072 PetscBool flg, set; 1073 static PetscBool wasSetTrue = PETSC_FALSE; 1074 1075 PetscFunctionBegin; 1076 if (dm) { 1077 PetscCall(PetscObjectGetOptionsPrefix((PetscObject)dm, &prefix)); 1078 options = ((PetscObject)dm)->options; 1079 } 1080 PetscCall(PetscStrcpy(prefix_opt, "-")); 1081 PetscCall(PetscStrlcat(prefix_opt, prefix, sizeof(prefix_opt))); 1082 PetscCall(PetscStrlcat(prefix_opt, &opt[1], sizeof(prefix_opt))); 1083 PetscCall(PetscOptionsGetBool(options, prefix, opt, &flg, &set)); 1084 if (!enable) { 1085 if (set && flg) wasSetTrue = PETSC_TRUE; 1086 PetscCall(PetscOptionsSetValue(options, prefix_opt, "0")); 1087 } else if (set && !flg) { 1088 if (wasSetTrue) { 1089 PetscCall(PetscOptionsSetValue(options, prefix_opt, "1")); 1090 } else { 1091 /* default is PETSC_TRUE */ 1092 PetscCall(PetscOptionsClearValue(options, prefix_opt)); 1093 } 1094 wasSetTrue = PETSC_FALSE; 1095 } 1096 if (PetscDefined(USE_DEBUG)) { 1097 PetscCall(PetscOptionsGetBool(options, prefix, opt, &flg, &set)); 1098 PetscCheck(!set || flg == enable,PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "PetscOptionsSetValue did not have the desired effect"); 1099 } 1100 PetscFunctionReturn(0); 1101 } 1102 1103 /* get coordinate description of the whole-domain boundary */ 1104 static PetscErrorCode DMPlexGetExpandedBoundary_Private(DM dm, PortableBoundary *boundary) 1105 { 1106 PortableBoundary bnd0, bnd; 1107 MPI_Comm comm; 1108 DM idm; 1109 DMLabel label; 1110 PetscInt d; 1111 const char boundaryName[] = "DMPlexDistributeInterpolateMarkInterface_boundary"; 1112 IS boundary_is; 1113 IS *boundary_expanded_iss; 1114 PetscMPIInt rootrank = 0; 1115 PetscMPIInt rank, size; 1116 PetscInt value = 1; 1117 DMPlexInterpolatedFlag intp; 1118 PetscBool flg; 1119 1120 PetscFunctionBegin; 1121 PetscCall(PetscNew(&bnd)); 1122 PetscCall(PetscObjectGetComm((PetscObject)dm, &comm)); 1123 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 1124 PetscCallMPI(MPI_Comm_size(comm, &size)); 1125 PetscCall(DMPlexIsDistributed(dm, &flg)); 1126 PetscCheck(!flg,comm, PETSC_ERR_ARG_WRONG, "serial DM (all points on one rank) needed"); 1127 1128 /* interpolate serial DM if not yet interpolated */ 1129 PetscCall(DMPlexIsInterpolatedCollective(dm, &intp)); 1130 if (intp == DMPLEX_INTERPOLATED_FULL) { 1131 idm = dm; 1132 PetscCall(PetscObjectReference((PetscObject)dm)); 1133 } else { 1134 PetscCall(DMPlexInterpolate(dm, &idm)); 1135 PetscCall(DMViewFromOptions(idm, NULL, "-idm_view")); 1136 } 1137 1138 /* mark whole-domain boundary of the serial DM */ 1139 PetscCall(DMLabelCreate(PETSC_COMM_SELF, boundaryName, &label)); 1140 PetscCall(DMAddLabel(idm, label)); 1141 PetscCall(DMPlexMarkBoundaryFaces(idm, value, label)); 1142 PetscCall(DMLabelViewFromOptionsOnComm_Private(label, "-idm_boundary_view", comm)); 1143 PetscCall(DMLabelGetStratumIS(label, value, &boundary_is)); 1144 1145 /* translate to coordinates */ 1146 PetscCall(PetscNew(&bnd0)); 1147 PetscCall(DMGetCoordinatesLocalSetUp(idm)); 1148 if (rank == rootrank) { 1149 PetscCall(DMPlexGetConeRecursive(idm, boundary_is, &bnd0->depth, &boundary_expanded_iss, &bnd0->sections)); 1150 PetscCall(DMGetCoordinatesLocalTuple(dm, boundary_expanded_iss[0], NULL, &bnd0->coordinates)); 1151 /* self-check */ 1152 { 1153 IS is0; 1154 PetscCall(DMPlexExpandedVerticesCoordinatesToFaces_Private(idm, bnd0, &is0)); 1155 PetscCall(ISEqual(is0, boundary_is, &flg)); 1156 PetscCheck(flg,PETSC_COMM_SELF, PETSC_ERR_PLIB, "DMPlexExpandedVerticesCoordinatesToFaces_Private produced a wrong IS"); 1157 PetscCall(ISDestroy(&is0)); 1158 } 1159 } else { 1160 PetscCall(VecCreateSeq(PETSC_COMM_SELF, 0, &bnd0->coordinates)); 1161 } 1162 1163 { 1164 Vec tmp; 1165 VecScatter sc; 1166 IS xis; 1167 PetscInt n; 1168 1169 /* just convert seq vectors to mpi vector */ 1170 PetscCall(VecGetLocalSize(bnd0->coordinates, &n)); 1171 PetscCallMPI(MPI_Bcast(&n, 1, MPIU_INT, rootrank, comm)); 1172 if (rank == rootrank) { 1173 PetscCall(VecCreateMPI(comm, n, n, &tmp)); 1174 } else { 1175 PetscCall(VecCreateMPI(comm, 0, n, &tmp)); 1176 } 1177 PetscCall(VecCopy(bnd0->coordinates, tmp)); 1178 PetscCall(VecDestroy(&bnd0->coordinates)); 1179 bnd0->coordinates = tmp; 1180 1181 /* replicate coordinates from root rank to all ranks */ 1182 PetscCall(VecCreateMPI(comm, n, n*size, &bnd->coordinates)); 1183 PetscCall(ISCreateStride(comm, n, 0, 1, &xis)); 1184 PetscCall(VecScatterCreate(bnd0->coordinates, xis, bnd->coordinates, NULL, &sc)); 1185 PetscCall(VecScatterBegin(sc, bnd0->coordinates, bnd->coordinates, INSERT_VALUES, SCATTER_FORWARD)); 1186 PetscCall(VecScatterEnd( sc, bnd0->coordinates, bnd->coordinates, INSERT_VALUES, SCATTER_FORWARD)); 1187 PetscCall(VecScatterDestroy(&sc)); 1188 PetscCall(ISDestroy(&xis)); 1189 } 1190 bnd->depth = bnd0->depth; 1191 PetscCallMPI(MPI_Bcast(&bnd->depth, 1, MPIU_INT, rootrank, comm)); 1192 PetscCall(PetscMalloc1(bnd->depth, &bnd->sections)); 1193 for (d=0; d<bnd->depth; d++) { 1194 PetscCall(PetscSectionReplicate_Private(comm, rootrank, (rank == rootrank) ? bnd0->sections[d] : NULL, &bnd->sections[d])); 1195 } 1196 1197 if (rank == rootrank) { 1198 PetscCall(DMPlexRestoreConeRecursive(idm, boundary_is, &bnd0->depth, &boundary_expanded_iss, &bnd0->sections)); 1199 } 1200 PetscCall(PortableBoundaryDestroy(&bnd0)); 1201 PetscCall(DMRemoveLabelBySelf(idm, &label, PETSC_TRUE)); 1202 PetscCall(DMLabelDestroy(&label)); 1203 PetscCall(ISDestroy(&boundary_is)); 1204 PetscCall(DMDestroy(&idm)); 1205 *boundary = bnd; 1206 PetscFunctionReturn(0); 1207 } 1208 1209 /* get faces of inter-partition interface */ 1210 static PetscErrorCode DMPlexGetInterfaceFaces_Private(DM ipdm, IS boundary_faces_is, IS *interface_faces_is) 1211 { 1212 MPI_Comm comm; 1213 DMLabel label; 1214 IS part_boundary_faces_is; 1215 const char partBoundaryName[] = "DMPlexDistributeInterpolateMarkInterface_partBoundary"; 1216 PetscInt value = 1; 1217 DMPlexInterpolatedFlag intp; 1218 1219 PetscFunctionBegin; 1220 PetscCall(PetscObjectGetComm((PetscObject)ipdm, &comm)); 1221 PetscCall(DMPlexIsInterpolatedCollective(ipdm, &intp)); 1222 PetscCheck(intp == DMPLEX_INTERPOLATED_FULL,comm, PETSC_ERR_ARG_WRONG, "only for fully interpolated DMPlex"); 1223 1224 /* get ipdm partition boundary (partBoundary) */ 1225 PetscCall(DMLabelCreate(PETSC_COMM_SELF, partBoundaryName, &label)); 1226 PetscCall(DMAddLabel(ipdm, label)); 1227 PetscCall(DMPlexMarkBoundaryFaces(ipdm, value, label)); 1228 PetscCall(DMLabelViewFromOptionsOnComm_Private(label, "-ipdm_part_boundary_view", comm)); 1229 PetscCall(DMLabelGetStratumISOnComm_Private(label, value, comm, &part_boundary_faces_is)); 1230 PetscCall(DMRemoveLabelBySelf(ipdm, &label, PETSC_TRUE)); 1231 PetscCall(DMLabelDestroy(&label)); 1232 1233 /* remove ipdm whole-domain boundary (boundary_faces_is) from ipdm partition boundary (part_boundary_faces_is), resulting just in inter-partition interface */ 1234 PetscCall(ISDifference(part_boundary_faces_is,boundary_faces_is,interface_faces_is)); 1235 PetscCall(ISDestroy(&part_boundary_faces_is)); 1236 PetscFunctionReturn(0); 1237 } 1238 1239 /* compute inter-partition interface including edges and vertices */ 1240 static PetscErrorCode DMPlexComputeCompleteInterface_Private(DM ipdm, IS interface_faces_is, IS *interface_is) 1241 { 1242 DMLabel label; 1243 PetscInt value = 1; 1244 const char interfaceName[] = "DMPlexDistributeInterpolateMarkInterface_interface"; 1245 DMPlexInterpolatedFlag intp; 1246 MPI_Comm comm; 1247 1248 PetscFunctionBegin; 1249 PetscCall(PetscObjectGetComm((PetscObject)ipdm, &comm)); 1250 PetscCall(DMPlexIsInterpolatedCollective(ipdm, &intp)); 1251 PetscCheck(intp == DMPLEX_INTERPOLATED_FULL,comm, PETSC_ERR_ARG_WRONG, "only for fully interpolated DMPlex"); 1252 1253 PetscCall(DMLabelCreate(PETSC_COMM_SELF, interfaceName, &label)); 1254 PetscCall(DMAddLabel(ipdm, label)); 1255 PetscCall(DMLabelSetStratumIS(label, value, interface_faces_is)); 1256 PetscCall(DMLabelViewFromOptionsOnComm_Private(label, "-interface_faces_view", comm)); 1257 PetscCall(DMPlexLabelComplete(ipdm, label)); 1258 PetscCall(DMLabelViewFromOptionsOnComm_Private(label, "-interface_view", comm)); 1259 PetscCall(DMLabelGetStratumISOnComm_Private(label, value, comm, interface_is)); 1260 PetscCall(PetscObjectSetName((PetscObject)*interface_is, "interface_is")); 1261 PetscCall(ISViewFromOptions(*interface_is, NULL, "-interface_is_view")); 1262 PetscCall(DMRemoveLabelBySelf(ipdm, &label, PETSC_TRUE)); 1263 PetscCall(DMLabelDestroy(&label)); 1264 PetscFunctionReturn(0); 1265 } 1266 1267 static PetscErrorCode PointSFGetOutwardInterfacePoints(PetscSF sf, IS *is) 1268 { 1269 PetscInt n; 1270 const PetscInt *arr; 1271 1272 PetscFunctionBegin; 1273 PetscCall(PetscSFGetGraph(sf, NULL, &n, &arr, NULL)); 1274 PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)sf), n, arr, PETSC_USE_POINTER, is)); 1275 PetscFunctionReturn(0); 1276 } 1277 1278 static PetscErrorCode PointSFGetInwardInterfacePoints(PetscSF sf, IS *is) 1279 { 1280 PetscInt n; 1281 const PetscInt *rootdegree; 1282 PetscInt *arr; 1283 1284 PetscFunctionBegin; 1285 PetscCall(PetscSFSetUp(sf)); 1286 PetscCall(PetscSFComputeDegreeBegin(sf, &rootdegree)); 1287 PetscCall(PetscSFComputeDegreeEnd(sf, &rootdegree)); 1288 PetscCall(PetscSFComputeMultiRootOriginalNumbering(sf, rootdegree, &n, &arr)); 1289 PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)sf), n, arr, PETSC_OWN_POINTER, is)); 1290 PetscFunctionReturn(0); 1291 } 1292 1293 static PetscErrorCode PointSFGetInterfacePoints_Private(PetscSF pointSF, IS *is) 1294 { 1295 IS pointSF_out_is, pointSF_in_is; 1296 1297 PetscFunctionBegin; 1298 PetscCall(PointSFGetOutwardInterfacePoints(pointSF, &pointSF_out_is)); 1299 PetscCall(PointSFGetInwardInterfacePoints(pointSF, &pointSF_in_is)); 1300 PetscCall(ISExpand(pointSF_out_is, pointSF_in_is, is)); 1301 PetscCall(ISDestroy(&pointSF_out_is)); 1302 PetscCall(ISDestroy(&pointSF_in_is)); 1303 PetscFunctionReturn(0); 1304 } 1305 1306 #define CHKERRMY(ierr) PetscCheck(!ierr,PETSC_COMM_SELF, PETSC_ERR_PLIB, "PointSF is wrong. Unable to show details!") 1307 1308 static PetscErrorCode ViewPointsWithType_Internal(DM dm, IS pointsIS, PetscViewer v) 1309 { 1310 DMLabel label; 1311 PetscSection coordsSection; 1312 Vec coordsVec; 1313 PetscScalar *coordsScalar; 1314 PetscInt coneSize, depth, dim, i, p, npoints; 1315 const PetscInt *points; 1316 1317 PetscFunctionBegin; 1318 PetscCall(DMGetDimension(dm, &dim)); 1319 PetscCall(DMGetCoordinateSection(dm, &coordsSection)); 1320 PetscCall(DMGetCoordinatesLocal(dm, &coordsVec)); 1321 PetscCall(VecGetArray(coordsVec, &coordsScalar)); 1322 PetscCall(ISGetLocalSize(pointsIS, &npoints)); 1323 PetscCall(ISGetIndices(pointsIS, &points)); 1324 PetscCall(DMPlexGetDepthLabel(dm, &label)); 1325 PetscCall(PetscViewerASCIIPushTab(v)); 1326 for (i=0; i<npoints; i++) { 1327 p = points[i]; 1328 PetscCall(DMLabelGetValue(label, p, &depth)); 1329 if (!depth) { 1330 PetscInt n, o; 1331 char coordstr[128]; 1332 1333 PetscCall(PetscSectionGetDof(coordsSection, p, &n)); 1334 PetscCall(PetscSectionGetOffset(coordsSection, p, &o)); 1335 PetscCall(coord2str(coordstr, sizeof(coordstr), n, &coordsScalar[o], 1.0)); 1336 PetscCall(PetscViewerASCIISynchronizedPrintf(v, "vertex %" PetscInt_FMT " w/ coordinates %s\n", p, coordstr)); 1337 } else { 1338 char entityType[16]; 1339 1340 switch (depth) { 1341 case 1: PetscCall(PetscStrcpy(entityType, "edge")); break; 1342 case 2: PetscCall(PetscStrcpy(entityType, "face")); break; 1343 case 3: PetscCall(PetscStrcpy(entityType, "cell")); break; 1344 default: SETERRQ(PetscObjectComm((PetscObject)v), PETSC_ERR_SUP, "Only for depth <= 3"); 1345 } 1346 if (depth == dim && dim < 3) { 1347 PetscCall(PetscStrlcat(entityType, " (cell)", sizeof(entityType))); 1348 } 1349 PetscCall(PetscViewerASCIISynchronizedPrintf(v, "%s %" PetscInt_FMT "\n", entityType, p)); 1350 } 1351 PetscCall(DMPlexGetConeSize(dm, p, &coneSize)); 1352 if (coneSize) { 1353 const PetscInt *cone; 1354 IS coneIS; 1355 1356 PetscCall(DMPlexGetCone(dm, p, &cone)); 1357 PetscCall(ISCreateGeneral(PETSC_COMM_SELF, coneSize, cone, PETSC_USE_POINTER, &coneIS)); 1358 PetscCall(ViewPointsWithType_Internal(dm, coneIS, v)); 1359 PetscCall(ISDestroy(&coneIS)); 1360 } 1361 } 1362 PetscCall(PetscViewerASCIIPopTab(v)); 1363 PetscCall(VecRestoreArray(coordsVec, &coordsScalar)); 1364 PetscCall(ISRestoreIndices(pointsIS, &points)); 1365 PetscFunctionReturn(0); 1366 } 1367 1368 static PetscErrorCode ViewPointsWithType(DM dm, IS points, PetscViewer v) 1369 { 1370 PetscBool flg; 1371 PetscInt npoints; 1372 PetscMPIInt rank; 1373 1374 PetscFunctionBegin; 1375 PetscCall(PetscObjectTypeCompare((PetscObject)v, PETSCVIEWERASCII, &flg)); 1376 PetscCheck(flg,PetscObjectComm((PetscObject)v), PETSC_ERR_SUP, "Only for ASCII viewer"); 1377 PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)v), &rank)); 1378 PetscCall(PetscViewerASCIIPushSynchronized(v)); 1379 PetscCall(ISGetLocalSize(points, &npoints)); 1380 if (npoints) { 1381 PetscCall(PetscViewerASCIISynchronizedPrintf(v, "[%d] --------\n", rank)); 1382 PetscCall(ViewPointsWithType_Internal(dm, points, v)); 1383 } 1384 PetscCall(PetscViewerFlush(v)); 1385 PetscCall(PetscViewerASCIIPopSynchronized(v)); 1386 PetscFunctionReturn(0); 1387 } 1388 1389 static PetscErrorCode DMPlexComparePointSFWithInterface_Private(DM ipdm, IS interface_is) 1390 { 1391 PetscSF pointsf; 1392 IS pointsf_is; 1393 PetscBool flg; 1394 MPI_Comm comm; 1395 PetscMPIInt size; 1396 1397 PetscFunctionBegin; 1398 PetscCall(PetscObjectGetComm((PetscObject)ipdm, &comm)); 1399 PetscCallMPI(MPI_Comm_size(comm, &size)); 1400 PetscCall(DMGetPointSF(ipdm, &pointsf)); 1401 if (pointsf) { 1402 PetscInt nroots; 1403 PetscCall(PetscSFGetGraph(pointsf, &nroots, NULL, NULL, NULL)); 1404 if (nroots < 0) pointsf = NULL; /* uninitialized SF */ 1405 } 1406 if (!pointsf) { 1407 PetscInt N=0; 1408 if (interface_is) PetscCall(ISGetSize(interface_is, &N)); 1409 PetscCheck(!N,comm, PETSC_ERR_PLIB, "interface_is should be NULL or empty for PointSF being NULL"); 1410 PetscFunctionReturn(0); 1411 } 1412 1413 /* get PointSF points as IS pointsf_is */ 1414 PetscCall(PointSFGetInterfacePoints_Private(pointsf, &pointsf_is)); 1415 1416 /* compare pointsf_is with interface_is */ 1417 PetscCall(ISEqual(interface_is, pointsf_is, &flg)); 1418 PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE,&flg,1,MPIU_BOOL,MPI_LAND,comm)); 1419 if (!flg) { 1420 IS pointsf_extra_is, pointsf_missing_is; 1421 PetscViewer errv = PETSC_VIEWER_STDERR_(comm); 1422 CHKERRMY(ISDifference(interface_is, pointsf_is, &pointsf_missing_is)); 1423 CHKERRMY(ISDifference(pointsf_is, interface_is, &pointsf_extra_is)); 1424 CHKERRMY(PetscViewerASCIIPrintf(errv, "Points missing in PointSF:\n")); 1425 CHKERRMY(ViewPointsWithType(ipdm, pointsf_missing_is, errv)); 1426 CHKERRMY(PetscViewerASCIIPrintf(errv, "Extra points in PointSF:\n")); 1427 CHKERRMY(ViewPointsWithType(ipdm, pointsf_extra_is, errv)); 1428 CHKERRMY(ISDestroy(&pointsf_extra_is)); 1429 CHKERRMY(ISDestroy(&pointsf_missing_is)); 1430 SETERRQ(comm, PETSC_ERR_PLIB, "PointSF is wrong! See details above."); 1431 } 1432 PetscCall(ISDestroy(&pointsf_is)); 1433 PetscFunctionReturn(0); 1434 } 1435 1436 /* remove faces & edges from label, leave just vertices */ 1437 static PetscErrorCode DMPlexISFilterVertices_Private(DM dm, IS points) 1438 { 1439 PetscInt vStart, vEnd; 1440 MPI_Comm comm; 1441 1442 PetscFunctionBegin; 1443 PetscCall(PetscObjectGetComm((PetscObject)dm, &comm)); 1444 PetscCall(DMPlexGetDepthStratum(dm, 0, &vStart, &vEnd)); 1445 PetscCall(ISGeneralFilter(points, vStart, vEnd)); 1446 PetscFunctionReturn(0); 1447 } 1448 1449 /* 1450 DMPlexCheckPointSFHeavy - Thoroughly test that the PointSF after parallel DMPlexInterpolate() includes exactly all interface points. 1451 1452 Collective 1453 1454 Input Parameters: 1455 . dm - The DMPlex object 1456 1457 Notes: 1458 The input DMPlex must be serial (one partition has all points, the other partitions have no points). 1459 This is a heavy test which involves DMPlexInterpolate() if the input DM is not interpolated yet, and depends on having a representation of the whole-domain boundary (PortableBoundary), which can be obtained only by DMPlexGetExpandedBoundary_Private() (which involves DMPlexInterpolate() of a sequential DM). 1460 This is mainly intended for debugging/testing purposes. 1461 1462 Algorithm: 1463 1. boundary faces of the serial version of the whole mesh are found using DMPlexMarkBoundaryFaces() 1464 2. boundary faces are translated into vertices using DMPlexGetConeRecursive() and these are translated into coordinates - this description (aka PortableBoundary) is completely independent of partitioning and point numbering 1465 3. the mesh is distributed or loaded in parallel 1466 4. boundary faces of the distributed mesh are reconstructed from PortableBoundary using DMPlexFindVertices() 1467 5. partition boundary faces of the parallel mesh are found using DMPlexMarkBoundaryFaces() 1468 6. partition interfaces are computed as set difference of partition boundary faces minus the reconstructed boundary 1469 7. check that interface covered by PointSF (union of inward and outward points) is equal to the partition interface for each rank, otherwise print the difference and throw an error 1470 1471 Level: developer 1472 1473 .seealso: DMGetPointSF(), DMPlexCheckSymmetry(), DMPlexCheckSkeleton(), DMPlexCheckFaces() 1474 */ 1475 static PetscErrorCode DMPlexCheckPointSFHeavy(DM dm, PortableBoundary bnd) 1476 { 1477 DM ipdm=NULL; 1478 IS boundary_faces_is, interface_faces_is, interface_is; 1479 DMPlexInterpolatedFlag intp; 1480 MPI_Comm comm; 1481 1482 PetscFunctionBegin; 1483 PetscCall(PetscObjectGetComm((PetscObject)dm, &comm)); 1484 1485 PetscCall(DMPlexIsInterpolatedCollective(dm, &intp)); 1486 if (intp == DMPLEX_INTERPOLATED_FULL) { 1487 ipdm = dm; 1488 } else { 1489 /* create temporary interpolated DM if input DM is not interpolated */ 1490 PetscCall(DMPlexSetOrientInterface_Private(dm, PETSC_FALSE)); 1491 PetscCall(DMPlexInterpolate(dm, &ipdm)); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexComparePointSFWithInterface_Private() below */ 1492 PetscCall(DMPlexSetOrientInterface_Private(dm, PETSC_TRUE)); 1493 } 1494 PetscCall(DMViewFromOptions(ipdm, NULL, "-ipdm_view")); 1495 1496 /* recover ipdm whole-domain boundary faces from the expanded vertices coordinates */ 1497 PetscCall(DMPlexExpandedVerticesCoordinatesToFaces_Private(ipdm, bnd, &boundary_faces_is)); 1498 /* get inter-partition interface faces (interface_faces_is)*/ 1499 PetscCall(DMPlexGetInterfaceFaces_Private(ipdm, boundary_faces_is, &interface_faces_is)); 1500 /* compute inter-partition interface including edges and vertices (interface_is) */ 1501 PetscCall(DMPlexComputeCompleteInterface_Private(ipdm, interface_faces_is, &interface_is)); 1502 /* destroy immediate ISs */ 1503 PetscCall(ISDestroy(&boundary_faces_is)); 1504 PetscCall(ISDestroy(&interface_faces_is)); 1505 1506 /* for uninterpolated case, keep just vertices in interface */ 1507 if (!intp) { 1508 PetscCall(DMPlexISFilterVertices_Private(ipdm, interface_is)); 1509 PetscCall(DMDestroy(&ipdm)); 1510 } 1511 1512 /* compare PointSF with the boundary reconstructed from coordinates */ 1513 PetscCall(DMPlexComparePointSFWithInterface_Private(dm, interface_is)); 1514 PetscCall(PetscPrintf(comm, "DMPlexCheckPointSFHeavy PASSED\n")); 1515 PetscCall(ISDestroy(&interface_is)); 1516 PetscFunctionReturn(0); 1517 } 1518 1519 int main(int argc, char **argv) 1520 { 1521 DM dm; 1522 AppCtx user; 1523 1524 PetscCall(PetscInitialize(&argc, &argv, NULL, help)); 1525 PetscCall(PetscLogStageRegister("create",&stage[0])); 1526 PetscCall(PetscLogStageRegister("distribute",&stage[1])); 1527 PetscCall(PetscLogStageRegister("interpolate",&stage[2])); 1528 PetscCall(ProcessOptions(PETSC_COMM_WORLD, &user)); 1529 PetscCall(CreateMesh(PETSC_COMM_WORLD, &user, &dm)); 1530 if (user.nPointsToExpand) { 1531 PetscCall(TestExpandPoints(dm, &user)); 1532 } 1533 if (user.ncoords) { 1534 Vec coords; 1535 1536 PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, user.ncoords, user.ncoords, user.coords, &coords)); 1537 PetscCall(ViewVerticesFromCoords(dm, coords, user.coordsTol, PETSC_VIEWER_STDOUT_WORLD)); 1538 PetscCall(VecDestroy(&coords)); 1539 } 1540 PetscCall(DMDestroy(&dm)); 1541 PetscCall(PetscFinalize()); 1542 return 0; 1543 } 1544 1545 /*TEST 1546 1547 testset: 1548 nsize: 2 1549 args: -dm_view ascii::ascii_info_detail 1550 args: -dm_plex_check_all 1551 test: 1552 suffix: 1_tri_dist0 1553 args: -distribute 0 -interpolate {{none create}separate output} 1554 test: 1555 suffix: 1_tri_dist1 1556 args: -distribute 1 -interpolate {{none create after_distribute}separate output} 1557 test: 1558 suffix: 1_quad_dist0 1559 args: -cell_simplex 0 -distribute 0 -interpolate {{none create}separate output} 1560 test: 1561 suffix: 1_quad_dist1 1562 args: -cell_simplex 0 -distribute 1 -interpolate {{none create after_distribute}separate output} 1563 test: 1564 suffix: 1_1d_dist1 1565 args: -dim 1 -distribute 1 1566 1567 testset: 1568 nsize: 3 1569 args: -testnum 1 -interpolate create 1570 args: -dm_plex_check_all 1571 test: 1572 suffix: 2 1573 args: -dm_view ascii::ascii_info_detail 1574 test: 1575 suffix: 2a 1576 args: -dm_plex_check_cones_conform_on_interfaces_verbose 1577 test: 1578 suffix: 2b 1579 args: -test_expand_points 0,1,2,5,6 1580 test: 1581 suffix: 2c 1582 args: -test_expand_points 0,1,2,5,6 -test_expand_points_empty 1583 1584 testset: 1585 # the same as 1% for 3D 1586 nsize: 2 1587 args: -dim 3 -dm_view ascii::ascii_info_detail 1588 args: -dm_plex_check_all 1589 test: 1590 suffix: 4_tet_dist0 1591 args: -distribute 0 -interpolate {{none create}separate output} 1592 test: 1593 suffix: 4_tet_dist1 1594 args: -distribute 1 -interpolate {{none create after_distribute}separate output} 1595 test: 1596 suffix: 4_hex_dist0 1597 args: -cell_simplex 0 -distribute 0 -interpolate {{none create}separate output} 1598 test: 1599 suffix: 4_hex_dist1 1600 args: -cell_simplex 0 -distribute 1 -interpolate {{none create after_distribute}separate output} 1601 1602 test: 1603 # the same as 4_tet_dist0 but test different initial orientations 1604 suffix: 4_tet_test_orient 1605 nsize: 2 1606 args: -dim 3 -distribute 0 1607 args: -dm_plex_check_all 1608 args: -rotate_interface_0 {{0 1 2 11 12 13}} 1609 args: -rotate_interface_1 {{0 1 2 11 12 13}} 1610 1611 testset: 1612 requires: exodusii 1613 args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/TwoQuads.exo 1614 args: -dm_view ascii::ascii_info_detail 1615 args: -dm_plex_check_all 1616 args: -custom_view 1617 test: 1618 suffix: 5_seq 1619 nsize: 1 1620 args: -distribute 0 -interpolate {{none create}separate output} 1621 test: 1622 # Detail viewing in a non-distributed mesh is broken because the DMLabelView() is collective, but the label is not shared 1623 suffix: 5_dist0 1624 nsize: 2 1625 args: -distribute 0 -interpolate {{none create}separate output} -dm_view 1626 test: 1627 suffix: 5_dist1 1628 nsize: 2 1629 args: -distribute 1 -interpolate {{none create after_distribute}separate output} 1630 1631 testset: 1632 nsize: {{1 2 4}} 1633 args: -use_generator 1634 args: -dm_plex_check_all 1635 args: -distribute -interpolate none 1636 test: 1637 suffix: 6_tri 1638 requires: triangle 1639 args: -faces {{2,2 1,3 7,4}} -cell_simplex 1 -dm_generator triangle 1640 test: 1641 suffix: 6_quad 1642 args: -faces {{2,2 1,3 7,4}} -cell_simplex 0 1643 test: 1644 suffix: 6_tet 1645 requires: ctetgen 1646 args: -faces {{2,2,2 1,3,5 3,4,7}} -cell_simplex 1 -dm_generator ctetgen 1647 test: 1648 suffix: 6_hex 1649 args: -faces {{2,2,2 1,3,5 3,4,7}} -cell_simplex 0 1650 testset: 1651 nsize: {{1 2 4}} 1652 args: -use_generator 1653 args: -dm_plex_check_all 1654 args: -distribute -interpolate create 1655 test: 1656 suffix: 6_int_tri 1657 requires: triangle 1658 args: -faces {{2,2 1,3 7,4}} -cell_simplex 1 -dm_generator triangle 1659 test: 1660 suffix: 6_int_quad 1661 args: -faces {{2,2 1,3 7,4}} -cell_simplex 0 1662 test: 1663 suffix: 6_int_tet 1664 requires: ctetgen 1665 args: -faces {{2,2,2 1,3,5 3,4,7}} -cell_simplex 1 -dm_generator ctetgen 1666 test: 1667 suffix: 6_int_hex 1668 args: -faces {{2,2,2 1,3,5 3,4,7}} -cell_simplex 0 1669 testset: 1670 nsize: {{2 4}} 1671 args: -use_generator 1672 args: -dm_plex_check_all 1673 args: -distribute -interpolate after_distribute 1674 test: 1675 suffix: 6_parint_tri 1676 requires: triangle 1677 args: -faces {{2,2 1,3 7,4}} -cell_simplex 1 -dm_generator triangle 1678 test: 1679 suffix: 6_parint_quad 1680 args: -faces {{2,2 1,3 7,4}} -cell_simplex 0 1681 test: 1682 suffix: 6_parint_tet 1683 requires: ctetgen 1684 args: -faces {{2,2,2 1,3,5 3,4,7}} -cell_simplex 1 -dm_generator ctetgen 1685 test: 1686 suffix: 6_parint_hex 1687 args: -faces {{2,2,2 1,3,5 3,4,7}} -cell_simplex 0 1688 1689 testset: # 7 EXODUS 1690 requires: exodusii 1691 args: -dm_plex_check_all 1692 args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.exo 1693 args: -distribute 1694 test: # seq load, simple partitioner 1695 suffix: 7_exo 1696 nsize: {{1 2 4 5}} 1697 args: -interpolate none 1698 test: # seq load, seq interpolation, simple partitioner 1699 suffix: 7_exo_int_simple 1700 nsize: {{1 2 4 5}} 1701 args: -interpolate create 1702 test: # seq load, seq interpolation, metis partitioner 1703 suffix: 7_exo_int_metis 1704 requires: parmetis 1705 nsize: {{2 4 5}} 1706 args: -interpolate create 1707 args: -petscpartitioner_type parmetis 1708 test: # seq load, simple partitioner, par interpolation 1709 suffix: 7_exo_simple_int 1710 nsize: {{2 4 5}} 1711 args: -interpolate after_distribute 1712 test: # seq load, metis partitioner, par interpolation 1713 suffix: 7_exo_metis_int 1714 requires: parmetis 1715 nsize: {{2 4 5}} 1716 args: -interpolate after_distribute 1717 args: -petscpartitioner_type parmetis 1718 1719 testset: # 7 HDF5 SEQUANTIAL LOAD 1720 requires: hdf5 !complex 1721 args: -dm_plex_check_all 1722 args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -dm_plex_create_from_hdf5_xdmf 1723 args: -dm_plex_hdf5_force_sequential 1724 args: -distribute 1725 test: # seq load, simple partitioner 1726 suffix: 7_seq_hdf5_simple 1727 nsize: {{1 2 4 5}} 1728 args: -interpolate none 1729 test: # seq load, seq interpolation, simple partitioner 1730 suffix: 7_seq_hdf5_int_simple 1731 nsize: {{1 2 4 5}} 1732 args: -interpolate after_create 1733 test: # seq load, seq interpolation, metis partitioner 1734 nsize: {{2 4 5}} 1735 suffix: 7_seq_hdf5_int_metis 1736 requires: parmetis 1737 args: -interpolate after_create 1738 args: -petscpartitioner_type parmetis 1739 test: # seq load, simple partitioner, par interpolation 1740 suffix: 7_seq_hdf5_simple_int 1741 nsize: {{2 4 5}} 1742 args: -interpolate after_distribute 1743 test: # seq load, metis partitioner, par interpolation 1744 nsize: {{2 4 5}} 1745 suffix: 7_seq_hdf5_metis_int 1746 requires: parmetis 1747 args: -interpolate after_distribute 1748 args: -petscpartitioner_type parmetis 1749 1750 testset: # 7 HDF5 PARALLEL LOAD 1751 requires: hdf5 !complex 1752 nsize: {{2 4 5}} 1753 args: -dm_plex_check_all 1754 args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -dm_plex_create_from_hdf5_xdmf 1755 test: # par load 1756 suffix: 7_par_hdf5 1757 args: -interpolate none 1758 test: # par load, par interpolation 1759 suffix: 7_par_hdf5_int 1760 args: -interpolate after_create 1761 test: # par load, parmetis repartitioner 1762 TODO: Parallel partitioning of uninterpolated meshes not supported 1763 suffix: 7_par_hdf5_parmetis 1764 requires: parmetis 1765 args: -distribute -petscpartitioner_type parmetis 1766 args: -interpolate none 1767 test: # par load, par interpolation, parmetis repartitioner 1768 suffix: 7_par_hdf5_int_parmetis 1769 requires: parmetis 1770 args: -distribute -petscpartitioner_type parmetis 1771 args: -interpolate after_create 1772 test: # par load, parmetis partitioner, par interpolation 1773 TODO: Parallel partitioning of uninterpolated meshes not supported 1774 suffix: 7_par_hdf5_parmetis_int 1775 requires: parmetis 1776 args: -distribute -petscpartitioner_type parmetis 1777 args: -interpolate after_distribute 1778 1779 test: 1780 suffix: 7_hdf5_hierarch 1781 requires: hdf5 ptscotch !complex 1782 nsize: {{2 3 4}separate output} 1783 args: -distribute 1784 args: -interpolate after_create 1785 args: -petscpartitioner_type matpartitioning -petscpartitioner_view ::ascii_info 1786 args: -mat_partitioning_type hierarch -mat_partitioning_hierarchical_nfineparts 2 1787 args: -mat_partitioning_hierarchical_coarseparttype ptscotch -mat_partitioning_hierarchical_fineparttype ptscotch 1788 1789 test: 1790 suffix: 8 1791 requires: hdf5 !complex 1792 nsize: 4 1793 args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -dm_plex_create_from_hdf5_xdmf 1794 args: -distribute 0 -interpolate after_create 1795 args: -view_vertices_from_coords 0.,1.,0.,-0.5,1.,0.,0.583,-0.644,0.,-2.,-2.,-2. -view_vertices_from_coords_tol 1e-3 1796 args: -dm_plex_check_all 1797 args: -custom_view 1798 1799 testset: # 9 HDF5 SEQUANTIAL LOAD 1800 requires: hdf5 !complex datafilespath 1801 args: -dm_plex_check_all 1802 args: -filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /cells -dm_plex_hdf5_geometry_path /coordinates 1803 args: -dm_plex_hdf5_force_sequential 1804 args: -distribute 1805 test: # seq load, simple partitioner 1806 suffix: 9_seq_hdf5_simple 1807 nsize: {{1 2 4 5}} 1808 args: -interpolate none 1809 test: # seq load, seq interpolation, simple partitioner 1810 suffix: 9_seq_hdf5_int_simple 1811 nsize: {{1 2 4 5}} 1812 args: -interpolate after_create 1813 test: # seq load, seq interpolation, metis partitioner 1814 nsize: {{2 4 5}} 1815 suffix: 9_seq_hdf5_int_metis 1816 requires: parmetis 1817 args: -interpolate after_create 1818 args: -petscpartitioner_type parmetis 1819 test: # seq load, simple partitioner, par interpolation 1820 suffix: 9_seq_hdf5_simple_int 1821 nsize: {{2 4 5}} 1822 args: -interpolate after_distribute 1823 test: # seq load, simple partitioner, par interpolation 1824 # This is like 9_seq_hdf5_simple_int but testing error output of DMPlexCheckPointSFHeavy(). 1825 # Once 9_seq_hdf5_simple_int gets fixed, this one gets broken. 1826 # We can then provide an intentionally broken mesh instead. 1827 TODO: This test is broken because PointSF is fixed. 1828 suffix: 9_seq_hdf5_simple_int_err 1829 nsize: 4 1830 args: -interpolate after_distribute 1831 filter: sed -e "/PETSC ERROR/,$$d" 1832 test: # seq load, metis partitioner, par interpolation 1833 nsize: {{2 4 5}} 1834 suffix: 9_seq_hdf5_metis_int 1835 requires: parmetis 1836 args: -interpolate after_distribute 1837 args: -petscpartitioner_type parmetis 1838 1839 testset: # 9 HDF5 PARALLEL LOAD 1840 requires: hdf5 !complex datafilespath 1841 nsize: {{2 4 5}} 1842 args: -dm_plex_check_all 1843 args: -filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /cells -dm_plex_hdf5_geometry_path /coordinates 1844 test: # par load 1845 suffix: 9_par_hdf5 1846 args: -interpolate none 1847 test: # par load, par interpolation 1848 suffix: 9_par_hdf5_int 1849 args: -interpolate after_create 1850 test: # par load, parmetis repartitioner 1851 TODO: Parallel partitioning of uninterpolated meshes not supported 1852 suffix: 9_par_hdf5_parmetis 1853 requires: parmetis 1854 args: -distribute -petscpartitioner_type parmetis 1855 args: -interpolate none 1856 test: # par load, par interpolation, parmetis repartitioner 1857 suffix: 9_par_hdf5_int_parmetis 1858 requires: parmetis 1859 args: -distribute -petscpartitioner_type parmetis 1860 args: -interpolate after_create 1861 test: # par load, parmetis partitioner, par interpolation 1862 TODO: Parallel partitioning of uninterpolated meshes not supported 1863 suffix: 9_par_hdf5_parmetis_int 1864 requires: parmetis 1865 args: -distribute -petscpartitioner_type parmetis 1866 args: -interpolate after_distribute 1867 1868 testset: # 10 HDF5 PARALLEL LOAD 1869 requires: hdf5 !complex datafilespath 1870 nsize: {{2 4 7}} 1871 args: -dm_plex_check_all 1872 args: -filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined2.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /topo -dm_plex_hdf5_geometry_path /geom 1873 test: # par load, par interpolation 1874 suffix: 10_par_hdf5_int 1875 args: -interpolate after_create 1876 TEST*/ 1877