xref: /petsc/src/dm/impls/plex/tests/ex18.c (revision d5b43468fb8780a8feea140ccd6fa3e6a50411cc)
1 static char help[] = "Tests for parallel mesh loading and parallel topological interpolation\n\n";
2 
3 #include <petsc/private/dmpleximpl.h>
4 /* List of test meshes
5 
6 Network
7 -------
8 Test 0 (2 ranks):
9 
10 network=0:
11 ---------
12   cell 0   cell 1   cell 2          nCells-1       (edge)
13 0 ------ 1 ------ 2 ------ 3 -- -- v --  -- nCells (vertex)
14 
15   vertex distribution:
16     rank 0: 0 1
17     rank 1: 2 3 ... nCells
18   cell(edge) distribution:
19     rank 0: 0 1
20     rank 1: 2 ... nCells-1
21 
22 network=1:
23 ---------
24                v2
25                 ^
26                 |
27                cell 2
28                 |
29  v0 --cell 0--> v3--cell 1--> v1
30 
31   vertex distribution:
32     rank 0: 0 1 3
33     rank 1: 2
34   cell(edge) distribution:
35     rank 0: 0 1
36     rank 1: 2
37 
38   example:
39     mpiexec -n 2 ./ex18 -distribute 1 -dim 1 -orig_dm_view -dist_dm_view -dist_dm_view -petscpartitioner_type parmetis -ncells 50
40 
41 Triangle
42 --------
43 Test 0 (2 ranks):
44 Two triangles sharing a face
45 
46         2
47       / | \
48      /  |  \
49     /   |   \
50    0  0 | 1  3
51     \   |   /
52      \  |  /
53       \ | /
54         1
55 
56   vertex distribution:
57     rank 0: 0 1
58     rank 1: 2 3
59   cell distribution:
60     rank 0: 0
61     rank 1: 1
62 
63 Test 1 (3 ranks):
64 Four triangles partitioned across 3 ranks
65 
66    0 _______ 3
67    | \     / |
68    |  \ 1 /  |
69    |   \ /   |
70    | 0  2  2 |
71    |   / \   |
72    |  / 3 \  |
73    | /     \ |
74    1 ------- 4
75 
76   vertex distribution:
77     rank 0: 0 1
78     rank 1: 2 3
79     rank 2: 4
80   cell distribution:
81     rank 0: 0
82     rank 1: 1
83     rank 2: 2 3
84 
85 Test 2 (3 ranks):
86 Four triangles partitioned across 3 ranks
87 
88    1 _______ 3
89    | \     / |
90    |  \ 1 /  |
91    |   \ /   |
92    | 0  0  2 |
93    |   / \   |
94    |  / 3 \  |
95    | /     \ |
96    2 ------- 4
97 
98   vertex distribution:
99     rank 0: 0 1
100     rank 1: 2 3
101     rank 2: 4
102   cell distribution:
103     rank 0: 0
104     rank 1: 1
105     rank 2: 2 3
106 
107 Tetrahedron
108 -----------
109 Test 0:
110 Two tets sharing a face
111 
112  cell   3 _______    cell
113  0    / | \      \   1
114      /  |  \      \
115     /   |   \      \
116    0----|----4-----2
117     \   |   /      /
118      \  |  /      /
119       \ | /      /
120         1-------
121    y
122    | x
123    |/
124    *----z
125 
126   vertex distribution:
127     rank 0: 0 1
128     rank 1: 2 3 4
129   cell distribution:
130     rank 0: 0
131     rank 1: 1
132 
133 Quadrilateral
134 -------------
135 Test 0 (2 ranks):
136 Two quads sharing a face
137 
138    3-------2-------5
139    |       |       |
140    |   0   |   1   |
141    |       |       |
142    0-------1-------4
143 
144   vertex distribution:
145     rank 0: 0 1 2
146     rank 1: 3 4 5
147   cell distribution:
148     rank 0: 0
149     rank 1: 1
150 
151 TODO Test 1:
152 A quad and a triangle sharing a face
153 
154    5-------4
155    |       | \
156    |   0   |  \
157    |       | 1 \
158    2-------3----6
159 
160 Hexahedron
161 ----------
162 Test 0 (2 ranks):
163 Two hexes sharing a face
164 
165 cell   7-------------6-------------11 cell
166 0     /|            /|            /|     1
167      / |   F1      / |   F7      / |
168     /  |          /  |          /  |
169    4-------------5-------------10  |
170    |   |     F4  |   |     F10 |   |
171    |   |         |   |         |   |
172    |F5 |         |F3 |         |F9 |
173    |   |  F2     |   |   F8    |   |
174    |   3---------|---2---------|---9
175    |  /          |  /          |  /
176    | /   F0      | /    F6     | /
177    |/            |/            |/
178    0-------------1-------------8
179 
180   vertex distribution:
181     rank 0: 0 1 2 3 4 5
182     rank 1: 6 7 8 9 10 11
183   cell distribution:
184     rank 0: 0
185     rank 1: 1
186 
187 */
188 
189 typedef enum {
190   NONE,
191   CREATE,
192   AFTER_CREATE,
193   AFTER_DISTRIBUTE
194 } InterpType;
195 
196 typedef struct {
197   PetscInt    debug;        /* The debugging level */
198   PetscInt    testNum;      /* Indicates the mesh to create */
199   PetscInt    dim;          /* The topological mesh dimension */
200   PetscBool   cellSimplex;  /* Use simplices or hexes */
201   PetscBool   distribute;   /* Distribute the mesh */
202   InterpType  interpolate;  /* Interpolate the mesh before or after DMPlexDistribute() */
203   PetscBool   useGenerator; /* Construct mesh with a mesh generator */
204   PetscBool   testOrientIF; /* Test for different original interface orientations */
205   PetscBool   testHeavy;    /* Run the heavy PointSF test */
206   PetscBool   customView;   /* Show results of DMPlexIsInterpolated() etc. */
207   PetscInt    ornt[2];      /* Orientation of interface on rank 0 and rank 1 */
208   PetscInt    faces[3];     /* Number of faces per dimension for generator */
209   PetscScalar coords[128];
210   PetscReal   coordsTol;
211   PetscInt    ncoords;
212   PetscInt    pointsToExpand[128];
213   PetscInt    nPointsToExpand;
214   PetscBool   testExpandPointsEmpty;
215   char        filename[PETSC_MAX_PATH_LEN]; /* Import mesh from file */
216 } AppCtx;
217 
218 struct _n_PortableBoundary {
219   Vec           coordinates;
220   PetscInt      depth;
221   PetscSection *sections;
222 };
223 typedef struct _n_PortableBoundary *PortableBoundary;
224 
225 #if defined(PETSC_USE_LOG)
226 static PetscLogStage stage[3];
227 #endif
228 
229 static PetscErrorCode DMPlexCheckPointSFHeavy(DM, PortableBoundary);
230 static PetscErrorCode DMPlexSetOrientInterface_Private(DM, PetscBool);
231 static PetscErrorCode DMPlexGetExpandedBoundary_Private(DM, PortableBoundary *);
232 static PetscErrorCode DMPlexExpandedConesToFaces_Private(DM, IS, PetscSection, IS *);
233 
234 static PetscErrorCode PortableBoundaryDestroy(PortableBoundary *bnd)
235 {
236   PetscInt d;
237 
238   PetscFunctionBegin;
239   if (!*bnd) PetscFunctionReturn(0);
240   PetscCall(VecDestroy(&(*bnd)->coordinates));
241   for (d = 0; d < (*bnd)->depth; d++) PetscCall(PetscSectionDestroy(&(*bnd)->sections[d]));
242   PetscCall(PetscFree((*bnd)->sections));
243   PetscCall(PetscFree(*bnd));
244   PetscFunctionReturn(0);
245 }
246 
247 static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
248 {
249   const char *interpTypes[4] = {"none", "create", "after_create", "after_distribute"};
250   PetscInt    interp         = NONE, dim;
251   PetscBool   flg1, flg2;
252 
253   PetscFunctionBegin;
254   options->debug                 = 0;
255   options->testNum               = 0;
256   options->dim                   = 2;
257   options->cellSimplex           = PETSC_TRUE;
258   options->distribute            = PETSC_FALSE;
259   options->interpolate           = NONE;
260   options->useGenerator          = PETSC_FALSE;
261   options->testOrientIF          = PETSC_FALSE;
262   options->testHeavy             = PETSC_TRUE;
263   options->customView            = PETSC_FALSE;
264   options->testExpandPointsEmpty = PETSC_FALSE;
265   options->ornt[0]               = 0;
266   options->ornt[1]               = 0;
267   options->faces[0]              = 2;
268   options->faces[1]              = 2;
269   options->faces[2]              = 2;
270   options->filename[0]           = '\0';
271   options->coordsTol             = PETSC_DEFAULT;
272 
273   PetscOptionsBegin(comm, "", "Meshing Interpolation Test Options", "DMPLEX");
274   PetscCall(PetscOptionsBoundedInt("-debug", "The debugging level", "ex18.c", options->debug, &options->debug, NULL, 0));
275   PetscCall(PetscOptionsBoundedInt("-testnum", "The mesh to create", "ex18.c", options->testNum, &options->testNum, NULL, 0));
276   PetscCall(PetscOptionsBool("-cell_simplex", "Generate simplices if true, otherwise hexes", "ex18.c", options->cellSimplex, &options->cellSimplex, NULL));
277   PetscCall(PetscOptionsBool("-distribute", "Distribute the mesh", "ex18.c", options->distribute, &options->distribute, NULL));
278   PetscCall(PetscOptionsEList("-interpolate", "Type of mesh interpolation (none, create, after_create, after_distribute)", "ex18.c", interpTypes, 4, interpTypes[options->interpolate], &interp, NULL));
279   options->interpolate = (InterpType)interp;
280   PetscCheck(options->distribute || options->interpolate != AFTER_DISTRIBUTE, comm, PETSC_ERR_SUP, "-interpolate after_distribute  needs  -distribute 1");
281   PetscCall(PetscOptionsBool("-use_generator", "Use a mesh generator to build the mesh", "ex18.c", options->useGenerator, &options->useGenerator, NULL));
282   options->ncoords = 128;
283   PetscCall(PetscOptionsScalarArray("-view_vertices_from_coords", "Print DAG points corresponding to vertices with given coordinates", "ex18.c", options->coords, &options->ncoords, NULL));
284   PetscCall(PetscOptionsReal("-view_vertices_from_coords_tol", "Tolerance for -view_vertices_from_coords", "ex18.c", options->coordsTol, &options->coordsTol, NULL));
285   options->nPointsToExpand = 128;
286   PetscCall(PetscOptionsIntArray("-test_expand_points", "Expand given array of DAG point using DMPlexGetConeRecursive() and print results", "ex18.c", options->pointsToExpand, &options->nPointsToExpand, NULL));
287   if (options->nPointsToExpand) PetscCall(PetscOptionsBool("-test_expand_points_empty", "For -test_expand_points, rank 0 will have empty input array", "ex18.c", options->testExpandPointsEmpty, &options->testExpandPointsEmpty, NULL));
288   PetscCall(PetscOptionsBool("-test_heavy", "Run the heavy PointSF test", "ex18.c", options->testHeavy, &options->testHeavy, NULL));
289   PetscCall(PetscOptionsBool("-custom_view", "Custom DMPlex view", "ex18.c", options->customView, &options->customView, NULL));
290   PetscCall(PetscOptionsRangeInt("-dim", "The topological mesh dimension", "ex18.c", options->dim, &options->dim, &flg1, 1, 3));
291   dim = 3;
292   PetscCall(PetscOptionsIntArray("-faces", "Number of faces per dimension", "ex18.c", options->faces, &dim, &flg2));
293   if (flg2) {
294     PetscCheck(!flg1 || dim == options->dim, comm, PETSC_ERR_ARG_OUTOFRANGE, "specified -dim %" PetscInt_FMT " is not equal to length %" PetscInt_FMT " of -faces (note that -dim can be omitted)", options->dim, dim);
295     options->dim = dim;
296   }
297   PetscCall(PetscOptionsString("-filename", "The mesh file", "ex18.c", options->filename, options->filename, sizeof(options->filename), NULL));
298   PetscCall(PetscOptionsBoundedInt("-rotate_interface_0", "Rotation (relative orientation) of interface on rank 0; implies -interpolate create -distribute 0", "ex18.c", options->ornt[0], &options->ornt[0], &options->testOrientIF, 0));
299   PetscCall(PetscOptionsBoundedInt("-rotate_interface_1", "Rotation (relative orientation) of interface on rank 1; implies -interpolate create -distribute 0", "ex18.c", options->ornt[1], &options->ornt[1], &flg2, 0));
300   PetscCheck(flg2 == options->testOrientIF, comm, PETSC_ERR_ARG_OUTOFRANGE, "neither or both -rotate_interface_0 -rotate_interface_1 must be set");
301   if (options->testOrientIF) {
302     PetscInt i;
303     for (i = 0; i < 2; i++) {
304       if (options->ornt[i] >= 10) options->ornt[i] = -(options->ornt[i] - 10); /* 11 12 13 become -1 -2 -3 */
305     }
306     options->filename[0]  = 0;
307     options->useGenerator = PETSC_FALSE;
308     options->dim          = 3;
309     options->cellSimplex  = PETSC_TRUE;
310     options->interpolate  = CREATE;
311     options->distribute   = PETSC_FALSE;
312   }
313   PetscOptionsEnd();
314   PetscFunctionReturn(0);
315 }
316 
317 static PetscErrorCode CreateMesh_1D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
318 {
319   PetscInt    testNum = user->testNum;
320   PetscMPIInt rank, size;
321   PetscInt    numCorners = 2, i;
322   PetscInt    numCells, numVertices, network;
323   PetscInt   *cells;
324   PetscReal  *coords;
325 
326   PetscFunctionBegin;
327   PetscCallMPI(MPI_Comm_rank(comm, &rank));
328   PetscCallMPI(MPI_Comm_size(comm, &size));
329   PetscCheck(size <= 2, comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for <=2 processes", testNum);
330 
331   numCells = 3;
332   PetscCall(PetscOptionsGetInt(NULL, NULL, "-ncells", &numCells, NULL));
333   PetscCheck(numCells >= 3, comm, PETSC_ERR_ARG_OUTOFRANGE, "Test ncells %" PetscInt_FMT " must >=3", numCells);
334 
335   if (size == 1) {
336     numVertices = numCells + 1;
337     PetscCall(PetscMalloc2(2 * numCells, &cells, 2 * numVertices, &coords));
338     for (i = 0; i < numCells; i++) {
339       cells[2 * i]      = i;
340       cells[2 * i + 1]  = i + 1;
341       coords[2 * i]     = i;
342       coords[2 * i + 1] = i + 1;
343     }
344 
345     PetscCall(DMPlexCreateFromCellListPetsc(comm, user->dim, numCells, numVertices, numCorners, PETSC_FALSE, cells, user->dim, coords, dm));
346     PetscCall(PetscFree2(cells, coords));
347     PetscFunctionReturn(0);
348   }
349 
350   network = 0;
351   PetscCall(PetscOptionsGetInt(NULL, NULL, "-network_case", &network, NULL));
352   if (network == 0) {
353     switch (rank) {
354     case 0: {
355       numCells    = 2;
356       numVertices = numCells;
357       PetscCall(PetscMalloc2(2 * numCells, &cells, 2 * numCells, &coords));
358       cells[0]  = 0;
359       cells[1]  = 1;
360       cells[2]  = 1;
361       cells[3]  = 2;
362       coords[0] = 0.;
363       coords[1] = 1.;
364       coords[2] = 1.;
365       coords[3] = 2.;
366     } break;
367     case 1: {
368       numCells -= 2;
369       numVertices = numCells + 1;
370       PetscCall(PetscMalloc2(2 * numCells, &cells, 2 * numCells, &coords));
371       for (i = 0; i < numCells; i++) {
372         cells[2 * i]      = 2 + i;
373         cells[2 * i + 1]  = 2 + i + 1;
374         coords[2 * i]     = 2 + i;
375         coords[2 * i + 1] = 2 + i + 1;
376       }
377     } break;
378     default:
379       SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
380     }
381   } else { /* network_case = 1 */
382     /* ----------------------- */
383     switch (rank) {
384     case 0: {
385       numCells    = 2;
386       numVertices = 3;
387       PetscCall(PetscMalloc2(2 * numCells, &cells, 2 * numCells, &coords));
388       cells[0] = 0;
389       cells[1] = 3;
390       cells[2] = 3;
391       cells[3] = 1;
392     } break;
393     case 1: {
394       numCells    = 1;
395       numVertices = 1;
396       PetscCall(PetscMalloc2(2 * numCells, &cells, 2 * numCells, &coords));
397       cells[0] = 3;
398       cells[1] = 2;
399     } break;
400     default:
401       SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
402     }
403   }
404   PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, PETSC_FALSE, cells, user->dim, coords, NULL, NULL, dm));
405   PetscCall(PetscFree2(cells, coords));
406   PetscFunctionReturn(0);
407 }
408 
409 static PetscErrorCode CreateSimplex_2D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
410 {
411   PetscInt    testNum = user->testNum, p;
412   PetscMPIInt rank, size;
413 
414   PetscFunctionBegin;
415   PetscCallMPI(MPI_Comm_rank(comm, &rank));
416   PetscCallMPI(MPI_Comm_size(comm, &size));
417   switch (testNum) {
418   case 0:
419     PetscCheck(size == 2, comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 2 processes", testNum);
420     switch (rank) {
421     case 0: {
422       const PetscInt numCells = 1, numVertices = 2, numCorners = 3;
423       const PetscInt cells[3]        = {0, 1, 2};
424       PetscReal      coords[4]       = {-0.5, 0.5, 0.0, 0.0};
425       PetscInt       markerPoints[6] = {1, 1, 2, 1, 3, 1};
426 
427       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
428       for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
429     } break;
430     case 1: {
431       const PetscInt numCells = 1, numVertices = 2, numCorners = 3;
432       const PetscInt cells[3]        = {1, 3, 2};
433       PetscReal      coords[4]       = {0.0, 1.0, 0.5, 0.5};
434       PetscInt       markerPoints[6] = {1, 1, 2, 1, 3, 1};
435 
436       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
437       for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
438     } break;
439     default:
440       SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
441     }
442     break;
443   case 1:
444     PetscCheck(size == 3, comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 3 processes", testNum);
445     switch (rank) {
446     case 0: {
447       const PetscInt numCells = 1, numVertices = 2, numCorners = 3;
448       const PetscInt cells[3]        = {0, 1, 2};
449       PetscReal      coords[4]       = {0.0, 1.0, 0.0, 0.0};
450       PetscInt       markerPoints[6] = {1, 1, 2, 1, 3, 1};
451 
452       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
453       for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
454     } break;
455     case 1: {
456       const PetscInt numCells = 1, numVertices = 2, numCorners = 3;
457       const PetscInt cells[3]        = {0, 2, 3};
458       PetscReal      coords[4]       = {0.5, 0.5, 1.0, 1.0};
459       PetscInt       markerPoints[6] = {1, 1, 2, 1, 3, 1};
460 
461       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
462       for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
463     } break;
464     case 2: {
465       const PetscInt numCells = 2, numVertices = 1, numCorners = 3;
466       const PetscInt cells[6]         = {2, 4, 3, 2, 1, 4};
467       PetscReal      coords[2]        = {1.0, 0.0};
468       PetscInt       markerPoints[10] = {2, 1, 3, 1, 4, 1, 5, 1, 6, 1};
469 
470       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
471       for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
472     } break;
473     default:
474       SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
475     }
476     break;
477   case 2:
478     PetscCheck(size == 3, comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 3 processes", testNum);
479     switch (rank) {
480     case 0: {
481       const PetscInt numCells = 1, numVertices = 2, numCorners = 3;
482       const PetscInt cells[3]        = {1, 2, 0};
483       PetscReal      coords[4]       = {0.5, 0.5, 0.0, 1.0};
484       PetscInt       markerPoints[6] = {1, 1, 2, 1, 3, 1};
485 
486       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
487       for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
488     } break;
489     case 1: {
490       const PetscInt numCells = 1, numVertices = 2, numCorners = 3;
491       const PetscInt cells[3]        = {1, 0, 3};
492       PetscReal      coords[4]       = {0.0, 0.0, 1.0, 1.0};
493       PetscInt       markerPoints[6] = {1, 1, 2, 1, 3, 1};
494 
495       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
496       for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
497     } break;
498     case 2: {
499       const PetscInt numCells = 2, numVertices = 1, numCorners = 3;
500       const PetscInt cells[6]         = {0, 4, 3, 0, 2, 4};
501       PetscReal      coords[2]        = {1.0, 0.0};
502       PetscInt       markerPoints[10] = {2, 1, 3, 1, 4, 1, 5, 1, 6, 1};
503 
504       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
505       for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
506     } break;
507     default:
508       SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
509     }
510     break;
511   default:
512     SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh %" PetscInt_FMT, testNum);
513   }
514   PetscFunctionReturn(0);
515 }
516 
517 static PetscErrorCode CreateSimplex_3D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
518 {
519   PetscInt    testNum = user->testNum, p;
520   PetscMPIInt rank, size;
521 
522   PetscFunctionBegin;
523   PetscCallMPI(MPI_Comm_rank(comm, &rank));
524   PetscCallMPI(MPI_Comm_size(comm, &size));
525   switch (testNum) {
526   case 0:
527     PetscCheck(size == 2, comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 2 processes", testNum);
528     switch (rank) {
529     case 0: {
530       const PetscInt numCells = 1, numVertices = 2, numCorners = 4;
531       const PetscInt cells[4]        = {0, 2, 1, 3};
532       PetscReal      coords[6]       = {0.0, 0.0, -0.5, 0.0, -0.5, 0.0};
533       PetscInt       markerPoints[8] = {1, 1, 2, 1, 3, 1, 4, 1};
534 
535       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
536       for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
537     } break;
538     case 1: {
539       const PetscInt numCells = 1, numVertices = 3, numCorners = 4;
540       const PetscInt cells[4]        = {1, 2, 4, 3};
541       PetscReal      coords[9]       = {1.0, 0.0, 0.0, 0.0, 0.5, 0.0, 0.0, 0.0, 0.5};
542       PetscInt       markerPoints[8] = {1, 1, 2, 1, 3, 1, 4, 1};
543 
544       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
545       for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
546     } break;
547     default:
548       SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
549     }
550     break;
551   default:
552     SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh %" PetscInt_FMT, testNum);
553   }
554   if (user->testOrientIF) {
555     PetscInt ifp[] = {8, 6};
556 
557     PetscCall(PetscObjectSetName((PetscObject)*dm, "Mesh before orientation"));
558     PetscCall(DMViewFromOptions(*dm, NULL, "-before_orientation_dm_view"));
559     /* rotate interface face ifp[rank] by given orientation ornt[rank] */
560     PetscCall(DMPlexOrientPoint(*dm, ifp[rank], user->ornt[rank]));
561     PetscCall(DMViewFromOptions(*dm, NULL, "-before_orientation_dm_view"));
562     PetscCall(DMPlexCheckFaces(*dm, 0));
563     PetscCall(DMPlexOrientInterface_Internal(*dm));
564     PetscCall(PetscPrintf(comm, "Orientation test PASSED\n"));
565   }
566   PetscFunctionReturn(0);
567 }
568 
569 static PetscErrorCode CreateQuad_2D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
570 {
571   PetscInt    testNum = user->testNum, p;
572   PetscMPIInt rank, size;
573 
574   PetscFunctionBegin;
575   PetscCallMPI(MPI_Comm_rank(comm, &rank));
576   PetscCallMPI(MPI_Comm_size(comm, &size));
577   switch (testNum) {
578   case 0:
579     PetscCheck(size == 2, comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 2 processes", testNum);
580     switch (rank) {
581     case 0: {
582       const PetscInt numCells = 1, numVertices = 3, numCorners = 4;
583       const PetscInt cells[4]            = {0, 1, 2, 3};
584       PetscReal      coords[6]           = {-0.5, 0.0, 0.0, 0.0, 0.0, 1.0};
585       PetscInt       markerPoints[4 * 2] = {1, 1, 2, 1, 3, 1, 4, 1};
586 
587       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
588       for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
589     } break;
590     case 1: {
591       const PetscInt numCells = 1, numVertices = 3, numCorners = 4;
592       const PetscInt cells[4]            = {1, 4, 5, 2};
593       PetscReal      coords[6]           = {-0.5, 1.0, 0.5, 0.0, 0.5, 1.0};
594       PetscInt       markerPoints[4 * 2] = {1, 1, 2, 1, 3, 1, 4, 1};
595 
596       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
597       for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
598     } break;
599     default:
600       SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
601     }
602     break;
603   default:
604     SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh %" PetscInt_FMT, testNum);
605   }
606   PetscFunctionReturn(0);
607 }
608 
609 static PetscErrorCode CreateHex_3D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
610 {
611   PetscInt    testNum = user->testNum, p;
612   PetscMPIInt rank, size;
613 
614   PetscFunctionBegin;
615   PetscCallMPI(MPI_Comm_rank(comm, &rank));
616   PetscCallMPI(MPI_Comm_size(comm, &size));
617   switch (testNum) {
618   case 0:
619     PetscCheck(size == 2, comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 2 processes", testNum);
620     switch (rank) {
621     case 0: {
622       const PetscInt numCells = 1, numVertices = 6, numCorners = 8;
623       const PetscInt cells[8]            = {0, 3, 2, 1, 4, 5, 6, 7};
624       PetscReal      coords[6 * 3]       = {-0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, -0.5, 1.0, 0.0, -0.5, 0.0, 1.0, 0.0, 0.0, 1.0};
625       PetscInt       markerPoints[8 * 2] = {2, 1, 3, 1, 4, 1, 5, 1, 6, 1, 7, 1, 8, 1, 9, 1};
626 
627       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
628       for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
629     } break;
630     case 1: {
631       const PetscInt numCells = 1, numVertices = 6, numCorners = 8;
632       const PetscInt cells[8]            = {1, 2, 9, 8, 5, 10, 11, 6};
633       PetscReal      coords[6 * 3]       = {0.0, 1.0, 1.0, -0.5, 1.0, 1.0, 0.5, 0.0, 0.0, 0.5, 1.0, 0.0, 0.5, 0.0, 1.0, 0.5, 1.0, 1.0};
634       PetscInt       markerPoints[8 * 2] = {2, 1, 3, 1, 4, 1, 5, 1, 6, 1, 7, 1, 8, 1, 9, 1};
635 
636       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
637       for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
638     } break;
639     default:
640       SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
641     }
642     break;
643   default:
644     SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh %" PetscInt_FMT, testNum);
645   }
646   PetscFunctionReturn(0);
647 }
648 
649 static PetscErrorCode CustomView(DM dm, PetscViewer v)
650 {
651   DMPlexInterpolatedFlag interpolated;
652   PetscBool              distributed;
653 
654   PetscFunctionBegin;
655   PetscCall(DMPlexIsDistributed(dm, &distributed));
656   PetscCall(DMPlexIsInterpolatedCollective(dm, &interpolated));
657   PetscCall(PetscViewerASCIIPrintf(v, "DMPlexIsDistributed: %s\n", PetscBools[distributed]));
658   PetscCall(PetscViewerASCIIPrintf(v, "DMPlexIsInterpolatedCollective: %s\n", DMPlexInterpolatedFlags[interpolated]));
659   PetscFunctionReturn(0);
660 }
661 
662 static PetscErrorCode CreateMeshFromFile(MPI_Comm comm, AppCtx *user, DM *dm, DM *serialDM)
663 {
664   const char *filename     = user->filename;
665   PetscBool   testHeavy    = user->testHeavy;
666   PetscBool   interpCreate = user->interpolate == CREATE ? PETSC_TRUE : PETSC_FALSE;
667   PetscBool   distributed  = PETSC_FALSE;
668 
669   PetscFunctionBegin;
670   *serialDM = NULL;
671   if (testHeavy && interpCreate) PetscCall(DMPlexSetOrientInterface_Private(NULL, PETSC_FALSE));
672   PetscCall(PetscLogStagePush(stage[0]));
673   PetscCall(DMPlexCreateFromFile(comm, filename, "ex18_plex", interpCreate, dm)); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexCheckPointSFHeavy() */
674   PetscCall(PetscLogStagePop());
675   if (testHeavy && interpCreate) PetscCall(DMPlexSetOrientInterface_Private(NULL, PETSC_TRUE));
676   PetscCall(DMPlexIsDistributed(*dm, &distributed));
677   PetscCall(PetscPrintf(comm, "DMPlexCreateFromFile produced %s mesh.\n", distributed ? "distributed" : "serial"));
678   if (testHeavy && distributed) {
679     PetscCall(PetscOptionsSetValue(NULL, "-dm_plex_hdf5_force_sequential", NULL));
680     PetscCall(DMPlexCreateFromFile(comm, filename, "ex18_plex", interpCreate, serialDM));
681     PetscCall(DMPlexIsDistributed(*serialDM, &distributed));
682     PetscCheck(!distributed, comm, PETSC_ERR_PLIB, "unable to create a serial DM from file");
683   }
684   PetscCall(DMGetDimension(*dm, &user->dim));
685   PetscFunctionReturn(0);
686 }
687 
688 static PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm)
689 {
690   PetscPartitioner part;
691   PortableBoundary boundary       = NULL;
692   DM               serialDM       = NULL;
693   PetscBool        cellSimplex    = user->cellSimplex;
694   PetscBool        useGenerator   = user->useGenerator;
695   PetscBool        interpCreate   = user->interpolate == CREATE ? PETSC_TRUE : PETSC_FALSE;
696   PetscBool        interpSerial   = user->interpolate == AFTER_CREATE ? PETSC_TRUE : PETSC_FALSE;
697   PetscBool        interpParallel = user->interpolate == AFTER_DISTRIBUTE ? PETSC_TRUE : PETSC_FALSE;
698   PetscBool        testHeavy      = user->testHeavy;
699   PetscMPIInt      rank;
700 
701   PetscFunctionBegin;
702   PetscCallMPI(MPI_Comm_rank(comm, &rank));
703   if (user->filename[0]) {
704     PetscCall(CreateMeshFromFile(comm, user, dm, &serialDM));
705   } else if (useGenerator) {
706     PetscCall(PetscLogStagePush(stage[0]));
707     PetscCall(DMPlexCreateBoxMesh(comm, user->dim, cellSimplex, user->faces, NULL, NULL, NULL, interpCreate, dm));
708     PetscCall(PetscLogStagePop());
709   } else {
710     PetscCall(PetscLogStagePush(stage[0]));
711     switch (user->dim) {
712     case 1:
713       PetscCall(CreateMesh_1D(comm, interpCreate, user, dm));
714       break;
715     case 2:
716       if (cellSimplex) {
717         PetscCall(CreateSimplex_2D(comm, interpCreate, user, dm));
718       } else {
719         PetscCall(CreateQuad_2D(comm, interpCreate, user, dm));
720       }
721       break;
722     case 3:
723       if (cellSimplex) {
724         PetscCall(CreateSimplex_3D(comm, interpCreate, user, dm));
725       } else {
726         PetscCall(CreateHex_3D(comm, interpCreate, user, dm));
727       }
728       break;
729     default:
730       SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "Cannot make meshes for dimension %" PetscInt_FMT, user->dim);
731     }
732     PetscCall(PetscLogStagePop());
733   }
734   PetscCheck(user->ncoords % user->dim == 0, comm, PETSC_ERR_ARG_OUTOFRANGE, "length of coordinates array %" PetscInt_FMT " must be divisable by spatial dimension %" PetscInt_FMT, user->ncoords, user->dim);
735   PetscCall(PetscObjectSetName((PetscObject)*dm, "Original Mesh"));
736   PetscCall(DMViewFromOptions(*dm, NULL, "-orig_dm_view"));
737 
738   if (interpSerial) {
739     DM idm;
740 
741     if (testHeavy) PetscCall(DMPlexSetOrientInterface_Private(*dm, PETSC_FALSE));
742     PetscCall(PetscLogStagePush(stage[2]));
743     PetscCall(DMPlexInterpolate(*dm, &idm)); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexCheckPointSFHeavy() */
744     PetscCall(PetscLogStagePop());
745     if (testHeavy) PetscCall(DMPlexSetOrientInterface_Private(*dm, PETSC_TRUE));
746     PetscCall(DMDestroy(dm));
747     *dm = idm;
748     PetscCall(PetscObjectSetName((PetscObject)*dm, "Interpolated Mesh"));
749     PetscCall(DMViewFromOptions(*dm, NULL, "-intp_dm_view"));
750   }
751 
752   /* Set partitioner options */
753   PetscCall(DMPlexGetPartitioner(*dm, &part));
754   if (part) {
755     PetscCall(PetscPartitionerSetType(part, PETSCPARTITIONERSIMPLE));
756     PetscCall(PetscPartitionerSetFromOptions(part));
757   }
758 
759   if (user->customView) PetscCall(CustomView(*dm, PETSC_VIEWER_STDOUT_(comm)));
760   if (testHeavy) {
761     PetscBool distributed;
762 
763     PetscCall(DMPlexIsDistributed(*dm, &distributed));
764     if (!serialDM && !distributed) {
765       serialDM = *dm;
766       PetscCall(PetscObjectReference((PetscObject)*dm));
767     }
768     if (serialDM) PetscCall(DMPlexGetExpandedBoundary_Private(serialDM, &boundary));
769     if (boundary) {
770       /* check DM which has been created in parallel and already interpolated */
771       PetscCall(DMPlexCheckPointSFHeavy(*dm, boundary));
772     }
773     /* Orient interface because it could be deliberately skipped above. It is idempotent. */
774     PetscCall(DMPlexOrientInterface_Internal(*dm));
775   }
776   if (user->distribute) {
777     DM pdm = NULL;
778 
779     /* Redistribute mesh over processes using that partitioner */
780     PetscCall(PetscLogStagePush(stage[1]));
781     PetscCall(DMPlexDistribute(*dm, 0, NULL, &pdm));
782     PetscCall(PetscLogStagePop());
783     if (pdm) {
784       PetscCall(DMDestroy(dm));
785       *dm = pdm;
786       PetscCall(PetscObjectSetName((PetscObject)*dm, "Redistributed Mesh"));
787       PetscCall(DMViewFromOptions(*dm, NULL, "-dist_dm_view"));
788     }
789 
790     if (interpParallel) {
791       DM idm;
792 
793       if (testHeavy) PetscCall(DMPlexSetOrientInterface_Private(*dm, PETSC_FALSE));
794       PetscCall(PetscLogStagePush(stage[2]));
795       PetscCall(DMPlexInterpolate(*dm, &idm)); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexCheckPointSFHeavy() */
796       PetscCall(PetscLogStagePop());
797       if (testHeavy) PetscCall(DMPlexSetOrientInterface_Private(*dm, PETSC_TRUE));
798       PetscCall(DMDestroy(dm));
799       *dm = idm;
800       PetscCall(PetscObjectSetName((PetscObject)*dm, "Interpolated Redistributed Mesh"));
801       PetscCall(DMViewFromOptions(*dm, NULL, "-intp_dm_view"));
802     }
803   }
804   if (testHeavy) {
805     if (boundary) PetscCall(DMPlexCheckPointSFHeavy(*dm, boundary));
806     /* Orient interface because it could be deliberately skipped above. It is idempotent. */
807     PetscCall(DMPlexOrientInterface_Internal(*dm));
808   }
809 
810   PetscCall(PetscObjectSetName((PetscObject)*dm, "Parallel Mesh"));
811   PetscCall(DMPlexDistributeSetDefault(*dm, PETSC_FALSE));
812   PetscCall(DMSetFromOptions(*dm));
813   PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
814 
815   if (user->customView) PetscCall(CustomView(*dm, PETSC_VIEWER_STDOUT_(comm)));
816   PetscCall(DMDestroy(&serialDM));
817   PetscCall(PortableBoundaryDestroy(&boundary));
818   PetscFunctionReturn(0);
819 }
820 
821 #define ps2d(number) ((double)PetscRealPart(number))
822 static inline PetscErrorCode coord2str(char buf[], size_t len, PetscInt dim, const PetscScalar coords[], PetscReal tol)
823 {
824   PetscFunctionBegin;
825   PetscCheck(dim <= 3, PETSC_COMM_SELF, PETSC_ERR_SUP, "dim must be less than or equal 3");
826   if (tol >= 1e-3) {
827     switch (dim) {
828     case 1:
829       PetscCall(PetscSNPrintf(buf, len, "(%12.3f)", ps2d(coords[0])));
830     case 2:
831       PetscCall(PetscSNPrintf(buf, len, "(%12.3f, %12.3f)", ps2d(coords[0]), ps2d(coords[1])));
832     case 3:
833       PetscCall(PetscSNPrintf(buf, len, "(%12.3f, %12.3f, %12.3f)", ps2d(coords[0]), ps2d(coords[1]), ps2d(coords[2])));
834     }
835   } else {
836     switch (dim) {
837     case 1:
838       PetscCall(PetscSNPrintf(buf, len, "(%12.6f)", ps2d(coords[0])));
839     case 2:
840       PetscCall(PetscSNPrintf(buf, len, "(%12.6f, %12.6f)", ps2d(coords[0]), ps2d(coords[1])));
841     case 3:
842       PetscCall(PetscSNPrintf(buf, len, "(%12.6f, %12.6f, %12.6f)", ps2d(coords[0]), ps2d(coords[1]), ps2d(coords[2])));
843     }
844   }
845   PetscFunctionReturn(0);
846 }
847 
848 static PetscErrorCode ViewVerticesFromCoords(DM dm, Vec coordsVec, PetscReal tol, PetscViewer viewer)
849 {
850   PetscInt           dim, i, npoints;
851   IS                 pointsIS;
852   const PetscInt    *points;
853   const PetscScalar *coords;
854   char               coordstr[128];
855   MPI_Comm           comm;
856   PetscMPIInt        rank;
857 
858   PetscFunctionBegin;
859   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
860   PetscCallMPI(MPI_Comm_rank(comm, &rank));
861   PetscCall(DMGetDimension(dm, &dim));
862   PetscCall(PetscViewerASCIIPushSynchronized(viewer));
863   PetscCall(DMPlexFindVertices(dm, coordsVec, tol, &pointsIS));
864   PetscCall(ISGetIndices(pointsIS, &points));
865   PetscCall(ISGetLocalSize(pointsIS, &npoints));
866   PetscCall(VecGetArrayRead(coordsVec, &coords));
867   for (i = 0; i < npoints; i++) {
868     PetscCall(coord2str(coordstr, sizeof(coordstr), dim, &coords[i * dim], tol));
869     if (rank == 0 && i) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "-----\n"));
870     PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] %s --> points[%" PetscInt_FMT "] = %" PetscInt_FMT "\n", rank, coordstr, i, points[i]));
871     PetscCall(PetscViewerFlush(viewer));
872   }
873   PetscCall(PetscViewerASCIIPopSynchronized(viewer));
874   PetscCall(VecRestoreArrayRead(coordsVec, &coords));
875   PetscCall(ISRestoreIndices(pointsIS, &points));
876   PetscCall(ISDestroy(&pointsIS));
877   PetscFunctionReturn(0);
878 }
879 
880 static PetscErrorCode TestExpandPoints(DM dm, AppCtx *user)
881 {
882   IS            is;
883   PetscSection *sects;
884   IS           *iss;
885   PetscInt      d, depth;
886   PetscMPIInt   rank;
887   PetscViewer   viewer = PETSC_VIEWER_STDOUT_WORLD, sviewer;
888 
889   PetscFunctionBegin;
890   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
891   if (user->testExpandPointsEmpty && rank == 0) {
892     PetscCall(ISCreateGeneral(PETSC_COMM_SELF, 0, NULL, PETSC_USE_POINTER, &is));
893   } else {
894     PetscCall(ISCreateGeneral(PETSC_COMM_SELF, user->nPointsToExpand, user->pointsToExpand, PETSC_USE_POINTER, &is));
895   }
896   PetscCall(DMPlexGetConeRecursive(dm, is, &depth, &iss, &sects));
897   PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
898   PetscCall(PetscViewerASCIIPrintf(sviewer, "[%d] ==========================\n", rank));
899   for (d = depth - 1; d >= 0; d--) {
900     IS        checkIS;
901     PetscBool flg;
902 
903     PetscCall(PetscViewerASCIIPrintf(sviewer, "depth %" PetscInt_FMT " ---------------\n", d));
904     PetscCall(PetscSectionView(sects[d], sviewer));
905     PetscCall(ISView(iss[d], sviewer));
906     /* check reverse operation */
907     if (d < depth - 1) {
908       PetscCall(DMPlexExpandedConesToFaces_Private(dm, iss[d], sects[d], &checkIS));
909       PetscCall(ISEqualUnsorted(checkIS, iss[d + 1], &flg));
910       PetscCheck(flg, PetscObjectComm((PetscObject)checkIS), PETSC_ERR_PLIB, "DMPlexExpandedConesToFaces_Private produced wrong IS");
911       PetscCall(ISDestroy(&checkIS));
912     }
913   }
914   PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
915   PetscCall(PetscViewerFlush(viewer));
916   PetscCall(DMPlexRestoreConeRecursive(dm, is, &depth, &iss, &sects));
917   PetscCall(ISDestroy(&is));
918   PetscFunctionReturn(0);
919 }
920 
921 static PetscErrorCode DMPlexExpandedConesToFaces_Private(DM dm, IS is, PetscSection section, IS *newis)
922 {
923   PetscInt        n, n1, ncone, numCoveredPoints, o, p, q, start, end;
924   const PetscInt *coveredPoints;
925   const PetscInt *arr, *cone;
926   PetscInt       *newarr;
927 
928   PetscFunctionBegin;
929   PetscCall(ISGetLocalSize(is, &n));
930   PetscCall(PetscSectionGetStorageSize(section, &n1));
931   PetscCall(PetscSectionGetChart(section, &start, &end));
932   PetscCheck(n == n1, PETSC_COMM_SELF, PETSC_ERR_PLIB, "IS size = %" PetscInt_FMT " != %" PetscInt_FMT " = section storage size", n, n1);
933   PetscCall(ISGetIndices(is, &arr));
934   PetscCall(PetscMalloc1(end - start, &newarr));
935   for (q = start; q < end; q++) {
936     PetscCall(PetscSectionGetDof(section, q, &ncone));
937     PetscCall(PetscSectionGetOffset(section, q, &o));
938     cone = &arr[o];
939     if (ncone == 1) {
940       numCoveredPoints = 1;
941       p                = cone[0];
942     } else {
943       PetscInt i;
944       p = PETSC_MAX_INT;
945       for (i = 0; i < ncone; i++)
946         if (cone[i] < 0) {
947           p = -1;
948           break;
949         }
950       if (p >= 0) {
951         PetscCall(DMPlexGetJoin(dm, ncone, cone, &numCoveredPoints, &coveredPoints));
952         PetscCheck(numCoveredPoints <= 1, PETSC_COMM_SELF, PETSC_ERR_PLIB, "more than one covered points for section point %" PetscInt_FMT, q);
953         if (numCoveredPoints) p = coveredPoints[0];
954         else p = -2;
955         PetscCall(DMPlexRestoreJoin(dm, ncone, cone, &numCoveredPoints, &coveredPoints));
956       }
957     }
958     newarr[q - start] = p;
959   }
960   PetscCall(ISRestoreIndices(is, &arr));
961   PetscCall(ISCreateGeneral(PETSC_COMM_SELF, end - start, newarr, PETSC_OWN_POINTER, newis));
962   PetscFunctionReturn(0);
963 }
964 
965 static PetscErrorCode DMPlexExpandedVerticesToFaces_Private(DM dm, IS boundary_expanded_is, PetscInt depth, PetscSection sections[], IS *boundary_is)
966 {
967   PetscInt d;
968   IS       is, newis;
969 
970   PetscFunctionBegin;
971   is = boundary_expanded_is;
972   PetscCall(PetscObjectReference((PetscObject)is));
973   for (d = 0; d < depth - 1; ++d) {
974     PetscCall(DMPlexExpandedConesToFaces_Private(dm, is, sections[d], &newis));
975     PetscCall(ISDestroy(&is));
976     is = newis;
977   }
978   *boundary_is = is;
979   PetscFunctionReturn(0);
980 }
981 
982 #define CHKERRQI(incall, ierr) \
983   if (ierr) incall = PETSC_FALSE;
984 
985 static PetscErrorCode DMLabelViewFromOptionsOnComm_Private(DMLabel label, const char optionname[], MPI_Comm comm)
986 {
987   PetscViewer       viewer;
988   PetscBool         flg;
989   static PetscBool  incall = PETSC_FALSE;
990   PetscViewerFormat format;
991 
992   PetscFunctionBegin;
993   if (incall) PetscFunctionReturn(0);
994   incall = PETSC_TRUE;
995   CHKERRQI(incall, PetscOptionsGetViewer(comm, ((PetscObject)label)->options, ((PetscObject)label)->prefix, optionname, &viewer, &format, &flg));
996   if (flg) {
997     CHKERRQI(incall, PetscViewerPushFormat(viewer, format));
998     CHKERRQI(incall, DMLabelView(label, viewer));
999     CHKERRQI(incall, PetscViewerPopFormat(viewer));
1000     CHKERRQI(incall, PetscViewerDestroy(&viewer));
1001   }
1002   incall = PETSC_FALSE;
1003   PetscFunctionReturn(0);
1004 }
1005 
1006 /* TODO: this is hotfixing DMLabelGetStratumIS() - it should be fixed systematically instead */
1007 static inline PetscErrorCode DMLabelGetStratumISOnComm_Private(DMLabel label, PetscInt value, MPI_Comm comm, IS *is)
1008 {
1009   IS tmpis;
1010 
1011   PetscFunctionBegin;
1012   PetscCall(DMLabelGetStratumIS(label, value, &tmpis));
1013   if (!tmpis) PetscCall(ISCreateGeneral(PETSC_COMM_SELF, 0, NULL, PETSC_USE_POINTER, &tmpis));
1014   PetscCall(ISOnComm(tmpis, comm, PETSC_COPY_VALUES, is));
1015   PetscCall(ISDestroy(&tmpis));
1016   PetscFunctionReturn(0);
1017 }
1018 
1019 /* currently only for simple PetscSection without fields or constraints */
1020 static PetscErrorCode PetscSectionReplicate_Private(MPI_Comm comm, PetscMPIInt rootrank, PetscSection sec0, PetscSection *secout)
1021 {
1022   PetscSection sec;
1023   PetscInt     chart[2], p;
1024   PetscInt    *dofarr;
1025   PetscMPIInt  rank;
1026 
1027   PetscFunctionBegin;
1028   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1029   if (rank == rootrank) PetscCall(PetscSectionGetChart(sec0, &chart[0], &chart[1]));
1030   PetscCallMPI(MPI_Bcast(chart, 2, MPIU_INT, rootrank, comm));
1031   PetscCall(PetscMalloc1(chart[1] - chart[0], &dofarr));
1032   if (rank == rootrank) {
1033     for (p = chart[0]; p < chart[1]; p++) PetscCall(PetscSectionGetDof(sec0, p, &dofarr[p - chart[0]]));
1034   }
1035   PetscCallMPI(MPI_Bcast(dofarr, chart[1] - chart[0], MPIU_INT, rootrank, comm));
1036   PetscCall(PetscSectionCreate(comm, &sec));
1037   PetscCall(PetscSectionSetChart(sec, chart[0], chart[1]));
1038   for (p = chart[0]; p < chart[1]; p++) PetscCall(PetscSectionSetDof(sec, p, dofarr[p - chart[0]]));
1039   PetscCall(PetscSectionSetUp(sec));
1040   PetscCall(PetscFree(dofarr));
1041   *secout = sec;
1042   PetscFunctionReturn(0);
1043 }
1044 
1045 static PetscErrorCode DMPlexExpandedVerticesCoordinatesToFaces_Private(DM ipdm, PortableBoundary bnd, IS *face_is)
1046 {
1047   IS faces_expanded_is;
1048 
1049   PetscFunctionBegin;
1050   PetscCall(DMPlexFindVertices(ipdm, bnd->coordinates, 0.0, &faces_expanded_is));
1051   PetscCall(DMPlexExpandedVerticesToFaces_Private(ipdm, faces_expanded_is, bnd->depth, bnd->sections, face_is));
1052   PetscCall(ISDestroy(&faces_expanded_is));
1053   PetscFunctionReturn(0);
1054 }
1055 
1056 /* hack disabling DMPlexOrientInterface() call in DMPlexInterpolate() via -dm_plex_interpolate_orient_interfaces option */
1057 static PetscErrorCode DMPlexSetOrientInterface_Private(DM dm, PetscBool enable)
1058 {
1059   PetscOptions     options = NULL;
1060   const char      *prefix  = NULL;
1061   const char       opt[]   = "-dm_plex_interpolate_orient_interfaces";
1062   char             prefix_opt[512];
1063   PetscBool        flg, set;
1064   static PetscBool wasSetTrue = PETSC_FALSE;
1065 
1066   PetscFunctionBegin;
1067   if (dm) {
1068     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)dm, &prefix));
1069     options = ((PetscObject)dm)->options;
1070   }
1071   PetscCall(PetscStrcpy(prefix_opt, "-"));
1072   PetscCall(PetscStrlcat(prefix_opt, prefix, sizeof(prefix_opt)));
1073   PetscCall(PetscStrlcat(prefix_opt, &opt[1], sizeof(prefix_opt)));
1074   PetscCall(PetscOptionsGetBool(options, prefix, opt, &flg, &set));
1075   if (!enable) {
1076     if (set && flg) wasSetTrue = PETSC_TRUE;
1077     PetscCall(PetscOptionsSetValue(options, prefix_opt, "0"));
1078   } else if (set && !flg) {
1079     if (wasSetTrue) {
1080       PetscCall(PetscOptionsSetValue(options, prefix_opt, "1"));
1081     } else {
1082       /* default is PETSC_TRUE */
1083       PetscCall(PetscOptionsClearValue(options, prefix_opt));
1084     }
1085     wasSetTrue = PETSC_FALSE;
1086   }
1087   if (PetscDefined(USE_DEBUG)) {
1088     PetscCall(PetscOptionsGetBool(options, prefix, opt, &flg, &set));
1089     PetscCheck(!set || flg == enable, PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "PetscOptionsSetValue did not have the desired effect");
1090   }
1091   PetscFunctionReturn(0);
1092 }
1093 
1094 /* get coordinate description of the whole-domain boundary */
1095 static PetscErrorCode DMPlexGetExpandedBoundary_Private(DM dm, PortableBoundary *boundary)
1096 {
1097   PortableBoundary       bnd0, bnd;
1098   MPI_Comm               comm;
1099   DM                     idm;
1100   DMLabel                label;
1101   PetscInt               d;
1102   const char             boundaryName[] = "DMPlexDistributeInterpolateMarkInterface_boundary";
1103   IS                     boundary_is;
1104   IS                    *boundary_expanded_iss;
1105   PetscMPIInt            rootrank = 0;
1106   PetscMPIInt            rank, size;
1107   PetscInt               value = 1;
1108   DMPlexInterpolatedFlag intp;
1109   PetscBool              flg;
1110 
1111   PetscFunctionBegin;
1112   PetscCall(PetscNew(&bnd));
1113   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1114   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1115   PetscCallMPI(MPI_Comm_size(comm, &size));
1116   PetscCall(DMPlexIsDistributed(dm, &flg));
1117   PetscCheck(!flg, comm, PETSC_ERR_ARG_WRONG, "serial DM (all points on one rank) needed");
1118 
1119   /* interpolate serial DM if not yet interpolated */
1120   PetscCall(DMPlexIsInterpolatedCollective(dm, &intp));
1121   if (intp == DMPLEX_INTERPOLATED_FULL) {
1122     idm = dm;
1123     PetscCall(PetscObjectReference((PetscObject)dm));
1124   } else {
1125     PetscCall(DMPlexInterpolate(dm, &idm));
1126     PetscCall(DMViewFromOptions(idm, NULL, "-idm_view"));
1127   }
1128 
1129   /* mark whole-domain boundary of the serial DM */
1130   PetscCall(DMLabelCreate(PETSC_COMM_SELF, boundaryName, &label));
1131   PetscCall(DMAddLabel(idm, label));
1132   PetscCall(DMPlexMarkBoundaryFaces(idm, value, label));
1133   PetscCall(DMLabelViewFromOptionsOnComm_Private(label, "-idm_boundary_view", comm));
1134   PetscCall(DMLabelGetStratumIS(label, value, &boundary_is));
1135 
1136   /* translate to coordinates */
1137   PetscCall(PetscNew(&bnd0));
1138   PetscCall(DMGetCoordinatesLocalSetUp(idm));
1139   if (rank == rootrank) {
1140     PetscCall(DMPlexGetConeRecursive(idm, boundary_is, &bnd0->depth, &boundary_expanded_iss, &bnd0->sections));
1141     PetscCall(DMGetCoordinatesLocalTuple(dm, boundary_expanded_iss[0], NULL, &bnd0->coordinates));
1142     /* self-check */
1143     {
1144       IS is0;
1145       PetscCall(DMPlexExpandedVerticesCoordinatesToFaces_Private(idm, bnd0, &is0));
1146       PetscCall(ISEqual(is0, boundary_is, &flg));
1147       PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_PLIB, "DMPlexExpandedVerticesCoordinatesToFaces_Private produced a wrong IS");
1148       PetscCall(ISDestroy(&is0));
1149     }
1150   } else {
1151     PetscCall(VecCreateSeq(PETSC_COMM_SELF, 0, &bnd0->coordinates));
1152   }
1153 
1154   {
1155     Vec        tmp;
1156     VecScatter sc;
1157     IS         xis;
1158     PetscInt   n;
1159 
1160     /* just convert seq vectors to mpi vector */
1161     PetscCall(VecGetLocalSize(bnd0->coordinates, &n));
1162     PetscCallMPI(MPI_Bcast(&n, 1, MPIU_INT, rootrank, comm));
1163     if (rank == rootrank) {
1164       PetscCall(VecCreateMPI(comm, n, n, &tmp));
1165     } else {
1166       PetscCall(VecCreateMPI(comm, 0, n, &tmp));
1167     }
1168     PetscCall(VecCopy(bnd0->coordinates, tmp));
1169     PetscCall(VecDestroy(&bnd0->coordinates));
1170     bnd0->coordinates = tmp;
1171 
1172     /* replicate coordinates from root rank to all ranks */
1173     PetscCall(VecCreateMPI(comm, n, n * size, &bnd->coordinates));
1174     PetscCall(ISCreateStride(comm, n, 0, 1, &xis));
1175     PetscCall(VecScatterCreate(bnd0->coordinates, xis, bnd->coordinates, NULL, &sc));
1176     PetscCall(VecScatterBegin(sc, bnd0->coordinates, bnd->coordinates, INSERT_VALUES, SCATTER_FORWARD));
1177     PetscCall(VecScatterEnd(sc, bnd0->coordinates, bnd->coordinates, INSERT_VALUES, SCATTER_FORWARD));
1178     PetscCall(VecScatterDestroy(&sc));
1179     PetscCall(ISDestroy(&xis));
1180   }
1181   bnd->depth = bnd0->depth;
1182   PetscCallMPI(MPI_Bcast(&bnd->depth, 1, MPIU_INT, rootrank, comm));
1183   PetscCall(PetscMalloc1(bnd->depth, &bnd->sections));
1184   for (d = 0; d < bnd->depth; d++) PetscCall(PetscSectionReplicate_Private(comm, rootrank, (rank == rootrank) ? bnd0->sections[d] : NULL, &bnd->sections[d]));
1185 
1186   if (rank == rootrank) PetscCall(DMPlexRestoreConeRecursive(idm, boundary_is, &bnd0->depth, &boundary_expanded_iss, &bnd0->sections));
1187   PetscCall(PortableBoundaryDestroy(&bnd0));
1188   PetscCall(DMRemoveLabelBySelf(idm, &label, PETSC_TRUE));
1189   PetscCall(DMLabelDestroy(&label));
1190   PetscCall(ISDestroy(&boundary_is));
1191   PetscCall(DMDestroy(&idm));
1192   *boundary = bnd;
1193   PetscFunctionReturn(0);
1194 }
1195 
1196 /* get faces of inter-partition interface */
1197 static PetscErrorCode DMPlexGetInterfaceFaces_Private(DM ipdm, IS boundary_faces_is, IS *interface_faces_is)
1198 {
1199   MPI_Comm               comm;
1200   DMLabel                label;
1201   IS                     part_boundary_faces_is;
1202   const char             partBoundaryName[] = "DMPlexDistributeInterpolateMarkInterface_partBoundary";
1203   PetscInt               value              = 1;
1204   DMPlexInterpolatedFlag intp;
1205 
1206   PetscFunctionBegin;
1207   PetscCall(PetscObjectGetComm((PetscObject)ipdm, &comm));
1208   PetscCall(DMPlexIsInterpolatedCollective(ipdm, &intp));
1209   PetscCheck(intp == DMPLEX_INTERPOLATED_FULL, comm, PETSC_ERR_ARG_WRONG, "only for fully interpolated DMPlex");
1210 
1211   /* get ipdm partition boundary (partBoundary) */
1212   PetscCall(DMLabelCreate(PETSC_COMM_SELF, partBoundaryName, &label));
1213   PetscCall(DMAddLabel(ipdm, label));
1214   PetscCall(DMPlexMarkBoundaryFaces(ipdm, value, label));
1215   PetscCall(DMLabelViewFromOptionsOnComm_Private(label, "-ipdm_part_boundary_view", comm));
1216   PetscCall(DMLabelGetStratumISOnComm_Private(label, value, comm, &part_boundary_faces_is));
1217   PetscCall(DMRemoveLabelBySelf(ipdm, &label, PETSC_TRUE));
1218   PetscCall(DMLabelDestroy(&label));
1219 
1220   /* remove ipdm whole-domain boundary (boundary_faces_is) from ipdm partition boundary (part_boundary_faces_is), resulting just in inter-partition interface */
1221   PetscCall(ISDifference(part_boundary_faces_is, boundary_faces_is, interface_faces_is));
1222   PetscCall(ISDestroy(&part_boundary_faces_is));
1223   PetscFunctionReturn(0);
1224 }
1225 
1226 /* compute inter-partition interface including edges and vertices */
1227 static PetscErrorCode DMPlexComputeCompleteInterface_Private(DM ipdm, IS interface_faces_is, IS *interface_is)
1228 {
1229   DMLabel                label;
1230   PetscInt               value           = 1;
1231   const char             interfaceName[] = "DMPlexDistributeInterpolateMarkInterface_interface";
1232   DMPlexInterpolatedFlag intp;
1233   MPI_Comm               comm;
1234 
1235   PetscFunctionBegin;
1236   PetscCall(PetscObjectGetComm((PetscObject)ipdm, &comm));
1237   PetscCall(DMPlexIsInterpolatedCollective(ipdm, &intp));
1238   PetscCheck(intp == DMPLEX_INTERPOLATED_FULL, comm, PETSC_ERR_ARG_WRONG, "only for fully interpolated DMPlex");
1239 
1240   PetscCall(DMLabelCreate(PETSC_COMM_SELF, interfaceName, &label));
1241   PetscCall(DMAddLabel(ipdm, label));
1242   PetscCall(DMLabelSetStratumIS(label, value, interface_faces_is));
1243   PetscCall(DMLabelViewFromOptionsOnComm_Private(label, "-interface_faces_view", comm));
1244   PetscCall(DMPlexLabelComplete(ipdm, label));
1245   PetscCall(DMLabelViewFromOptionsOnComm_Private(label, "-interface_view", comm));
1246   PetscCall(DMLabelGetStratumISOnComm_Private(label, value, comm, interface_is));
1247   PetscCall(PetscObjectSetName((PetscObject)*interface_is, "interface_is"));
1248   PetscCall(ISViewFromOptions(*interface_is, NULL, "-interface_is_view"));
1249   PetscCall(DMRemoveLabelBySelf(ipdm, &label, PETSC_TRUE));
1250   PetscCall(DMLabelDestroy(&label));
1251   PetscFunctionReturn(0);
1252 }
1253 
1254 static PetscErrorCode PointSFGetOutwardInterfacePoints(PetscSF sf, IS *is)
1255 {
1256   PetscInt        n;
1257   const PetscInt *arr;
1258 
1259   PetscFunctionBegin;
1260   PetscCall(PetscSFGetGraph(sf, NULL, &n, &arr, NULL));
1261   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)sf), n, arr, PETSC_USE_POINTER, is));
1262   PetscFunctionReturn(0);
1263 }
1264 
1265 static PetscErrorCode PointSFGetInwardInterfacePoints(PetscSF sf, IS *is)
1266 {
1267   PetscInt        n;
1268   const PetscInt *rootdegree;
1269   PetscInt       *arr;
1270 
1271   PetscFunctionBegin;
1272   PetscCall(PetscSFSetUp(sf));
1273   PetscCall(PetscSFComputeDegreeBegin(sf, &rootdegree));
1274   PetscCall(PetscSFComputeDegreeEnd(sf, &rootdegree));
1275   PetscCall(PetscSFComputeMultiRootOriginalNumbering(sf, rootdegree, &n, &arr));
1276   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)sf), n, arr, PETSC_OWN_POINTER, is));
1277   PetscFunctionReturn(0);
1278 }
1279 
1280 static PetscErrorCode PointSFGetInterfacePoints_Private(PetscSF pointSF, IS *is)
1281 {
1282   IS pointSF_out_is, pointSF_in_is;
1283 
1284   PetscFunctionBegin;
1285   PetscCall(PointSFGetOutwardInterfacePoints(pointSF, &pointSF_out_is));
1286   PetscCall(PointSFGetInwardInterfacePoints(pointSF, &pointSF_in_is));
1287   PetscCall(ISExpand(pointSF_out_is, pointSF_in_is, is));
1288   PetscCall(ISDestroy(&pointSF_out_is));
1289   PetscCall(ISDestroy(&pointSF_in_is));
1290   PetscFunctionReturn(0);
1291 }
1292 
1293 #define CHKERRMY(ierr) PetscCheck(!ierr, PETSC_COMM_SELF, PETSC_ERR_PLIB, "PointSF is wrong. Unable to show details!")
1294 
1295 static PetscErrorCode ViewPointsWithType_Internal(DM dm, IS pointsIS, PetscViewer v)
1296 {
1297   DMLabel         label;
1298   PetscSection    coordsSection;
1299   Vec             coordsVec;
1300   PetscScalar    *coordsScalar;
1301   PetscInt        coneSize, depth, dim, i, p, npoints;
1302   const PetscInt *points;
1303 
1304   PetscFunctionBegin;
1305   PetscCall(DMGetDimension(dm, &dim));
1306   PetscCall(DMGetCoordinateSection(dm, &coordsSection));
1307   PetscCall(DMGetCoordinatesLocal(dm, &coordsVec));
1308   PetscCall(VecGetArray(coordsVec, &coordsScalar));
1309   PetscCall(ISGetLocalSize(pointsIS, &npoints));
1310   PetscCall(ISGetIndices(pointsIS, &points));
1311   PetscCall(DMPlexGetDepthLabel(dm, &label));
1312   PetscCall(PetscViewerASCIIPushTab(v));
1313   for (i = 0; i < npoints; i++) {
1314     p = points[i];
1315     PetscCall(DMLabelGetValue(label, p, &depth));
1316     if (!depth) {
1317       PetscInt n, o;
1318       char     coordstr[128];
1319 
1320       PetscCall(PetscSectionGetDof(coordsSection, p, &n));
1321       PetscCall(PetscSectionGetOffset(coordsSection, p, &o));
1322       PetscCall(coord2str(coordstr, sizeof(coordstr), n, &coordsScalar[o], 1.0));
1323       PetscCall(PetscViewerASCIISynchronizedPrintf(v, "vertex %" PetscInt_FMT " w/ coordinates %s\n", p, coordstr));
1324     } else {
1325       char entityType[16];
1326 
1327       switch (depth) {
1328       case 1:
1329         PetscCall(PetscStrcpy(entityType, "edge"));
1330         break;
1331       case 2:
1332         PetscCall(PetscStrcpy(entityType, "face"));
1333         break;
1334       case 3:
1335         PetscCall(PetscStrcpy(entityType, "cell"));
1336         break;
1337       default:
1338         SETERRQ(PetscObjectComm((PetscObject)v), PETSC_ERR_SUP, "Only for depth <= 3");
1339       }
1340       if (depth == dim && dim < 3) PetscCall(PetscStrlcat(entityType, " (cell)", sizeof(entityType)));
1341       PetscCall(PetscViewerASCIISynchronizedPrintf(v, "%s %" PetscInt_FMT "\n", entityType, p));
1342     }
1343     PetscCall(DMPlexGetConeSize(dm, p, &coneSize));
1344     if (coneSize) {
1345       const PetscInt *cone;
1346       IS              coneIS;
1347 
1348       PetscCall(DMPlexGetCone(dm, p, &cone));
1349       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, coneSize, cone, PETSC_USE_POINTER, &coneIS));
1350       PetscCall(ViewPointsWithType_Internal(dm, coneIS, v));
1351       PetscCall(ISDestroy(&coneIS));
1352     }
1353   }
1354   PetscCall(PetscViewerASCIIPopTab(v));
1355   PetscCall(VecRestoreArray(coordsVec, &coordsScalar));
1356   PetscCall(ISRestoreIndices(pointsIS, &points));
1357   PetscFunctionReturn(0);
1358 }
1359 
1360 static PetscErrorCode ViewPointsWithType(DM dm, IS points, PetscViewer v)
1361 {
1362   PetscBool   flg;
1363   PetscInt    npoints;
1364   PetscMPIInt rank;
1365 
1366   PetscFunctionBegin;
1367   PetscCall(PetscObjectTypeCompare((PetscObject)v, PETSCVIEWERASCII, &flg));
1368   PetscCheck(flg, PetscObjectComm((PetscObject)v), PETSC_ERR_SUP, "Only for ASCII viewer");
1369   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)v), &rank));
1370   PetscCall(PetscViewerASCIIPushSynchronized(v));
1371   PetscCall(ISGetLocalSize(points, &npoints));
1372   if (npoints) {
1373     PetscCall(PetscViewerASCIISynchronizedPrintf(v, "[%d] --------\n", rank));
1374     PetscCall(ViewPointsWithType_Internal(dm, points, v));
1375   }
1376   PetscCall(PetscViewerFlush(v));
1377   PetscCall(PetscViewerASCIIPopSynchronized(v));
1378   PetscFunctionReturn(0);
1379 }
1380 
1381 static PetscErrorCode DMPlexComparePointSFWithInterface_Private(DM ipdm, IS interface_is)
1382 {
1383   PetscSF     pointsf;
1384   IS          pointsf_is;
1385   PetscBool   flg;
1386   MPI_Comm    comm;
1387   PetscMPIInt size;
1388 
1389   PetscFunctionBegin;
1390   PetscCall(PetscObjectGetComm((PetscObject)ipdm, &comm));
1391   PetscCallMPI(MPI_Comm_size(comm, &size));
1392   PetscCall(DMGetPointSF(ipdm, &pointsf));
1393   if (pointsf) {
1394     PetscInt nroots;
1395     PetscCall(PetscSFGetGraph(pointsf, &nroots, NULL, NULL, NULL));
1396     if (nroots < 0) pointsf = NULL; /* uninitialized SF */
1397   }
1398   if (!pointsf) {
1399     PetscInt N = 0;
1400     if (interface_is) PetscCall(ISGetSize(interface_is, &N));
1401     PetscCheck(!N, comm, PETSC_ERR_PLIB, "interface_is should be NULL or empty for PointSF being NULL");
1402     PetscFunctionReturn(0);
1403   }
1404 
1405   /* get PointSF points as IS pointsf_is */
1406   PetscCall(PointSFGetInterfacePoints_Private(pointsf, &pointsf_is));
1407 
1408   /* compare pointsf_is with interface_is */
1409   PetscCall(ISEqual(interface_is, pointsf_is, &flg));
1410   PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE, &flg, 1, MPIU_BOOL, MPI_LAND, comm));
1411   if (!flg) {
1412     IS          pointsf_extra_is, pointsf_missing_is;
1413     PetscViewer errv = PETSC_VIEWER_STDERR_(comm);
1414     CHKERRMY(ISDifference(interface_is, pointsf_is, &pointsf_missing_is));
1415     CHKERRMY(ISDifference(pointsf_is, interface_is, &pointsf_extra_is));
1416     CHKERRMY(PetscViewerASCIIPrintf(errv, "Points missing in PointSF:\n"));
1417     CHKERRMY(ViewPointsWithType(ipdm, pointsf_missing_is, errv));
1418     CHKERRMY(PetscViewerASCIIPrintf(errv, "Extra points in PointSF:\n"));
1419     CHKERRMY(ViewPointsWithType(ipdm, pointsf_extra_is, errv));
1420     CHKERRMY(ISDestroy(&pointsf_extra_is));
1421     CHKERRMY(ISDestroy(&pointsf_missing_is));
1422     SETERRQ(comm, PETSC_ERR_PLIB, "PointSF is wrong! See details above.");
1423   }
1424   PetscCall(ISDestroy(&pointsf_is));
1425   PetscFunctionReturn(0);
1426 }
1427 
1428 /* remove faces & edges from label, leave just vertices */
1429 static PetscErrorCode DMPlexISFilterVertices_Private(DM dm, IS points)
1430 {
1431   PetscInt vStart, vEnd;
1432   MPI_Comm comm;
1433 
1434   PetscFunctionBegin;
1435   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1436   PetscCall(DMPlexGetDepthStratum(dm, 0, &vStart, &vEnd));
1437   PetscCall(ISGeneralFilter(points, vStart, vEnd));
1438   PetscFunctionReturn(0);
1439 }
1440 
1441 /*
1442   DMPlexCheckPointSFHeavy - Thoroughly test that the PointSF after parallel DMPlexInterpolate() includes exactly all interface points.
1443 
1444   Collective
1445 
1446   Input Parameters:
1447 . dm - The DMPlex object
1448 
1449   Notes:
1450   The input DMPlex must be serial (one partition has all points, the other partitions have no points).
1451   This is a heavy test which involves DMPlexInterpolate() if the input DM is not interpolated yet, and depends on having a representation of the whole-domain boundary (PortableBoundary), which can be obtained only by DMPlexGetExpandedBoundary_Private() (which involves DMPlexInterpolate() of a sequential DM).
1452   This is mainly intended for debugging/testing purposes.
1453 
1454   Algorithm:
1455   1. boundary faces of the serial version of the whole mesh are found using DMPlexMarkBoundaryFaces()
1456   2. boundary faces are translated into vertices using DMPlexGetConeRecursive() and these are translated into coordinates - this description (aka PortableBoundary) is completely independent of partitioning and point numbering
1457   3. the mesh is distributed or loaded in parallel
1458   4. boundary faces of the distributed mesh are reconstructed from PortableBoundary using DMPlexFindVertices()
1459   5. partition boundary faces of the parallel mesh are found using DMPlexMarkBoundaryFaces()
1460   6. partition interfaces are computed as set difference of partition boundary faces minus the reconstructed boundary
1461   7. check that interface covered by PointSF (union of inward and outward points) is equal to the partition interface for each rank, otherwise print the difference and throw an error
1462 
1463   Level: developer
1464 
1465 .seealso: DMGetPointSF(), DMPlexCheckSymmetry(), DMPlexCheckSkeleton(), DMPlexCheckFaces()
1466 */
1467 static PetscErrorCode DMPlexCheckPointSFHeavy(DM dm, PortableBoundary bnd)
1468 {
1469   DM                     ipdm = NULL;
1470   IS                     boundary_faces_is, interface_faces_is, interface_is;
1471   DMPlexInterpolatedFlag intp;
1472   MPI_Comm               comm;
1473 
1474   PetscFunctionBegin;
1475   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1476 
1477   PetscCall(DMPlexIsInterpolatedCollective(dm, &intp));
1478   if (intp == DMPLEX_INTERPOLATED_FULL) {
1479     ipdm = dm;
1480   } else {
1481     /* create temporary interpolated DM if input DM is not interpolated */
1482     PetscCall(DMPlexSetOrientInterface_Private(dm, PETSC_FALSE));
1483     PetscCall(DMPlexInterpolate(dm, &ipdm)); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexComparePointSFWithInterface_Private() below */
1484     PetscCall(DMPlexSetOrientInterface_Private(dm, PETSC_TRUE));
1485   }
1486   PetscCall(DMViewFromOptions(ipdm, NULL, "-ipdm_view"));
1487 
1488   /* recover ipdm whole-domain boundary faces from the expanded vertices coordinates */
1489   PetscCall(DMPlexExpandedVerticesCoordinatesToFaces_Private(ipdm, bnd, &boundary_faces_is));
1490   /* get inter-partition interface faces (interface_faces_is)*/
1491   PetscCall(DMPlexGetInterfaceFaces_Private(ipdm, boundary_faces_is, &interface_faces_is));
1492   /* compute inter-partition interface including edges and vertices (interface_is) */
1493   PetscCall(DMPlexComputeCompleteInterface_Private(ipdm, interface_faces_is, &interface_is));
1494   /* destroy immediate ISs */
1495   PetscCall(ISDestroy(&boundary_faces_is));
1496   PetscCall(ISDestroy(&interface_faces_is));
1497 
1498   /* for uninterpolated case, keep just vertices in interface */
1499   if (!intp) {
1500     PetscCall(DMPlexISFilterVertices_Private(ipdm, interface_is));
1501     PetscCall(DMDestroy(&ipdm));
1502   }
1503 
1504   /* compare PointSF with the boundary reconstructed from coordinates */
1505   PetscCall(DMPlexComparePointSFWithInterface_Private(dm, interface_is));
1506   PetscCall(PetscPrintf(comm, "DMPlexCheckPointSFHeavy PASSED\n"));
1507   PetscCall(ISDestroy(&interface_is));
1508   PetscFunctionReturn(0);
1509 }
1510 
1511 int main(int argc, char **argv)
1512 {
1513   DM     dm;
1514   AppCtx user;
1515 
1516   PetscFunctionBeginUser;
1517   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
1518   PetscCall(PetscLogStageRegister("create", &stage[0]));
1519   PetscCall(PetscLogStageRegister("distribute", &stage[1]));
1520   PetscCall(PetscLogStageRegister("interpolate", &stage[2]));
1521   PetscCall(ProcessOptions(PETSC_COMM_WORLD, &user));
1522   PetscCall(CreateMesh(PETSC_COMM_WORLD, &user, &dm));
1523   if (user.nPointsToExpand) PetscCall(TestExpandPoints(dm, &user));
1524   if (user.ncoords) {
1525     Vec coords;
1526 
1527     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, user.ncoords, user.ncoords, user.coords, &coords));
1528     PetscCall(ViewVerticesFromCoords(dm, coords, user.coordsTol, PETSC_VIEWER_STDOUT_WORLD));
1529     PetscCall(VecDestroy(&coords));
1530   }
1531   PetscCall(DMDestroy(&dm));
1532   PetscCall(PetscFinalize());
1533   return 0;
1534 }
1535 
1536 /*TEST
1537 
1538   testset:
1539     nsize: 2
1540     args: -dm_view ascii::ascii_info_detail
1541     args: -dm_plex_check_all
1542     test:
1543       suffix: 1_tri_dist0
1544       args: -distribute 0 -interpolate {{none create}separate output}
1545     test:
1546       suffix: 1_tri_dist1
1547       args: -distribute 1 -interpolate {{none create after_distribute}separate output}
1548     test:
1549       suffix: 1_quad_dist0
1550       args: -cell_simplex 0 -distribute 0 -interpolate {{none create}separate output}
1551     test:
1552       suffix: 1_quad_dist1
1553       args: -cell_simplex 0 -distribute 1 -interpolate {{none create after_distribute}separate output}
1554     test:
1555       suffix: 1_1d_dist1
1556       args: -dim 1 -distribute 1
1557 
1558   testset:
1559     nsize: 3
1560     args: -testnum 1 -interpolate create
1561     args: -dm_plex_check_all
1562     test:
1563       suffix: 2
1564       args: -dm_view ascii::ascii_info_detail
1565     test:
1566       suffix: 2a
1567       args: -dm_plex_check_cones_conform_on_interfaces_verbose
1568     test:
1569       suffix: 2b
1570       args: -test_expand_points 0,1,2,5,6
1571     test:
1572       suffix: 2c
1573       args: -test_expand_points 0,1,2,5,6 -test_expand_points_empty
1574 
1575   testset:
1576     # the same as 1% for 3D
1577     nsize: 2
1578     args: -dim 3 -dm_view ascii::ascii_info_detail
1579     args: -dm_plex_check_all
1580     test:
1581       suffix: 4_tet_dist0
1582       args: -distribute 0 -interpolate {{none create}separate output}
1583     test:
1584       suffix: 4_tet_dist1
1585       args: -distribute 1 -interpolate {{none create after_distribute}separate output}
1586     test:
1587       suffix: 4_hex_dist0
1588       args: -cell_simplex 0 -distribute 0 -interpolate {{none create}separate output}
1589     test:
1590       suffix: 4_hex_dist1
1591       args: -cell_simplex 0 -distribute 1 -interpolate {{none create after_distribute}separate output}
1592 
1593   test:
1594     # the same as 4_tet_dist0 but test different initial orientations
1595     suffix: 4_tet_test_orient
1596     nsize: 2
1597     args: -dim 3 -distribute 0
1598     args: -dm_plex_check_all
1599     args: -rotate_interface_0 {{0 1 2 11 12 13}}
1600     args: -rotate_interface_1 {{0 1 2 11 12 13}}
1601 
1602   testset:
1603     requires: exodusii
1604     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/TwoQuads.exo
1605     args: -dm_view ascii::ascii_info_detail
1606     args: -dm_plex_check_all
1607     args: -custom_view
1608     test:
1609       suffix: 5_seq
1610       nsize: 1
1611       args: -distribute 0 -interpolate {{none create}separate output}
1612     test:
1613       # Detail viewing in a non-distributed mesh is broken because the DMLabelView() is collective, but the label is not shared
1614       suffix: 5_dist0
1615       nsize: 2
1616       args: -distribute 0 -interpolate {{none create}separate output} -dm_view
1617     test:
1618       suffix: 5_dist1
1619       nsize: 2
1620       args: -distribute 1 -interpolate {{none create after_distribute}separate output}
1621 
1622   testset:
1623     nsize: {{1 2 4}}
1624     args: -use_generator
1625     args: -dm_plex_check_all
1626     args: -distribute -interpolate none
1627     test:
1628       suffix: 6_tri
1629       requires: triangle
1630       args: -faces {{2,2  1,3  7,4}} -cell_simplex 1 -dm_generator triangle
1631     test:
1632       suffix: 6_quad
1633       args: -faces {{2,2  1,3  7,4}} -cell_simplex 0
1634     test:
1635       suffix: 6_tet
1636       requires: ctetgen
1637       args: -faces {{2,2,2  1,3,5  3,4,7}} -cell_simplex 1 -dm_generator ctetgen
1638     test:
1639       suffix: 6_hex
1640       args: -faces {{2,2,2  1,3,5  3,4,7}} -cell_simplex 0
1641   testset:
1642     nsize: {{1 2 4}}
1643     args: -use_generator
1644     args: -dm_plex_check_all
1645     args: -distribute -interpolate create
1646     test:
1647       suffix: 6_int_tri
1648       requires: triangle
1649       args: -faces {{2,2  1,3  7,4}} -cell_simplex 1 -dm_generator triangle
1650     test:
1651       suffix: 6_int_quad
1652       args: -faces {{2,2  1,3  7,4}} -cell_simplex 0
1653     test:
1654       suffix: 6_int_tet
1655       requires: ctetgen
1656       args: -faces {{2,2,2  1,3,5  3,4,7}} -cell_simplex 1 -dm_generator ctetgen
1657     test:
1658       suffix: 6_int_hex
1659       args: -faces {{2,2,2  1,3,5  3,4,7}} -cell_simplex 0
1660   testset:
1661     nsize: {{2 4}}
1662     args: -use_generator
1663     args: -dm_plex_check_all
1664     args: -distribute -interpolate after_distribute
1665     test:
1666       suffix: 6_parint_tri
1667       requires: triangle
1668       args: -faces {{2,2  1,3  7,4}} -cell_simplex 1 -dm_generator triangle
1669     test:
1670       suffix: 6_parint_quad
1671       args: -faces {{2,2  1,3  7,4}} -cell_simplex 0
1672     test:
1673       suffix: 6_parint_tet
1674       requires: ctetgen
1675       args: -faces {{2,2,2  1,3,5  3,4,7}} -cell_simplex 1 -dm_generator ctetgen
1676     test:
1677       suffix: 6_parint_hex
1678       args: -faces {{2,2,2  1,3,5  3,4,7}} -cell_simplex 0
1679 
1680   testset: # 7 EXODUS
1681     requires: exodusii
1682     args: -dm_plex_check_all
1683     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.exo
1684     args: -distribute
1685     test: # seq load, simple partitioner
1686       suffix: 7_exo
1687       nsize: {{1 2 4 5}}
1688       args: -interpolate none
1689     test: # seq load, seq interpolation, simple partitioner
1690       suffix: 7_exo_int_simple
1691       nsize: {{1 2 4 5}}
1692       args: -interpolate create
1693     test: # seq load, seq interpolation, metis partitioner
1694       suffix: 7_exo_int_metis
1695       requires: parmetis
1696       nsize: {{2 4 5}}
1697       args: -interpolate create
1698       args: -petscpartitioner_type parmetis
1699     test: # seq load, simple partitioner, par interpolation
1700       suffix: 7_exo_simple_int
1701       nsize: {{2 4 5}}
1702       args: -interpolate after_distribute
1703     test: # seq load, metis partitioner, par interpolation
1704       suffix: 7_exo_metis_int
1705       requires: parmetis
1706       nsize: {{2 4 5}}
1707       args: -interpolate after_distribute
1708       args: -petscpartitioner_type parmetis
1709 
1710   testset: # 7 HDF5 SEQUANTIAL LOAD
1711     requires: hdf5 !complex
1712     args: -dm_plex_check_all
1713     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -dm_plex_create_from_hdf5_xdmf
1714     args: -dm_plex_hdf5_force_sequential
1715     args: -distribute
1716     test: # seq load, simple partitioner
1717       suffix: 7_seq_hdf5_simple
1718       nsize: {{1 2 4 5}}
1719       args: -interpolate none
1720     test: # seq load, seq interpolation, simple partitioner
1721       suffix: 7_seq_hdf5_int_simple
1722       nsize: {{1 2 4 5}}
1723       args: -interpolate after_create
1724     test: # seq load, seq interpolation, metis partitioner
1725       nsize: {{2 4 5}}
1726       suffix: 7_seq_hdf5_int_metis
1727       requires: parmetis
1728       args: -interpolate after_create
1729       args: -petscpartitioner_type parmetis
1730     test: # seq load, simple partitioner, par interpolation
1731       suffix: 7_seq_hdf5_simple_int
1732       nsize: {{2 4 5}}
1733       args: -interpolate after_distribute
1734     test: # seq load, metis partitioner, par interpolation
1735       nsize: {{2 4 5}}
1736       suffix: 7_seq_hdf5_metis_int
1737       requires: parmetis
1738       args: -interpolate after_distribute
1739       args: -petscpartitioner_type parmetis
1740 
1741   testset: # 7 HDF5 PARALLEL LOAD
1742     requires: hdf5 !complex
1743     nsize: {{2 4 5}}
1744     args: -dm_plex_check_all
1745     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -dm_plex_create_from_hdf5_xdmf
1746     test: # par load
1747       suffix: 7_par_hdf5
1748       args: -interpolate none
1749     test: # par load, par interpolation
1750       suffix: 7_par_hdf5_int
1751       args: -interpolate after_create
1752     test: # par load, parmetis repartitioner
1753       TODO: Parallel partitioning of uninterpolated meshes not supported
1754       suffix: 7_par_hdf5_parmetis
1755       requires: parmetis
1756       args: -distribute -petscpartitioner_type parmetis
1757       args: -interpolate none
1758     test: # par load, par interpolation, parmetis repartitioner
1759       suffix: 7_par_hdf5_int_parmetis
1760       requires: parmetis
1761       args: -distribute -petscpartitioner_type parmetis
1762       args: -interpolate after_create
1763     test: # par load, parmetis partitioner, par interpolation
1764       TODO: Parallel partitioning of uninterpolated meshes not supported
1765       suffix: 7_par_hdf5_parmetis_int
1766       requires: parmetis
1767       args: -distribute -petscpartitioner_type parmetis
1768       args: -interpolate after_distribute
1769 
1770     test:
1771       suffix: 7_hdf5_hierarch
1772       requires: hdf5 ptscotch !complex
1773       nsize: {{2 3 4}separate output}
1774       args: -distribute
1775       args: -interpolate after_create
1776       args: -petscpartitioner_type matpartitioning -petscpartitioner_view ::ascii_info
1777       args: -mat_partitioning_type hierarch -mat_partitioning_hierarchical_nfineparts 2
1778       args: -mat_partitioning_hierarchical_coarseparttype ptscotch -mat_partitioning_hierarchical_fineparttype ptscotch
1779 
1780   test:
1781     suffix: 8
1782     requires: hdf5 !complex
1783     nsize: 4
1784     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -dm_plex_create_from_hdf5_xdmf
1785     args: -distribute 0 -interpolate after_create
1786     args: -view_vertices_from_coords 0.,1.,0.,-0.5,1.,0.,0.583,-0.644,0.,-2.,-2.,-2. -view_vertices_from_coords_tol 1e-3
1787     args: -dm_plex_check_all
1788     args: -custom_view
1789 
1790   testset: # 9 HDF5 SEQUANTIAL LOAD
1791     requires: hdf5 !complex datafilespath
1792     args: -dm_plex_check_all
1793     args: -filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /cells -dm_plex_hdf5_geometry_path /coordinates
1794     args: -dm_plex_hdf5_force_sequential
1795     args: -distribute
1796     test: # seq load, simple partitioner
1797       suffix: 9_seq_hdf5_simple
1798       nsize: {{1 2 4 5}}
1799       args: -interpolate none
1800     test: # seq load, seq interpolation, simple partitioner
1801       suffix: 9_seq_hdf5_int_simple
1802       nsize: {{1 2 4 5}}
1803       args: -interpolate after_create
1804     test: # seq load, seq interpolation, metis partitioner
1805       nsize: {{2 4 5}}
1806       suffix: 9_seq_hdf5_int_metis
1807       requires: parmetis
1808       args: -interpolate after_create
1809       args: -petscpartitioner_type parmetis
1810     test: # seq load, simple partitioner, par interpolation
1811       suffix: 9_seq_hdf5_simple_int
1812       nsize: {{2 4 5}}
1813       args: -interpolate after_distribute
1814     test: # seq load, simple partitioner, par interpolation
1815       # This is like 9_seq_hdf5_simple_int but testing error output of DMPlexCheckPointSFHeavy().
1816       # Once 9_seq_hdf5_simple_int gets fixed, this one gets broken.
1817       # We can then provide an intentionally broken mesh instead.
1818       TODO: This test is broken because PointSF is fixed.
1819       suffix: 9_seq_hdf5_simple_int_err
1820       nsize: 4
1821       args: -interpolate after_distribute
1822       filter: sed -e "/PETSC ERROR/,$$d"
1823     test: # seq load, metis partitioner, par interpolation
1824       nsize: {{2 4 5}}
1825       suffix: 9_seq_hdf5_metis_int
1826       requires: parmetis
1827       args: -interpolate after_distribute
1828       args: -petscpartitioner_type parmetis
1829 
1830   testset: # 9 HDF5 PARALLEL LOAD
1831     requires: hdf5 !complex datafilespath
1832     nsize: {{2 4 5}}
1833     args: -dm_plex_check_all
1834     args: -filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /cells -dm_plex_hdf5_geometry_path /coordinates
1835     test: # par load
1836       suffix: 9_par_hdf5
1837       args: -interpolate none
1838     test: # par load, par interpolation
1839       suffix: 9_par_hdf5_int
1840       args: -interpolate after_create
1841     test: # par load, parmetis repartitioner
1842       TODO: Parallel partitioning of uninterpolated meshes not supported
1843       suffix: 9_par_hdf5_parmetis
1844       requires: parmetis
1845       args: -distribute -petscpartitioner_type parmetis
1846       args: -interpolate none
1847     test: # par load, par interpolation, parmetis repartitioner
1848       suffix: 9_par_hdf5_int_parmetis
1849       requires: parmetis
1850       args: -distribute -petscpartitioner_type parmetis
1851       args: -interpolate after_create
1852     test: # par load, parmetis partitioner, par interpolation
1853       TODO: Parallel partitioning of uninterpolated meshes not supported
1854       suffix: 9_par_hdf5_parmetis_int
1855       requires: parmetis
1856       args: -distribute -petscpartitioner_type parmetis
1857       args: -interpolate after_distribute
1858 
1859   testset: # 10 HDF5 PARALLEL LOAD
1860     requires: hdf5 !complex datafilespath
1861     nsize: {{2 4 7}}
1862     args: -dm_plex_check_all
1863     args: -filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined2.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /topo -dm_plex_hdf5_geometry_path /geom
1864     test: # par load, par interpolation
1865       suffix: 10_par_hdf5_int
1866       args: -interpolate after_create
1867 TEST*/
1868