xref: /petsc/src/dm/impls/plex/tests/ex18.c (revision 9371c9d470a9602b6d10a8bf50c9b2280a79e45a)
1 static char help[] = "Tests for parallel mesh loading and parallel topological interpolation\n\n";
2 
3 #include <petsc/private/dmpleximpl.h>
4 /* List of test meshes
5 
6 Network
7 -------
8 Test 0 (2 ranks):
9 
10 network=0:
11 ---------
12   cell 0   cell 1   cell 2          nCells-1       (edge)
13 0 ------ 1 ------ 2 ------ 3 -- -- v --  -- nCells (vertex)
14 
15   vertex distribution:
16     rank 0: 0 1
17     rank 1: 2 3 ... nCells
18   cell(edge) distribution:
19     rank 0: 0 1
20     rank 1: 2 ... nCells-1
21 
22 network=1:
23 ---------
24                v2
25                 ^
26                 |
27                cell 2
28                 |
29  v0 --cell 0--> v3--cell 1--> v1
30 
31   vertex distribution:
32     rank 0: 0 1 3
33     rank 1: 2
34   cell(edge) distribution:
35     rank 0: 0 1
36     rank 1: 2
37 
38   example:
39     mpiexec -n 2 ./ex18 -distribute 1 -dim 1 -orig_dm_view -dist_dm_view -dist_dm_view -petscpartitioner_type parmetis -ncells 50
40 
41 Triangle
42 --------
43 Test 0 (2 ranks):
44 Two triangles sharing a face
45 
46         2
47       / | \
48      /  |  \
49     /   |   \
50    0  0 | 1  3
51     \   |   /
52      \  |  /
53       \ | /
54         1
55 
56   vertex distribution:
57     rank 0: 0 1
58     rank 1: 2 3
59   cell distribution:
60     rank 0: 0
61     rank 1: 1
62 
63 Test 1 (3 ranks):
64 Four triangles partitioned across 3 ranks
65 
66    0 _______ 3
67    | \     / |
68    |  \ 1 /  |
69    |   \ /   |
70    | 0  2  2 |
71    |   / \   |
72    |  / 3 \  |
73    | /     \ |
74    1 ------- 4
75 
76   vertex distribution:
77     rank 0: 0 1
78     rank 1: 2 3
79     rank 2: 4
80   cell distribution:
81     rank 0: 0
82     rank 1: 1
83     rank 2: 2 3
84 
85 Test 2 (3 ranks):
86 Four triangles partitioned across 3 ranks
87 
88    1 _______ 3
89    | \     / |
90    |  \ 1 /  |
91    |   \ /   |
92    | 0  0  2 |
93    |   / \   |
94    |  / 3 \  |
95    | /     \ |
96    2 ------- 4
97 
98   vertex distribution:
99     rank 0: 0 1
100     rank 1: 2 3
101     rank 2: 4
102   cell distribution:
103     rank 0: 0
104     rank 1: 1
105     rank 2: 2 3
106 
107 Tetrahedron
108 -----------
109 Test 0:
110 Two tets sharing a face
111 
112  cell   3 _______    cell
113  0    / | \      \   1
114      /  |  \      \
115     /   |   \      \
116    0----|----4-----2
117     \   |   /      /
118      \  |  /      /
119       \ | /      /
120         1-------
121    y
122    | x
123    |/
124    *----z
125 
126   vertex distribution:
127     rank 0: 0 1
128     rank 1: 2 3 4
129   cell distribution:
130     rank 0: 0
131     rank 1: 1
132 
133 Quadrilateral
134 -------------
135 Test 0 (2 ranks):
136 Two quads sharing a face
137 
138    3-------2-------5
139    |       |       |
140    |   0   |   1   |
141    |       |       |
142    0-------1-------4
143 
144   vertex distribution:
145     rank 0: 0 1 2
146     rank 1: 3 4 5
147   cell distribution:
148     rank 0: 0
149     rank 1: 1
150 
151 TODO Test 1:
152 A quad and a triangle sharing a face
153 
154    5-------4
155    |       | \
156    |   0   |  \
157    |       | 1 \
158    2-------3----6
159 
160 Hexahedron
161 ----------
162 Test 0 (2 ranks):
163 Two hexes sharing a face
164 
165 cell   7-------------6-------------11 cell
166 0     /|            /|            /|     1
167      / |   F1      / |   F7      / |
168     /  |          /  |          /  |
169    4-------------5-------------10  |
170    |   |     F4  |   |     F10 |   |
171    |   |         |   |         |   |
172    |F5 |         |F3 |         |F9 |
173    |   |  F2     |   |   F8    |   |
174    |   3---------|---2---------|---9
175    |  /          |  /          |  /
176    | /   F0      | /    F6     | /
177    |/            |/            |/
178    0-------------1-------------8
179 
180   vertex distribution:
181     rank 0: 0 1 2 3 4 5
182     rank 1: 6 7 8 9 10 11
183   cell distribution:
184     rank 0: 0
185     rank 1: 1
186 
187 */
188 
189 typedef enum {
190   NONE,
191   CREATE,
192   AFTER_CREATE,
193   AFTER_DISTRIBUTE
194 } InterpType;
195 
196 typedef struct {
197   PetscInt    debug;        /* The debugging level */
198   PetscInt    testNum;      /* Indicates the mesh to create */
199   PetscInt    dim;          /* The topological mesh dimension */
200   PetscBool   cellSimplex;  /* Use simplices or hexes */
201   PetscBool   distribute;   /* Distribute the mesh */
202   InterpType  interpolate;  /* Interpolate the mesh before or after DMPlexDistribute() */
203   PetscBool   useGenerator; /* Construct mesh with a mesh generator */
204   PetscBool   testOrientIF; /* Test for different original interface orientations */
205   PetscBool   testHeavy;    /* Run the heavy PointSF test */
206   PetscBool   customView;   /* Show results of DMPlexIsInterpolated() etc. */
207   PetscInt    ornt[2];      /* Orientation of interface on rank 0 and rank 1 */
208   PetscInt    faces[3];     /* Number of faces per dimension for generator */
209   PetscScalar coords[128];
210   PetscReal   coordsTol;
211   PetscInt    ncoords;
212   PetscInt    pointsToExpand[128];
213   PetscInt    nPointsToExpand;
214   PetscBool   testExpandPointsEmpty;
215   char        filename[PETSC_MAX_PATH_LEN]; /* Import mesh from file */
216 } AppCtx;
217 
218 struct _n_PortableBoundary {
219   Vec           coordinates;
220   PetscInt      depth;
221   PetscSection *sections;
222 };
223 typedef struct _n_PortableBoundary *PortableBoundary;
224 
225 #if defined(PETSC_USE_LOG)
226 static PetscLogStage stage[3];
227 #endif
228 
229 static PetscErrorCode DMPlexCheckPointSFHeavy(DM, PortableBoundary);
230 static PetscErrorCode DMPlexSetOrientInterface_Private(DM, PetscBool);
231 static PetscErrorCode DMPlexGetExpandedBoundary_Private(DM, PortableBoundary *);
232 static PetscErrorCode DMPlexExpandedConesToFaces_Private(DM, IS, PetscSection, IS *);
233 
234 static PetscErrorCode PortableBoundaryDestroy(PortableBoundary *bnd) {
235   PetscInt d;
236 
237   PetscFunctionBegin;
238   if (!*bnd) PetscFunctionReturn(0);
239   PetscCall(VecDestroy(&(*bnd)->coordinates));
240   for (d = 0; d < (*bnd)->depth; d++) { PetscCall(PetscSectionDestroy(&(*bnd)->sections[d])); }
241   PetscCall(PetscFree((*bnd)->sections));
242   PetscCall(PetscFree(*bnd));
243   PetscFunctionReturn(0);
244 }
245 
246 static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options) {
247   const char *interpTypes[4] = {"none", "create", "after_create", "after_distribute"};
248   PetscInt    interp         = NONE, dim;
249   PetscBool   flg1, flg2;
250 
251   PetscFunctionBegin;
252   options->debug                 = 0;
253   options->testNum               = 0;
254   options->dim                   = 2;
255   options->cellSimplex           = PETSC_TRUE;
256   options->distribute            = PETSC_FALSE;
257   options->interpolate           = NONE;
258   options->useGenerator          = PETSC_FALSE;
259   options->testOrientIF          = PETSC_FALSE;
260   options->testHeavy             = PETSC_TRUE;
261   options->customView            = PETSC_FALSE;
262   options->testExpandPointsEmpty = PETSC_FALSE;
263   options->ornt[0]               = 0;
264   options->ornt[1]               = 0;
265   options->faces[0]              = 2;
266   options->faces[1]              = 2;
267   options->faces[2]              = 2;
268   options->filename[0]           = '\0';
269   options->coordsTol             = PETSC_DEFAULT;
270 
271   PetscOptionsBegin(comm, "", "Meshing Interpolation Test Options", "DMPLEX");
272   PetscCall(PetscOptionsBoundedInt("-debug", "The debugging level", "ex18.c", options->debug, &options->debug, NULL, 0));
273   PetscCall(PetscOptionsBoundedInt("-testnum", "The mesh to create", "ex18.c", options->testNum, &options->testNum, NULL, 0));
274   PetscCall(PetscOptionsBool("-cell_simplex", "Generate simplices if true, otherwise hexes", "ex18.c", options->cellSimplex, &options->cellSimplex, NULL));
275   PetscCall(PetscOptionsBool("-distribute", "Distribute the mesh", "ex18.c", options->distribute, &options->distribute, NULL));
276   PetscCall(PetscOptionsEList("-interpolate", "Type of mesh interpolation (none, create, after_create, after_distribute)", "ex18.c", interpTypes, 4, interpTypes[options->interpolate], &interp, NULL));
277   options->interpolate = (InterpType)interp;
278   PetscCheck(options->distribute || options->interpolate != AFTER_DISTRIBUTE, comm, PETSC_ERR_SUP, "-interpolate after_distribute  needs  -distribute 1");
279   PetscCall(PetscOptionsBool("-use_generator", "Use a mesh generator to build the mesh", "ex18.c", options->useGenerator, &options->useGenerator, NULL));
280   options->ncoords = 128;
281   PetscCall(PetscOptionsScalarArray("-view_vertices_from_coords", "Print DAG points corresponding to vertices with given coordinates", "ex18.c", options->coords, &options->ncoords, NULL));
282   PetscCall(PetscOptionsReal("-view_vertices_from_coords_tol", "Tolerance for -view_vertices_from_coords", "ex18.c", options->coordsTol, &options->coordsTol, NULL));
283   options->nPointsToExpand = 128;
284   PetscCall(PetscOptionsIntArray("-test_expand_points", "Expand given array of DAG point using DMPlexGetConeRecursive() and print results", "ex18.c", options->pointsToExpand, &options->nPointsToExpand, NULL));
285   if (options->nPointsToExpand) { PetscCall(PetscOptionsBool("-test_expand_points_empty", "For -test_expand_points, rank 0 will have empty input array", "ex18.c", options->testExpandPointsEmpty, &options->testExpandPointsEmpty, NULL)); }
286   PetscCall(PetscOptionsBool("-test_heavy", "Run the heavy PointSF test", "ex18.c", options->testHeavy, &options->testHeavy, NULL));
287   PetscCall(PetscOptionsBool("-custom_view", "Custom DMPlex view", "ex18.c", options->customView, &options->customView, NULL));
288   PetscCall(PetscOptionsRangeInt("-dim", "The topological mesh dimension", "ex18.c", options->dim, &options->dim, &flg1, 1, 3));
289   dim = 3;
290   PetscCall(PetscOptionsIntArray("-faces", "Number of faces per dimension", "ex18.c", options->faces, &dim, &flg2));
291   if (flg2) {
292     PetscCheck(!flg1 || dim == options->dim, comm, PETSC_ERR_ARG_OUTOFRANGE, "specified -dim %" PetscInt_FMT " is not equal to length %" PetscInt_FMT " of -faces (note that -dim can be omitted)", options->dim, dim);
293     options->dim = dim;
294   }
295   PetscCall(PetscOptionsString("-filename", "The mesh file", "ex18.c", options->filename, options->filename, sizeof(options->filename), NULL));
296   PetscCall(PetscOptionsBoundedInt("-rotate_interface_0", "Rotation (relative orientation) of interface on rank 0; implies -interpolate create -distribute 0", "ex18.c", options->ornt[0], &options->ornt[0], &options->testOrientIF, 0));
297   PetscCall(PetscOptionsBoundedInt("-rotate_interface_1", "Rotation (relative orientation) of interface on rank 1; implies -interpolate create -distribute 0", "ex18.c", options->ornt[1], &options->ornt[1], &flg2, 0));
298   PetscCheck(flg2 == options->testOrientIF, comm, PETSC_ERR_ARG_OUTOFRANGE, "neither or both -rotate_interface_0 -rotate_interface_1 must be set");
299   if (options->testOrientIF) {
300     PetscInt i;
301     for (i = 0; i < 2; i++) {
302       if (options->ornt[i] >= 10) options->ornt[i] = -(options->ornt[i] - 10); /* 11 12 13 become -1 -2 -3 */
303     }
304     options->filename[0]  = 0;
305     options->useGenerator = PETSC_FALSE;
306     options->dim          = 3;
307     options->cellSimplex  = PETSC_TRUE;
308     options->interpolate  = CREATE;
309     options->distribute   = PETSC_FALSE;
310   }
311   PetscOptionsEnd();
312   PetscFunctionReturn(0);
313 }
314 
315 static PetscErrorCode CreateMesh_1D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm) {
316   PetscInt    testNum = user->testNum;
317   PetscMPIInt rank, size;
318   PetscInt    numCorners = 2, i;
319   PetscInt    numCells, numVertices, network;
320   PetscInt   *cells;
321   PetscReal  *coords;
322 
323   PetscFunctionBegin;
324   PetscCallMPI(MPI_Comm_rank(comm, &rank));
325   PetscCallMPI(MPI_Comm_size(comm, &size));
326   PetscCheck(size <= 2, comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for <=2 processes", testNum);
327 
328   numCells = 3;
329   PetscCall(PetscOptionsGetInt(NULL, NULL, "-ncells", &numCells, NULL));
330   PetscCheck(numCells >= 3, comm, PETSC_ERR_ARG_OUTOFRANGE, "Test ncells %" PetscInt_FMT " must >=3", numCells);
331 
332   if (size == 1) {
333     numVertices = numCells + 1;
334     PetscCall(PetscMalloc2(2 * numCells, &cells, 2 * numVertices, &coords));
335     for (i = 0; i < numCells; i++) {
336       cells[2 * i]      = i;
337       cells[2 * i + 1]  = i + 1;
338       coords[2 * i]     = i;
339       coords[2 * i + 1] = i + 1;
340     }
341 
342     PetscCall(DMPlexCreateFromCellListPetsc(comm, user->dim, numCells, numVertices, numCorners, PETSC_FALSE, cells, user->dim, coords, dm));
343     PetscCall(PetscFree2(cells, coords));
344     PetscFunctionReturn(0);
345   }
346 
347   network = 0;
348   PetscCall(PetscOptionsGetInt(NULL, NULL, "-network_case", &network, NULL));
349   if (network == 0) {
350     switch (rank) {
351     case 0: {
352       numCells    = 2;
353       numVertices = numCells;
354       PetscCall(PetscMalloc2(2 * numCells, &cells, 2 * numCells, &coords));
355       cells[0]  = 0;
356       cells[1]  = 1;
357       cells[2]  = 1;
358       cells[3]  = 2;
359       coords[0] = 0.;
360       coords[1] = 1.;
361       coords[2] = 1.;
362       coords[3] = 2.;
363     } break;
364     case 1: {
365       numCells -= 2;
366       numVertices = numCells + 1;
367       PetscCall(PetscMalloc2(2 * numCells, &cells, 2 * numCells, &coords));
368       for (i = 0; i < numCells; i++) {
369         cells[2 * i]      = 2 + i;
370         cells[2 * i + 1]  = 2 + i + 1;
371         coords[2 * i]     = 2 + i;
372         coords[2 * i + 1] = 2 + i + 1;
373       }
374     } break;
375     default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
376     }
377   } else { /* network_case = 1 */
378     /* ----------------------- */
379     switch (rank) {
380     case 0: {
381       numCells    = 2;
382       numVertices = 3;
383       PetscCall(PetscMalloc2(2 * numCells, &cells, 2 * numCells, &coords));
384       cells[0] = 0;
385       cells[1] = 3;
386       cells[2] = 3;
387       cells[3] = 1;
388     } break;
389     case 1: {
390       numCells    = 1;
391       numVertices = 1;
392       PetscCall(PetscMalloc2(2 * numCells, &cells, 2 * numCells, &coords));
393       cells[0] = 3;
394       cells[1] = 2;
395     } break;
396     default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
397     }
398   }
399   PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, PETSC_FALSE, cells, user->dim, coords, NULL, NULL, dm));
400   PetscCall(PetscFree2(cells, coords));
401   PetscFunctionReturn(0);
402 }
403 
404 static PetscErrorCode CreateSimplex_2D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm) {
405   PetscInt    testNum = user->testNum, p;
406   PetscMPIInt rank, size;
407 
408   PetscFunctionBegin;
409   PetscCallMPI(MPI_Comm_rank(comm, &rank));
410   PetscCallMPI(MPI_Comm_size(comm, &size));
411   switch (testNum) {
412   case 0:
413     PetscCheck(size == 2, comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 2 processes", testNum);
414     switch (rank) {
415     case 0: {
416       const PetscInt numCells = 1, numVertices = 2, numCorners = 3;
417       const PetscInt cells[3]        = {0, 1, 2};
418       PetscReal      coords[4]       = {-0.5, 0.5, 0.0, 0.0};
419       PetscInt       markerPoints[6] = {1, 1, 2, 1, 3, 1};
420 
421       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
422       for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
423     } break;
424     case 1: {
425       const PetscInt numCells = 1, numVertices = 2, numCorners = 3;
426       const PetscInt cells[3]        = {1, 3, 2};
427       PetscReal      coords[4]       = {0.0, 1.0, 0.5, 0.5};
428       PetscInt       markerPoints[6] = {1, 1, 2, 1, 3, 1};
429 
430       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
431       for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
432     } break;
433     default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
434     }
435     break;
436   case 1:
437     PetscCheck(size == 3, comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 3 processes", testNum);
438     switch (rank) {
439     case 0: {
440       const PetscInt numCells = 1, numVertices = 2, numCorners = 3;
441       const PetscInt cells[3]        = {0, 1, 2};
442       PetscReal      coords[4]       = {0.0, 1.0, 0.0, 0.0};
443       PetscInt       markerPoints[6] = {1, 1, 2, 1, 3, 1};
444 
445       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
446       for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
447     } break;
448     case 1: {
449       const PetscInt numCells = 1, numVertices = 2, numCorners = 3;
450       const PetscInt cells[3]        = {0, 2, 3};
451       PetscReal      coords[4]       = {0.5, 0.5, 1.0, 1.0};
452       PetscInt       markerPoints[6] = {1, 1, 2, 1, 3, 1};
453 
454       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
455       for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
456     } break;
457     case 2: {
458       const PetscInt numCells = 2, numVertices = 1, numCorners = 3;
459       const PetscInt cells[6]         = {2, 4, 3, 2, 1, 4};
460       PetscReal      coords[2]        = {1.0, 0.0};
461       PetscInt       markerPoints[10] = {2, 1, 3, 1, 4, 1, 5, 1, 6, 1};
462 
463       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
464       for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
465     } break;
466     default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
467     }
468     break;
469   case 2:
470     PetscCheck(size == 3, comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 3 processes", testNum);
471     switch (rank) {
472     case 0: {
473       const PetscInt numCells = 1, numVertices = 2, numCorners = 3;
474       const PetscInt cells[3]        = {1, 2, 0};
475       PetscReal      coords[4]       = {0.5, 0.5, 0.0, 1.0};
476       PetscInt       markerPoints[6] = {1, 1, 2, 1, 3, 1};
477 
478       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
479       for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
480     } break;
481     case 1: {
482       const PetscInt numCells = 1, numVertices = 2, numCorners = 3;
483       const PetscInt cells[3]        = {1, 0, 3};
484       PetscReal      coords[4]       = {0.0, 0.0, 1.0, 1.0};
485       PetscInt       markerPoints[6] = {1, 1, 2, 1, 3, 1};
486 
487       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
488       for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
489     } break;
490     case 2: {
491       const PetscInt numCells = 2, numVertices = 1, numCorners = 3;
492       const PetscInt cells[6]         = {0, 4, 3, 0, 2, 4};
493       PetscReal      coords[2]        = {1.0, 0.0};
494       PetscInt       markerPoints[10] = {2, 1, 3, 1, 4, 1, 5, 1, 6, 1};
495 
496       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
497       for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
498     } break;
499     default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
500     }
501     break;
502   default: SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh %" PetscInt_FMT, testNum);
503   }
504   PetscFunctionReturn(0);
505 }
506 
507 static PetscErrorCode CreateSimplex_3D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm) {
508   PetscInt    testNum = user->testNum, p;
509   PetscMPIInt rank, size;
510 
511   PetscFunctionBegin;
512   PetscCallMPI(MPI_Comm_rank(comm, &rank));
513   PetscCallMPI(MPI_Comm_size(comm, &size));
514   switch (testNum) {
515   case 0:
516     PetscCheck(size == 2, comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 2 processes", testNum);
517     switch (rank) {
518     case 0: {
519       const PetscInt numCells = 1, numVertices = 2, numCorners = 4;
520       const PetscInt cells[4]        = {0, 2, 1, 3};
521       PetscReal      coords[6]       = {0.0, 0.0, -0.5, 0.0, -0.5, 0.0};
522       PetscInt       markerPoints[8] = {1, 1, 2, 1, 3, 1, 4, 1};
523 
524       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
525       for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
526     } break;
527     case 1: {
528       const PetscInt numCells = 1, numVertices = 3, numCorners = 4;
529       const PetscInt cells[4]        = {1, 2, 4, 3};
530       PetscReal      coords[9]       = {1.0, 0.0, 0.0, 0.0, 0.5, 0.0, 0.0, 0.0, 0.5};
531       PetscInt       markerPoints[8] = {1, 1, 2, 1, 3, 1, 4, 1};
532 
533       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
534       for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
535     } break;
536     default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
537     }
538     break;
539   default: SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh %" PetscInt_FMT, testNum);
540   }
541   if (user->testOrientIF) {
542     PetscInt ifp[] = {8, 6};
543 
544     PetscCall(PetscObjectSetName((PetscObject)*dm, "Mesh before orientation"));
545     PetscCall(DMViewFromOptions(*dm, NULL, "-before_orientation_dm_view"));
546     /* rotate interface face ifp[rank] by given orientation ornt[rank] */
547     PetscCall(DMPlexOrientPoint(*dm, ifp[rank], user->ornt[rank]));
548     PetscCall(DMViewFromOptions(*dm, NULL, "-before_orientation_dm_view"));
549     PetscCall(DMPlexCheckFaces(*dm, 0));
550     PetscCall(DMPlexOrientInterface_Internal(*dm));
551     PetscCall(PetscPrintf(comm, "Orientation test PASSED\n"));
552   }
553   PetscFunctionReturn(0);
554 }
555 
556 static PetscErrorCode CreateQuad_2D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm) {
557   PetscInt    testNum = user->testNum, p;
558   PetscMPIInt rank, size;
559 
560   PetscFunctionBegin;
561   PetscCallMPI(MPI_Comm_rank(comm, &rank));
562   PetscCallMPI(MPI_Comm_size(comm, &size));
563   switch (testNum) {
564   case 0:
565     PetscCheck(size == 2, comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 2 processes", testNum);
566     switch (rank) {
567     case 0: {
568       const PetscInt numCells = 1, numVertices = 3, numCorners = 4;
569       const PetscInt cells[4]            = {0, 1, 2, 3};
570       PetscReal      coords[6]           = {-0.5, 0.0, 0.0, 0.0, 0.0, 1.0};
571       PetscInt       markerPoints[4 * 2] = {1, 1, 2, 1, 3, 1, 4, 1};
572 
573       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
574       for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
575     } break;
576     case 1: {
577       const PetscInt numCells = 1, numVertices = 3, numCorners = 4;
578       const PetscInt cells[4]            = {1, 4, 5, 2};
579       PetscReal      coords[6]           = {-0.5, 1.0, 0.5, 0.0, 0.5, 1.0};
580       PetscInt       markerPoints[4 * 2] = {1, 1, 2, 1, 3, 1, 4, 1};
581 
582       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
583       for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
584     } break;
585     default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
586     }
587     break;
588   default: SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh %" PetscInt_FMT, testNum);
589   }
590   PetscFunctionReturn(0);
591 }
592 
593 static PetscErrorCode CreateHex_3D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm) {
594   PetscInt    testNum = user->testNum, p;
595   PetscMPIInt rank, size;
596 
597   PetscFunctionBegin;
598   PetscCallMPI(MPI_Comm_rank(comm, &rank));
599   PetscCallMPI(MPI_Comm_size(comm, &size));
600   switch (testNum) {
601   case 0:
602     PetscCheck(size == 2, comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 2 processes", testNum);
603     switch (rank) {
604     case 0: {
605       const PetscInt numCells = 1, numVertices = 6, numCorners = 8;
606       const PetscInt cells[8]            = {0, 3, 2, 1, 4, 5, 6, 7};
607       PetscReal      coords[6 * 3]       = {-0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, -0.5, 1.0, 0.0, -0.5, 0.0, 1.0, 0.0, 0.0, 1.0};
608       PetscInt       markerPoints[8 * 2] = {2, 1, 3, 1, 4, 1, 5, 1, 6, 1, 7, 1, 8, 1, 9, 1};
609 
610       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
611       for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
612     } break;
613     case 1: {
614       const PetscInt numCells = 1, numVertices = 6, numCorners = 8;
615       const PetscInt cells[8]            = {1, 2, 9, 8, 5, 10, 11, 6};
616       PetscReal      coords[6 * 3]       = {0.0, 1.0, 1.0, -0.5, 1.0, 1.0, 0.5, 0.0, 0.0, 0.5, 1.0, 0.0, 0.5, 0.0, 1.0, 0.5, 1.0, 1.0};
617       PetscInt       markerPoints[8 * 2] = {2, 1, 3, 1, 4, 1, 5, 1, 6, 1, 7, 1, 8, 1, 9, 1};
618 
619       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
620       for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
621     } break;
622     default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
623     }
624     break;
625   default: SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh %" PetscInt_FMT, testNum);
626   }
627   PetscFunctionReturn(0);
628 }
629 
630 static PetscErrorCode CustomView(DM dm, PetscViewer v) {
631   DMPlexInterpolatedFlag interpolated;
632   PetscBool              distributed;
633 
634   PetscFunctionBegin;
635   PetscCall(DMPlexIsDistributed(dm, &distributed));
636   PetscCall(DMPlexIsInterpolatedCollective(dm, &interpolated));
637   PetscCall(PetscViewerASCIIPrintf(v, "DMPlexIsDistributed: %s\n", PetscBools[distributed]));
638   PetscCall(PetscViewerASCIIPrintf(v, "DMPlexIsInterpolatedCollective: %s\n", DMPlexInterpolatedFlags[interpolated]));
639   PetscFunctionReturn(0);
640 }
641 
642 static PetscErrorCode CreateMeshFromFile(MPI_Comm comm, AppCtx *user, DM *dm, DM *serialDM) {
643   const char *filename     = user->filename;
644   PetscBool   testHeavy    = user->testHeavy;
645   PetscBool   interpCreate = user->interpolate == CREATE ? PETSC_TRUE : PETSC_FALSE;
646   PetscBool   distributed  = PETSC_FALSE;
647 
648   PetscFunctionBegin;
649   *serialDM = NULL;
650   if (testHeavy && interpCreate) PetscCall(DMPlexSetOrientInterface_Private(NULL, PETSC_FALSE));
651   PetscCall(PetscLogStagePush(stage[0]));
652   PetscCall(DMPlexCreateFromFile(comm, filename, "ex18_plex", interpCreate, dm)); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexCheckPointSFHeavy() */
653   PetscCall(PetscLogStagePop());
654   if (testHeavy && interpCreate) PetscCall(DMPlexSetOrientInterface_Private(NULL, PETSC_TRUE));
655   PetscCall(DMPlexIsDistributed(*dm, &distributed));
656   PetscCall(PetscPrintf(comm, "DMPlexCreateFromFile produced %s mesh.\n", distributed ? "distributed" : "serial"));
657   if (testHeavy && distributed) {
658     PetscCall(PetscOptionsSetValue(NULL, "-dm_plex_hdf5_force_sequential", NULL));
659     PetscCall(DMPlexCreateFromFile(comm, filename, "ex18_plex", interpCreate, serialDM));
660     PetscCall(DMPlexIsDistributed(*serialDM, &distributed));
661     PetscCheck(!distributed, comm, PETSC_ERR_PLIB, "unable to create a serial DM from file");
662   }
663   PetscCall(DMGetDimension(*dm, &user->dim));
664   PetscFunctionReturn(0);
665 }
666 
667 static PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm) {
668   PetscPartitioner part;
669   PortableBoundary boundary       = NULL;
670   DM               serialDM       = NULL;
671   PetscBool        cellSimplex    = user->cellSimplex;
672   PetscBool        useGenerator   = user->useGenerator;
673   PetscBool        interpCreate   = user->interpolate == CREATE ? PETSC_TRUE : PETSC_FALSE;
674   PetscBool        interpSerial   = user->interpolate == AFTER_CREATE ? PETSC_TRUE : PETSC_FALSE;
675   PetscBool        interpParallel = user->interpolate == AFTER_DISTRIBUTE ? PETSC_TRUE : PETSC_FALSE;
676   PetscBool        testHeavy      = user->testHeavy;
677   PetscMPIInt      rank;
678 
679   PetscFunctionBegin;
680   PetscCallMPI(MPI_Comm_rank(comm, &rank));
681   if (user->filename[0]) {
682     PetscCall(CreateMeshFromFile(comm, user, dm, &serialDM));
683   } else if (useGenerator) {
684     PetscCall(PetscLogStagePush(stage[0]));
685     PetscCall(DMPlexCreateBoxMesh(comm, user->dim, cellSimplex, user->faces, NULL, NULL, NULL, interpCreate, dm));
686     PetscCall(PetscLogStagePop());
687   } else {
688     PetscCall(PetscLogStagePush(stage[0]));
689     switch (user->dim) {
690     case 1: PetscCall(CreateMesh_1D(comm, interpCreate, user, dm)); break;
691     case 2:
692       if (cellSimplex) {
693         PetscCall(CreateSimplex_2D(comm, interpCreate, user, dm));
694       } else {
695         PetscCall(CreateQuad_2D(comm, interpCreate, user, dm));
696       }
697       break;
698     case 3:
699       if (cellSimplex) {
700         PetscCall(CreateSimplex_3D(comm, interpCreate, user, dm));
701       } else {
702         PetscCall(CreateHex_3D(comm, interpCreate, user, dm));
703       }
704       break;
705     default: SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "Cannot make meshes for dimension %" PetscInt_FMT, user->dim);
706     }
707     PetscCall(PetscLogStagePop());
708   }
709   PetscCheck(user->ncoords % user->dim == 0, comm, PETSC_ERR_ARG_OUTOFRANGE, "length of coordinates array %" PetscInt_FMT " must be divisable by spatial dimension %" PetscInt_FMT, user->ncoords, user->dim);
710   PetscCall(PetscObjectSetName((PetscObject)*dm, "Original Mesh"));
711   PetscCall(DMViewFromOptions(*dm, NULL, "-orig_dm_view"));
712 
713   if (interpSerial) {
714     DM idm;
715 
716     if (testHeavy) PetscCall(DMPlexSetOrientInterface_Private(*dm, PETSC_FALSE));
717     PetscCall(PetscLogStagePush(stage[2]));
718     PetscCall(DMPlexInterpolate(*dm, &idm)); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexCheckPointSFHeavy() */
719     PetscCall(PetscLogStagePop());
720     if (testHeavy) PetscCall(DMPlexSetOrientInterface_Private(*dm, PETSC_TRUE));
721     PetscCall(DMDestroy(dm));
722     *dm = idm;
723     PetscCall(PetscObjectSetName((PetscObject)*dm, "Interpolated Mesh"));
724     PetscCall(DMViewFromOptions(*dm, NULL, "-intp_dm_view"));
725   }
726 
727   /* Set partitioner options */
728   PetscCall(DMPlexGetPartitioner(*dm, &part));
729   if (part) {
730     PetscCall(PetscPartitionerSetType(part, PETSCPARTITIONERSIMPLE));
731     PetscCall(PetscPartitionerSetFromOptions(part));
732   }
733 
734   if (user->customView) PetscCall(CustomView(*dm, PETSC_VIEWER_STDOUT_(comm)));
735   if (testHeavy) {
736     PetscBool distributed;
737 
738     PetscCall(DMPlexIsDistributed(*dm, &distributed));
739     if (!serialDM && !distributed) {
740       serialDM = *dm;
741       PetscCall(PetscObjectReference((PetscObject)*dm));
742     }
743     if (serialDM) { PetscCall(DMPlexGetExpandedBoundary_Private(serialDM, &boundary)); }
744     if (boundary) {
745       /* check DM which has been created in parallel and already interpolated */
746       PetscCall(DMPlexCheckPointSFHeavy(*dm, boundary));
747     }
748     /* Orient interface because it could be deliberately skipped above. It is idempotent. */
749     PetscCall(DMPlexOrientInterface_Internal(*dm));
750   }
751   if (user->distribute) {
752     DM pdm = NULL;
753 
754     /* Redistribute mesh over processes using that partitioner */
755     PetscCall(PetscLogStagePush(stage[1]));
756     PetscCall(DMPlexDistribute(*dm, 0, NULL, &pdm));
757     PetscCall(PetscLogStagePop());
758     if (pdm) {
759       PetscCall(DMDestroy(dm));
760       *dm = pdm;
761       PetscCall(PetscObjectSetName((PetscObject)*dm, "Redistributed Mesh"));
762       PetscCall(DMViewFromOptions(*dm, NULL, "-dist_dm_view"));
763     }
764 
765     if (interpParallel) {
766       DM idm;
767 
768       if (testHeavy) PetscCall(DMPlexSetOrientInterface_Private(*dm, PETSC_FALSE));
769       PetscCall(PetscLogStagePush(stage[2]));
770       PetscCall(DMPlexInterpolate(*dm, &idm)); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexCheckPointSFHeavy() */
771       PetscCall(PetscLogStagePop());
772       if (testHeavy) PetscCall(DMPlexSetOrientInterface_Private(*dm, PETSC_TRUE));
773       PetscCall(DMDestroy(dm));
774       *dm = idm;
775       PetscCall(PetscObjectSetName((PetscObject)*dm, "Interpolated Redistributed Mesh"));
776       PetscCall(DMViewFromOptions(*dm, NULL, "-intp_dm_view"));
777     }
778   }
779   if (testHeavy) {
780     if (boundary) PetscCall(DMPlexCheckPointSFHeavy(*dm, boundary));
781     /* Orient interface because it could be deliberately skipped above. It is idempotent. */
782     PetscCall(DMPlexOrientInterface_Internal(*dm));
783   }
784 
785   PetscCall(PetscObjectSetName((PetscObject)*dm, "Parallel Mesh"));
786   PetscCall(DMPlexDistributeSetDefault(*dm, PETSC_FALSE));
787   PetscCall(DMSetFromOptions(*dm));
788   PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
789 
790   if (user->customView) PetscCall(CustomView(*dm, PETSC_VIEWER_STDOUT_(comm)));
791   PetscCall(DMDestroy(&serialDM));
792   PetscCall(PortableBoundaryDestroy(&boundary));
793   PetscFunctionReturn(0);
794 }
795 
796 #define ps2d(number) ((double)PetscRealPart(number))
797 static inline PetscErrorCode coord2str(char buf[], size_t len, PetscInt dim, const PetscScalar coords[], PetscReal tol) {
798   PetscFunctionBegin;
799   PetscCheck(dim <= 3, PETSC_COMM_SELF, PETSC_ERR_SUP, "dim must be less than or equal 3");
800   if (tol >= 1e-3) {
801     switch (dim) {
802     case 1: PetscCall(PetscSNPrintf(buf, len, "(%12.3f)", ps2d(coords[0])));
803     case 2: PetscCall(PetscSNPrintf(buf, len, "(%12.3f, %12.3f)", ps2d(coords[0]), ps2d(coords[1])));
804     case 3: PetscCall(PetscSNPrintf(buf, len, "(%12.3f, %12.3f, %12.3f)", ps2d(coords[0]), ps2d(coords[1]), ps2d(coords[2])));
805     }
806   } else {
807     switch (dim) {
808     case 1: PetscCall(PetscSNPrintf(buf, len, "(%12.6f)", ps2d(coords[0])));
809     case 2: PetscCall(PetscSNPrintf(buf, len, "(%12.6f, %12.6f)", ps2d(coords[0]), ps2d(coords[1])));
810     case 3: PetscCall(PetscSNPrintf(buf, len, "(%12.6f, %12.6f, %12.6f)", ps2d(coords[0]), ps2d(coords[1]), ps2d(coords[2])));
811     }
812   }
813   PetscFunctionReturn(0);
814 }
815 
816 static PetscErrorCode ViewVerticesFromCoords(DM dm, Vec coordsVec, PetscReal tol, PetscViewer viewer) {
817   PetscInt           dim, i, npoints;
818   IS                 pointsIS;
819   const PetscInt    *points;
820   const PetscScalar *coords;
821   char               coordstr[128];
822   MPI_Comm           comm;
823   PetscMPIInt        rank;
824 
825   PetscFunctionBegin;
826   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
827   PetscCallMPI(MPI_Comm_rank(comm, &rank));
828   PetscCall(DMGetDimension(dm, &dim));
829   PetscCall(PetscViewerASCIIPushSynchronized(viewer));
830   PetscCall(DMPlexFindVertices(dm, coordsVec, tol, &pointsIS));
831   PetscCall(ISGetIndices(pointsIS, &points));
832   PetscCall(ISGetLocalSize(pointsIS, &npoints));
833   PetscCall(VecGetArrayRead(coordsVec, &coords));
834   for (i = 0; i < npoints; i++) {
835     PetscCall(coord2str(coordstr, sizeof(coordstr), dim, &coords[i * dim], tol));
836     if (rank == 0 && i) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "-----\n"));
837     PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] %s --> points[%" PetscInt_FMT "] = %" PetscInt_FMT "\n", rank, coordstr, i, points[i]));
838     PetscCall(PetscViewerFlush(viewer));
839   }
840   PetscCall(PetscViewerASCIIPopSynchronized(viewer));
841   PetscCall(VecRestoreArrayRead(coordsVec, &coords));
842   PetscCall(ISRestoreIndices(pointsIS, &points));
843   PetscCall(ISDestroy(&pointsIS));
844   PetscFunctionReturn(0);
845 }
846 
847 static PetscErrorCode TestExpandPoints(DM dm, AppCtx *user) {
848   IS            is;
849   PetscSection *sects;
850   IS           *iss;
851   PetscInt      d, depth;
852   PetscMPIInt   rank;
853   PetscViewer   viewer = PETSC_VIEWER_STDOUT_WORLD, sviewer;
854 
855   PetscFunctionBegin;
856   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
857   if (user->testExpandPointsEmpty && rank == 0) {
858     PetscCall(ISCreateGeneral(PETSC_COMM_SELF, 0, NULL, PETSC_USE_POINTER, &is));
859   } else {
860     PetscCall(ISCreateGeneral(PETSC_COMM_SELF, user->nPointsToExpand, user->pointsToExpand, PETSC_USE_POINTER, &is));
861   }
862   PetscCall(DMPlexGetConeRecursive(dm, is, &depth, &iss, &sects));
863   PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
864   PetscCall(PetscViewerASCIIPrintf(sviewer, "[%d] ==========================\n", rank));
865   for (d = depth - 1; d >= 0; d--) {
866     IS        checkIS;
867     PetscBool flg;
868 
869     PetscCall(PetscViewerASCIIPrintf(sviewer, "depth %" PetscInt_FMT " ---------------\n", d));
870     PetscCall(PetscSectionView(sects[d], sviewer));
871     PetscCall(ISView(iss[d], sviewer));
872     /* check reverse operation */
873     if (d < depth - 1) {
874       PetscCall(DMPlexExpandedConesToFaces_Private(dm, iss[d], sects[d], &checkIS));
875       PetscCall(ISEqualUnsorted(checkIS, iss[d + 1], &flg));
876       PetscCheck(flg, PetscObjectComm((PetscObject)checkIS), PETSC_ERR_PLIB, "DMPlexExpandedConesToFaces_Private produced wrong IS");
877       PetscCall(ISDestroy(&checkIS));
878     }
879   }
880   PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
881   PetscCall(PetscViewerFlush(viewer));
882   PetscCall(DMPlexRestoreConeRecursive(dm, is, &depth, &iss, &sects));
883   PetscCall(ISDestroy(&is));
884   PetscFunctionReturn(0);
885 }
886 
887 static PetscErrorCode DMPlexExpandedConesToFaces_Private(DM dm, IS is, PetscSection section, IS *newis) {
888   PetscInt        n, n1, ncone, numCoveredPoints, o, p, q, start, end;
889   const PetscInt *coveredPoints;
890   const PetscInt *arr, *cone;
891   PetscInt       *newarr;
892 
893   PetscFunctionBegin;
894   PetscCall(ISGetLocalSize(is, &n));
895   PetscCall(PetscSectionGetStorageSize(section, &n1));
896   PetscCall(PetscSectionGetChart(section, &start, &end));
897   PetscCheck(n == n1, PETSC_COMM_SELF, PETSC_ERR_PLIB, "IS size = %" PetscInt_FMT " != %" PetscInt_FMT " = section storage size", n, n1);
898   PetscCall(ISGetIndices(is, &arr));
899   PetscCall(PetscMalloc1(end - start, &newarr));
900   for (q = start; q < end; q++) {
901     PetscCall(PetscSectionGetDof(section, q, &ncone));
902     PetscCall(PetscSectionGetOffset(section, q, &o));
903     cone = &arr[o];
904     if (ncone == 1) {
905       numCoveredPoints = 1;
906       p                = cone[0];
907     } else {
908       PetscInt i;
909       p = PETSC_MAX_INT;
910       for (i = 0; i < ncone; i++)
911         if (cone[i] < 0) {
912           p = -1;
913           break;
914         }
915       if (p >= 0) {
916         PetscCall(DMPlexGetJoin(dm, ncone, cone, &numCoveredPoints, &coveredPoints));
917         PetscCheck(numCoveredPoints <= 1, PETSC_COMM_SELF, PETSC_ERR_PLIB, "more than one covered points for section point %" PetscInt_FMT, q);
918         if (numCoveredPoints) p = coveredPoints[0];
919         else p = -2;
920         PetscCall(DMPlexRestoreJoin(dm, ncone, cone, &numCoveredPoints, &coveredPoints));
921       }
922     }
923     newarr[q - start] = p;
924   }
925   PetscCall(ISRestoreIndices(is, &arr));
926   PetscCall(ISCreateGeneral(PETSC_COMM_SELF, end - start, newarr, PETSC_OWN_POINTER, newis));
927   PetscFunctionReturn(0);
928 }
929 
930 static PetscErrorCode DMPlexExpandedVerticesToFaces_Private(DM dm, IS boundary_expanded_is, PetscInt depth, PetscSection sections[], IS *boundary_is) {
931   PetscInt d;
932   IS       is, newis;
933 
934   PetscFunctionBegin;
935   is = boundary_expanded_is;
936   PetscCall(PetscObjectReference((PetscObject)is));
937   for (d = 0; d < depth - 1; ++d) {
938     PetscCall(DMPlexExpandedConesToFaces_Private(dm, is, sections[d], &newis));
939     PetscCall(ISDestroy(&is));
940     is = newis;
941   }
942   *boundary_is = is;
943   PetscFunctionReturn(0);
944 }
945 
946 #define CHKERRQI(incall, ierr) \
947   if (ierr) { incall = PETSC_FALSE; }
948 
949 static PetscErrorCode DMLabelViewFromOptionsOnComm_Private(DMLabel label, const char optionname[], MPI_Comm comm) {
950   PetscViewer       viewer;
951   PetscBool         flg;
952   static PetscBool  incall = PETSC_FALSE;
953   PetscViewerFormat format;
954 
955   PetscFunctionBegin;
956   if (incall) PetscFunctionReturn(0);
957   incall = PETSC_TRUE;
958   CHKERRQI(incall, PetscOptionsGetViewer(comm, ((PetscObject)label)->options, ((PetscObject)label)->prefix, optionname, &viewer, &format, &flg));
959   if (flg) {
960     CHKERRQI(incall, PetscViewerPushFormat(viewer, format));
961     CHKERRQI(incall, DMLabelView(label, viewer));
962     CHKERRQI(incall, PetscViewerPopFormat(viewer));
963     CHKERRQI(incall, PetscViewerDestroy(&viewer));
964   }
965   incall = PETSC_FALSE;
966   PetscFunctionReturn(0);
967 }
968 
969 /* TODO: this is hotfixing DMLabelGetStratumIS() - it should be fixed systematically instead */
970 static inline PetscErrorCode DMLabelGetStratumISOnComm_Private(DMLabel label, PetscInt value, MPI_Comm comm, IS *is) {
971   IS tmpis;
972 
973   PetscFunctionBegin;
974   PetscCall(DMLabelGetStratumIS(label, value, &tmpis));
975   if (!tmpis) PetscCall(ISCreateGeneral(PETSC_COMM_SELF, 0, NULL, PETSC_USE_POINTER, &tmpis));
976   PetscCall(ISOnComm(tmpis, comm, PETSC_COPY_VALUES, is));
977   PetscCall(ISDestroy(&tmpis));
978   PetscFunctionReturn(0);
979 }
980 
981 /* currently only for simple PetscSection without fields or constraints */
982 static PetscErrorCode PetscSectionReplicate_Private(MPI_Comm comm, PetscMPIInt rootrank, PetscSection sec0, PetscSection *secout) {
983   PetscSection sec;
984   PetscInt     chart[2], p;
985   PetscInt    *dofarr;
986   PetscMPIInt  rank;
987 
988   PetscFunctionBegin;
989   PetscCallMPI(MPI_Comm_rank(comm, &rank));
990   if (rank == rootrank) { PetscCall(PetscSectionGetChart(sec0, &chart[0], &chart[1])); }
991   PetscCallMPI(MPI_Bcast(chart, 2, MPIU_INT, rootrank, comm));
992   PetscCall(PetscMalloc1(chart[1] - chart[0], &dofarr));
993   if (rank == rootrank) {
994     for (p = chart[0]; p < chart[1]; p++) { PetscCall(PetscSectionGetDof(sec0, p, &dofarr[p - chart[0]])); }
995   }
996   PetscCallMPI(MPI_Bcast(dofarr, chart[1] - chart[0], MPIU_INT, rootrank, comm));
997   PetscCall(PetscSectionCreate(comm, &sec));
998   PetscCall(PetscSectionSetChart(sec, chart[0], chart[1]));
999   for (p = chart[0]; p < chart[1]; p++) { PetscCall(PetscSectionSetDof(sec, p, dofarr[p - chart[0]])); }
1000   PetscCall(PetscSectionSetUp(sec));
1001   PetscCall(PetscFree(dofarr));
1002   *secout = sec;
1003   PetscFunctionReturn(0);
1004 }
1005 
1006 static PetscErrorCode DMPlexExpandedVerticesCoordinatesToFaces_Private(DM ipdm, PortableBoundary bnd, IS *face_is) {
1007   IS faces_expanded_is;
1008 
1009   PetscFunctionBegin;
1010   PetscCall(DMPlexFindVertices(ipdm, bnd->coordinates, 0.0, &faces_expanded_is));
1011   PetscCall(DMPlexExpandedVerticesToFaces_Private(ipdm, faces_expanded_is, bnd->depth, bnd->sections, face_is));
1012   PetscCall(ISDestroy(&faces_expanded_is));
1013   PetscFunctionReturn(0);
1014 }
1015 
1016 /* hack disabling DMPlexOrientInterface() call in DMPlexInterpolate() via -dm_plex_interpolate_orient_interfaces option */
1017 static PetscErrorCode DMPlexSetOrientInterface_Private(DM dm, PetscBool enable) {
1018   PetscOptions     options = NULL;
1019   const char      *prefix  = NULL;
1020   const char       opt[]   = "-dm_plex_interpolate_orient_interfaces";
1021   char             prefix_opt[512];
1022   PetscBool        flg, set;
1023   static PetscBool wasSetTrue = PETSC_FALSE;
1024 
1025   PetscFunctionBegin;
1026   if (dm) {
1027     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)dm, &prefix));
1028     options = ((PetscObject)dm)->options;
1029   }
1030   PetscCall(PetscStrcpy(prefix_opt, "-"));
1031   PetscCall(PetscStrlcat(prefix_opt, prefix, sizeof(prefix_opt)));
1032   PetscCall(PetscStrlcat(prefix_opt, &opt[1], sizeof(prefix_opt)));
1033   PetscCall(PetscOptionsGetBool(options, prefix, opt, &flg, &set));
1034   if (!enable) {
1035     if (set && flg) wasSetTrue = PETSC_TRUE;
1036     PetscCall(PetscOptionsSetValue(options, prefix_opt, "0"));
1037   } else if (set && !flg) {
1038     if (wasSetTrue) {
1039       PetscCall(PetscOptionsSetValue(options, prefix_opt, "1"));
1040     } else {
1041       /* default is PETSC_TRUE */
1042       PetscCall(PetscOptionsClearValue(options, prefix_opt));
1043     }
1044     wasSetTrue = PETSC_FALSE;
1045   }
1046   if (PetscDefined(USE_DEBUG)) {
1047     PetscCall(PetscOptionsGetBool(options, prefix, opt, &flg, &set));
1048     PetscCheck(!set || flg == enable, PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "PetscOptionsSetValue did not have the desired effect");
1049   }
1050   PetscFunctionReturn(0);
1051 }
1052 
1053 /* get coordinate description of the whole-domain boundary */
1054 static PetscErrorCode DMPlexGetExpandedBoundary_Private(DM dm, PortableBoundary *boundary) {
1055   PortableBoundary       bnd0, bnd;
1056   MPI_Comm               comm;
1057   DM                     idm;
1058   DMLabel                label;
1059   PetscInt               d;
1060   const char             boundaryName[] = "DMPlexDistributeInterpolateMarkInterface_boundary";
1061   IS                     boundary_is;
1062   IS                    *boundary_expanded_iss;
1063   PetscMPIInt            rootrank = 0;
1064   PetscMPIInt            rank, size;
1065   PetscInt               value = 1;
1066   DMPlexInterpolatedFlag intp;
1067   PetscBool              flg;
1068 
1069   PetscFunctionBegin;
1070   PetscCall(PetscNew(&bnd));
1071   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1072   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1073   PetscCallMPI(MPI_Comm_size(comm, &size));
1074   PetscCall(DMPlexIsDistributed(dm, &flg));
1075   PetscCheck(!flg, comm, PETSC_ERR_ARG_WRONG, "serial DM (all points on one rank) needed");
1076 
1077   /* interpolate serial DM if not yet interpolated */
1078   PetscCall(DMPlexIsInterpolatedCollective(dm, &intp));
1079   if (intp == DMPLEX_INTERPOLATED_FULL) {
1080     idm = dm;
1081     PetscCall(PetscObjectReference((PetscObject)dm));
1082   } else {
1083     PetscCall(DMPlexInterpolate(dm, &idm));
1084     PetscCall(DMViewFromOptions(idm, NULL, "-idm_view"));
1085   }
1086 
1087   /* mark whole-domain boundary of the serial DM */
1088   PetscCall(DMLabelCreate(PETSC_COMM_SELF, boundaryName, &label));
1089   PetscCall(DMAddLabel(idm, label));
1090   PetscCall(DMPlexMarkBoundaryFaces(idm, value, label));
1091   PetscCall(DMLabelViewFromOptionsOnComm_Private(label, "-idm_boundary_view", comm));
1092   PetscCall(DMLabelGetStratumIS(label, value, &boundary_is));
1093 
1094   /* translate to coordinates */
1095   PetscCall(PetscNew(&bnd0));
1096   PetscCall(DMGetCoordinatesLocalSetUp(idm));
1097   if (rank == rootrank) {
1098     PetscCall(DMPlexGetConeRecursive(idm, boundary_is, &bnd0->depth, &boundary_expanded_iss, &bnd0->sections));
1099     PetscCall(DMGetCoordinatesLocalTuple(dm, boundary_expanded_iss[0], NULL, &bnd0->coordinates));
1100     /* self-check */
1101     {
1102       IS is0;
1103       PetscCall(DMPlexExpandedVerticesCoordinatesToFaces_Private(idm, bnd0, &is0));
1104       PetscCall(ISEqual(is0, boundary_is, &flg));
1105       PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_PLIB, "DMPlexExpandedVerticesCoordinatesToFaces_Private produced a wrong IS");
1106       PetscCall(ISDestroy(&is0));
1107     }
1108   } else {
1109     PetscCall(VecCreateSeq(PETSC_COMM_SELF, 0, &bnd0->coordinates));
1110   }
1111 
1112   {
1113     Vec        tmp;
1114     VecScatter sc;
1115     IS         xis;
1116     PetscInt   n;
1117 
1118     /* just convert seq vectors to mpi vector */
1119     PetscCall(VecGetLocalSize(bnd0->coordinates, &n));
1120     PetscCallMPI(MPI_Bcast(&n, 1, MPIU_INT, rootrank, comm));
1121     if (rank == rootrank) {
1122       PetscCall(VecCreateMPI(comm, n, n, &tmp));
1123     } else {
1124       PetscCall(VecCreateMPI(comm, 0, n, &tmp));
1125     }
1126     PetscCall(VecCopy(bnd0->coordinates, tmp));
1127     PetscCall(VecDestroy(&bnd0->coordinates));
1128     bnd0->coordinates = tmp;
1129 
1130     /* replicate coordinates from root rank to all ranks */
1131     PetscCall(VecCreateMPI(comm, n, n * size, &bnd->coordinates));
1132     PetscCall(ISCreateStride(comm, n, 0, 1, &xis));
1133     PetscCall(VecScatterCreate(bnd0->coordinates, xis, bnd->coordinates, NULL, &sc));
1134     PetscCall(VecScatterBegin(sc, bnd0->coordinates, bnd->coordinates, INSERT_VALUES, SCATTER_FORWARD));
1135     PetscCall(VecScatterEnd(sc, bnd0->coordinates, bnd->coordinates, INSERT_VALUES, SCATTER_FORWARD));
1136     PetscCall(VecScatterDestroy(&sc));
1137     PetscCall(ISDestroy(&xis));
1138   }
1139   bnd->depth = bnd0->depth;
1140   PetscCallMPI(MPI_Bcast(&bnd->depth, 1, MPIU_INT, rootrank, comm));
1141   PetscCall(PetscMalloc1(bnd->depth, &bnd->sections));
1142   for (d = 0; d < bnd->depth; d++) { PetscCall(PetscSectionReplicate_Private(comm, rootrank, (rank == rootrank) ? bnd0->sections[d] : NULL, &bnd->sections[d])); }
1143 
1144   if (rank == rootrank) { PetscCall(DMPlexRestoreConeRecursive(idm, boundary_is, &bnd0->depth, &boundary_expanded_iss, &bnd0->sections)); }
1145   PetscCall(PortableBoundaryDestroy(&bnd0));
1146   PetscCall(DMRemoveLabelBySelf(idm, &label, PETSC_TRUE));
1147   PetscCall(DMLabelDestroy(&label));
1148   PetscCall(ISDestroy(&boundary_is));
1149   PetscCall(DMDestroy(&idm));
1150   *boundary = bnd;
1151   PetscFunctionReturn(0);
1152 }
1153 
1154 /* get faces of inter-partition interface */
1155 static PetscErrorCode DMPlexGetInterfaceFaces_Private(DM ipdm, IS boundary_faces_is, IS *interface_faces_is) {
1156   MPI_Comm               comm;
1157   DMLabel                label;
1158   IS                     part_boundary_faces_is;
1159   const char             partBoundaryName[] = "DMPlexDistributeInterpolateMarkInterface_partBoundary";
1160   PetscInt               value              = 1;
1161   DMPlexInterpolatedFlag intp;
1162 
1163   PetscFunctionBegin;
1164   PetscCall(PetscObjectGetComm((PetscObject)ipdm, &comm));
1165   PetscCall(DMPlexIsInterpolatedCollective(ipdm, &intp));
1166   PetscCheck(intp == DMPLEX_INTERPOLATED_FULL, comm, PETSC_ERR_ARG_WRONG, "only for fully interpolated DMPlex");
1167 
1168   /* get ipdm partition boundary (partBoundary) */
1169   PetscCall(DMLabelCreate(PETSC_COMM_SELF, partBoundaryName, &label));
1170   PetscCall(DMAddLabel(ipdm, label));
1171   PetscCall(DMPlexMarkBoundaryFaces(ipdm, value, label));
1172   PetscCall(DMLabelViewFromOptionsOnComm_Private(label, "-ipdm_part_boundary_view", comm));
1173   PetscCall(DMLabelGetStratumISOnComm_Private(label, value, comm, &part_boundary_faces_is));
1174   PetscCall(DMRemoveLabelBySelf(ipdm, &label, PETSC_TRUE));
1175   PetscCall(DMLabelDestroy(&label));
1176 
1177   /* remove ipdm whole-domain boundary (boundary_faces_is) from ipdm partition boundary (part_boundary_faces_is), resulting just in inter-partition interface */
1178   PetscCall(ISDifference(part_boundary_faces_is, boundary_faces_is, interface_faces_is));
1179   PetscCall(ISDestroy(&part_boundary_faces_is));
1180   PetscFunctionReturn(0);
1181 }
1182 
1183 /* compute inter-partition interface including edges and vertices */
1184 static PetscErrorCode DMPlexComputeCompleteInterface_Private(DM ipdm, IS interface_faces_is, IS *interface_is) {
1185   DMLabel                label;
1186   PetscInt               value           = 1;
1187   const char             interfaceName[] = "DMPlexDistributeInterpolateMarkInterface_interface";
1188   DMPlexInterpolatedFlag intp;
1189   MPI_Comm               comm;
1190 
1191   PetscFunctionBegin;
1192   PetscCall(PetscObjectGetComm((PetscObject)ipdm, &comm));
1193   PetscCall(DMPlexIsInterpolatedCollective(ipdm, &intp));
1194   PetscCheck(intp == DMPLEX_INTERPOLATED_FULL, comm, PETSC_ERR_ARG_WRONG, "only for fully interpolated DMPlex");
1195 
1196   PetscCall(DMLabelCreate(PETSC_COMM_SELF, interfaceName, &label));
1197   PetscCall(DMAddLabel(ipdm, label));
1198   PetscCall(DMLabelSetStratumIS(label, value, interface_faces_is));
1199   PetscCall(DMLabelViewFromOptionsOnComm_Private(label, "-interface_faces_view", comm));
1200   PetscCall(DMPlexLabelComplete(ipdm, label));
1201   PetscCall(DMLabelViewFromOptionsOnComm_Private(label, "-interface_view", comm));
1202   PetscCall(DMLabelGetStratumISOnComm_Private(label, value, comm, interface_is));
1203   PetscCall(PetscObjectSetName((PetscObject)*interface_is, "interface_is"));
1204   PetscCall(ISViewFromOptions(*interface_is, NULL, "-interface_is_view"));
1205   PetscCall(DMRemoveLabelBySelf(ipdm, &label, PETSC_TRUE));
1206   PetscCall(DMLabelDestroy(&label));
1207   PetscFunctionReturn(0);
1208 }
1209 
1210 static PetscErrorCode PointSFGetOutwardInterfacePoints(PetscSF sf, IS *is) {
1211   PetscInt        n;
1212   const PetscInt *arr;
1213 
1214   PetscFunctionBegin;
1215   PetscCall(PetscSFGetGraph(sf, NULL, &n, &arr, NULL));
1216   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)sf), n, arr, PETSC_USE_POINTER, is));
1217   PetscFunctionReturn(0);
1218 }
1219 
1220 static PetscErrorCode PointSFGetInwardInterfacePoints(PetscSF sf, IS *is) {
1221   PetscInt        n;
1222   const PetscInt *rootdegree;
1223   PetscInt       *arr;
1224 
1225   PetscFunctionBegin;
1226   PetscCall(PetscSFSetUp(sf));
1227   PetscCall(PetscSFComputeDegreeBegin(sf, &rootdegree));
1228   PetscCall(PetscSFComputeDegreeEnd(sf, &rootdegree));
1229   PetscCall(PetscSFComputeMultiRootOriginalNumbering(sf, rootdegree, &n, &arr));
1230   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)sf), n, arr, PETSC_OWN_POINTER, is));
1231   PetscFunctionReturn(0);
1232 }
1233 
1234 static PetscErrorCode PointSFGetInterfacePoints_Private(PetscSF pointSF, IS *is) {
1235   IS pointSF_out_is, pointSF_in_is;
1236 
1237   PetscFunctionBegin;
1238   PetscCall(PointSFGetOutwardInterfacePoints(pointSF, &pointSF_out_is));
1239   PetscCall(PointSFGetInwardInterfacePoints(pointSF, &pointSF_in_is));
1240   PetscCall(ISExpand(pointSF_out_is, pointSF_in_is, is));
1241   PetscCall(ISDestroy(&pointSF_out_is));
1242   PetscCall(ISDestroy(&pointSF_in_is));
1243   PetscFunctionReturn(0);
1244 }
1245 
1246 #define CHKERRMY(ierr) PetscCheck(!ierr, PETSC_COMM_SELF, PETSC_ERR_PLIB, "PointSF is wrong. Unable to show details!")
1247 
1248 static PetscErrorCode ViewPointsWithType_Internal(DM dm, IS pointsIS, PetscViewer v) {
1249   DMLabel         label;
1250   PetscSection    coordsSection;
1251   Vec             coordsVec;
1252   PetscScalar    *coordsScalar;
1253   PetscInt        coneSize, depth, dim, i, p, npoints;
1254   const PetscInt *points;
1255 
1256   PetscFunctionBegin;
1257   PetscCall(DMGetDimension(dm, &dim));
1258   PetscCall(DMGetCoordinateSection(dm, &coordsSection));
1259   PetscCall(DMGetCoordinatesLocal(dm, &coordsVec));
1260   PetscCall(VecGetArray(coordsVec, &coordsScalar));
1261   PetscCall(ISGetLocalSize(pointsIS, &npoints));
1262   PetscCall(ISGetIndices(pointsIS, &points));
1263   PetscCall(DMPlexGetDepthLabel(dm, &label));
1264   PetscCall(PetscViewerASCIIPushTab(v));
1265   for (i = 0; i < npoints; i++) {
1266     p = points[i];
1267     PetscCall(DMLabelGetValue(label, p, &depth));
1268     if (!depth) {
1269       PetscInt n, o;
1270       char     coordstr[128];
1271 
1272       PetscCall(PetscSectionGetDof(coordsSection, p, &n));
1273       PetscCall(PetscSectionGetOffset(coordsSection, p, &o));
1274       PetscCall(coord2str(coordstr, sizeof(coordstr), n, &coordsScalar[o], 1.0));
1275       PetscCall(PetscViewerASCIISynchronizedPrintf(v, "vertex %" PetscInt_FMT " w/ coordinates %s\n", p, coordstr));
1276     } else {
1277       char entityType[16];
1278 
1279       switch (depth) {
1280       case 1: PetscCall(PetscStrcpy(entityType, "edge")); break;
1281       case 2: PetscCall(PetscStrcpy(entityType, "face")); break;
1282       case 3: PetscCall(PetscStrcpy(entityType, "cell")); break;
1283       default: SETERRQ(PetscObjectComm((PetscObject)v), PETSC_ERR_SUP, "Only for depth <= 3");
1284       }
1285       if (depth == dim && dim < 3) { PetscCall(PetscStrlcat(entityType, " (cell)", sizeof(entityType))); }
1286       PetscCall(PetscViewerASCIISynchronizedPrintf(v, "%s %" PetscInt_FMT "\n", entityType, p));
1287     }
1288     PetscCall(DMPlexGetConeSize(dm, p, &coneSize));
1289     if (coneSize) {
1290       const PetscInt *cone;
1291       IS              coneIS;
1292 
1293       PetscCall(DMPlexGetCone(dm, p, &cone));
1294       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, coneSize, cone, PETSC_USE_POINTER, &coneIS));
1295       PetscCall(ViewPointsWithType_Internal(dm, coneIS, v));
1296       PetscCall(ISDestroy(&coneIS));
1297     }
1298   }
1299   PetscCall(PetscViewerASCIIPopTab(v));
1300   PetscCall(VecRestoreArray(coordsVec, &coordsScalar));
1301   PetscCall(ISRestoreIndices(pointsIS, &points));
1302   PetscFunctionReturn(0);
1303 }
1304 
1305 static PetscErrorCode ViewPointsWithType(DM dm, IS points, PetscViewer v) {
1306   PetscBool   flg;
1307   PetscInt    npoints;
1308   PetscMPIInt rank;
1309 
1310   PetscFunctionBegin;
1311   PetscCall(PetscObjectTypeCompare((PetscObject)v, PETSCVIEWERASCII, &flg));
1312   PetscCheck(flg, PetscObjectComm((PetscObject)v), PETSC_ERR_SUP, "Only for ASCII viewer");
1313   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)v), &rank));
1314   PetscCall(PetscViewerASCIIPushSynchronized(v));
1315   PetscCall(ISGetLocalSize(points, &npoints));
1316   if (npoints) {
1317     PetscCall(PetscViewerASCIISynchronizedPrintf(v, "[%d] --------\n", rank));
1318     PetscCall(ViewPointsWithType_Internal(dm, points, v));
1319   }
1320   PetscCall(PetscViewerFlush(v));
1321   PetscCall(PetscViewerASCIIPopSynchronized(v));
1322   PetscFunctionReturn(0);
1323 }
1324 
1325 static PetscErrorCode DMPlexComparePointSFWithInterface_Private(DM ipdm, IS interface_is) {
1326   PetscSF     pointsf;
1327   IS          pointsf_is;
1328   PetscBool   flg;
1329   MPI_Comm    comm;
1330   PetscMPIInt size;
1331 
1332   PetscFunctionBegin;
1333   PetscCall(PetscObjectGetComm((PetscObject)ipdm, &comm));
1334   PetscCallMPI(MPI_Comm_size(comm, &size));
1335   PetscCall(DMGetPointSF(ipdm, &pointsf));
1336   if (pointsf) {
1337     PetscInt nroots;
1338     PetscCall(PetscSFGetGraph(pointsf, &nroots, NULL, NULL, NULL));
1339     if (nroots < 0) pointsf = NULL; /* uninitialized SF */
1340   }
1341   if (!pointsf) {
1342     PetscInt N = 0;
1343     if (interface_is) PetscCall(ISGetSize(interface_is, &N));
1344     PetscCheck(!N, comm, PETSC_ERR_PLIB, "interface_is should be NULL or empty for PointSF being NULL");
1345     PetscFunctionReturn(0);
1346   }
1347 
1348   /* get PointSF points as IS pointsf_is */
1349   PetscCall(PointSFGetInterfacePoints_Private(pointsf, &pointsf_is));
1350 
1351   /* compare pointsf_is with interface_is */
1352   PetscCall(ISEqual(interface_is, pointsf_is, &flg));
1353   PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE, &flg, 1, MPIU_BOOL, MPI_LAND, comm));
1354   if (!flg) {
1355     IS          pointsf_extra_is, pointsf_missing_is;
1356     PetscViewer errv = PETSC_VIEWER_STDERR_(comm);
1357     CHKERRMY(ISDifference(interface_is, pointsf_is, &pointsf_missing_is));
1358     CHKERRMY(ISDifference(pointsf_is, interface_is, &pointsf_extra_is));
1359     CHKERRMY(PetscViewerASCIIPrintf(errv, "Points missing in PointSF:\n"));
1360     CHKERRMY(ViewPointsWithType(ipdm, pointsf_missing_is, errv));
1361     CHKERRMY(PetscViewerASCIIPrintf(errv, "Extra points in PointSF:\n"));
1362     CHKERRMY(ViewPointsWithType(ipdm, pointsf_extra_is, errv));
1363     CHKERRMY(ISDestroy(&pointsf_extra_is));
1364     CHKERRMY(ISDestroy(&pointsf_missing_is));
1365     SETERRQ(comm, PETSC_ERR_PLIB, "PointSF is wrong! See details above.");
1366   }
1367   PetscCall(ISDestroy(&pointsf_is));
1368   PetscFunctionReturn(0);
1369 }
1370 
1371 /* remove faces & edges from label, leave just vertices */
1372 static PetscErrorCode DMPlexISFilterVertices_Private(DM dm, IS points) {
1373   PetscInt vStart, vEnd;
1374   MPI_Comm comm;
1375 
1376   PetscFunctionBegin;
1377   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1378   PetscCall(DMPlexGetDepthStratum(dm, 0, &vStart, &vEnd));
1379   PetscCall(ISGeneralFilter(points, vStart, vEnd));
1380   PetscFunctionReturn(0);
1381 }
1382 
1383 /*
1384   DMPlexCheckPointSFHeavy - Thoroughly test that the PointSF after parallel DMPlexInterpolate() includes exactly all interface points.
1385 
1386   Collective
1387 
1388   Input Parameters:
1389 . dm - The DMPlex object
1390 
1391   Notes:
1392   The input DMPlex must be serial (one partition has all points, the other partitions have no points).
1393   This is a heavy test which involves DMPlexInterpolate() if the input DM is not interpolated yet, and depends on having a representation of the whole-domain boundary (PortableBoundary), which can be obtained only by DMPlexGetExpandedBoundary_Private() (which involves DMPlexInterpolate() of a sequential DM).
1394   This is mainly intended for debugging/testing purposes.
1395 
1396   Algorithm:
1397   1. boundary faces of the serial version of the whole mesh are found using DMPlexMarkBoundaryFaces()
1398   2. boundary faces are translated into vertices using DMPlexGetConeRecursive() and these are translated into coordinates - this description (aka PortableBoundary) is completely independent of partitioning and point numbering
1399   3. the mesh is distributed or loaded in parallel
1400   4. boundary faces of the distributed mesh are reconstructed from PortableBoundary using DMPlexFindVertices()
1401   5. partition boundary faces of the parallel mesh are found using DMPlexMarkBoundaryFaces()
1402   6. partition interfaces are computed as set difference of partition boundary faces minus the reconstructed boundary
1403   7. check that interface covered by PointSF (union of inward and outward points) is equal to the partition interface for each rank, otherwise print the difference and throw an error
1404 
1405   Level: developer
1406 
1407 .seealso: DMGetPointSF(), DMPlexCheckSymmetry(), DMPlexCheckSkeleton(), DMPlexCheckFaces()
1408 */
1409 static PetscErrorCode DMPlexCheckPointSFHeavy(DM dm, PortableBoundary bnd) {
1410   DM                     ipdm = NULL;
1411   IS                     boundary_faces_is, interface_faces_is, interface_is;
1412   DMPlexInterpolatedFlag intp;
1413   MPI_Comm               comm;
1414 
1415   PetscFunctionBegin;
1416   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1417 
1418   PetscCall(DMPlexIsInterpolatedCollective(dm, &intp));
1419   if (intp == DMPLEX_INTERPOLATED_FULL) {
1420     ipdm = dm;
1421   } else {
1422     /* create temporary interpolated DM if input DM is not interpolated */
1423     PetscCall(DMPlexSetOrientInterface_Private(dm, PETSC_FALSE));
1424     PetscCall(DMPlexInterpolate(dm, &ipdm)); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexComparePointSFWithInterface_Private() below */
1425     PetscCall(DMPlexSetOrientInterface_Private(dm, PETSC_TRUE));
1426   }
1427   PetscCall(DMViewFromOptions(ipdm, NULL, "-ipdm_view"));
1428 
1429   /* recover ipdm whole-domain boundary faces from the expanded vertices coordinates */
1430   PetscCall(DMPlexExpandedVerticesCoordinatesToFaces_Private(ipdm, bnd, &boundary_faces_is));
1431   /* get inter-partition interface faces (interface_faces_is)*/
1432   PetscCall(DMPlexGetInterfaceFaces_Private(ipdm, boundary_faces_is, &interface_faces_is));
1433   /* compute inter-partition interface including edges and vertices (interface_is) */
1434   PetscCall(DMPlexComputeCompleteInterface_Private(ipdm, interface_faces_is, &interface_is));
1435   /* destroy immediate ISs */
1436   PetscCall(ISDestroy(&boundary_faces_is));
1437   PetscCall(ISDestroy(&interface_faces_is));
1438 
1439   /* for uninterpolated case, keep just vertices in interface */
1440   if (!intp) {
1441     PetscCall(DMPlexISFilterVertices_Private(ipdm, interface_is));
1442     PetscCall(DMDestroy(&ipdm));
1443   }
1444 
1445   /* compare PointSF with the boundary reconstructed from coordinates */
1446   PetscCall(DMPlexComparePointSFWithInterface_Private(dm, interface_is));
1447   PetscCall(PetscPrintf(comm, "DMPlexCheckPointSFHeavy PASSED\n"));
1448   PetscCall(ISDestroy(&interface_is));
1449   PetscFunctionReturn(0);
1450 }
1451 
1452 int main(int argc, char **argv) {
1453   DM     dm;
1454   AppCtx user;
1455 
1456   PetscFunctionBeginUser;
1457   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
1458   PetscCall(PetscLogStageRegister("create", &stage[0]));
1459   PetscCall(PetscLogStageRegister("distribute", &stage[1]));
1460   PetscCall(PetscLogStageRegister("interpolate", &stage[2]));
1461   PetscCall(ProcessOptions(PETSC_COMM_WORLD, &user));
1462   PetscCall(CreateMesh(PETSC_COMM_WORLD, &user, &dm));
1463   if (user.nPointsToExpand) { PetscCall(TestExpandPoints(dm, &user)); }
1464   if (user.ncoords) {
1465     Vec coords;
1466 
1467     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, user.ncoords, user.ncoords, user.coords, &coords));
1468     PetscCall(ViewVerticesFromCoords(dm, coords, user.coordsTol, PETSC_VIEWER_STDOUT_WORLD));
1469     PetscCall(VecDestroy(&coords));
1470   }
1471   PetscCall(DMDestroy(&dm));
1472   PetscCall(PetscFinalize());
1473   return 0;
1474 }
1475 
1476 /*TEST
1477 
1478   testset:
1479     nsize: 2
1480     args: -dm_view ascii::ascii_info_detail
1481     args: -dm_plex_check_all
1482     test:
1483       suffix: 1_tri_dist0
1484       args: -distribute 0 -interpolate {{none create}separate output}
1485     test:
1486       suffix: 1_tri_dist1
1487       args: -distribute 1 -interpolate {{none create after_distribute}separate output}
1488     test:
1489       suffix: 1_quad_dist0
1490       args: -cell_simplex 0 -distribute 0 -interpolate {{none create}separate output}
1491     test:
1492       suffix: 1_quad_dist1
1493       args: -cell_simplex 0 -distribute 1 -interpolate {{none create after_distribute}separate output}
1494     test:
1495       suffix: 1_1d_dist1
1496       args: -dim 1 -distribute 1
1497 
1498   testset:
1499     nsize: 3
1500     args: -testnum 1 -interpolate create
1501     args: -dm_plex_check_all
1502     test:
1503       suffix: 2
1504       args: -dm_view ascii::ascii_info_detail
1505     test:
1506       suffix: 2a
1507       args: -dm_plex_check_cones_conform_on_interfaces_verbose
1508     test:
1509       suffix: 2b
1510       args: -test_expand_points 0,1,2,5,6
1511     test:
1512       suffix: 2c
1513       args: -test_expand_points 0,1,2,5,6 -test_expand_points_empty
1514 
1515   testset:
1516     # the same as 1% for 3D
1517     nsize: 2
1518     args: -dim 3 -dm_view ascii::ascii_info_detail
1519     args: -dm_plex_check_all
1520     test:
1521       suffix: 4_tet_dist0
1522       args: -distribute 0 -interpolate {{none create}separate output}
1523     test:
1524       suffix: 4_tet_dist1
1525       args: -distribute 1 -interpolate {{none create after_distribute}separate output}
1526     test:
1527       suffix: 4_hex_dist0
1528       args: -cell_simplex 0 -distribute 0 -interpolate {{none create}separate output}
1529     test:
1530       suffix: 4_hex_dist1
1531       args: -cell_simplex 0 -distribute 1 -interpolate {{none create after_distribute}separate output}
1532 
1533   test:
1534     # the same as 4_tet_dist0 but test different initial orientations
1535     suffix: 4_tet_test_orient
1536     nsize: 2
1537     args: -dim 3 -distribute 0
1538     args: -dm_plex_check_all
1539     args: -rotate_interface_0 {{0 1 2 11 12 13}}
1540     args: -rotate_interface_1 {{0 1 2 11 12 13}}
1541 
1542   testset:
1543     requires: exodusii
1544     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/TwoQuads.exo
1545     args: -dm_view ascii::ascii_info_detail
1546     args: -dm_plex_check_all
1547     args: -custom_view
1548     test:
1549       suffix: 5_seq
1550       nsize: 1
1551       args: -distribute 0 -interpolate {{none create}separate output}
1552     test:
1553       # Detail viewing in a non-distributed mesh is broken because the DMLabelView() is collective, but the label is not shared
1554       suffix: 5_dist0
1555       nsize: 2
1556       args: -distribute 0 -interpolate {{none create}separate output} -dm_view
1557     test:
1558       suffix: 5_dist1
1559       nsize: 2
1560       args: -distribute 1 -interpolate {{none create after_distribute}separate output}
1561 
1562   testset:
1563     nsize: {{1 2 4}}
1564     args: -use_generator
1565     args: -dm_plex_check_all
1566     args: -distribute -interpolate none
1567     test:
1568       suffix: 6_tri
1569       requires: triangle
1570       args: -faces {{2,2  1,3  7,4}} -cell_simplex 1 -dm_generator triangle
1571     test:
1572       suffix: 6_quad
1573       args: -faces {{2,2  1,3  7,4}} -cell_simplex 0
1574     test:
1575       suffix: 6_tet
1576       requires: ctetgen
1577       args: -faces {{2,2,2  1,3,5  3,4,7}} -cell_simplex 1 -dm_generator ctetgen
1578     test:
1579       suffix: 6_hex
1580       args: -faces {{2,2,2  1,3,5  3,4,7}} -cell_simplex 0
1581   testset:
1582     nsize: {{1 2 4}}
1583     args: -use_generator
1584     args: -dm_plex_check_all
1585     args: -distribute -interpolate create
1586     test:
1587       suffix: 6_int_tri
1588       requires: triangle
1589       args: -faces {{2,2  1,3  7,4}} -cell_simplex 1 -dm_generator triangle
1590     test:
1591       suffix: 6_int_quad
1592       args: -faces {{2,2  1,3  7,4}} -cell_simplex 0
1593     test:
1594       suffix: 6_int_tet
1595       requires: ctetgen
1596       args: -faces {{2,2,2  1,3,5  3,4,7}} -cell_simplex 1 -dm_generator ctetgen
1597     test:
1598       suffix: 6_int_hex
1599       args: -faces {{2,2,2  1,3,5  3,4,7}} -cell_simplex 0
1600   testset:
1601     nsize: {{2 4}}
1602     args: -use_generator
1603     args: -dm_plex_check_all
1604     args: -distribute -interpolate after_distribute
1605     test:
1606       suffix: 6_parint_tri
1607       requires: triangle
1608       args: -faces {{2,2  1,3  7,4}} -cell_simplex 1 -dm_generator triangle
1609     test:
1610       suffix: 6_parint_quad
1611       args: -faces {{2,2  1,3  7,4}} -cell_simplex 0
1612     test:
1613       suffix: 6_parint_tet
1614       requires: ctetgen
1615       args: -faces {{2,2,2  1,3,5  3,4,7}} -cell_simplex 1 -dm_generator ctetgen
1616     test:
1617       suffix: 6_parint_hex
1618       args: -faces {{2,2,2  1,3,5  3,4,7}} -cell_simplex 0
1619 
1620   testset: # 7 EXODUS
1621     requires: exodusii
1622     args: -dm_plex_check_all
1623     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.exo
1624     args: -distribute
1625     test: # seq load, simple partitioner
1626       suffix: 7_exo
1627       nsize: {{1 2 4 5}}
1628       args: -interpolate none
1629     test: # seq load, seq interpolation, simple partitioner
1630       suffix: 7_exo_int_simple
1631       nsize: {{1 2 4 5}}
1632       args: -interpolate create
1633     test: # seq load, seq interpolation, metis partitioner
1634       suffix: 7_exo_int_metis
1635       requires: parmetis
1636       nsize: {{2 4 5}}
1637       args: -interpolate create
1638       args: -petscpartitioner_type parmetis
1639     test: # seq load, simple partitioner, par interpolation
1640       suffix: 7_exo_simple_int
1641       nsize: {{2 4 5}}
1642       args: -interpolate after_distribute
1643     test: # seq load, metis partitioner, par interpolation
1644       suffix: 7_exo_metis_int
1645       requires: parmetis
1646       nsize: {{2 4 5}}
1647       args: -interpolate after_distribute
1648       args: -petscpartitioner_type parmetis
1649 
1650   testset: # 7 HDF5 SEQUANTIAL LOAD
1651     requires: hdf5 !complex
1652     args: -dm_plex_check_all
1653     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -dm_plex_create_from_hdf5_xdmf
1654     args: -dm_plex_hdf5_force_sequential
1655     args: -distribute
1656     test: # seq load, simple partitioner
1657       suffix: 7_seq_hdf5_simple
1658       nsize: {{1 2 4 5}}
1659       args: -interpolate none
1660     test: # seq load, seq interpolation, simple partitioner
1661       suffix: 7_seq_hdf5_int_simple
1662       nsize: {{1 2 4 5}}
1663       args: -interpolate after_create
1664     test: # seq load, seq interpolation, metis partitioner
1665       nsize: {{2 4 5}}
1666       suffix: 7_seq_hdf5_int_metis
1667       requires: parmetis
1668       args: -interpolate after_create
1669       args: -petscpartitioner_type parmetis
1670     test: # seq load, simple partitioner, par interpolation
1671       suffix: 7_seq_hdf5_simple_int
1672       nsize: {{2 4 5}}
1673       args: -interpolate after_distribute
1674     test: # seq load, metis partitioner, par interpolation
1675       nsize: {{2 4 5}}
1676       suffix: 7_seq_hdf5_metis_int
1677       requires: parmetis
1678       args: -interpolate after_distribute
1679       args: -petscpartitioner_type parmetis
1680 
1681   testset: # 7 HDF5 PARALLEL LOAD
1682     requires: hdf5 !complex
1683     nsize: {{2 4 5}}
1684     args: -dm_plex_check_all
1685     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -dm_plex_create_from_hdf5_xdmf
1686     test: # par load
1687       suffix: 7_par_hdf5
1688       args: -interpolate none
1689     test: # par load, par interpolation
1690       suffix: 7_par_hdf5_int
1691       args: -interpolate after_create
1692     test: # par load, parmetis repartitioner
1693       TODO: Parallel partitioning of uninterpolated meshes not supported
1694       suffix: 7_par_hdf5_parmetis
1695       requires: parmetis
1696       args: -distribute -petscpartitioner_type parmetis
1697       args: -interpolate none
1698     test: # par load, par interpolation, parmetis repartitioner
1699       suffix: 7_par_hdf5_int_parmetis
1700       requires: parmetis
1701       args: -distribute -petscpartitioner_type parmetis
1702       args: -interpolate after_create
1703     test: # par load, parmetis partitioner, par interpolation
1704       TODO: Parallel partitioning of uninterpolated meshes not supported
1705       suffix: 7_par_hdf5_parmetis_int
1706       requires: parmetis
1707       args: -distribute -petscpartitioner_type parmetis
1708       args: -interpolate after_distribute
1709 
1710     test:
1711       suffix: 7_hdf5_hierarch
1712       requires: hdf5 ptscotch !complex
1713       nsize: {{2 3 4}separate output}
1714       args: -distribute
1715       args: -interpolate after_create
1716       args: -petscpartitioner_type matpartitioning -petscpartitioner_view ::ascii_info
1717       args: -mat_partitioning_type hierarch -mat_partitioning_hierarchical_nfineparts 2
1718       args: -mat_partitioning_hierarchical_coarseparttype ptscotch -mat_partitioning_hierarchical_fineparttype ptscotch
1719 
1720   test:
1721     suffix: 8
1722     requires: hdf5 !complex
1723     nsize: 4
1724     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -dm_plex_create_from_hdf5_xdmf
1725     args: -distribute 0 -interpolate after_create
1726     args: -view_vertices_from_coords 0.,1.,0.,-0.5,1.,0.,0.583,-0.644,0.,-2.,-2.,-2. -view_vertices_from_coords_tol 1e-3
1727     args: -dm_plex_check_all
1728     args: -custom_view
1729 
1730   testset: # 9 HDF5 SEQUANTIAL LOAD
1731     requires: hdf5 !complex datafilespath
1732     args: -dm_plex_check_all
1733     args: -filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /cells -dm_plex_hdf5_geometry_path /coordinates
1734     args: -dm_plex_hdf5_force_sequential
1735     args: -distribute
1736     test: # seq load, simple partitioner
1737       suffix: 9_seq_hdf5_simple
1738       nsize: {{1 2 4 5}}
1739       args: -interpolate none
1740     test: # seq load, seq interpolation, simple partitioner
1741       suffix: 9_seq_hdf5_int_simple
1742       nsize: {{1 2 4 5}}
1743       args: -interpolate after_create
1744     test: # seq load, seq interpolation, metis partitioner
1745       nsize: {{2 4 5}}
1746       suffix: 9_seq_hdf5_int_metis
1747       requires: parmetis
1748       args: -interpolate after_create
1749       args: -petscpartitioner_type parmetis
1750     test: # seq load, simple partitioner, par interpolation
1751       suffix: 9_seq_hdf5_simple_int
1752       nsize: {{2 4 5}}
1753       args: -interpolate after_distribute
1754     test: # seq load, simple partitioner, par interpolation
1755       # This is like 9_seq_hdf5_simple_int but testing error output of DMPlexCheckPointSFHeavy().
1756       # Once 9_seq_hdf5_simple_int gets fixed, this one gets broken.
1757       # We can then provide an intentionally broken mesh instead.
1758       TODO: This test is broken because PointSF is fixed.
1759       suffix: 9_seq_hdf5_simple_int_err
1760       nsize: 4
1761       args: -interpolate after_distribute
1762       filter: sed -e "/PETSC ERROR/,$$d"
1763     test: # seq load, metis partitioner, par interpolation
1764       nsize: {{2 4 5}}
1765       suffix: 9_seq_hdf5_metis_int
1766       requires: parmetis
1767       args: -interpolate after_distribute
1768       args: -petscpartitioner_type parmetis
1769 
1770   testset: # 9 HDF5 PARALLEL LOAD
1771     requires: hdf5 !complex datafilespath
1772     nsize: {{2 4 5}}
1773     args: -dm_plex_check_all
1774     args: -filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /cells -dm_plex_hdf5_geometry_path /coordinates
1775     test: # par load
1776       suffix: 9_par_hdf5
1777       args: -interpolate none
1778     test: # par load, par interpolation
1779       suffix: 9_par_hdf5_int
1780       args: -interpolate after_create
1781     test: # par load, parmetis repartitioner
1782       TODO: Parallel partitioning of uninterpolated meshes not supported
1783       suffix: 9_par_hdf5_parmetis
1784       requires: parmetis
1785       args: -distribute -petscpartitioner_type parmetis
1786       args: -interpolate none
1787     test: # par load, par interpolation, parmetis repartitioner
1788       suffix: 9_par_hdf5_int_parmetis
1789       requires: parmetis
1790       args: -distribute -petscpartitioner_type parmetis
1791       args: -interpolate after_create
1792     test: # par load, parmetis partitioner, par interpolation
1793       TODO: Parallel partitioning of uninterpolated meshes not supported
1794       suffix: 9_par_hdf5_parmetis_int
1795       requires: parmetis
1796       args: -distribute -petscpartitioner_type parmetis
1797       args: -interpolate after_distribute
1798 
1799   testset: # 10 HDF5 PARALLEL LOAD
1800     requires: hdf5 !complex datafilespath
1801     nsize: {{2 4 7}}
1802     args: -dm_plex_check_all
1803     args: -filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined2.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /topo -dm_plex_hdf5_geometry_path /geom
1804     test: # par load, par interpolation
1805       suffix: 10_par_hdf5_int
1806       args: -interpolate after_create
1807 TEST*/
1808