xref: /petsc/src/dm/impls/plex/tests/ex18.c (revision 45601b4b7d8ff587f0d3d0ebcf8818ecb79c2acc)
1 static char help[] = "Tests for parallel mesh loading and parallel topological interpolation\n\n";
2 
3 #include <petsc/private/dmpleximpl.h>
4 /* List of test meshes
5 
6 Network
7 -------
8 Test 0 (2 ranks):
9 
10 network=0:
11 ---------
12   cell 0   cell 1   cell 2          nCells-1       (edge)
13 0 ------ 1 ------ 2 ------ 3 -- -- v --  -- nCells (vertex)
14 
15   vertex distribution:
16     rank 0: 0 1
17     rank 1: 2 3 ... nCells
18   cell(edge) distribution:
19     rank 0: 0 1
20     rank 1: 2 ... nCells-1
21 
22 network=1:
23 ---------
24                v2
25                 ^
26                 |
27                cell 2
28                 |
29  v0 --cell 0--> v3--cell 1--> v1
30 
31   vertex distribution:
32     rank 0: 0 1 3
33     rank 1: 2
34   cell(edge) distribution:
35     rank 0: 0 1
36     rank 1: 2
37 
38   example:
39     mpiexec -n 2 ./ex18 -distribute 1 -dim 1 -orig_dm_view -dist_dm_view -dist_dm_view -petscpartitioner_type parmetis -ncells 50
40 
41 Triangle
42 --------
43 Test 0 (2 ranks):
44 Two triangles sharing a face
45 
46         2
47       / | \
48      /  |  \
49     /   |   \
50    0  0 | 1  3
51     \   |   /
52      \  |  /
53       \ | /
54         1
55 
56   vertex distribution:
57     rank 0: 0 1
58     rank 1: 2 3
59   cell distribution:
60     rank 0: 0
61     rank 1: 1
62 
63 Test 1 (3 ranks):
64 Four triangles partitioned across 3 ranks
65 
66    0 _______ 3
67    | \     / |
68    |  \ 1 /  |
69    |   \ /   |
70    | 0  2  2 |
71    |   / \   |
72    |  / 3 \  |
73    | /     \ |
74    1 ------- 4
75 
76   vertex distribution:
77     rank 0: 0 1
78     rank 1: 2 3
79     rank 2: 4
80   cell distribution:
81     rank 0: 0
82     rank 1: 1
83     rank 2: 2 3
84 
85 Test 2 (3 ranks):
86 Four triangles partitioned across 3 ranks
87 
88    1 _______ 3
89    | \     / |
90    |  \ 1 /  |
91    |   \ /   |
92    | 0  0  2 |
93    |   / \   |
94    |  / 3 \  |
95    | /     \ |
96    2 ------- 4
97 
98   vertex distribution:
99     rank 0: 0 1
100     rank 1: 2 3
101     rank 2: 4
102   cell distribution:
103     rank 0: 0
104     rank 1: 1
105     rank 2: 2 3
106 
107 Tetrahedron
108 -----------
109 Test 0:
110 Two tets sharing a face
111 
112  cell   3 _______    cell
113  0    / | \      \   1
114      /  |  \      \
115     /   |   \      \
116    0----|----4-----2
117     \   |   /      /
118      \  |  /      /
119       \ | /      /
120         1-------
121    y
122    | x
123    |/
124    *----z
125 
126   vertex distribution:
127     rank 0: 0 1
128     rank 1: 2 3 4
129   cell distribution:
130     rank 0: 0
131     rank 1: 1
132 
133 Quadrilateral
134 -------------
135 Test 0 (2 ranks):
136 Two quads sharing a face
137 
138    3-------2-------5
139    |       |       |
140    |   0   |   1   |
141    |       |       |
142    0-------1-------4
143 
144   vertex distribution:
145     rank 0: 0 1 2
146     rank 1: 3 4 5
147   cell distribution:
148     rank 0: 0
149     rank 1: 1
150 
151 TODO Test 1:
152 A quad and a triangle sharing a face
153 
154    5-------4
155    |       | \
156    |   0   |  \
157    |       | 1 \
158    2-------3----6
159 
160 Hexahedron
161 ----------
162 Test 0 (2 ranks):
163 Two hexes sharing a face
164 
165 cell   7-------------6-------------11 cell
166 0     /|            /|            /|     1
167      / |   F1      / |   F7      / |
168     /  |          /  |          /  |
169    4-------------5-------------10  |
170    |   |     F4  |   |     F10 |   |
171    |   |         |   |         |   |
172    |F5 |         |F3 |         |F9 |
173    |   |  F2     |   |   F8    |   |
174    |   3---------|---2---------|---9
175    |  /          |  /          |  /
176    | /   F0      | /    F6     | /
177    |/            |/            |/
178    0-------------1-------------8
179 
180   vertex distribution:
181     rank 0: 0 1 2 3 4 5
182     rank 1: 6 7 8 9 10 11
183   cell distribution:
184     rank 0: 0
185     rank 1: 1
186 
187 */
188 
189 typedef enum {NONE, CREATE, AFTER_CREATE, AFTER_DISTRIBUTE} InterpType;
190 
191 typedef struct {
192   PetscInt   debug;                        /* The debugging level */
193   PetscInt   testNum;                      /* Indicates the mesh to create */
194   PetscInt   dim;                          /* The topological mesh dimension */
195   PetscBool  cellSimplex;                  /* Use simplices or hexes */
196   PetscBool  distribute;                   /* Distribute the mesh */
197   InterpType interpolate;                  /* Interpolate the mesh before or after DMPlexDistribute() */
198   PetscBool  useGenerator;                 /* Construct mesh with a mesh generator */
199   PetscBool  testOrientIF;                 /* Test for different original interface orientations */
200   PetscBool  testHeavy;                    /* Run the heavy PointSF test */
201   PetscBool  customView;                   /* Show results of DMPlexIsInterpolated() etc. */
202   PetscInt   ornt[2];                      /* Orientation of interface on rank 0 and rank 1 */
203   PetscInt   faces[3];                     /* Number of faces per dimension for generator */
204   PetscScalar coords[128];
205   PetscReal  coordsTol;
206   PetscInt   ncoords;
207   PetscInt   pointsToExpand[128];
208   PetscInt   nPointsToExpand;
209   PetscBool  testExpandPointsEmpty;
210   char       filename[PETSC_MAX_PATH_LEN]; /* Import mesh from file */
211 } AppCtx;
212 
213 struct _n_PortableBoundary {
214   Vec coordinates;
215   PetscInt depth;
216   PetscSection *sections;
217 };
218 typedef struct _n_PortableBoundary * PortableBoundary;
219 
220 #if defined(PETSC_USE_LOG)
221 static PetscLogStage  stage[3];
222 #endif
223 
224 static PetscErrorCode DMPlexCheckPointSFHeavy(DM, PortableBoundary);
225 static PetscErrorCode DMPlexSetOrientInterface_Private(DM,PetscBool);
226 static PetscErrorCode DMPlexGetExpandedBoundary_Private(DM, PortableBoundary *);
227 static PetscErrorCode DMPlexExpandedConesToFaces_Private(DM, IS, PetscSection, IS *);
228 
229 static PetscErrorCode PortableBoundaryDestroy(PortableBoundary *bnd)
230 {
231   PetscInt       d;
232 
233   PetscFunctionBegin;
234   if (!*bnd) PetscFunctionReturn(0);
235   PetscCall(VecDestroy(&(*bnd)->coordinates));
236   for (d=0; d < (*bnd)->depth; d++) {
237     PetscCall(PetscSectionDestroy(&(*bnd)->sections[d]));
238   }
239   PetscCall(PetscFree((*bnd)->sections));
240   PetscCall(PetscFree(*bnd));
241   PetscFunctionReturn(0);
242 }
243 
244 static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
245 {
246   const char    *interpTypes[4]  = {"none", "create", "after_create", "after_distribute"};
247   PetscInt       interp=NONE, dim;
248   PetscBool      flg1, flg2;
249 
250   PetscFunctionBegin;
251   options->debug        = 0;
252   options->testNum      = 0;
253   options->dim          = 2;
254   options->cellSimplex  = PETSC_TRUE;
255   options->distribute   = PETSC_FALSE;
256   options->interpolate  = NONE;
257   options->useGenerator = PETSC_FALSE;
258   options->testOrientIF = PETSC_FALSE;
259   options->testHeavy    = PETSC_TRUE;
260   options->customView   = PETSC_FALSE;
261   options->testExpandPointsEmpty = PETSC_FALSE;
262   options->ornt[0]      = 0;
263   options->ornt[1]      = 0;
264   options->faces[0]     = 2;
265   options->faces[1]     = 2;
266   options->faces[2]     = 2;
267   options->filename[0]  = '\0';
268   options->coordsTol    = PETSC_DEFAULT;
269 
270   PetscOptionsBegin(comm, "", "Meshing Interpolation Test Options", "DMPLEX");
271   PetscCall(PetscOptionsBoundedInt("-debug", "The debugging level", "ex18.c", options->debug, &options->debug, NULL,0));
272   PetscCall(PetscOptionsBoundedInt("-testnum", "The mesh to create", "ex18.c", options->testNum, &options->testNum, NULL,0));
273   PetscCall(PetscOptionsBool("-cell_simplex", "Generate simplices if true, otherwise hexes", "ex18.c", options->cellSimplex, &options->cellSimplex, NULL));
274   PetscCall(PetscOptionsBool("-distribute", "Distribute the mesh", "ex18.c", options->distribute, &options->distribute, NULL));
275   PetscCall(PetscOptionsEList("-interpolate", "Type of mesh interpolation (none, create, after_create, after_distribute)", "ex18.c", interpTypes, 4, interpTypes[options->interpolate], &interp, NULL));
276   options->interpolate = (InterpType) interp;
277   PetscCheck(options->distribute || options->interpolate != AFTER_DISTRIBUTE,comm, PETSC_ERR_SUP, "-interpolate after_distribute  needs  -distribute 1");
278   PetscCall(PetscOptionsBool("-use_generator", "Use a mesh generator to build the mesh", "ex18.c", options->useGenerator, &options->useGenerator, NULL));
279   options->ncoords = 128;
280   PetscCall(PetscOptionsScalarArray("-view_vertices_from_coords", "Print DAG points corresponding to vertices with given coordinates", "ex18.c", options->coords, &options->ncoords, NULL));
281   PetscCall(PetscOptionsReal("-view_vertices_from_coords_tol", "Tolerance for -view_vertices_from_coords", "ex18.c", options->coordsTol, &options->coordsTol, NULL));
282   options->nPointsToExpand = 128;
283   PetscCall(PetscOptionsIntArray("-test_expand_points", "Expand given array of DAG point using DMPlexGetConeRecursive() and print results", "ex18.c", options->pointsToExpand, &options->nPointsToExpand, NULL));
284   if (options->nPointsToExpand) {
285     PetscCall(PetscOptionsBool("-test_expand_points_empty", "For -test_expand_points, rank 0 will have empty input array", "ex18.c", options->testExpandPointsEmpty, &options->testExpandPointsEmpty, NULL));
286   }
287   PetscCall(PetscOptionsBool("-test_heavy", "Run the heavy PointSF test", "ex18.c", options->testHeavy, &options->testHeavy, NULL));
288   PetscCall(PetscOptionsBool("-custom_view", "Custom DMPlex view", "ex18.c", options->customView, &options->customView, NULL));
289   PetscCall(PetscOptionsRangeInt("-dim", "The topological mesh dimension", "ex18.c", options->dim, &options->dim, &flg1,1,3));
290   dim = 3;
291   PetscCall(PetscOptionsIntArray("-faces", "Number of faces per dimension", "ex18.c", options->faces, &dim, &flg2));
292   if (flg2) {
293     PetscCheck(!flg1 || dim == options->dim,comm, PETSC_ERR_ARG_OUTOFRANGE, "specified -dim %" PetscInt_FMT " is not equal to length %" PetscInt_FMT " of -faces (note that -dim can be omitted)", options->dim, dim);
294     options->dim = dim;
295   }
296   PetscCall(PetscOptionsString("-filename", "The mesh file", "ex18.c", options->filename, options->filename, sizeof(options->filename), NULL));
297   PetscCall(PetscOptionsBoundedInt("-rotate_interface_0", "Rotation (relative orientation) of interface on rank 0; implies -interpolate create -distribute 0", "ex18.c", options->ornt[0], &options->ornt[0], &options->testOrientIF,0));
298   PetscCall(PetscOptionsBoundedInt("-rotate_interface_1", "Rotation (relative orientation) of interface on rank 1; implies -interpolate create -distribute 0", "ex18.c", options->ornt[1], &options->ornt[1], &flg2,0));
299   PetscCheck(flg2 == options->testOrientIF,comm, PETSC_ERR_ARG_OUTOFRANGE, "neither or both -rotate_interface_0 -rotate_interface_1 must be set");
300   if (options->testOrientIF) {
301     PetscInt i;
302     for (i=0; i<2; i++) {
303       if (options->ornt[i] >= 10) options->ornt[i] = -(options->ornt[i]-10);  /* 11 12 13 become -1 -2 -3 */
304     }
305     options->filename[0]  = 0;
306     options->useGenerator = PETSC_FALSE;
307     options->dim          = 3;
308     options->cellSimplex  = PETSC_TRUE;
309     options->interpolate  = CREATE;
310     options->distribute   = PETSC_FALSE;
311   }
312   PetscOptionsEnd();
313   PetscFunctionReturn(0);
314 }
315 
316 static PetscErrorCode CreateMesh_1D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
317 {
318   PetscInt       testNum = user->testNum;
319   PetscMPIInt    rank,size;
320   PetscInt       numCorners=2,i;
321   PetscInt       numCells,numVertices,network;
322   PetscInt       *cells;
323   PetscReal      *coords;
324 
325   PetscFunctionBegin;
326   PetscCallMPI(MPI_Comm_rank(comm, &rank));
327   PetscCallMPI(MPI_Comm_size(comm, &size));
328   PetscCheck(size <= 2,comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for <=2 processes",testNum);
329 
330   numCells = 3;
331   PetscCall(PetscOptionsGetInt(NULL, NULL, "-ncells", &numCells, NULL));
332   PetscCheck(numCells >= 3,comm, PETSC_ERR_ARG_OUTOFRANGE, "Test ncells %" PetscInt_FMT " must >=3",numCells);
333 
334   if (size == 1) {
335     numVertices = numCells + 1;
336     PetscCall(PetscMalloc2(2*numCells,&cells,2*numVertices,&coords));
337     for (i=0; i<numCells; i++) {
338       cells[2*i] = i; cells[2*i+1] = i + 1;
339       coords[2*i] = i; coords[2*i+1] = i + 1;
340     }
341 
342     PetscCall(DMPlexCreateFromCellListPetsc(comm, user->dim, numCells, numVertices, numCorners, PETSC_FALSE, cells, user->dim, coords, dm));
343     PetscCall(PetscFree2(cells,coords));
344     PetscFunctionReturn(0);
345   }
346 
347   network = 0;
348   PetscCall(PetscOptionsGetInt(NULL, NULL, "-network_case", &network, NULL));
349   if (network == 0) {
350     switch (rank) {
351     case 0:
352     {
353       numCells    = 2;
354       numVertices = numCells;
355       PetscCall(PetscMalloc2(2*numCells,&cells,2*numCells,&coords));
356       cells[0] = 0; cells[1] = 1;
357       cells[2] = 1; cells[3] = 2;
358       coords[0] = 0.; coords[1] = 1.;
359       coords[2] = 1.; coords[3] = 2.;
360     }
361     break;
362     case 1:
363     {
364       numCells    -= 2;
365       numVertices = numCells + 1;
366       PetscCall(PetscMalloc2(2*numCells,&cells,2*numCells,&coords));
367       for (i=0; i<numCells; i++) {
368         cells[2*i] = 2+i; cells[2*i+1] = 2 + i + 1;
369         coords[2*i] = 2+i; coords[2*i+1] = 2 + i + 1;
370       }
371     }
372     break;
373     default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
374     }
375   } else { /* network_case = 1 */
376     /* ----------------------- */
377     switch (rank) {
378     case 0:
379     {
380       numCells    = 2;
381       numVertices = 3;
382       PetscCall(PetscMalloc2(2*numCells,&cells,2*numCells,&coords));
383       cells[0] = 0; cells[1] = 3;
384       cells[2] = 3; cells[3] = 1;
385     }
386     break;
387     case 1:
388     {
389       numCells    = 1;
390       numVertices = 1;
391       PetscCall(PetscMalloc2(2*numCells,&cells,2*numCells,&coords));
392       cells[0] = 3; cells[1] = 2;
393     }
394     break;
395     default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
396     }
397   }
398   PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, PETSC_FALSE, cells, user->dim, coords, NULL, NULL, dm));
399   PetscCall(PetscFree2(cells,coords));
400   PetscFunctionReturn(0);
401 }
402 
403 static PetscErrorCode CreateSimplex_2D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
404 {
405   PetscInt       testNum = user->testNum, p;
406   PetscMPIInt    rank, size;
407 
408   PetscFunctionBegin;
409   PetscCallMPI(MPI_Comm_rank(comm, &rank));
410   PetscCallMPI(MPI_Comm_size(comm, &size));
411   switch (testNum) {
412   case 0:
413     PetscCheck(size == 2,comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 2 processes", testNum);
414     switch (rank) {
415       case 0:
416       {
417         const PetscInt numCells  = 1, numVertices = 2, numCorners = 3;
418         const PetscInt cells[3]  = {0, 1, 2};
419         PetscReal      coords[4] = {-0.5, 0.5, 0.0, 0.0};
420         PetscInt       markerPoints[6] = {1, 1, 2, 1, 3, 1};
421 
422         PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
423         for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
424       }
425       break;
426       case 1:
427       {
428         const PetscInt numCells  = 1, numVertices = 2, numCorners = 3;
429         const PetscInt cells[3]  = {1, 3, 2};
430         PetscReal      coords[4] = {0.0, 1.0, 0.5, 0.5};
431         PetscInt       markerPoints[6] = {1, 1, 2, 1, 3, 1};
432 
433         PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
434         for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
435       }
436       break;
437       default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
438     }
439     break;
440   case 1:
441     PetscCheck(size == 3,comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 3 processes", testNum);
442     switch (rank) {
443       case 0:
444       {
445         const PetscInt numCells  = 1, numVertices = 2, numCorners = 3;
446         const PetscInt cells[3]  = {0, 1, 2};
447         PetscReal      coords[4] = {0.0, 1.0, 0.0, 0.0};
448         PetscInt       markerPoints[6] = {1, 1, 2, 1, 3, 1};
449 
450         PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
451         for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
452       }
453       break;
454       case 1:
455       {
456         const PetscInt numCells  = 1, numVertices = 2, numCorners = 3;
457         const PetscInt cells[3]  = {0, 2, 3};
458         PetscReal      coords[4] = {0.5, 0.5, 1.0, 1.0};
459         PetscInt       markerPoints[6] = {1, 1, 2, 1, 3, 1};
460 
461         PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
462         for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
463       }
464       break;
465       case 2:
466       {
467         const PetscInt numCells  = 2, numVertices = 1, numCorners = 3;
468         const PetscInt cells[6]  = {2, 4, 3, 2, 1, 4};
469         PetscReal      coords[2] = {1.0, 0.0};
470         PetscInt       markerPoints[10] = {2, 1, 3, 1, 4, 1, 5, 1, 6, 1};
471 
472         PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
473         for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
474       }
475       break;
476       default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
477     }
478     break;
479   case 2:
480     PetscCheck(size == 3,comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 3 processes", testNum);
481     switch (rank) {
482       case 0:
483       {
484         const PetscInt numCells  = 1, numVertices = 2, numCorners = 3;
485         const PetscInt cells[3]  = {1, 2, 0};
486         PetscReal      coords[4] = {0.5, 0.5, 0.0, 1.0};
487         PetscInt       markerPoints[6] = {1, 1, 2, 1, 3, 1};
488 
489         PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
490         for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
491       }
492       break;
493       case 1:
494       {
495         const PetscInt numCells  = 1, numVertices = 2, numCorners = 3;
496         const PetscInt cells[3]  = {1, 0, 3};
497         PetscReal      coords[4] = {0.0, 0.0, 1.0, 1.0};
498         PetscInt       markerPoints[6] = {1, 1, 2, 1, 3, 1};
499 
500         PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
501         for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
502       }
503       break;
504       case 2:
505       {
506         const PetscInt numCells  = 2, numVertices = 1, numCorners = 3;
507         const PetscInt cells[6]  = {0, 4, 3, 0, 2, 4};
508         PetscReal      coords[2] = {1.0, 0.0};
509         PetscInt       markerPoints[10] = {2, 1, 3, 1, 4, 1, 5, 1, 6, 1};
510 
511         PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
512         for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
513       }
514       break;
515       default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
516     }
517     break;
518   default: SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh %" PetscInt_FMT, testNum);
519   }
520   PetscFunctionReturn(0);
521 }
522 
523 static PetscErrorCode CreateSimplex_3D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
524 {
525   PetscInt       testNum = user->testNum, p;
526   PetscMPIInt    rank, size;
527 
528   PetscFunctionBegin;
529   PetscCallMPI(MPI_Comm_rank(comm, &rank));
530   PetscCallMPI(MPI_Comm_size(comm, &size));
531   switch (testNum) {
532   case 0:
533     PetscCheck(size == 2,comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 2 processes", testNum);
534     switch (rank) {
535       case 0:
536       {
537         const PetscInt numCells  = 1, numVertices = 2, numCorners = 4;
538         const PetscInt cells[4]  = {0, 2, 1, 3};
539         PetscReal      coords[6] = {0.0, 0.0, -0.5,  0.0, -0.5, 0.0};
540         PetscInt       markerPoints[8] = {1, 1, 2, 1, 3, 1, 4, 1};
541 
542         PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
543         for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
544       }
545       break;
546       case 1:
547       {
548         const PetscInt numCells  = 1, numVertices = 3, numCorners = 4;
549         const PetscInt cells[4]  = {1, 2, 4, 3};
550         PetscReal      coords[9] = {1.0, 0.0, 0.0,  0.0, 0.5, 0.0,  0.0, 0.0, 0.5};
551         PetscInt       markerPoints[8] = {1, 1, 2, 1, 3, 1, 4, 1};
552 
553         PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
554         for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
555       }
556       break;
557       default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
558     }
559     break;
560   default: SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh %" PetscInt_FMT, testNum);
561   }
562   if (user->testOrientIF) {
563     PetscInt ifp[] = {8, 6};
564 
565     PetscCall(PetscObjectSetName((PetscObject) *dm, "Mesh before orientation"));
566     PetscCall(DMViewFromOptions(*dm, NULL, "-before_orientation_dm_view"));
567     /* rotate interface face ifp[rank] by given orientation ornt[rank] */
568     PetscCall(DMPlexOrientPoint(*dm, ifp[rank], user->ornt[rank]));
569     PetscCall(DMViewFromOptions(*dm, NULL, "-before_orientation_dm_view"));
570     PetscCall(DMPlexCheckFaces(*dm, 0));
571     PetscCall(DMPlexOrientInterface_Internal(*dm));
572     PetscCall(PetscPrintf(comm, "Orientation test PASSED\n"));
573   }
574   PetscFunctionReturn(0);
575 }
576 
577 static PetscErrorCode CreateQuad_2D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
578 {
579   PetscInt       testNum = user->testNum, p;
580   PetscMPIInt    rank, size;
581 
582   PetscFunctionBegin;
583   PetscCallMPI(MPI_Comm_rank(comm, &rank));
584   PetscCallMPI(MPI_Comm_size(comm, &size));
585   switch (testNum) {
586   case 0:
587     PetscCheck(size == 2,comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 2 processes", testNum);
588     switch (rank) {
589       case 0:
590       {
591         const PetscInt numCells  = 1, numVertices = 3, numCorners = 4;
592         const PetscInt cells[4]  = {0, 1, 2, 3};
593         PetscReal      coords[6] = {-0.5, 0.0, 0.0, 0.0, 0.0, 1.0};
594         PetscInt       markerPoints[4*2] = {1, 1, 2, 1, 3, 1, 4, 1};
595 
596         PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
597         for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
598       }
599       break;
600       case 1:
601       {
602         const PetscInt numCells  = 1, numVertices = 3, numCorners = 4;
603         const PetscInt cells[4]  = {1, 4, 5, 2};
604         PetscReal      coords[6] = {-0.5, 1.0, 0.5, 0.0, 0.5, 1.0};
605         PetscInt       markerPoints[4*2] = {1, 1, 2, 1, 3, 1, 4, 1};
606 
607         PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
608         for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
609       }
610       break;
611       default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
612     }
613     break;
614   default: SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh %" PetscInt_FMT, testNum);
615   }
616   PetscFunctionReturn(0);
617 }
618 
619 static PetscErrorCode CreateHex_3D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
620 {
621   PetscInt       testNum = user->testNum, p;
622   PetscMPIInt    rank, size;
623 
624   PetscFunctionBegin;
625   PetscCallMPI(MPI_Comm_rank(comm, &rank));
626   PetscCallMPI(MPI_Comm_size(comm, &size));
627   switch (testNum) {
628   case 0:
629     PetscCheck(size == 2,comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 2 processes", testNum);
630     switch (rank) {
631     case 0:
632     {
633       const PetscInt numCells  = 1, numVertices = 6, numCorners = 8;
634       const PetscInt cells[8]  = {0, 3, 2, 1, 4, 5, 6, 7};
635       PetscReal      coords[6*3] = {-0.5,0.0,0.0, 0.0,0.0,0.0, 0.0,1.0,0.0, -0.5,1.0,0.0, -0.5,0.0,1.0, 0.0,0.0,1.0};
636       PetscInt       markerPoints[8*2] = {2,1,3,1,4,1,5,1,6,1,7,1,8,1,9,1};
637 
638       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
639       for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
640     }
641     break;
642     case 1:
643     {
644       const PetscInt numCells  = 1, numVertices = 6, numCorners = 8;
645       const PetscInt cells[8]  = {1, 2, 9, 8, 5, 10, 11, 6};
646       PetscReal      coords[6*3] = {0.0,1.0,1.0, -0.5,1.0,1.0, 0.5,0.0,0.0, 0.5,1.0,0.0, 0.5,0.0,1.0,  0.5,1.0,1.0};
647       PetscInt       markerPoints[8*2] = {2,1,3,1,4,1,5,1,6,1,7,1,8,1,9,1};
648 
649       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
650       for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
651     }
652     break;
653     default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
654     }
655   break;
656   default: SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh %" PetscInt_FMT, testNum);
657   }
658   PetscFunctionReturn(0);
659 }
660 
661 static PetscErrorCode CustomView(DM dm, PetscViewer v)
662 {
663   DMPlexInterpolatedFlag interpolated;
664   PetscBool              distributed;
665 
666   PetscFunctionBegin;
667   PetscCall(DMPlexIsDistributed(dm, &distributed));
668   PetscCall(DMPlexIsInterpolatedCollective(dm, &interpolated));
669   PetscCall(PetscViewerASCIIPrintf(v, "DMPlexIsDistributed: %s\n", PetscBools[distributed]));
670   PetscCall(PetscViewerASCIIPrintf(v, "DMPlexIsInterpolatedCollective: %s\n", DMPlexInterpolatedFlags[interpolated]));
671   PetscFunctionReturn(0);
672 }
673 
674 static PetscErrorCode CreateMeshFromFile(MPI_Comm comm, AppCtx *user, DM *dm, DM *serialDM)
675 {
676   const char    *filename       = user->filename;
677   PetscBool      testHeavy      = user->testHeavy;
678   PetscBool      interpCreate   = user->interpolate == CREATE ? PETSC_TRUE : PETSC_FALSE;
679   PetscBool      distributed    = PETSC_FALSE;
680 
681   PetscFunctionBegin;
682   *serialDM = NULL;
683   if (testHeavy && interpCreate) PetscCall(DMPlexSetOrientInterface_Private(NULL, PETSC_FALSE));
684   PetscCall(PetscLogStagePush(stage[0]));
685   PetscCall(DMPlexCreateFromFile(comm, filename, "ex18_plex", interpCreate, dm)); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexCheckPointSFHeavy() */
686   PetscCall(PetscLogStagePop());
687   if (testHeavy && interpCreate) PetscCall(DMPlexSetOrientInterface_Private(NULL, PETSC_TRUE));
688   PetscCall(DMPlexIsDistributed(*dm, &distributed));
689   PetscCall(PetscPrintf(comm, "DMPlexCreateFromFile produced %s mesh.\n", distributed ? "distributed" : "serial"));
690   if (testHeavy && distributed) {
691     PetscCall(PetscOptionsSetValue(NULL, "-dm_plex_hdf5_force_sequential", NULL));
692     PetscCall(DMPlexCreateFromFile(comm, filename, "ex18_plex", interpCreate, serialDM));
693     PetscCall(DMPlexIsDistributed(*serialDM, &distributed));
694     PetscCheck(!distributed,comm, PETSC_ERR_PLIB, "unable to create a serial DM from file");
695   }
696   PetscCall(DMGetDimension(*dm, &user->dim));
697   PetscFunctionReturn(0);
698 }
699 
700 static PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm)
701 {
702   PetscPartitioner part;
703   PortableBoundary boundary     = NULL;
704   DM             serialDM       = NULL;
705   PetscBool      cellSimplex    = user->cellSimplex;
706   PetscBool      useGenerator   = user->useGenerator;
707   PetscBool      interpCreate   = user->interpolate == CREATE ? PETSC_TRUE : PETSC_FALSE;
708   PetscBool      interpSerial   = user->interpolate == AFTER_CREATE ? PETSC_TRUE : PETSC_FALSE;
709   PetscBool      interpParallel = user->interpolate == AFTER_DISTRIBUTE ? PETSC_TRUE : PETSC_FALSE;
710   PetscBool      testHeavy      = user->testHeavy;
711   PetscMPIInt    rank;
712 
713   PetscFunctionBegin;
714   PetscCallMPI(MPI_Comm_rank(comm, &rank));
715   if (user->filename[0]) {
716     PetscCall(CreateMeshFromFile(comm, user, dm, &serialDM));
717   } else if (useGenerator) {
718     PetscCall(PetscLogStagePush(stage[0]));
719     PetscCall(DMPlexCreateBoxMesh(comm, user->dim, cellSimplex, user->faces, NULL, NULL, NULL, interpCreate, dm));
720     PetscCall(PetscLogStagePop());
721   } else {
722     PetscCall(PetscLogStagePush(stage[0]));
723     switch (user->dim) {
724     case 1:
725       PetscCall(CreateMesh_1D(comm, interpCreate, user, dm));
726       break;
727     case 2:
728       if (cellSimplex) {
729         PetscCall(CreateSimplex_2D(comm, interpCreate, user, dm));
730       } else {
731         PetscCall(CreateQuad_2D(comm, interpCreate, user, dm));
732       }
733       break;
734     case 3:
735       if (cellSimplex) {
736         PetscCall(CreateSimplex_3D(comm, interpCreate, user, dm));
737       } else {
738         PetscCall(CreateHex_3D(comm, interpCreate, user, dm));
739       }
740       break;
741     default:
742       SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "Cannot make meshes for dimension %" PetscInt_FMT, user->dim);
743     }
744     PetscCall(PetscLogStagePop());
745   }
746   PetscCheck(user->ncoords % user->dim == 0,comm, PETSC_ERR_ARG_OUTOFRANGE, "length of coordinates array %" PetscInt_FMT " must be divisable by spatial dimension %" PetscInt_FMT, user->ncoords, user->dim);
747   PetscCall(PetscObjectSetName((PetscObject) *dm, "Original Mesh"));
748   PetscCall(DMViewFromOptions(*dm, NULL, "-orig_dm_view"));
749 
750   if (interpSerial) {
751     DM idm;
752 
753     if (testHeavy) PetscCall(DMPlexSetOrientInterface_Private(*dm, PETSC_FALSE));
754     PetscCall(PetscLogStagePush(stage[2]));
755     PetscCall(DMPlexInterpolate(*dm, &idm)); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexCheckPointSFHeavy() */
756     PetscCall(PetscLogStagePop());
757     if (testHeavy) PetscCall(DMPlexSetOrientInterface_Private(*dm, PETSC_TRUE));
758     PetscCall(DMDestroy(dm));
759     *dm = idm;
760     PetscCall(PetscObjectSetName((PetscObject) *dm, "Interpolated Mesh"));
761     PetscCall(DMViewFromOptions(*dm, NULL, "-intp_dm_view"));
762   }
763 
764   /* Set partitioner options */
765   PetscCall(DMPlexGetPartitioner(*dm, &part));
766   if (part) {
767     PetscCall(PetscPartitionerSetType(part, PETSCPARTITIONERSIMPLE));
768     PetscCall(PetscPartitionerSetFromOptions(part));
769   }
770 
771   if (user->customView) PetscCall(CustomView(*dm, PETSC_VIEWER_STDOUT_(comm)));
772   if (testHeavy) {
773     PetscBool distributed;
774 
775     PetscCall(DMPlexIsDistributed(*dm, &distributed));
776     if (!serialDM && !distributed) {
777       serialDM = *dm;
778       PetscCall(PetscObjectReference((PetscObject)*dm));
779     }
780     if (serialDM) {
781       PetscCall(DMPlexGetExpandedBoundary_Private(serialDM, &boundary));
782     }
783     if (boundary) {
784       /* check DM which has been created in parallel and already interpolated */
785       PetscCall(DMPlexCheckPointSFHeavy(*dm, boundary));
786     }
787     /* Orient interface because it could be deliberately skipped above. It is idempotent. */
788     PetscCall(DMPlexOrientInterface_Internal(*dm));
789   }
790   if (user->distribute) {
791     DM               pdm = NULL;
792 
793     /* Redistribute mesh over processes using that partitioner */
794     PetscCall(PetscLogStagePush(stage[1]));
795     PetscCall(DMPlexDistribute(*dm, 0, NULL, &pdm));
796     PetscCall(PetscLogStagePop());
797     if (pdm) {
798       PetscCall(DMDestroy(dm));
799       *dm  = pdm;
800       PetscCall(PetscObjectSetName((PetscObject) *dm, "Redistributed Mesh"));
801       PetscCall(DMViewFromOptions(*dm, NULL, "-dist_dm_view"));
802     }
803 
804     if (interpParallel) {
805       DM idm;
806 
807       if (testHeavy) PetscCall(DMPlexSetOrientInterface_Private(*dm, PETSC_FALSE));
808       PetscCall(PetscLogStagePush(stage[2]));
809       PetscCall(DMPlexInterpolate(*dm, &idm)); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexCheckPointSFHeavy() */
810       PetscCall(PetscLogStagePop());
811       if (testHeavy) PetscCall(DMPlexSetOrientInterface_Private(*dm, PETSC_TRUE));
812       PetscCall(DMDestroy(dm));
813       *dm = idm;
814       PetscCall(PetscObjectSetName((PetscObject) *dm, "Interpolated Redistributed Mesh"));
815       PetscCall(DMViewFromOptions(*dm, NULL, "-intp_dm_view"));
816     }
817   }
818   if (testHeavy) {
819     if (boundary) {
820       PetscCall(DMPlexCheckPointSFHeavy(*dm, boundary));
821     }
822     /* Orient interface because it could be deliberately skipped above. It is idempotent. */
823     PetscCall(DMPlexOrientInterface_Internal(*dm));
824   }
825 
826   PetscCall(PetscObjectSetName((PetscObject) *dm, "Parallel Mesh"));
827   PetscCall(DMPlexDistributeSetDefault(*dm, PETSC_FALSE));
828   PetscCall(DMSetFromOptions(*dm));
829   PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
830 
831   if (user->customView) PetscCall(CustomView(*dm, PETSC_VIEWER_STDOUT_(comm)));
832   PetscCall(DMDestroy(&serialDM));
833   PetscCall(PortableBoundaryDestroy(&boundary));
834   PetscFunctionReturn(0);
835 }
836 
837 #define ps2d(number) ((double) PetscRealPart(number))
838 static inline PetscErrorCode coord2str(char buf[], size_t len, PetscInt dim, const PetscScalar coords[], PetscReal tol)
839 {
840   PetscFunctionBegin;
841   PetscCheck(dim <= 3,PETSC_COMM_SELF, PETSC_ERR_SUP, "dim must be less than or equal 3");
842   if (tol >= 1e-3) {
843     switch (dim) {
844       case 1: PetscCall(PetscSNPrintf(buf,len,"(%12.3f)",ps2d(coords[0])));
845       case 2: PetscCall(PetscSNPrintf(buf,len,"(%12.3f, %12.3f)",ps2d(coords[0]),ps2d(coords[1])));
846       case 3: PetscCall(PetscSNPrintf(buf,len,"(%12.3f, %12.3f, %12.3f)",ps2d(coords[0]),ps2d(coords[1]),ps2d(coords[2])));
847     }
848   } else {
849     switch (dim) {
850       case 1: PetscCall(PetscSNPrintf(buf,len,"(%12.6f)",ps2d(coords[0])));
851       case 2: PetscCall(PetscSNPrintf(buf,len,"(%12.6f, %12.6f)",ps2d(coords[0]),ps2d(coords[1])));
852       case 3: PetscCall(PetscSNPrintf(buf,len,"(%12.6f, %12.6f, %12.6f)",ps2d(coords[0]),ps2d(coords[1]),ps2d(coords[2])));
853     }
854   }
855   PetscFunctionReturn(0);
856 }
857 
858 static PetscErrorCode ViewVerticesFromCoords(DM dm, Vec coordsVec, PetscReal tol, PetscViewer viewer)
859 {
860   PetscInt       dim, i, npoints;
861   IS             pointsIS;
862   const PetscInt *points;
863   const PetscScalar *coords;
864   char           coordstr[128];
865   MPI_Comm       comm;
866   PetscMPIInt    rank;
867 
868   PetscFunctionBegin;
869   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
870   PetscCallMPI(MPI_Comm_rank(comm, &rank));
871   PetscCall(DMGetDimension(dm, &dim));
872   PetscCall(PetscViewerASCIIPushSynchronized(viewer));
873   PetscCall(DMPlexFindVertices(dm, coordsVec, tol, &pointsIS));
874   PetscCall(ISGetIndices(pointsIS, &points));
875   PetscCall(ISGetLocalSize(pointsIS, &npoints));
876   PetscCall(VecGetArrayRead(coordsVec, &coords));
877   for (i=0; i < npoints; i++) {
878     PetscCall(coord2str(coordstr, sizeof(coordstr), dim, &coords[i*dim], tol));
879     if (rank == 0 && i) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "-----\n"));
880     PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] %s --> points[%" PetscInt_FMT "] = %" PetscInt_FMT "\n", rank, coordstr, i, points[i]));
881     PetscCall(PetscViewerFlush(viewer));
882   }
883   PetscCall(PetscViewerASCIIPopSynchronized(viewer));
884   PetscCall(VecRestoreArrayRead(coordsVec, &coords));
885   PetscCall(ISRestoreIndices(pointsIS, &points));
886   PetscCall(ISDestroy(&pointsIS));
887   PetscFunctionReturn(0);
888 }
889 
890 static PetscErrorCode TestExpandPoints(DM dm, AppCtx *user)
891 {
892   IS                is;
893   PetscSection      *sects;
894   IS                *iss;
895   PetscInt          d,depth;
896   PetscMPIInt       rank;
897   PetscViewer       viewer=PETSC_VIEWER_STDOUT_WORLD, sviewer;
898 
899   PetscFunctionBegin;
900   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm),&rank));
901   if (user->testExpandPointsEmpty && rank == 0) {
902     PetscCall(ISCreateGeneral(PETSC_COMM_SELF, 0, NULL, PETSC_USE_POINTER, &is));
903   } else {
904     PetscCall(ISCreateGeneral(PETSC_COMM_SELF, user->nPointsToExpand, user->pointsToExpand, PETSC_USE_POINTER, &is));
905   }
906   PetscCall(DMPlexGetConeRecursive(dm, is, &depth, &iss, &sects));
907   PetscCall(PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer));
908   PetscCall(PetscViewerASCIIPrintf(sviewer, "[%d] ==========================\n",rank));
909   for (d=depth-1; d>=0; d--) {
910     IS          checkIS;
911     PetscBool   flg;
912 
913     PetscCall(PetscViewerASCIIPrintf(sviewer, "depth %" PetscInt_FMT " ---------------\n",d));
914     PetscCall(PetscSectionView(sects[d], sviewer));
915     PetscCall(ISView(iss[d], sviewer));
916     /* check reverse operation */
917     if (d < depth-1) {
918       PetscCall(DMPlexExpandedConesToFaces_Private(dm, iss[d], sects[d], &checkIS));
919       PetscCall(ISEqualUnsorted(checkIS, iss[d+1], &flg));
920       PetscCheck(flg,PetscObjectComm((PetscObject) checkIS), PETSC_ERR_PLIB, "DMPlexExpandedConesToFaces_Private produced wrong IS");
921       PetscCall(ISDestroy(&checkIS));
922     }
923   }
924   PetscCall(PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer));
925   PetscCall(PetscViewerFlush(viewer));
926   PetscCall(DMPlexRestoreConeRecursive(dm, is, &depth, &iss, &sects));
927   PetscCall(ISDestroy(&is));
928   PetscFunctionReturn(0);
929 }
930 
931 static PetscErrorCode DMPlexExpandedConesToFaces_Private(DM dm, IS is, PetscSection section, IS *newis)
932 {
933   PetscInt          n,n1,ncone,numCoveredPoints,o,p,q,start,end;
934   const PetscInt    *coveredPoints;
935   const PetscInt    *arr, *cone;
936   PetscInt          *newarr;
937 
938   PetscFunctionBegin;
939   PetscCall(ISGetLocalSize(is, &n));
940   PetscCall(PetscSectionGetStorageSize(section, &n1));
941   PetscCall(PetscSectionGetChart(section, &start, &end));
942   PetscCheck(n == n1,PETSC_COMM_SELF, PETSC_ERR_PLIB, "IS size = %" PetscInt_FMT " != %" PetscInt_FMT " = section storage size", n, n1);
943   PetscCall(ISGetIndices(is, &arr));
944   PetscCall(PetscMalloc1(end-start, &newarr));
945   for (q=start; q<end; q++) {
946     PetscCall(PetscSectionGetDof(section, q, &ncone));
947     PetscCall(PetscSectionGetOffset(section, q, &o));
948     cone = &arr[o];
949     if (ncone == 1) {
950       numCoveredPoints = 1;
951       p = cone[0];
952     } else {
953       PetscInt i;
954       p = PETSC_MAX_INT;
955       for (i=0; i<ncone; i++) if (cone[i] < 0) {p = -1; break;}
956       if (p >= 0) {
957         PetscCall(DMPlexGetJoin(dm, ncone, cone, &numCoveredPoints, &coveredPoints));
958         PetscCheck(numCoveredPoints <= 1,PETSC_COMM_SELF, PETSC_ERR_PLIB, "more than one covered points for section point %" PetscInt_FMT,q);
959         if (numCoveredPoints) p = coveredPoints[0];
960         else                  p = -2;
961         PetscCall(DMPlexRestoreJoin(dm, ncone, cone, &numCoveredPoints, &coveredPoints));
962       }
963     }
964     newarr[q-start] = p;
965   }
966   PetscCall(ISRestoreIndices(is, &arr));
967   PetscCall(ISCreateGeneral(PETSC_COMM_SELF, end-start, newarr, PETSC_OWN_POINTER, newis));
968   PetscFunctionReturn(0);
969 }
970 
971 static PetscErrorCode DMPlexExpandedVerticesToFaces_Private(DM dm, IS boundary_expanded_is, PetscInt depth, PetscSection sections[], IS *boundary_is)
972 {
973   PetscInt          d;
974   IS                is,newis;
975 
976   PetscFunctionBegin;
977   is = boundary_expanded_is;
978   PetscCall(PetscObjectReference((PetscObject)is));
979   for (d = 0; d < depth-1; ++d) {
980     PetscCall(DMPlexExpandedConesToFaces_Private(dm, is, sections[d], &newis));
981     PetscCall(ISDestroy(&is));
982     is = newis;
983   }
984   *boundary_is = is;
985   PetscFunctionReturn(0);
986 }
987 
988 #define CHKERRQI(incall,ierr) if (ierr) {incall = PETSC_FALSE; }
989 
990 static PetscErrorCode DMLabelViewFromOptionsOnComm_Private(DMLabel label, const char optionname[], MPI_Comm comm)
991 {
992   PetscViewer       viewer;
993   PetscBool         flg;
994   static PetscBool  incall = PETSC_FALSE;
995   PetscViewerFormat format;
996 
997   PetscFunctionBegin;
998   if (incall) PetscFunctionReturn(0);
999   incall = PETSC_TRUE;
1000   CHKERRQI(incall,PetscOptionsGetViewer(comm,((PetscObject)label)->options,((PetscObject)label)->prefix,optionname,&viewer,&format,&flg));
1001   if (flg) {
1002     CHKERRQI(incall,PetscViewerPushFormat(viewer,format));
1003     CHKERRQI(incall,DMLabelView(label, viewer));
1004     CHKERRQI(incall,PetscViewerPopFormat(viewer));
1005     CHKERRQI(incall,PetscViewerDestroy(&viewer));
1006   }
1007   incall = PETSC_FALSE;
1008   PetscFunctionReturn(0);
1009 }
1010 
1011 /* TODO: this is hotfixing DMLabelGetStratumIS() - it should be fixed systematically instead */
1012 static inline PetscErrorCode DMLabelGetStratumISOnComm_Private(DMLabel label, PetscInt value, MPI_Comm comm, IS *is)
1013 {
1014   IS                tmpis;
1015 
1016   PetscFunctionBegin;
1017   PetscCall(DMLabelGetStratumIS(label, value, &tmpis));
1018   if (!tmpis) PetscCall(ISCreateGeneral(PETSC_COMM_SELF, 0, NULL, PETSC_USE_POINTER, &tmpis));
1019   PetscCall(ISOnComm(tmpis, comm, PETSC_COPY_VALUES, is));
1020   PetscCall(ISDestroy(&tmpis));
1021   PetscFunctionReturn(0);
1022 }
1023 
1024 /* currently only for simple PetscSection without fields or constraints */
1025 static PetscErrorCode PetscSectionReplicate_Private(MPI_Comm comm, PetscMPIInt rootrank, PetscSection sec0, PetscSection *secout)
1026 {
1027   PetscSection      sec;
1028   PetscInt          chart[2], p;
1029   PetscInt          *dofarr;
1030   PetscMPIInt       rank;
1031 
1032   PetscFunctionBegin;
1033   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1034   if (rank == rootrank) {
1035     PetscCall(PetscSectionGetChart(sec0, &chart[0], &chart[1]));
1036   }
1037   PetscCallMPI(MPI_Bcast(chart, 2, MPIU_INT, rootrank, comm));
1038   PetscCall(PetscMalloc1(chart[1]-chart[0], &dofarr));
1039   if (rank == rootrank) {
1040     for (p = chart[0]; p < chart[1]; p++) {
1041       PetscCall(PetscSectionGetDof(sec0, p, &dofarr[p-chart[0]]));
1042     }
1043   }
1044   PetscCallMPI(MPI_Bcast(dofarr, chart[1]-chart[0], MPIU_INT, rootrank, comm));
1045   PetscCall(PetscSectionCreate(comm, &sec));
1046   PetscCall(PetscSectionSetChart(sec, chart[0], chart[1]));
1047   for (p = chart[0]; p < chart[1]; p++) {
1048     PetscCall(PetscSectionSetDof(sec, p, dofarr[p-chart[0]]));
1049   }
1050   PetscCall(PetscSectionSetUp(sec));
1051   PetscCall(PetscFree(dofarr));
1052   *secout = sec;
1053   PetscFunctionReturn(0);
1054 }
1055 
1056 static PetscErrorCode DMPlexExpandedVerticesCoordinatesToFaces_Private(DM ipdm, PortableBoundary bnd, IS *face_is)
1057 {
1058   IS                  faces_expanded_is;
1059 
1060   PetscFunctionBegin;
1061   PetscCall(DMPlexFindVertices(ipdm, bnd->coordinates, 0.0, &faces_expanded_is));
1062   PetscCall(DMPlexExpandedVerticesToFaces_Private(ipdm, faces_expanded_is, bnd->depth, bnd->sections, face_is));
1063   PetscCall(ISDestroy(&faces_expanded_is));
1064   PetscFunctionReturn(0);
1065 }
1066 
1067 /* hack disabling DMPlexOrientInterface() call in DMPlexInterpolate() via -dm_plex_interpolate_orient_interfaces option */
1068 static PetscErrorCode DMPlexSetOrientInterface_Private(DM dm, PetscBool enable)
1069 {
1070   PetscOptions      options = NULL;
1071   const char        *prefix = NULL;
1072   const char        opt[] = "-dm_plex_interpolate_orient_interfaces";
1073   char              prefix_opt[512];
1074   PetscBool         flg, set;
1075   static PetscBool  wasSetTrue = PETSC_FALSE;
1076 
1077   PetscFunctionBegin;
1078   if (dm) {
1079     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)dm, &prefix));
1080     options = ((PetscObject)dm)->options;
1081   }
1082   PetscCall(PetscStrcpy(prefix_opt, "-"));
1083   PetscCall(PetscStrlcat(prefix_opt, prefix, sizeof(prefix_opt)));
1084   PetscCall(PetscStrlcat(prefix_opt, &opt[1], sizeof(prefix_opt)));
1085   PetscCall(PetscOptionsGetBool(options, prefix, opt, &flg, &set));
1086   if (!enable) {
1087     if (set && flg) wasSetTrue = PETSC_TRUE;
1088     PetscCall(PetscOptionsSetValue(options, prefix_opt, "0"));
1089   } else if (set && !flg) {
1090     if (wasSetTrue) {
1091       PetscCall(PetscOptionsSetValue(options, prefix_opt, "1"));
1092     } else {
1093       /* default is PETSC_TRUE */
1094       PetscCall(PetscOptionsClearValue(options, prefix_opt));
1095     }
1096     wasSetTrue = PETSC_FALSE;
1097   }
1098   if (PetscDefined(USE_DEBUG)) {
1099     PetscCall(PetscOptionsGetBool(options, prefix, opt, &flg, &set));
1100     PetscCheck(!set || flg == enable,PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "PetscOptionsSetValue did not have the desired effect");
1101   }
1102   PetscFunctionReturn(0);
1103 }
1104 
1105 /* get coordinate description of the whole-domain boundary */
1106 static PetscErrorCode DMPlexGetExpandedBoundary_Private(DM dm, PortableBoundary *boundary)
1107 {
1108   PortableBoundary       bnd0, bnd;
1109   MPI_Comm               comm;
1110   DM                     idm;
1111   DMLabel                label;
1112   PetscInt               d;
1113   const char             boundaryName[] = "DMPlexDistributeInterpolateMarkInterface_boundary";
1114   IS                     boundary_is;
1115   IS                     *boundary_expanded_iss;
1116   PetscMPIInt            rootrank = 0;
1117   PetscMPIInt            rank, size;
1118   PetscInt               value = 1;
1119   DMPlexInterpolatedFlag intp;
1120   PetscBool              flg;
1121 
1122   PetscFunctionBegin;
1123   PetscCall(PetscNew(&bnd));
1124   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1125   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1126   PetscCallMPI(MPI_Comm_size(comm, &size));
1127   PetscCall(DMPlexIsDistributed(dm, &flg));
1128   PetscCheck(!flg,comm, PETSC_ERR_ARG_WRONG, "serial DM (all points on one rank) needed");
1129 
1130   /* interpolate serial DM if not yet interpolated */
1131   PetscCall(DMPlexIsInterpolatedCollective(dm, &intp));
1132   if (intp == DMPLEX_INTERPOLATED_FULL) {
1133     idm = dm;
1134     PetscCall(PetscObjectReference((PetscObject)dm));
1135   } else {
1136     PetscCall(DMPlexInterpolate(dm, &idm));
1137     PetscCall(DMViewFromOptions(idm, NULL, "-idm_view"));
1138   }
1139 
1140   /* mark whole-domain boundary of the serial DM */
1141   PetscCall(DMLabelCreate(PETSC_COMM_SELF, boundaryName, &label));
1142   PetscCall(DMAddLabel(idm, label));
1143   PetscCall(DMPlexMarkBoundaryFaces(idm, value, label));
1144   PetscCall(DMLabelViewFromOptionsOnComm_Private(label, "-idm_boundary_view", comm));
1145   PetscCall(DMLabelGetStratumIS(label, value, &boundary_is));
1146 
1147   /* translate to coordinates */
1148   PetscCall(PetscNew(&bnd0));
1149   PetscCall(DMGetCoordinatesLocalSetUp(idm));
1150   if (rank == rootrank) {
1151     PetscCall(DMPlexGetConeRecursive(idm, boundary_is, &bnd0->depth, &boundary_expanded_iss, &bnd0->sections));
1152     PetscCall(DMGetCoordinatesLocalTuple(dm, boundary_expanded_iss[0], NULL, &bnd0->coordinates));
1153     /* self-check */
1154     {
1155       IS is0;
1156       PetscCall(DMPlexExpandedVerticesCoordinatesToFaces_Private(idm, bnd0, &is0));
1157       PetscCall(ISEqual(is0, boundary_is, &flg));
1158       PetscCheck(flg,PETSC_COMM_SELF, PETSC_ERR_PLIB, "DMPlexExpandedVerticesCoordinatesToFaces_Private produced a wrong IS");
1159       PetscCall(ISDestroy(&is0));
1160     }
1161   } else {
1162     PetscCall(VecCreateSeq(PETSC_COMM_SELF, 0, &bnd0->coordinates));
1163   }
1164 
1165   {
1166     Vec         tmp;
1167     VecScatter  sc;
1168     IS          xis;
1169     PetscInt    n;
1170 
1171     /* just convert seq vectors to mpi vector */
1172     PetscCall(VecGetLocalSize(bnd0->coordinates, &n));
1173     PetscCallMPI(MPI_Bcast(&n, 1, MPIU_INT, rootrank, comm));
1174     if (rank == rootrank) {
1175       PetscCall(VecCreateMPI(comm, n, n, &tmp));
1176     } else {
1177       PetscCall(VecCreateMPI(comm, 0, n, &tmp));
1178     }
1179     PetscCall(VecCopy(bnd0->coordinates, tmp));
1180     PetscCall(VecDestroy(&bnd0->coordinates));
1181     bnd0->coordinates = tmp;
1182 
1183     /* replicate coordinates from root rank to all ranks */
1184     PetscCall(VecCreateMPI(comm, n, n*size, &bnd->coordinates));
1185     PetscCall(ISCreateStride(comm, n, 0, 1, &xis));
1186     PetscCall(VecScatterCreate(bnd0->coordinates, xis, bnd->coordinates, NULL, &sc));
1187     PetscCall(VecScatterBegin(sc, bnd0->coordinates, bnd->coordinates, INSERT_VALUES, SCATTER_FORWARD));
1188     PetscCall(VecScatterEnd(  sc, bnd0->coordinates, bnd->coordinates, INSERT_VALUES, SCATTER_FORWARD));
1189     PetscCall(VecScatterDestroy(&sc));
1190     PetscCall(ISDestroy(&xis));
1191   }
1192   bnd->depth = bnd0->depth;
1193   PetscCallMPI(MPI_Bcast(&bnd->depth, 1, MPIU_INT, rootrank, comm));
1194   PetscCall(PetscMalloc1(bnd->depth, &bnd->sections));
1195   for (d=0; d<bnd->depth; d++) {
1196     PetscCall(PetscSectionReplicate_Private(comm, rootrank, (rank == rootrank) ? bnd0->sections[d] : NULL, &bnd->sections[d]));
1197   }
1198 
1199   if (rank == rootrank) {
1200     PetscCall(DMPlexRestoreConeRecursive(idm, boundary_is, &bnd0->depth, &boundary_expanded_iss, &bnd0->sections));
1201   }
1202   PetscCall(PortableBoundaryDestroy(&bnd0));
1203   PetscCall(DMRemoveLabelBySelf(idm, &label, PETSC_TRUE));
1204   PetscCall(DMLabelDestroy(&label));
1205   PetscCall(ISDestroy(&boundary_is));
1206   PetscCall(DMDestroy(&idm));
1207   *boundary = bnd;
1208   PetscFunctionReturn(0);
1209 }
1210 
1211 /* get faces of inter-partition interface */
1212 static PetscErrorCode DMPlexGetInterfaceFaces_Private(DM ipdm, IS boundary_faces_is, IS *interface_faces_is)
1213 {
1214   MPI_Comm               comm;
1215   DMLabel                label;
1216   IS                     part_boundary_faces_is;
1217   const char             partBoundaryName[] = "DMPlexDistributeInterpolateMarkInterface_partBoundary";
1218   PetscInt               value = 1;
1219   DMPlexInterpolatedFlag intp;
1220 
1221   PetscFunctionBegin;
1222   PetscCall(PetscObjectGetComm((PetscObject)ipdm, &comm));
1223   PetscCall(DMPlexIsInterpolatedCollective(ipdm, &intp));
1224   PetscCheck(intp == DMPLEX_INTERPOLATED_FULL,comm, PETSC_ERR_ARG_WRONG, "only for fully interpolated DMPlex");
1225 
1226   /* get ipdm partition boundary (partBoundary) */
1227   PetscCall(DMLabelCreate(PETSC_COMM_SELF, partBoundaryName, &label));
1228   PetscCall(DMAddLabel(ipdm, label));
1229   PetscCall(DMPlexMarkBoundaryFaces(ipdm, value, label));
1230   PetscCall(DMLabelViewFromOptionsOnComm_Private(label, "-ipdm_part_boundary_view", comm));
1231   PetscCall(DMLabelGetStratumISOnComm_Private(label, value, comm, &part_boundary_faces_is));
1232   PetscCall(DMRemoveLabelBySelf(ipdm, &label, PETSC_TRUE));
1233   PetscCall(DMLabelDestroy(&label));
1234 
1235   /* remove ipdm whole-domain boundary (boundary_faces_is) from ipdm partition boundary (part_boundary_faces_is), resulting just in inter-partition interface */
1236   PetscCall(ISDifference(part_boundary_faces_is,boundary_faces_is,interface_faces_is));
1237   PetscCall(ISDestroy(&part_boundary_faces_is));
1238   PetscFunctionReturn(0);
1239 }
1240 
1241 /* compute inter-partition interface including edges and vertices */
1242 static PetscErrorCode DMPlexComputeCompleteInterface_Private(DM ipdm, IS interface_faces_is, IS *interface_is)
1243 {
1244   DMLabel                label;
1245   PetscInt               value = 1;
1246   const char             interfaceName[] = "DMPlexDistributeInterpolateMarkInterface_interface";
1247   DMPlexInterpolatedFlag intp;
1248   MPI_Comm               comm;
1249 
1250   PetscFunctionBegin;
1251   PetscCall(PetscObjectGetComm((PetscObject)ipdm, &comm));
1252   PetscCall(DMPlexIsInterpolatedCollective(ipdm, &intp));
1253   PetscCheck(intp == DMPLEX_INTERPOLATED_FULL,comm, PETSC_ERR_ARG_WRONG, "only for fully interpolated DMPlex");
1254 
1255   PetscCall(DMLabelCreate(PETSC_COMM_SELF, interfaceName, &label));
1256   PetscCall(DMAddLabel(ipdm, label));
1257   PetscCall(DMLabelSetStratumIS(label, value, interface_faces_is));
1258   PetscCall(DMLabelViewFromOptionsOnComm_Private(label, "-interface_faces_view", comm));
1259   PetscCall(DMPlexLabelComplete(ipdm, label));
1260   PetscCall(DMLabelViewFromOptionsOnComm_Private(label, "-interface_view", comm));
1261   PetscCall(DMLabelGetStratumISOnComm_Private(label, value, comm, interface_is));
1262   PetscCall(PetscObjectSetName((PetscObject)*interface_is, "interface_is"));
1263   PetscCall(ISViewFromOptions(*interface_is, NULL, "-interface_is_view"));
1264   PetscCall(DMRemoveLabelBySelf(ipdm, &label, PETSC_TRUE));
1265   PetscCall(DMLabelDestroy(&label));
1266   PetscFunctionReturn(0);
1267 }
1268 
1269 static PetscErrorCode PointSFGetOutwardInterfacePoints(PetscSF sf, IS *is)
1270 {
1271   PetscInt        n;
1272   const PetscInt  *arr;
1273 
1274   PetscFunctionBegin;
1275   PetscCall(PetscSFGetGraph(sf, NULL, &n, &arr, NULL));
1276   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)sf), n, arr, PETSC_USE_POINTER, is));
1277   PetscFunctionReturn(0);
1278 }
1279 
1280 static PetscErrorCode PointSFGetInwardInterfacePoints(PetscSF sf, IS *is)
1281 {
1282   PetscInt        n;
1283   const PetscInt  *rootdegree;
1284   PetscInt        *arr;
1285 
1286   PetscFunctionBegin;
1287   PetscCall(PetscSFSetUp(sf));
1288   PetscCall(PetscSFComputeDegreeBegin(sf, &rootdegree));
1289   PetscCall(PetscSFComputeDegreeEnd(sf, &rootdegree));
1290   PetscCall(PetscSFComputeMultiRootOriginalNumbering(sf, rootdegree, &n, &arr));
1291   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)sf), n, arr, PETSC_OWN_POINTER, is));
1292   PetscFunctionReturn(0);
1293 }
1294 
1295 static PetscErrorCode PointSFGetInterfacePoints_Private(PetscSF pointSF, IS *is)
1296 {
1297   IS pointSF_out_is, pointSF_in_is;
1298 
1299   PetscFunctionBegin;
1300   PetscCall(PointSFGetOutwardInterfacePoints(pointSF, &pointSF_out_is));
1301   PetscCall(PointSFGetInwardInterfacePoints(pointSF, &pointSF_in_is));
1302   PetscCall(ISExpand(pointSF_out_is, pointSF_in_is, is));
1303   PetscCall(ISDestroy(&pointSF_out_is));
1304   PetscCall(ISDestroy(&pointSF_in_is));
1305   PetscFunctionReturn(0);
1306 }
1307 
1308 #define CHKERRMY(ierr) PetscCheck(!ierr,PETSC_COMM_SELF, PETSC_ERR_PLIB, "PointSF is wrong. Unable to show details!")
1309 
1310 static PetscErrorCode ViewPointsWithType_Internal(DM dm, IS pointsIS, PetscViewer v)
1311 {
1312   DMLabel         label;
1313   PetscSection    coordsSection;
1314   Vec             coordsVec;
1315   PetscScalar     *coordsScalar;
1316   PetscInt        coneSize, depth, dim, i, p, npoints;
1317   const PetscInt  *points;
1318 
1319   PetscFunctionBegin;
1320   PetscCall(DMGetDimension(dm, &dim));
1321   PetscCall(DMGetCoordinateSection(dm, &coordsSection));
1322   PetscCall(DMGetCoordinatesLocal(dm, &coordsVec));
1323   PetscCall(VecGetArray(coordsVec, &coordsScalar));
1324   PetscCall(ISGetLocalSize(pointsIS, &npoints));
1325   PetscCall(ISGetIndices(pointsIS, &points));
1326   PetscCall(DMPlexGetDepthLabel(dm, &label));
1327   PetscCall(PetscViewerASCIIPushTab(v));
1328   for (i=0; i<npoints; i++) {
1329     p = points[i];
1330     PetscCall(DMLabelGetValue(label, p, &depth));
1331     if (!depth) {
1332       PetscInt        n, o;
1333       char            coordstr[128];
1334 
1335       PetscCall(PetscSectionGetDof(coordsSection, p, &n));
1336       PetscCall(PetscSectionGetOffset(coordsSection, p, &o));
1337       PetscCall(coord2str(coordstr, sizeof(coordstr), n, &coordsScalar[o], 1.0));
1338       PetscCall(PetscViewerASCIISynchronizedPrintf(v, "vertex %" PetscInt_FMT " w/ coordinates %s\n", p, coordstr));
1339     } else {
1340       char            entityType[16];
1341 
1342       switch (depth) {
1343         case 1: PetscCall(PetscStrcpy(entityType, "edge")); break;
1344         case 2: PetscCall(PetscStrcpy(entityType, "face")); break;
1345         case 3: PetscCall(PetscStrcpy(entityType, "cell")); break;
1346         default: SETERRQ(PetscObjectComm((PetscObject)v), PETSC_ERR_SUP, "Only for depth <= 3");
1347       }
1348       if (depth == dim && dim < 3) {
1349         PetscCall(PetscStrlcat(entityType, " (cell)", sizeof(entityType)));
1350       }
1351       PetscCall(PetscViewerASCIISynchronizedPrintf(v, "%s %" PetscInt_FMT "\n", entityType, p));
1352     }
1353     PetscCall(DMPlexGetConeSize(dm, p, &coneSize));
1354     if (coneSize) {
1355       const PetscInt *cone;
1356       IS             coneIS;
1357 
1358       PetscCall(DMPlexGetCone(dm, p, &cone));
1359       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, coneSize, cone, PETSC_USE_POINTER, &coneIS));
1360       PetscCall(ViewPointsWithType_Internal(dm, coneIS, v));
1361       PetscCall(ISDestroy(&coneIS));
1362     }
1363   }
1364   PetscCall(PetscViewerASCIIPopTab(v));
1365   PetscCall(VecRestoreArray(coordsVec, &coordsScalar));
1366   PetscCall(ISRestoreIndices(pointsIS, &points));
1367   PetscFunctionReturn(0);
1368 }
1369 
1370 static PetscErrorCode ViewPointsWithType(DM dm, IS points, PetscViewer v)
1371 {
1372   PetscBool       flg;
1373   PetscInt        npoints;
1374   PetscMPIInt     rank;
1375 
1376   PetscFunctionBegin;
1377   PetscCall(PetscObjectTypeCompare((PetscObject)v, PETSCVIEWERASCII, &flg));
1378   PetscCheck(flg,PetscObjectComm((PetscObject)v), PETSC_ERR_SUP, "Only for ASCII viewer");
1379   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)v), &rank));
1380   PetscCall(PetscViewerASCIIPushSynchronized(v));
1381   PetscCall(ISGetLocalSize(points, &npoints));
1382   if (npoints) {
1383     PetscCall(PetscViewerASCIISynchronizedPrintf(v, "[%d] --------\n", rank));
1384     PetscCall(ViewPointsWithType_Internal(dm, points, v));
1385   }
1386   PetscCall(PetscViewerFlush(v));
1387   PetscCall(PetscViewerASCIIPopSynchronized(v));
1388   PetscFunctionReturn(0);
1389 }
1390 
1391 static PetscErrorCode DMPlexComparePointSFWithInterface_Private(DM ipdm, IS interface_is)
1392 {
1393   PetscSF         pointsf;
1394   IS              pointsf_is;
1395   PetscBool       flg;
1396   MPI_Comm        comm;
1397   PetscMPIInt     size;
1398 
1399   PetscFunctionBegin;
1400   PetscCall(PetscObjectGetComm((PetscObject)ipdm, &comm));
1401   PetscCallMPI(MPI_Comm_size(comm, &size));
1402   PetscCall(DMGetPointSF(ipdm, &pointsf));
1403   if (pointsf) {
1404     PetscInt nroots;
1405     PetscCall(PetscSFGetGraph(pointsf, &nroots, NULL, NULL, NULL));
1406     if (nroots < 0) pointsf = NULL; /* uninitialized SF */
1407   }
1408   if (!pointsf) {
1409     PetscInt N=0;
1410     if (interface_is) PetscCall(ISGetSize(interface_is, &N));
1411     PetscCheck(!N,comm, PETSC_ERR_PLIB, "interface_is should be NULL or empty for PointSF being NULL");
1412     PetscFunctionReturn(0);
1413   }
1414 
1415   /* get PointSF points as IS pointsf_is */
1416   PetscCall(PointSFGetInterfacePoints_Private(pointsf, &pointsf_is));
1417 
1418   /* compare pointsf_is with interface_is */
1419   PetscCall(ISEqual(interface_is, pointsf_is, &flg));
1420   PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE,&flg,1,MPIU_BOOL,MPI_LAND,comm));
1421   if (!flg) {
1422     IS pointsf_extra_is, pointsf_missing_is;
1423     PetscViewer errv = PETSC_VIEWER_STDERR_(comm);
1424     CHKERRMY(ISDifference(interface_is, pointsf_is, &pointsf_missing_is));
1425     CHKERRMY(ISDifference(pointsf_is, interface_is, &pointsf_extra_is));
1426     CHKERRMY(PetscViewerASCIIPrintf(errv, "Points missing in PointSF:\n"));
1427     CHKERRMY(ViewPointsWithType(ipdm, pointsf_missing_is, errv));
1428     CHKERRMY(PetscViewerASCIIPrintf(errv, "Extra points in PointSF:\n"));
1429     CHKERRMY(ViewPointsWithType(ipdm, pointsf_extra_is, errv));
1430     CHKERRMY(ISDestroy(&pointsf_extra_is));
1431     CHKERRMY(ISDestroy(&pointsf_missing_is));
1432     SETERRQ(comm, PETSC_ERR_PLIB, "PointSF is wrong! See details above.");
1433   }
1434   PetscCall(ISDestroy(&pointsf_is));
1435   PetscFunctionReturn(0);
1436 }
1437 
1438 /* remove faces & edges from label, leave just vertices */
1439 static PetscErrorCode DMPlexISFilterVertices_Private(DM dm, IS points)
1440 {
1441   PetscInt        vStart, vEnd;
1442   MPI_Comm        comm;
1443 
1444   PetscFunctionBegin;
1445   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1446   PetscCall(DMPlexGetDepthStratum(dm, 0, &vStart, &vEnd));
1447   PetscCall(ISGeneralFilter(points, vStart, vEnd));
1448   PetscFunctionReturn(0);
1449 }
1450 
1451 /*
1452   DMPlexCheckPointSFHeavy - Thoroughly test that the PointSF after parallel DMPlexInterpolate() includes exactly all interface points.
1453 
1454   Collective
1455 
1456   Input Parameters:
1457 . dm - The DMPlex object
1458 
1459   Notes:
1460   The input DMPlex must be serial (one partition has all points, the other partitions have no points).
1461   This is a heavy test which involves DMPlexInterpolate() if the input DM is not interpolated yet, and depends on having a representation of the whole-domain boundary (PortableBoundary), which can be obtained only by DMPlexGetExpandedBoundary_Private() (which involves DMPlexInterpolate() of a sequential DM).
1462   This is mainly intended for debugging/testing purposes.
1463 
1464   Algorithm:
1465   1. boundary faces of the serial version of the whole mesh are found using DMPlexMarkBoundaryFaces()
1466   2. boundary faces are translated into vertices using DMPlexGetConeRecursive() and these are translated into coordinates - this description (aka PortableBoundary) is completely independent of partitioning and point numbering
1467   3. the mesh is distributed or loaded in parallel
1468   4. boundary faces of the distributed mesh are reconstructed from PortableBoundary using DMPlexFindVertices()
1469   5. partition boundary faces of the parallel mesh are found using DMPlexMarkBoundaryFaces()
1470   6. partition interfaces are computed as set difference of partition boundary faces minus the reconstructed boundary
1471   7. check that interface covered by PointSF (union of inward and outward points) is equal to the partition interface for each rank, otherwise print the difference and throw an error
1472 
1473   Level: developer
1474 
1475 .seealso: DMGetPointSF(), DMPlexCheckSymmetry(), DMPlexCheckSkeleton(), DMPlexCheckFaces()
1476 */
1477 static PetscErrorCode DMPlexCheckPointSFHeavy(DM dm, PortableBoundary bnd)
1478 {
1479   DM                     ipdm=NULL;
1480   IS                     boundary_faces_is, interface_faces_is, interface_is;
1481   DMPlexInterpolatedFlag intp;
1482   MPI_Comm               comm;
1483 
1484   PetscFunctionBegin;
1485   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1486 
1487   PetscCall(DMPlexIsInterpolatedCollective(dm, &intp));
1488   if (intp == DMPLEX_INTERPOLATED_FULL) {
1489     ipdm = dm;
1490   } else {
1491     /* create temporary interpolated DM if input DM is not interpolated */
1492     PetscCall(DMPlexSetOrientInterface_Private(dm, PETSC_FALSE));
1493     PetscCall(DMPlexInterpolate(dm, &ipdm)); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexComparePointSFWithInterface_Private() below */
1494     PetscCall(DMPlexSetOrientInterface_Private(dm, PETSC_TRUE));
1495   }
1496   PetscCall(DMViewFromOptions(ipdm, NULL, "-ipdm_view"));
1497 
1498   /* recover ipdm whole-domain boundary faces from the expanded vertices coordinates */
1499   PetscCall(DMPlexExpandedVerticesCoordinatesToFaces_Private(ipdm, bnd, &boundary_faces_is));
1500   /* get inter-partition interface faces (interface_faces_is)*/
1501   PetscCall(DMPlexGetInterfaceFaces_Private(ipdm, boundary_faces_is, &interface_faces_is));
1502   /* compute inter-partition interface including edges and vertices (interface_is) */
1503   PetscCall(DMPlexComputeCompleteInterface_Private(ipdm, interface_faces_is, &interface_is));
1504   /* destroy immediate ISs */
1505   PetscCall(ISDestroy(&boundary_faces_is));
1506   PetscCall(ISDestroy(&interface_faces_is));
1507 
1508   /* for uninterpolated case, keep just vertices in interface */
1509   if (!intp) {
1510     PetscCall(DMPlexISFilterVertices_Private(ipdm, interface_is));
1511     PetscCall(DMDestroy(&ipdm));
1512   }
1513 
1514   /* compare PointSF with the boundary reconstructed from coordinates */
1515   PetscCall(DMPlexComparePointSFWithInterface_Private(dm, interface_is));
1516   PetscCall(PetscPrintf(comm, "DMPlexCheckPointSFHeavy PASSED\n"));
1517   PetscCall(ISDestroy(&interface_is));
1518   PetscFunctionReturn(0);
1519 }
1520 
1521 int main(int argc, char **argv)
1522 {
1523   DM             dm;
1524   AppCtx         user;
1525 
1526   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
1527   PetscCall(PetscLogStageRegister("create",&stage[0]));
1528   PetscCall(PetscLogStageRegister("distribute",&stage[1]));
1529   PetscCall(PetscLogStageRegister("interpolate",&stage[2]));
1530   PetscCall(ProcessOptions(PETSC_COMM_WORLD, &user));
1531   PetscCall(CreateMesh(PETSC_COMM_WORLD, &user, &dm));
1532   if (user.nPointsToExpand) {
1533     PetscCall(TestExpandPoints(dm, &user));
1534   }
1535   if (user.ncoords) {
1536     Vec coords;
1537 
1538     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, user.ncoords, user.ncoords, user.coords, &coords));
1539     PetscCall(ViewVerticesFromCoords(dm, coords, user.coordsTol, PETSC_VIEWER_STDOUT_WORLD));
1540     PetscCall(VecDestroy(&coords));
1541   }
1542   PetscCall(DMDestroy(&dm));
1543   PetscCall(PetscFinalize());
1544   return 0;
1545 }
1546 
1547 /*TEST
1548 
1549   testset:
1550     nsize: 2
1551     args: -dm_view ascii::ascii_info_detail
1552     args: -dm_plex_check_all
1553     test:
1554       suffix: 1_tri_dist0
1555       args: -distribute 0 -interpolate {{none create}separate output}
1556     test:
1557       suffix: 1_tri_dist1
1558       args: -distribute 1 -interpolate {{none create after_distribute}separate output}
1559     test:
1560       suffix: 1_quad_dist0
1561       args: -cell_simplex 0 -distribute 0 -interpolate {{none create}separate output}
1562     test:
1563       suffix: 1_quad_dist1
1564       args: -cell_simplex 0 -distribute 1 -interpolate {{none create after_distribute}separate output}
1565     test:
1566       suffix: 1_1d_dist1
1567       args: -dim 1 -distribute 1
1568 
1569   testset:
1570     nsize: 3
1571     args: -testnum 1 -interpolate create
1572     args: -dm_plex_check_all
1573     test:
1574       suffix: 2
1575       args: -dm_view ascii::ascii_info_detail
1576     test:
1577       suffix: 2a
1578       args: -dm_plex_check_cones_conform_on_interfaces_verbose
1579     test:
1580       suffix: 2b
1581       args: -test_expand_points 0,1,2,5,6
1582     test:
1583       suffix: 2c
1584       args: -test_expand_points 0,1,2,5,6 -test_expand_points_empty
1585 
1586   testset:
1587     # the same as 1% for 3D
1588     nsize: 2
1589     args: -dim 3 -dm_view ascii::ascii_info_detail
1590     args: -dm_plex_check_all
1591     test:
1592       suffix: 4_tet_dist0
1593       args: -distribute 0 -interpolate {{none create}separate output}
1594     test:
1595       suffix: 4_tet_dist1
1596       args: -distribute 1 -interpolate {{none create after_distribute}separate output}
1597     test:
1598       suffix: 4_hex_dist0
1599       args: -cell_simplex 0 -distribute 0 -interpolate {{none create}separate output}
1600     test:
1601       suffix: 4_hex_dist1
1602       args: -cell_simplex 0 -distribute 1 -interpolate {{none create after_distribute}separate output}
1603 
1604   test:
1605     # the same as 4_tet_dist0 but test different initial orientations
1606     suffix: 4_tet_test_orient
1607     nsize: 2
1608     args: -dim 3 -distribute 0
1609     args: -dm_plex_check_all
1610     args: -rotate_interface_0 {{0 1 2 11 12 13}}
1611     args: -rotate_interface_1 {{0 1 2 11 12 13}}
1612 
1613   testset:
1614     requires: exodusii
1615     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/TwoQuads.exo
1616     args: -dm_view ascii::ascii_info_detail
1617     args: -dm_plex_check_all
1618     args: -custom_view
1619     test:
1620       suffix: 5_seq
1621       nsize: 1
1622       args: -distribute 0 -interpolate {{none create}separate output}
1623     test:
1624       # Detail viewing in a non-distributed mesh is broken because the DMLabelView() is collective, but the label is not shared
1625       suffix: 5_dist0
1626       nsize: 2
1627       args: -distribute 0 -interpolate {{none create}separate output} -dm_view
1628     test:
1629       suffix: 5_dist1
1630       nsize: 2
1631       args: -distribute 1 -interpolate {{none create after_distribute}separate output}
1632 
1633   testset:
1634     nsize: {{1 2 4}}
1635     args: -use_generator
1636     args: -dm_plex_check_all
1637     args: -distribute -interpolate none
1638     test:
1639       suffix: 6_tri
1640       requires: triangle
1641       args: -faces {{2,2  1,3  7,4}} -cell_simplex 1 -dm_generator triangle
1642     test:
1643       suffix: 6_quad
1644       args: -faces {{2,2  1,3  7,4}} -cell_simplex 0
1645     test:
1646       suffix: 6_tet
1647       requires: ctetgen
1648       args: -faces {{2,2,2  1,3,5  3,4,7}} -cell_simplex 1 -dm_generator ctetgen
1649     test:
1650       suffix: 6_hex
1651       args: -faces {{2,2,2  1,3,5  3,4,7}} -cell_simplex 0
1652   testset:
1653     nsize: {{1 2 4}}
1654     args: -use_generator
1655     args: -dm_plex_check_all
1656     args: -distribute -interpolate create
1657     test:
1658       suffix: 6_int_tri
1659       requires: triangle
1660       args: -faces {{2,2  1,3  7,4}} -cell_simplex 1 -dm_generator triangle
1661     test:
1662       suffix: 6_int_quad
1663       args: -faces {{2,2  1,3  7,4}} -cell_simplex 0
1664     test:
1665       suffix: 6_int_tet
1666       requires: ctetgen
1667       args: -faces {{2,2,2  1,3,5  3,4,7}} -cell_simplex 1 -dm_generator ctetgen
1668     test:
1669       suffix: 6_int_hex
1670       args: -faces {{2,2,2  1,3,5  3,4,7}} -cell_simplex 0
1671   testset:
1672     nsize: {{2 4}}
1673     args: -use_generator
1674     args: -dm_plex_check_all
1675     args: -distribute -interpolate after_distribute
1676     test:
1677       suffix: 6_parint_tri
1678       requires: triangle
1679       args: -faces {{2,2  1,3  7,4}} -cell_simplex 1 -dm_generator triangle
1680     test:
1681       suffix: 6_parint_quad
1682       args: -faces {{2,2  1,3  7,4}} -cell_simplex 0
1683     test:
1684       suffix: 6_parint_tet
1685       requires: ctetgen
1686       args: -faces {{2,2,2  1,3,5  3,4,7}} -cell_simplex 1 -dm_generator ctetgen
1687     test:
1688       suffix: 6_parint_hex
1689       args: -faces {{2,2,2  1,3,5  3,4,7}} -cell_simplex 0
1690 
1691   testset: # 7 EXODUS
1692     requires: exodusii
1693     args: -dm_plex_check_all
1694     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.exo
1695     args: -distribute
1696     test: # seq load, simple partitioner
1697       suffix: 7_exo
1698       nsize: {{1 2 4 5}}
1699       args: -interpolate none
1700     test: # seq load, seq interpolation, simple partitioner
1701       suffix: 7_exo_int_simple
1702       nsize: {{1 2 4 5}}
1703       args: -interpolate create
1704     test: # seq load, seq interpolation, metis partitioner
1705       suffix: 7_exo_int_metis
1706       requires: parmetis
1707       nsize: {{2 4 5}}
1708       args: -interpolate create
1709       args: -petscpartitioner_type parmetis
1710     test: # seq load, simple partitioner, par interpolation
1711       suffix: 7_exo_simple_int
1712       nsize: {{2 4 5}}
1713       args: -interpolate after_distribute
1714     test: # seq load, metis partitioner, par interpolation
1715       suffix: 7_exo_metis_int
1716       requires: parmetis
1717       nsize: {{2 4 5}}
1718       args: -interpolate after_distribute
1719       args: -petscpartitioner_type parmetis
1720 
1721   testset: # 7 HDF5 SEQUANTIAL LOAD
1722     requires: hdf5 !complex
1723     args: -dm_plex_check_all
1724     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -dm_plex_create_from_hdf5_xdmf
1725     args: -dm_plex_hdf5_force_sequential
1726     args: -distribute
1727     test: # seq load, simple partitioner
1728       suffix: 7_seq_hdf5_simple
1729       nsize: {{1 2 4 5}}
1730       args: -interpolate none
1731     test: # seq load, seq interpolation, simple partitioner
1732       suffix: 7_seq_hdf5_int_simple
1733       nsize: {{1 2 4 5}}
1734       args: -interpolate after_create
1735     test: # seq load, seq interpolation, metis partitioner
1736       nsize: {{2 4 5}}
1737       suffix: 7_seq_hdf5_int_metis
1738       requires: parmetis
1739       args: -interpolate after_create
1740       args: -petscpartitioner_type parmetis
1741     test: # seq load, simple partitioner, par interpolation
1742       suffix: 7_seq_hdf5_simple_int
1743       nsize: {{2 4 5}}
1744       args: -interpolate after_distribute
1745     test: # seq load, metis partitioner, par interpolation
1746       nsize: {{2 4 5}}
1747       suffix: 7_seq_hdf5_metis_int
1748       requires: parmetis
1749       args: -interpolate after_distribute
1750       args: -petscpartitioner_type parmetis
1751 
1752   testset: # 7 HDF5 PARALLEL LOAD
1753     requires: hdf5 !complex
1754     nsize: {{2 4 5}}
1755     args: -dm_plex_check_all
1756     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -dm_plex_create_from_hdf5_xdmf
1757     test: # par load
1758       suffix: 7_par_hdf5
1759       args: -interpolate none
1760     test: # par load, par interpolation
1761       suffix: 7_par_hdf5_int
1762       args: -interpolate after_create
1763     test: # par load, parmetis repartitioner
1764       TODO: Parallel partitioning of uninterpolated meshes not supported
1765       suffix: 7_par_hdf5_parmetis
1766       requires: parmetis
1767       args: -distribute -petscpartitioner_type parmetis
1768       args: -interpolate none
1769     test: # par load, par interpolation, parmetis repartitioner
1770       suffix: 7_par_hdf5_int_parmetis
1771       requires: parmetis
1772       args: -distribute -petscpartitioner_type parmetis
1773       args: -interpolate after_create
1774     test: # par load, parmetis partitioner, par interpolation
1775       TODO: Parallel partitioning of uninterpolated meshes not supported
1776       suffix: 7_par_hdf5_parmetis_int
1777       requires: parmetis
1778       args: -distribute -petscpartitioner_type parmetis
1779       args: -interpolate after_distribute
1780 
1781     test:
1782       suffix: 7_hdf5_hierarch
1783       requires: hdf5 ptscotch !complex
1784       nsize: {{2 3 4}separate output}
1785       args: -distribute
1786       args: -interpolate after_create
1787       args: -petscpartitioner_type matpartitioning -petscpartitioner_view ::ascii_info
1788       args: -mat_partitioning_type hierarch -mat_partitioning_hierarchical_nfineparts 2
1789       args: -mat_partitioning_hierarchical_coarseparttype ptscotch -mat_partitioning_hierarchical_fineparttype ptscotch
1790 
1791   test:
1792     suffix: 8
1793     requires: hdf5 !complex
1794     nsize: 4
1795     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -dm_plex_create_from_hdf5_xdmf
1796     args: -distribute 0 -interpolate after_create
1797     args: -view_vertices_from_coords 0.,1.,0.,-0.5,1.,0.,0.583,-0.644,0.,-2.,-2.,-2. -view_vertices_from_coords_tol 1e-3
1798     args: -dm_plex_check_all
1799     args: -custom_view
1800 
1801   testset: # 9 HDF5 SEQUANTIAL LOAD
1802     requires: hdf5 !complex datafilespath
1803     args: -dm_plex_check_all
1804     args: -filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /cells -dm_plex_hdf5_geometry_path /coordinates
1805     args: -dm_plex_hdf5_force_sequential
1806     args: -distribute
1807     test: # seq load, simple partitioner
1808       suffix: 9_seq_hdf5_simple
1809       nsize: {{1 2 4 5}}
1810       args: -interpolate none
1811     test: # seq load, seq interpolation, simple partitioner
1812       suffix: 9_seq_hdf5_int_simple
1813       nsize: {{1 2 4 5}}
1814       args: -interpolate after_create
1815     test: # seq load, seq interpolation, metis partitioner
1816       nsize: {{2 4 5}}
1817       suffix: 9_seq_hdf5_int_metis
1818       requires: parmetis
1819       args: -interpolate after_create
1820       args: -petscpartitioner_type parmetis
1821     test: # seq load, simple partitioner, par interpolation
1822       suffix: 9_seq_hdf5_simple_int
1823       nsize: {{2 4 5}}
1824       args: -interpolate after_distribute
1825     test: # seq load, simple partitioner, par interpolation
1826       # This is like 9_seq_hdf5_simple_int but testing error output of DMPlexCheckPointSFHeavy().
1827       # Once 9_seq_hdf5_simple_int gets fixed, this one gets broken.
1828       # We can then provide an intentionally broken mesh instead.
1829       TODO: This test is broken because PointSF is fixed.
1830       suffix: 9_seq_hdf5_simple_int_err
1831       nsize: 4
1832       args: -interpolate after_distribute
1833       filter: sed -e "/PETSC ERROR/,$$d"
1834     test: # seq load, metis partitioner, par interpolation
1835       nsize: {{2 4 5}}
1836       suffix: 9_seq_hdf5_metis_int
1837       requires: parmetis
1838       args: -interpolate after_distribute
1839       args: -petscpartitioner_type parmetis
1840 
1841   testset: # 9 HDF5 PARALLEL LOAD
1842     requires: hdf5 !complex datafilespath
1843     nsize: {{2 4 5}}
1844     args: -dm_plex_check_all
1845     args: -filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /cells -dm_plex_hdf5_geometry_path /coordinates
1846     test: # par load
1847       suffix: 9_par_hdf5
1848       args: -interpolate none
1849     test: # par load, par interpolation
1850       suffix: 9_par_hdf5_int
1851       args: -interpolate after_create
1852     test: # par load, parmetis repartitioner
1853       TODO: Parallel partitioning of uninterpolated meshes not supported
1854       suffix: 9_par_hdf5_parmetis
1855       requires: parmetis
1856       args: -distribute -petscpartitioner_type parmetis
1857       args: -interpolate none
1858     test: # par load, par interpolation, parmetis repartitioner
1859       suffix: 9_par_hdf5_int_parmetis
1860       requires: parmetis
1861       args: -distribute -petscpartitioner_type parmetis
1862       args: -interpolate after_create
1863     test: # par load, parmetis partitioner, par interpolation
1864       TODO: Parallel partitioning of uninterpolated meshes not supported
1865       suffix: 9_par_hdf5_parmetis_int
1866       requires: parmetis
1867       args: -distribute -petscpartitioner_type parmetis
1868       args: -interpolate after_distribute
1869 
1870   testset: # 10 HDF5 PARALLEL LOAD
1871     requires: hdf5 !complex datafilespath
1872     nsize: {{2 4 7}}
1873     args: -dm_plex_check_all
1874     args: -filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined2.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /topo -dm_plex_hdf5_geometry_path /geom
1875     test: # par load, par interpolation
1876       suffix: 10_par_hdf5_int
1877       args: -interpolate after_create
1878 TEST*/
1879