xref: /petsc/src/dm/impls/plex/tests/ex18.c (revision dbbe0bcd3f3a8fbab5a45420dc06f8387e5764c6)
1 static char help[] = "Tests for parallel mesh loading and parallel topological interpolation\n\n";
2 
3 #include <petsc/private/dmpleximpl.h>
4 /* List of test meshes
5 
6 Network
7 -------
8 Test 0 (2 ranks):
9 
10 network=0:
11 ---------
12   cell 0   cell 1   cell 2          nCells-1       (edge)
13 0 ------ 1 ------ 2 ------ 3 -- -- v --  -- nCells (vertex)
14 
15   vertex distribution:
16     rank 0: 0 1
17     rank 1: 2 3 ... nCells
18   cell(edge) distribution:
19     rank 0: 0 1
20     rank 1: 2 ... nCells-1
21 
22 network=1:
23 ---------
24                v2
25                 ^
26                 |
27                cell 2
28                 |
29  v0 --cell 0--> v3--cell 1--> v1
30 
31   vertex distribution:
32     rank 0: 0 1 3
33     rank 1: 2
34   cell(edge) distribution:
35     rank 0: 0 1
36     rank 1: 2
37 
38   example:
39     mpiexec -n 2 ./ex18 -distribute 1 -dim 1 -orig_dm_view -dist_dm_view -dist_dm_view -petscpartitioner_type parmetis -ncells 50
40 
41 Triangle
42 --------
43 Test 0 (2 ranks):
44 Two triangles sharing a face
45 
46         2
47       / | \
48      /  |  \
49     /   |   \
50    0  0 | 1  3
51     \   |   /
52      \  |  /
53       \ | /
54         1
55 
56   vertex distribution:
57     rank 0: 0 1
58     rank 1: 2 3
59   cell distribution:
60     rank 0: 0
61     rank 1: 1
62 
63 Test 1 (3 ranks):
64 Four triangles partitioned across 3 ranks
65 
66    0 _______ 3
67    | \     / |
68    |  \ 1 /  |
69    |   \ /   |
70    | 0  2  2 |
71    |   / \   |
72    |  / 3 \  |
73    | /     \ |
74    1 ------- 4
75 
76   vertex distribution:
77     rank 0: 0 1
78     rank 1: 2 3
79     rank 2: 4
80   cell distribution:
81     rank 0: 0
82     rank 1: 1
83     rank 2: 2 3
84 
85 Test 2 (3 ranks):
86 Four triangles partitioned across 3 ranks
87 
88    1 _______ 3
89    | \     / |
90    |  \ 1 /  |
91    |   \ /   |
92    | 0  0  2 |
93    |   / \   |
94    |  / 3 \  |
95    | /     \ |
96    2 ------- 4
97 
98   vertex distribution:
99     rank 0: 0 1
100     rank 1: 2 3
101     rank 2: 4
102   cell distribution:
103     rank 0: 0
104     rank 1: 1
105     rank 2: 2 3
106 
107 Tetrahedron
108 -----------
109 Test 0:
110 Two tets sharing a face
111 
112  cell   3 _______    cell
113  0    / | \      \   1
114      /  |  \      \
115     /   |   \      \
116    0----|----4-----2
117     \   |   /      /
118      \  |  /      /
119       \ | /      /
120         1-------
121    y
122    | x
123    |/
124    *----z
125 
126   vertex distribution:
127     rank 0: 0 1
128     rank 1: 2 3 4
129   cell distribution:
130     rank 0: 0
131     rank 1: 1
132 
133 Quadrilateral
134 -------------
135 Test 0 (2 ranks):
136 Two quads sharing a face
137 
138    3-------2-------5
139    |       |       |
140    |   0   |   1   |
141    |       |       |
142    0-------1-------4
143 
144   vertex distribution:
145     rank 0: 0 1 2
146     rank 1: 3 4 5
147   cell distribution:
148     rank 0: 0
149     rank 1: 1
150 
151 TODO Test 1:
152 A quad and a triangle sharing a face
153 
154    5-------4
155    |       | \
156    |   0   |  \
157    |       | 1 \
158    2-------3----6
159 
160 Hexahedron
161 ----------
162 Test 0 (2 ranks):
163 Two hexes sharing a face
164 
165 cell   7-------------6-------------11 cell
166 0     /|            /|            /|     1
167      / |   F1      / |   F7      / |
168     /  |          /  |          /  |
169    4-------------5-------------10  |
170    |   |     F4  |   |     F10 |   |
171    |   |         |   |         |   |
172    |F5 |         |F3 |         |F9 |
173    |   |  F2     |   |   F8    |   |
174    |   3---------|---2---------|---9
175    |  /          |  /          |  /
176    | /   F0      | /    F6     | /
177    |/            |/            |/
178    0-------------1-------------8
179 
180   vertex distribution:
181     rank 0: 0 1 2 3 4 5
182     rank 1: 6 7 8 9 10 11
183   cell distribution:
184     rank 0: 0
185     rank 1: 1
186 
187 */
188 
189 typedef enum {NONE, CREATE, AFTER_CREATE, AFTER_DISTRIBUTE} InterpType;
190 
191 typedef struct {
192   PetscInt   debug;                        /* The debugging level */
193   PetscInt   testNum;                      /* Indicates the mesh to create */
194   PetscInt   dim;                          /* The topological mesh dimension */
195   PetscBool  cellSimplex;                  /* Use simplices or hexes */
196   PetscBool  distribute;                   /* Distribute the mesh */
197   InterpType interpolate;                  /* Interpolate the mesh before or after DMPlexDistribute() */
198   PetscBool  useGenerator;                 /* Construct mesh with a mesh generator */
199   PetscBool  testOrientIF;                 /* Test for different original interface orientations */
200   PetscBool  testHeavy;                    /* Run the heavy PointSF test */
201   PetscBool  customView;                   /* Show results of DMPlexIsInterpolated() etc. */
202   PetscInt   ornt[2];                      /* Orientation of interface on rank 0 and rank 1 */
203   PetscInt   faces[3];                     /* Number of faces per dimension for generator */
204   PetscScalar coords[128];
205   PetscReal  coordsTol;
206   PetscInt   ncoords;
207   PetscInt   pointsToExpand[128];
208   PetscInt   nPointsToExpand;
209   PetscBool  testExpandPointsEmpty;
210   char       filename[PETSC_MAX_PATH_LEN]; /* Import mesh from file */
211 } AppCtx;
212 
213 struct _n_PortableBoundary {
214   Vec coordinates;
215   PetscInt depth;
216   PetscSection *sections;
217 };
218 typedef struct _n_PortableBoundary * PortableBoundary;
219 
220 #if defined(PETSC_USE_LOG)
221 static PetscLogStage  stage[3];
222 #endif
223 
224 static PetscErrorCode DMPlexCheckPointSFHeavy(DM, PortableBoundary);
225 static PetscErrorCode DMPlexSetOrientInterface_Private(DM,PetscBool);
226 static PetscErrorCode DMPlexGetExpandedBoundary_Private(DM, PortableBoundary *);
227 static PetscErrorCode DMPlexExpandedConesToFaces_Private(DM, IS, PetscSection, IS *);
228 
229 static PetscErrorCode PortableBoundaryDestroy(PortableBoundary *bnd)
230 {
231   PetscInt       d;
232 
233   PetscFunctionBegin;
234   if (!*bnd) PetscFunctionReturn(0);
235   PetscCall(VecDestroy(&(*bnd)->coordinates));
236   for (d=0; d < (*bnd)->depth; d++) {
237     PetscCall(PetscSectionDestroy(&(*bnd)->sections[d]));
238   }
239   PetscCall(PetscFree((*bnd)->sections));
240   PetscCall(PetscFree(*bnd));
241   PetscFunctionReturn(0);
242 }
243 
244 static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
245 {
246   const char    *interpTypes[4]  = {"none", "create", "after_create", "after_distribute"};
247   PetscInt       interp=NONE, dim;
248   PetscBool      flg1, flg2;
249 
250   PetscFunctionBegin;
251   options->debug        = 0;
252   options->testNum      = 0;
253   options->dim          = 2;
254   options->cellSimplex  = PETSC_TRUE;
255   options->distribute   = PETSC_FALSE;
256   options->interpolate  = NONE;
257   options->useGenerator = PETSC_FALSE;
258   options->testOrientIF = PETSC_FALSE;
259   options->testHeavy    = PETSC_TRUE;
260   options->customView   = PETSC_FALSE;
261   options->testExpandPointsEmpty = PETSC_FALSE;
262   options->ornt[0]      = 0;
263   options->ornt[1]      = 0;
264   options->faces[0]     = 2;
265   options->faces[1]     = 2;
266   options->faces[2]     = 2;
267   options->filename[0]  = '\0';
268   options->coordsTol    = PETSC_DEFAULT;
269 
270   PetscOptionsBegin(comm, "", "Meshing Interpolation Test Options", "DMPLEX");
271   PetscCall(PetscOptionsBoundedInt("-debug", "The debugging level", "ex18.c", options->debug, &options->debug, NULL,0));
272   PetscCall(PetscOptionsBoundedInt("-testnum", "The mesh to create", "ex18.c", options->testNum, &options->testNum, NULL,0));
273   PetscCall(PetscOptionsBool("-cell_simplex", "Generate simplices if true, otherwise hexes", "ex18.c", options->cellSimplex, &options->cellSimplex, NULL));
274   PetscCall(PetscOptionsBool("-distribute", "Distribute the mesh", "ex18.c", options->distribute, &options->distribute, NULL));
275   PetscCall(PetscOptionsEList("-interpolate", "Type of mesh interpolation (none, create, after_create, after_distribute)", "ex18.c", interpTypes, 4, interpTypes[options->interpolate], &interp, NULL));
276   options->interpolate = (InterpType) interp;
277   PetscCheck(options->distribute || options->interpolate != AFTER_DISTRIBUTE,comm, PETSC_ERR_SUP, "-interpolate after_distribute  needs  -distribute 1");
278   PetscCall(PetscOptionsBool("-use_generator", "Use a mesh generator to build the mesh", "ex18.c", options->useGenerator, &options->useGenerator, NULL));
279   options->ncoords = 128;
280   PetscCall(PetscOptionsScalarArray("-view_vertices_from_coords", "Print DAG points corresponding to vertices with given coordinates", "ex18.c", options->coords, &options->ncoords, NULL));
281   PetscCall(PetscOptionsReal("-view_vertices_from_coords_tol", "Tolerance for -view_vertices_from_coords", "ex18.c", options->coordsTol, &options->coordsTol, NULL));
282   options->nPointsToExpand = 128;
283   PetscCall(PetscOptionsIntArray("-test_expand_points", "Expand given array of DAG point using DMPlexGetConeRecursive() and print results", "ex18.c", options->pointsToExpand, &options->nPointsToExpand, NULL));
284   if (options->nPointsToExpand) {
285     PetscCall(PetscOptionsBool("-test_expand_points_empty", "For -test_expand_points, rank 0 will have empty input array", "ex18.c", options->testExpandPointsEmpty, &options->testExpandPointsEmpty, NULL));
286   }
287   PetscCall(PetscOptionsBool("-test_heavy", "Run the heavy PointSF test", "ex18.c", options->testHeavy, &options->testHeavy, NULL));
288   PetscCall(PetscOptionsBool("-custom_view", "Custom DMPlex view", "ex18.c", options->customView, &options->customView, NULL));
289   PetscCall(PetscOptionsRangeInt("-dim", "The topological mesh dimension", "ex18.c", options->dim, &options->dim, &flg1,1,3));
290   dim = 3;
291   PetscCall(PetscOptionsIntArray("-faces", "Number of faces per dimension", "ex18.c", options->faces, &dim, &flg2));
292   if (flg2) {
293     PetscCheck(!flg1 || dim == options->dim,comm, PETSC_ERR_ARG_OUTOFRANGE, "specified -dim %" PetscInt_FMT " is not equal to length %" PetscInt_FMT " of -faces (note that -dim can be omitted)", options->dim, dim);
294     options->dim = dim;
295   }
296   PetscCall(PetscOptionsString("-filename", "The mesh file", "ex18.c", options->filename, options->filename, sizeof(options->filename), NULL));
297   PetscCall(PetscOptionsBoundedInt("-rotate_interface_0", "Rotation (relative orientation) of interface on rank 0; implies -interpolate create -distribute 0", "ex18.c", options->ornt[0], &options->ornt[0], &options->testOrientIF,0));
298   PetscCall(PetscOptionsBoundedInt("-rotate_interface_1", "Rotation (relative orientation) of interface on rank 1; implies -interpolate create -distribute 0", "ex18.c", options->ornt[1], &options->ornt[1], &flg2,0));
299   PetscCheck(flg2 == options->testOrientIF,comm, PETSC_ERR_ARG_OUTOFRANGE, "neither or both -rotate_interface_0 -rotate_interface_1 must be set");
300   if (options->testOrientIF) {
301     PetscInt i;
302     for (i=0; i<2; i++) {
303       if (options->ornt[i] >= 10) options->ornt[i] = -(options->ornt[i]-10);  /* 11 12 13 become -1 -2 -3 */
304     }
305     options->filename[0]  = 0;
306     options->useGenerator = PETSC_FALSE;
307     options->dim          = 3;
308     options->cellSimplex  = PETSC_TRUE;
309     options->interpolate  = CREATE;
310     options->distribute   = PETSC_FALSE;
311   }
312   PetscOptionsEnd();
313   PetscFunctionReturn(0);
314 }
315 
316 static PetscErrorCode CreateMesh_1D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
317 {
318   PetscInt       testNum = user->testNum;
319   PetscMPIInt    rank,size;
320   PetscInt       numCorners=2,i;
321   PetscInt       numCells,numVertices,network;
322   PetscInt       *cells;
323   PetscReal      *coords;
324 
325   PetscFunctionBegin;
326   PetscCallMPI(MPI_Comm_rank(comm, &rank));
327   PetscCallMPI(MPI_Comm_size(comm, &size));
328   PetscCheck(size <= 2,comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for <=2 processes",testNum);
329 
330   numCells = 3;
331   PetscCall(PetscOptionsGetInt(NULL, NULL, "-ncells", &numCells, NULL));
332   PetscCheck(numCells >= 3,comm, PETSC_ERR_ARG_OUTOFRANGE, "Test ncells %" PetscInt_FMT " must >=3",numCells);
333 
334   if (size == 1) {
335     numVertices = numCells + 1;
336     PetscCall(PetscMalloc2(2*numCells,&cells,2*numVertices,&coords));
337     for (i=0; i<numCells; i++) {
338       cells[2*i] = i; cells[2*i+1] = i + 1;
339       coords[2*i] = i; coords[2*i+1] = i + 1;
340     }
341 
342     PetscCall(DMPlexCreateFromCellListPetsc(comm, user->dim, numCells, numVertices, numCorners, PETSC_FALSE, cells, user->dim, coords, dm));
343     PetscCall(PetscFree2(cells,coords));
344     PetscFunctionReturn(0);
345   }
346 
347   network = 0;
348   PetscCall(PetscOptionsGetInt(NULL, NULL, "-network_case", &network, NULL));
349   if (network == 0) {
350     switch (rank) {
351     case 0:
352     {
353       numCells    = 2;
354       numVertices = numCells;
355       PetscCall(PetscMalloc2(2*numCells,&cells,2*numCells,&coords));
356       cells[0] = 0; cells[1] = 1;
357       cells[2] = 1; cells[3] = 2;
358       coords[0] = 0.; coords[1] = 1.;
359       coords[2] = 1.; coords[3] = 2.;
360     }
361     break;
362     case 1:
363     {
364       numCells    -= 2;
365       numVertices = numCells + 1;
366       PetscCall(PetscMalloc2(2*numCells,&cells,2*numCells,&coords));
367       for (i=0; i<numCells; i++) {
368         cells[2*i] = 2+i; cells[2*i+1] = 2 + i + 1;
369         coords[2*i] = 2+i; coords[2*i+1] = 2 + i + 1;
370       }
371     }
372     break;
373     default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
374     }
375   } else { /* network_case = 1 */
376     /* ----------------------- */
377     switch (rank) {
378     case 0:
379     {
380       numCells    = 2;
381       numVertices = 3;
382       PetscCall(PetscMalloc2(2*numCells,&cells,2*numCells,&coords));
383       cells[0] = 0; cells[1] = 3;
384       cells[2] = 3; cells[3] = 1;
385     }
386     break;
387     case 1:
388     {
389       numCells    = 1;
390       numVertices = 1;
391       PetscCall(PetscMalloc2(2*numCells,&cells,2*numCells,&coords));
392       cells[0] = 3; cells[1] = 2;
393     }
394     break;
395     default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
396     }
397   }
398   PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, PETSC_FALSE, cells, user->dim, coords, NULL, NULL, dm));
399   PetscCall(PetscFree2(cells,coords));
400   PetscFunctionReturn(0);
401 }
402 
403 static PetscErrorCode CreateSimplex_2D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
404 {
405   PetscInt       testNum = user->testNum, p;
406   PetscMPIInt    rank, size;
407 
408   PetscFunctionBegin;
409   PetscCallMPI(MPI_Comm_rank(comm, &rank));
410   PetscCallMPI(MPI_Comm_size(comm, &size));
411   switch (testNum) {
412   case 0:
413     PetscCheck(size == 2,comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 2 processes", testNum);
414     switch (rank) {
415       case 0:
416       {
417         const PetscInt numCells  = 1, numVertices = 2, numCorners = 3;
418         const PetscInt cells[3]  = {0, 1, 2};
419         PetscReal      coords[4] = {-0.5, 0.5, 0.0, 0.0};
420         PetscInt       markerPoints[6] = {1, 1, 2, 1, 3, 1};
421 
422         PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
423         for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
424       }
425       break;
426       case 1:
427       {
428         const PetscInt numCells  = 1, numVertices = 2, numCorners = 3;
429         const PetscInt cells[3]  = {1, 3, 2};
430         PetscReal      coords[4] = {0.0, 1.0, 0.5, 0.5};
431         PetscInt       markerPoints[6] = {1, 1, 2, 1, 3, 1};
432 
433         PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
434         for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
435       }
436       break;
437       default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
438     }
439     break;
440   case 1:
441     PetscCheck(size == 3,comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 3 processes", testNum);
442     switch (rank) {
443       case 0:
444       {
445         const PetscInt numCells  = 1, numVertices = 2, numCorners = 3;
446         const PetscInt cells[3]  = {0, 1, 2};
447         PetscReal      coords[4] = {0.0, 1.0, 0.0, 0.0};
448         PetscInt       markerPoints[6] = {1, 1, 2, 1, 3, 1};
449 
450         PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
451         for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
452       }
453       break;
454       case 1:
455       {
456         const PetscInt numCells  = 1, numVertices = 2, numCorners = 3;
457         const PetscInt cells[3]  = {0, 2, 3};
458         PetscReal      coords[4] = {0.5, 0.5, 1.0, 1.0};
459         PetscInt       markerPoints[6] = {1, 1, 2, 1, 3, 1};
460 
461         PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
462         for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
463       }
464       break;
465       case 2:
466       {
467         const PetscInt numCells  = 2, numVertices = 1, numCorners = 3;
468         const PetscInt cells[6]  = {2, 4, 3, 2, 1, 4};
469         PetscReal      coords[2] = {1.0, 0.0};
470         PetscInt       markerPoints[10] = {2, 1, 3, 1, 4, 1, 5, 1, 6, 1};
471 
472         PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
473         for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
474       }
475       break;
476       default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
477     }
478     break;
479   case 2:
480     PetscCheck(size == 3,comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 3 processes", testNum);
481     switch (rank) {
482       case 0:
483       {
484         const PetscInt numCells  = 1, numVertices = 2, numCorners = 3;
485         const PetscInt cells[3]  = {1, 2, 0};
486         PetscReal      coords[4] = {0.5, 0.5, 0.0, 1.0};
487         PetscInt       markerPoints[6] = {1, 1, 2, 1, 3, 1};
488 
489         PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
490         for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
491       }
492       break;
493       case 1:
494       {
495         const PetscInt numCells  = 1, numVertices = 2, numCorners = 3;
496         const PetscInt cells[3]  = {1, 0, 3};
497         PetscReal      coords[4] = {0.0, 0.0, 1.0, 1.0};
498         PetscInt       markerPoints[6] = {1, 1, 2, 1, 3, 1};
499 
500         PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
501         for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
502       }
503       break;
504       case 2:
505       {
506         const PetscInt numCells  = 2, numVertices = 1, numCorners = 3;
507         const PetscInt cells[6]  = {0, 4, 3, 0, 2, 4};
508         PetscReal      coords[2] = {1.0, 0.0};
509         PetscInt       markerPoints[10] = {2, 1, 3, 1, 4, 1, 5, 1, 6, 1};
510 
511         PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
512         for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
513       }
514       break;
515       default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
516     }
517     break;
518   default: SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh %" PetscInt_FMT, testNum);
519   }
520   PetscFunctionReturn(0);
521 }
522 
523 static PetscErrorCode CreateSimplex_3D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
524 {
525   PetscInt       testNum = user->testNum, p;
526   PetscMPIInt    rank, size;
527 
528   PetscFunctionBegin;
529   PetscCallMPI(MPI_Comm_rank(comm, &rank));
530   PetscCallMPI(MPI_Comm_size(comm, &size));
531   switch (testNum) {
532   case 0:
533     PetscCheck(size == 2,comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 2 processes", testNum);
534     switch (rank) {
535       case 0:
536       {
537         const PetscInt numCells  = 1, numVertices = 2, numCorners = 4;
538         const PetscInt cells[4]  = {0, 2, 1, 3};
539         PetscReal      coords[6] = {0.0, 0.0, -0.5,  0.0, -0.5, 0.0};
540         PetscInt       markerPoints[8] = {1, 1, 2, 1, 3, 1, 4, 1};
541 
542         PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
543         for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
544       }
545       break;
546       case 1:
547       {
548         const PetscInt numCells  = 1, numVertices = 3, numCorners = 4;
549         const PetscInt cells[4]  = {1, 2, 4, 3};
550         PetscReal      coords[9] = {1.0, 0.0, 0.0,  0.0, 0.5, 0.0,  0.0, 0.0, 0.5};
551         PetscInt       markerPoints[8] = {1, 1, 2, 1, 3, 1, 4, 1};
552 
553         PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
554         for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
555       }
556       break;
557       default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
558     }
559     break;
560   default: SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh %" PetscInt_FMT, testNum);
561   }
562   if (user->testOrientIF) {
563     PetscInt ifp[] = {8, 6};
564 
565     PetscCall(PetscObjectSetName((PetscObject) *dm, "Mesh before orientation"));
566     PetscCall(DMViewFromOptions(*dm, NULL, "-before_orientation_dm_view"));
567     /* rotate interface face ifp[rank] by given orientation ornt[rank] */
568     PetscCall(DMPlexOrientPoint(*dm, ifp[rank], user->ornt[rank]));
569     PetscCall(DMViewFromOptions(*dm, NULL, "-before_orientation_dm_view"));
570     PetscCall(DMPlexCheckFaces(*dm, 0));
571     PetscCall(DMPlexOrientInterface_Internal(*dm));
572     PetscCall(PetscPrintf(comm, "Orientation test PASSED\n"));
573   }
574   PetscFunctionReturn(0);
575 }
576 
577 static PetscErrorCode CreateQuad_2D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
578 {
579   PetscInt       testNum = user->testNum, p;
580   PetscMPIInt    rank, size;
581 
582   PetscFunctionBegin;
583   PetscCallMPI(MPI_Comm_rank(comm, &rank));
584   PetscCallMPI(MPI_Comm_size(comm, &size));
585   switch (testNum) {
586   case 0:
587     PetscCheck(size == 2,comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 2 processes", testNum);
588     switch (rank) {
589       case 0:
590       {
591         const PetscInt numCells  = 1, numVertices = 3, numCorners = 4;
592         const PetscInt cells[4]  = {0, 1, 2, 3};
593         PetscReal      coords[6] = {-0.5, 0.0, 0.0, 0.0, 0.0, 1.0};
594         PetscInt       markerPoints[4*2] = {1, 1, 2, 1, 3, 1, 4, 1};
595 
596         PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
597         for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
598       }
599       break;
600       case 1:
601       {
602         const PetscInt numCells  = 1, numVertices = 3, numCorners = 4;
603         const PetscInt cells[4]  = {1, 4, 5, 2};
604         PetscReal      coords[6] = {-0.5, 1.0, 0.5, 0.0, 0.5, 1.0};
605         PetscInt       markerPoints[4*2] = {1, 1, 2, 1, 3, 1, 4, 1};
606 
607         PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
608         for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
609       }
610       break;
611       default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
612     }
613     break;
614   default: SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh %" PetscInt_FMT, testNum);
615   }
616   PetscFunctionReturn(0);
617 }
618 
619 static PetscErrorCode CreateHex_3D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
620 {
621   PetscInt       testNum = user->testNum, p;
622   PetscMPIInt    rank, size;
623 
624   PetscFunctionBegin;
625   PetscCallMPI(MPI_Comm_rank(comm, &rank));
626   PetscCallMPI(MPI_Comm_size(comm, &size));
627   switch (testNum) {
628   case 0:
629     PetscCheck(size == 2,comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 2 processes", testNum);
630     switch (rank) {
631     case 0:
632     {
633       const PetscInt numCells  = 1, numVertices = 6, numCorners = 8;
634       const PetscInt cells[8]  = {0, 3, 2, 1, 4, 5, 6, 7};
635       PetscReal      coords[6*3] = {-0.5,0.0,0.0, 0.0,0.0,0.0, 0.0,1.0,0.0, -0.5,1.0,0.0, -0.5,0.0,1.0, 0.0,0.0,1.0};
636       PetscInt       markerPoints[8*2] = {2,1,3,1,4,1,5,1,6,1,7,1,8,1,9,1};
637 
638       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
639       for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
640     }
641     break;
642     case 1:
643     {
644       const PetscInt numCells  = 1, numVertices = 6, numCorners = 8;
645       const PetscInt cells[8]  = {1, 2, 9, 8, 5, 10, 11, 6};
646       PetscReal      coords[6*3] = {0.0,1.0,1.0, -0.5,1.0,1.0, 0.5,0.0,0.0, 0.5,1.0,0.0, 0.5,0.0,1.0,  0.5,1.0,1.0};
647       PetscInt       markerPoints[8*2] = {2,1,3,1,4,1,5,1,6,1,7,1,8,1,9,1};
648 
649       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
650       for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
651     }
652     break;
653     default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
654     }
655   break;
656   default: SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh %" PetscInt_FMT, testNum);
657   }
658   PetscFunctionReturn(0);
659 }
660 
661 static PetscErrorCode CustomView(DM dm, PetscViewer v)
662 {
663   DMPlexInterpolatedFlag interpolated;
664   PetscBool              distributed;
665 
666   PetscFunctionBegin;
667   PetscCall(DMPlexIsDistributed(dm, &distributed));
668   PetscCall(DMPlexIsInterpolatedCollective(dm, &interpolated));
669   PetscCall(PetscViewerASCIIPrintf(v, "DMPlexIsDistributed: %s\n", PetscBools[distributed]));
670   PetscCall(PetscViewerASCIIPrintf(v, "DMPlexIsInterpolatedCollective: %s\n", DMPlexInterpolatedFlags[interpolated]));
671   PetscFunctionReturn(0);
672 }
673 
674 static PetscErrorCode CreateMeshFromFile(MPI_Comm comm, AppCtx *user, DM *dm, DM *serialDM)
675 {
676   const char    *filename       = user->filename;
677   PetscBool      testHeavy      = user->testHeavy;
678   PetscBool      interpCreate   = user->interpolate == CREATE ? PETSC_TRUE : PETSC_FALSE;
679   PetscBool      distributed    = PETSC_FALSE;
680 
681   PetscFunctionBegin;
682   *serialDM = NULL;
683   if (testHeavy && interpCreate) PetscCall(DMPlexSetOrientInterface_Private(NULL, PETSC_FALSE));
684   PetscCall(PetscLogStagePush(stage[0]));
685   PetscCall(DMPlexCreateFromFile(comm, filename, "ex18_plex", interpCreate, dm)); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexCheckPointSFHeavy() */
686   PetscCall(PetscLogStagePop());
687   if (testHeavy && interpCreate) PetscCall(DMPlexSetOrientInterface_Private(NULL, PETSC_TRUE));
688   PetscCall(DMPlexIsDistributed(*dm, &distributed));
689   PetscCall(PetscPrintf(comm, "DMPlexCreateFromFile produced %s mesh.\n", distributed ? "distributed" : "serial"));
690   if (testHeavy && distributed) {
691     PetscCall(PetscOptionsSetValue(NULL, "-dm_plex_hdf5_force_sequential", NULL));
692     PetscCall(DMPlexCreateFromFile(comm, filename, "ex18_plex", interpCreate, serialDM));
693     PetscCall(DMPlexIsDistributed(*serialDM, &distributed));
694     PetscCheck(!distributed,comm, PETSC_ERR_PLIB, "unable to create a serial DM from file");
695   }
696   PetscCall(DMGetDimension(*dm, &user->dim));
697   PetscFunctionReturn(0);
698 }
699 
700 static PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm)
701 {
702   PetscPartitioner part;
703   PortableBoundary boundary     = NULL;
704   DM             serialDM       = NULL;
705   PetscBool      cellSimplex    = user->cellSimplex;
706   PetscBool      useGenerator   = user->useGenerator;
707   PetscBool      interpCreate   = user->interpolate == CREATE ? PETSC_TRUE : PETSC_FALSE;
708   PetscBool      interpSerial   = user->interpolate == AFTER_CREATE ? PETSC_TRUE : PETSC_FALSE;
709   PetscBool      interpParallel = user->interpolate == AFTER_DISTRIBUTE ? PETSC_TRUE : PETSC_FALSE;
710   PetscBool      testHeavy      = user->testHeavy;
711   PetscMPIInt    rank;
712 
713   PetscFunctionBegin;
714   PetscCallMPI(MPI_Comm_rank(comm, &rank));
715   if (user->filename[0]) {
716     PetscCall(CreateMeshFromFile(comm, user, dm, &serialDM));
717   } else if (useGenerator) {
718     PetscCall(PetscLogStagePush(stage[0]));
719     PetscCall(DMPlexCreateBoxMesh(comm, user->dim, cellSimplex, user->faces, NULL, NULL, NULL, interpCreate, dm));
720     PetscCall(PetscLogStagePop());
721   } else {
722     PetscCall(PetscLogStagePush(stage[0]));
723     switch (user->dim) {
724     case 1:
725       PetscCall(CreateMesh_1D(comm, interpCreate, user, dm));
726       break;
727     case 2:
728       if (cellSimplex) {
729         PetscCall(CreateSimplex_2D(comm, interpCreate, user, dm));
730       } else {
731         PetscCall(CreateQuad_2D(comm, interpCreate, user, dm));
732       }
733       break;
734     case 3:
735       if (cellSimplex) {
736         PetscCall(CreateSimplex_3D(comm, interpCreate, user, dm));
737       } else {
738         PetscCall(CreateHex_3D(comm, interpCreate, user, dm));
739       }
740       break;
741     default:
742       SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "Cannot make meshes for dimension %" PetscInt_FMT, user->dim);
743     }
744     PetscCall(PetscLogStagePop());
745   }
746   PetscCheck(user->ncoords % user->dim == 0,comm, PETSC_ERR_ARG_OUTOFRANGE, "length of coordinates array %" PetscInt_FMT " must be divisable by spatial dimension %" PetscInt_FMT, user->ncoords, user->dim);
747   PetscCall(PetscObjectSetName((PetscObject) *dm, "Original Mesh"));
748   PetscCall(DMViewFromOptions(*dm, NULL, "-orig_dm_view"));
749 
750   if (interpSerial) {
751     DM idm;
752 
753     if (testHeavy) PetscCall(DMPlexSetOrientInterface_Private(*dm, PETSC_FALSE));
754     PetscCall(PetscLogStagePush(stage[2]));
755     PetscCall(DMPlexInterpolate(*dm, &idm)); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexCheckPointSFHeavy() */
756     PetscCall(PetscLogStagePop());
757     if (testHeavy) PetscCall(DMPlexSetOrientInterface_Private(*dm, PETSC_TRUE));
758     PetscCall(DMDestroy(dm));
759     *dm = idm;
760     PetscCall(PetscObjectSetName((PetscObject) *dm, "Interpolated Mesh"));
761     PetscCall(DMViewFromOptions(*dm, NULL, "-intp_dm_view"));
762   }
763 
764   /* Set partitioner options */
765   PetscCall(DMPlexGetPartitioner(*dm, &part));
766   if (part) {
767     PetscCall(PetscPartitionerSetType(part, PETSCPARTITIONERSIMPLE));
768     PetscCall(PetscPartitionerSetFromOptions(part));
769   }
770 
771   if (user->customView) PetscCall(CustomView(*dm, PETSC_VIEWER_STDOUT_(comm)));
772   if (testHeavy) {
773     PetscBool distributed;
774 
775     PetscCall(DMPlexIsDistributed(*dm, &distributed));
776     if (!serialDM && !distributed) {
777       serialDM = *dm;
778       PetscCall(PetscObjectReference((PetscObject)*dm));
779     }
780     if (serialDM) {
781       PetscCall(DMPlexGetExpandedBoundary_Private(serialDM, &boundary));
782     }
783     if (boundary) {
784       /* check DM which has been created in parallel and already interpolated */
785       PetscCall(DMPlexCheckPointSFHeavy(*dm, boundary));
786     }
787     /* Orient interface because it could be deliberately skipped above. It is idempotent. */
788     PetscCall(DMPlexOrientInterface_Internal(*dm));
789   }
790   if (user->distribute) {
791     DM               pdm = NULL;
792 
793     /* Redistribute mesh over processes using that partitioner */
794     PetscCall(PetscLogStagePush(stage[1]));
795     PetscCall(DMPlexDistribute(*dm, 0, NULL, &pdm));
796     PetscCall(PetscLogStagePop());
797     if (pdm) {
798       PetscCall(DMDestroy(dm));
799       *dm  = pdm;
800       PetscCall(PetscObjectSetName((PetscObject) *dm, "Redistributed Mesh"));
801       PetscCall(DMViewFromOptions(*dm, NULL, "-dist_dm_view"));
802     }
803 
804     if (interpParallel) {
805       DM idm;
806 
807       if (testHeavy) PetscCall(DMPlexSetOrientInterface_Private(*dm, PETSC_FALSE));
808       PetscCall(PetscLogStagePush(stage[2]));
809       PetscCall(DMPlexInterpolate(*dm, &idm)); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexCheckPointSFHeavy() */
810       PetscCall(PetscLogStagePop());
811       if (testHeavy) PetscCall(DMPlexSetOrientInterface_Private(*dm, PETSC_TRUE));
812       PetscCall(DMDestroy(dm));
813       *dm = idm;
814       PetscCall(PetscObjectSetName((PetscObject) *dm, "Interpolated Redistributed Mesh"));
815       PetscCall(DMViewFromOptions(*dm, NULL, "-intp_dm_view"));
816     }
817   }
818   if (testHeavy) {
819     if (boundary) PetscCall(DMPlexCheckPointSFHeavy(*dm, boundary));
820     /* Orient interface because it could be deliberately skipped above. It is idempotent. */
821     PetscCall(DMPlexOrientInterface_Internal(*dm));
822   }
823 
824   PetscCall(PetscObjectSetName((PetscObject) *dm, "Parallel Mesh"));
825   PetscCall(DMPlexDistributeSetDefault(*dm, PETSC_FALSE));
826   PetscCall(DMSetFromOptions(*dm));
827   PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
828 
829   if (user->customView) PetscCall(CustomView(*dm, PETSC_VIEWER_STDOUT_(comm)));
830   PetscCall(DMDestroy(&serialDM));
831   PetscCall(PortableBoundaryDestroy(&boundary));
832   PetscFunctionReturn(0);
833 }
834 
835 #define ps2d(number) ((double) PetscRealPart(number))
836 static inline PetscErrorCode coord2str(char buf[], size_t len, PetscInt dim, const PetscScalar coords[], PetscReal tol)
837 {
838   PetscFunctionBegin;
839   PetscCheck(dim <= 3,PETSC_COMM_SELF, PETSC_ERR_SUP, "dim must be less than or equal 3");
840   if (tol >= 1e-3) {
841     switch (dim) {
842       case 1: PetscCall(PetscSNPrintf(buf,len,"(%12.3f)",ps2d(coords[0])));
843       case 2: PetscCall(PetscSNPrintf(buf,len,"(%12.3f, %12.3f)",ps2d(coords[0]),ps2d(coords[1])));
844       case 3: PetscCall(PetscSNPrintf(buf,len,"(%12.3f, %12.3f, %12.3f)",ps2d(coords[0]),ps2d(coords[1]),ps2d(coords[2])));
845     }
846   } else {
847     switch (dim) {
848       case 1: PetscCall(PetscSNPrintf(buf,len,"(%12.6f)",ps2d(coords[0])));
849       case 2: PetscCall(PetscSNPrintf(buf,len,"(%12.6f, %12.6f)",ps2d(coords[0]),ps2d(coords[1])));
850       case 3: PetscCall(PetscSNPrintf(buf,len,"(%12.6f, %12.6f, %12.6f)",ps2d(coords[0]),ps2d(coords[1]),ps2d(coords[2])));
851     }
852   }
853   PetscFunctionReturn(0);
854 }
855 
856 static PetscErrorCode ViewVerticesFromCoords(DM dm, Vec coordsVec, PetscReal tol, PetscViewer viewer)
857 {
858   PetscInt       dim, i, npoints;
859   IS             pointsIS;
860   const PetscInt *points;
861   const PetscScalar *coords;
862   char           coordstr[128];
863   MPI_Comm       comm;
864   PetscMPIInt    rank;
865 
866   PetscFunctionBegin;
867   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
868   PetscCallMPI(MPI_Comm_rank(comm, &rank));
869   PetscCall(DMGetDimension(dm, &dim));
870   PetscCall(PetscViewerASCIIPushSynchronized(viewer));
871   PetscCall(DMPlexFindVertices(dm, coordsVec, tol, &pointsIS));
872   PetscCall(ISGetIndices(pointsIS, &points));
873   PetscCall(ISGetLocalSize(pointsIS, &npoints));
874   PetscCall(VecGetArrayRead(coordsVec, &coords));
875   for (i=0; i < npoints; i++) {
876     PetscCall(coord2str(coordstr, sizeof(coordstr), dim, &coords[i*dim], tol));
877     if (rank == 0 && i) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "-----\n"));
878     PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] %s --> points[%" PetscInt_FMT "] = %" PetscInt_FMT "\n", rank, coordstr, i, points[i]));
879     PetscCall(PetscViewerFlush(viewer));
880   }
881   PetscCall(PetscViewerASCIIPopSynchronized(viewer));
882   PetscCall(VecRestoreArrayRead(coordsVec, &coords));
883   PetscCall(ISRestoreIndices(pointsIS, &points));
884   PetscCall(ISDestroy(&pointsIS));
885   PetscFunctionReturn(0);
886 }
887 
888 static PetscErrorCode TestExpandPoints(DM dm, AppCtx *user)
889 {
890   IS                is;
891   PetscSection      *sects;
892   IS                *iss;
893   PetscInt          d,depth;
894   PetscMPIInt       rank;
895   PetscViewer       viewer=PETSC_VIEWER_STDOUT_WORLD, sviewer;
896 
897   PetscFunctionBegin;
898   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm),&rank));
899   if (user->testExpandPointsEmpty && rank == 0) {
900     PetscCall(ISCreateGeneral(PETSC_COMM_SELF, 0, NULL, PETSC_USE_POINTER, &is));
901   } else {
902     PetscCall(ISCreateGeneral(PETSC_COMM_SELF, user->nPointsToExpand, user->pointsToExpand, PETSC_USE_POINTER, &is));
903   }
904   PetscCall(DMPlexGetConeRecursive(dm, is, &depth, &iss, &sects));
905   PetscCall(PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer));
906   PetscCall(PetscViewerASCIIPrintf(sviewer, "[%d] ==========================\n",rank));
907   for (d=depth-1; d>=0; d--) {
908     IS          checkIS;
909     PetscBool   flg;
910 
911     PetscCall(PetscViewerASCIIPrintf(sviewer, "depth %" PetscInt_FMT " ---------------\n",d));
912     PetscCall(PetscSectionView(sects[d], sviewer));
913     PetscCall(ISView(iss[d], sviewer));
914     /* check reverse operation */
915     if (d < depth-1) {
916       PetscCall(DMPlexExpandedConesToFaces_Private(dm, iss[d], sects[d], &checkIS));
917       PetscCall(ISEqualUnsorted(checkIS, iss[d+1], &flg));
918       PetscCheck(flg,PetscObjectComm((PetscObject) checkIS), PETSC_ERR_PLIB, "DMPlexExpandedConesToFaces_Private produced wrong IS");
919       PetscCall(ISDestroy(&checkIS));
920     }
921   }
922   PetscCall(PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer));
923   PetscCall(PetscViewerFlush(viewer));
924   PetscCall(DMPlexRestoreConeRecursive(dm, is, &depth, &iss, &sects));
925   PetscCall(ISDestroy(&is));
926   PetscFunctionReturn(0);
927 }
928 
929 static PetscErrorCode DMPlexExpandedConesToFaces_Private(DM dm, IS is, PetscSection section, IS *newis)
930 {
931   PetscInt          n,n1,ncone,numCoveredPoints,o,p,q,start,end;
932   const PetscInt    *coveredPoints;
933   const PetscInt    *arr, *cone;
934   PetscInt          *newarr;
935 
936   PetscFunctionBegin;
937   PetscCall(ISGetLocalSize(is, &n));
938   PetscCall(PetscSectionGetStorageSize(section, &n1));
939   PetscCall(PetscSectionGetChart(section, &start, &end));
940   PetscCheck(n == n1,PETSC_COMM_SELF, PETSC_ERR_PLIB, "IS size = %" PetscInt_FMT " != %" PetscInt_FMT " = section storage size", n, n1);
941   PetscCall(ISGetIndices(is, &arr));
942   PetscCall(PetscMalloc1(end-start, &newarr));
943   for (q=start; q<end; q++) {
944     PetscCall(PetscSectionGetDof(section, q, &ncone));
945     PetscCall(PetscSectionGetOffset(section, q, &o));
946     cone = &arr[o];
947     if (ncone == 1) {
948       numCoveredPoints = 1;
949       p = cone[0];
950     } else {
951       PetscInt i;
952       p = PETSC_MAX_INT;
953       for (i=0; i<ncone; i++) if (cone[i] < 0) {p = -1; break;}
954       if (p >= 0) {
955         PetscCall(DMPlexGetJoin(dm, ncone, cone, &numCoveredPoints, &coveredPoints));
956         PetscCheck(numCoveredPoints <= 1,PETSC_COMM_SELF, PETSC_ERR_PLIB, "more than one covered points for section point %" PetscInt_FMT,q);
957         if (numCoveredPoints) p = coveredPoints[0];
958         else                  p = -2;
959         PetscCall(DMPlexRestoreJoin(dm, ncone, cone, &numCoveredPoints, &coveredPoints));
960       }
961     }
962     newarr[q-start] = p;
963   }
964   PetscCall(ISRestoreIndices(is, &arr));
965   PetscCall(ISCreateGeneral(PETSC_COMM_SELF, end-start, newarr, PETSC_OWN_POINTER, newis));
966   PetscFunctionReturn(0);
967 }
968 
969 static PetscErrorCode DMPlexExpandedVerticesToFaces_Private(DM dm, IS boundary_expanded_is, PetscInt depth, PetscSection sections[], IS *boundary_is)
970 {
971   PetscInt          d;
972   IS                is,newis;
973 
974   PetscFunctionBegin;
975   is = boundary_expanded_is;
976   PetscCall(PetscObjectReference((PetscObject)is));
977   for (d = 0; d < depth-1; ++d) {
978     PetscCall(DMPlexExpandedConesToFaces_Private(dm, is, sections[d], &newis));
979     PetscCall(ISDestroy(&is));
980     is = newis;
981   }
982   *boundary_is = is;
983   PetscFunctionReturn(0);
984 }
985 
986 #define CHKERRQI(incall,ierr) if (ierr) {incall = PETSC_FALSE; }
987 
988 static PetscErrorCode DMLabelViewFromOptionsOnComm_Private(DMLabel label, const char optionname[], MPI_Comm comm)
989 {
990   PetscViewer       viewer;
991   PetscBool         flg;
992   static PetscBool  incall = PETSC_FALSE;
993   PetscViewerFormat format;
994 
995   PetscFunctionBegin;
996   if (incall) PetscFunctionReturn(0);
997   incall = PETSC_TRUE;
998   CHKERRQI(incall,PetscOptionsGetViewer(comm,((PetscObject)label)->options,((PetscObject)label)->prefix,optionname,&viewer,&format,&flg));
999   if (flg) {
1000     CHKERRQI(incall,PetscViewerPushFormat(viewer,format));
1001     CHKERRQI(incall,DMLabelView(label, viewer));
1002     CHKERRQI(incall,PetscViewerPopFormat(viewer));
1003     CHKERRQI(incall,PetscViewerDestroy(&viewer));
1004   }
1005   incall = PETSC_FALSE;
1006   PetscFunctionReturn(0);
1007 }
1008 
1009 /* TODO: this is hotfixing DMLabelGetStratumIS() - it should be fixed systematically instead */
1010 static inline PetscErrorCode DMLabelGetStratumISOnComm_Private(DMLabel label, PetscInt value, MPI_Comm comm, IS *is)
1011 {
1012   IS                tmpis;
1013 
1014   PetscFunctionBegin;
1015   PetscCall(DMLabelGetStratumIS(label, value, &tmpis));
1016   if (!tmpis) PetscCall(ISCreateGeneral(PETSC_COMM_SELF, 0, NULL, PETSC_USE_POINTER, &tmpis));
1017   PetscCall(ISOnComm(tmpis, comm, PETSC_COPY_VALUES, is));
1018   PetscCall(ISDestroy(&tmpis));
1019   PetscFunctionReturn(0);
1020 }
1021 
1022 /* currently only for simple PetscSection without fields or constraints */
1023 static PetscErrorCode PetscSectionReplicate_Private(MPI_Comm comm, PetscMPIInt rootrank, PetscSection sec0, PetscSection *secout)
1024 {
1025   PetscSection      sec;
1026   PetscInt          chart[2], p;
1027   PetscInt          *dofarr;
1028   PetscMPIInt       rank;
1029 
1030   PetscFunctionBegin;
1031   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1032   if (rank == rootrank) {
1033     PetscCall(PetscSectionGetChart(sec0, &chart[0], &chart[1]));
1034   }
1035   PetscCallMPI(MPI_Bcast(chart, 2, MPIU_INT, rootrank, comm));
1036   PetscCall(PetscMalloc1(chart[1]-chart[0], &dofarr));
1037   if (rank == rootrank) {
1038     for (p = chart[0]; p < chart[1]; p++) {
1039       PetscCall(PetscSectionGetDof(sec0, p, &dofarr[p-chart[0]]));
1040     }
1041   }
1042   PetscCallMPI(MPI_Bcast(dofarr, chart[1]-chart[0], MPIU_INT, rootrank, comm));
1043   PetscCall(PetscSectionCreate(comm, &sec));
1044   PetscCall(PetscSectionSetChart(sec, chart[0], chart[1]));
1045   for (p = chart[0]; p < chart[1]; p++) {
1046     PetscCall(PetscSectionSetDof(sec, p, dofarr[p-chart[0]]));
1047   }
1048   PetscCall(PetscSectionSetUp(sec));
1049   PetscCall(PetscFree(dofarr));
1050   *secout = sec;
1051   PetscFunctionReturn(0);
1052 }
1053 
1054 static PetscErrorCode DMPlexExpandedVerticesCoordinatesToFaces_Private(DM ipdm, PortableBoundary bnd, IS *face_is)
1055 {
1056   IS                  faces_expanded_is;
1057 
1058   PetscFunctionBegin;
1059   PetscCall(DMPlexFindVertices(ipdm, bnd->coordinates, 0.0, &faces_expanded_is));
1060   PetscCall(DMPlexExpandedVerticesToFaces_Private(ipdm, faces_expanded_is, bnd->depth, bnd->sections, face_is));
1061   PetscCall(ISDestroy(&faces_expanded_is));
1062   PetscFunctionReturn(0);
1063 }
1064 
1065 /* hack disabling DMPlexOrientInterface() call in DMPlexInterpolate() via -dm_plex_interpolate_orient_interfaces option */
1066 static PetscErrorCode DMPlexSetOrientInterface_Private(DM dm, PetscBool enable)
1067 {
1068   PetscOptions      options = NULL;
1069   const char        *prefix = NULL;
1070   const char        opt[] = "-dm_plex_interpolate_orient_interfaces";
1071   char              prefix_opt[512];
1072   PetscBool         flg, set;
1073   static PetscBool  wasSetTrue = PETSC_FALSE;
1074 
1075   PetscFunctionBegin;
1076   if (dm) {
1077     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)dm, &prefix));
1078     options = ((PetscObject)dm)->options;
1079   }
1080   PetscCall(PetscStrcpy(prefix_opt, "-"));
1081   PetscCall(PetscStrlcat(prefix_opt, prefix, sizeof(prefix_opt)));
1082   PetscCall(PetscStrlcat(prefix_opt, &opt[1], sizeof(prefix_opt)));
1083   PetscCall(PetscOptionsGetBool(options, prefix, opt, &flg, &set));
1084   if (!enable) {
1085     if (set && flg) wasSetTrue = PETSC_TRUE;
1086     PetscCall(PetscOptionsSetValue(options, prefix_opt, "0"));
1087   } else if (set && !flg) {
1088     if (wasSetTrue) {
1089       PetscCall(PetscOptionsSetValue(options, prefix_opt, "1"));
1090     } else {
1091       /* default is PETSC_TRUE */
1092       PetscCall(PetscOptionsClearValue(options, prefix_opt));
1093     }
1094     wasSetTrue = PETSC_FALSE;
1095   }
1096   if (PetscDefined(USE_DEBUG)) {
1097     PetscCall(PetscOptionsGetBool(options, prefix, opt, &flg, &set));
1098     PetscCheck(!set || flg == enable,PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "PetscOptionsSetValue did not have the desired effect");
1099   }
1100   PetscFunctionReturn(0);
1101 }
1102 
1103 /* get coordinate description of the whole-domain boundary */
1104 static PetscErrorCode DMPlexGetExpandedBoundary_Private(DM dm, PortableBoundary *boundary)
1105 {
1106   PortableBoundary       bnd0, bnd;
1107   MPI_Comm               comm;
1108   DM                     idm;
1109   DMLabel                label;
1110   PetscInt               d;
1111   const char             boundaryName[] = "DMPlexDistributeInterpolateMarkInterface_boundary";
1112   IS                     boundary_is;
1113   IS                     *boundary_expanded_iss;
1114   PetscMPIInt            rootrank = 0;
1115   PetscMPIInt            rank, size;
1116   PetscInt               value = 1;
1117   DMPlexInterpolatedFlag intp;
1118   PetscBool              flg;
1119 
1120   PetscFunctionBegin;
1121   PetscCall(PetscNew(&bnd));
1122   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1123   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1124   PetscCallMPI(MPI_Comm_size(comm, &size));
1125   PetscCall(DMPlexIsDistributed(dm, &flg));
1126   PetscCheck(!flg,comm, PETSC_ERR_ARG_WRONG, "serial DM (all points on one rank) needed");
1127 
1128   /* interpolate serial DM if not yet interpolated */
1129   PetscCall(DMPlexIsInterpolatedCollective(dm, &intp));
1130   if (intp == DMPLEX_INTERPOLATED_FULL) {
1131     idm = dm;
1132     PetscCall(PetscObjectReference((PetscObject)dm));
1133   } else {
1134     PetscCall(DMPlexInterpolate(dm, &idm));
1135     PetscCall(DMViewFromOptions(idm, NULL, "-idm_view"));
1136   }
1137 
1138   /* mark whole-domain boundary of the serial DM */
1139   PetscCall(DMLabelCreate(PETSC_COMM_SELF, boundaryName, &label));
1140   PetscCall(DMAddLabel(idm, label));
1141   PetscCall(DMPlexMarkBoundaryFaces(idm, value, label));
1142   PetscCall(DMLabelViewFromOptionsOnComm_Private(label, "-idm_boundary_view", comm));
1143   PetscCall(DMLabelGetStratumIS(label, value, &boundary_is));
1144 
1145   /* translate to coordinates */
1146   PetscCall(PetscNew(&bnd0));
1147   PetscCall(DMGetCoordinatesLocalSetUp(idm));
1148   if (rank == rootrank) {
1149     PetscCall(DMPlexGetConeRecursive(idm, boundary_is, &bnd0->depth, &boundary_expanded_iss, &bnd0->sections));
1150     PetscCall(DMGetCoordinatesLocalTuple(dm, boundary_expanded_iss[0], NULL, &bnd0->coordinates));
1151     /* self-check */
1152     {
1153       IS is0;
1154       PetscCall(DMPlexExpandedVerticesCoordinatesToFaces_Private(idm, bnd0, &is0));
1155       PetscCall(ISEqual(is0, boundary_is, &flg));
1156       PetscCheck(flg,PETSC_COMM_SELF, PETSC_ERR_PLIB, "DMPlexExpandedVerticesCoordinatesToFaces_Private produced a wrong IS");
1157       PetscCall(ISDestroy(&is0));
1158     }
1159   } else {
1160     PetscCall(VecCreateSeq(PETSC_COMM_SELF, 0, &bnd0->coordinates));
1161   }
1162 
1163   {
1164     Vec         tmp;
1165     VecScatter  sc;
1166     IS          xis;
1167     PetscInt    n;
1168 
1169     /* just convert seq vectors to mpi vector */
1170     PetscCall(VecGetLocalSize(bnd0->coordinates, &n));
1171     PetscCallMPI(MPI_Bcast(&n, 1, MPIU_INT, rootrank, comm));
1172     if (rank == rootrank) {
1173       PetscCall(VecCreateMPI(comm, n, n, &tmp));
1174     } else {
1175       PetscCall(VecCreateMPI(comm, 0, n, &tmp));
1176     }
1177     PetscCall(VecCopy(bnd0->coordinates, tmp));
1178     PetscCall(VecDestroy(&bnd0->coordinates));
1179     bnd0->coordinates = tmp;
1180 
1181     /* replicate coordinates from root rank to all ranks */
1182     PetscCall(VecCreateMPI(comm, n, n*size, &bnd->coordinates));
1183     PetscCall(ISCreateStride(comm, n, 0, 1, &xis));
1184     PetscCall(VecScatterCreate(bnd0->coordinates, xis, bnd->coordinates, NULL, &sc));
1185     PetscCall(VecScatterBegin(sc, bnd0->coordinates, bnd->coordinates, INSERT_VALUES, SCATTER_FORWARD));
1186     PetscCall(VecScatterEnd(  sc, bnd0->coordinates, bnd->coordinates, INSERT_VALUES, SCATTER_FORWARD));
1187     PetscCall(VecScatterDestroy(&sc));
1188     PetscCall(ISDestroy(&xis));
1189   }
1190   bnd->depth = bnd0->depth;
1191   PetscCallMPI(MPI_Bcast(&bnd->depth, 1, MPIU_INT, rootrank, comm));
1192   PetscCall(PetscMalloc1(bnd->depth, &bnd->sections));
1193   for (d=0; d<bnd->depth; d++) {
1194     PetscCall(PetscSectionReplicate_Private(comm, rootrank, (rank == rootrank) ? bnd0->sections[d] : NULL, &bnd->sections[d]));
1195   }
1196 
1197   if (rank == rootrank) {
1198     PetscCall(DMPlexRestoreConeRecursive(idm, boundary_is, &bnd0->depth, &boundary_expanded_iss, &bnd0->sections));
1199   }
1200   PetscCall(PortableBoundaryDestroy(&bnd0));
1201   PetscCall(DMRemoveLabelBySelf(idm, &label, PETSC_TRUE));
1202   PetscCall(DMLabelDestroy(&label));
1203   PetscCall(ISDestroy(&boundary_is));
1204   PetscCall(DMDestroy(&idm));
1205   *boundary = bnd;
1206   PetscFunctionReturn(0);
1207 }
1208 
1209 /* get faces of inter-partition interface */
1210 static PetscErrorCode DMPlexGetInterfaceFaces_Private(DM ipdm, IS boundary_faces_is, IS *interface_faces_is)
1211 {
1212   MPI_Comm               comm;
1213   DMLabel                label;
1214   IS                     part_boundary_faces_is;
1215   const char             partBoundaryName[] = "DMPlexDistributeInterpolateMarkInterface_partBoundary";
1216   PetscInt               value = 1;
1217   DMPlexInterpolatedFlag intp;
1218 
1219   PetscFunctionBegin;
1220   PetscCall(PetscObjectGetComm((PetscObject)ipdm, &comm));
1221   PetscCall(DMPlexIsInterpolatedCollective(ipdm, &intp));
1222   PetscCheck(intp == DMPLEX_INTERPOLATED_FULL,comm, PETSC_ERR_ARG_WRONG, "only for fully interpolated DMPlex");
1223 
1224   /* get ipdm partition boundary (partBoundary) */
1225   PetscCall(DMLabelCreate(PETSC_COMM_SELF, partBoundaryName, &label));
1226   PetscCall(DMAddLabel(ipdm, label));
1227   PetscCall(DMPlexMarkBoundaryFaces(ipdm, value, label));
1228   PetscCall(DMLabelViewFromOptionsOnComm_Private(label, "-ipdm_part_boundary_view", comm));
1229   PetscCall(DMLabelGetStratumISOnComm_Private(label, value, comm, &part_boundary_faces_is));
1230   PetscCall(DMRemoveLabelBySelf(ipdm, &label, PETSC_TRUE));
1231   PetscCall(DMLabelDestroy(&label));
1232 
1233   /* remove ipdm whole-domain boundary (boundary_faces_is) from ipdm partition boundary (part_boundary_faces_is), resulting just in inter-partition interface */
1234   PetscCall(ISDifference(part_boundary_faces_is,boundary_faces_is,interface_faces_is));
1235   PetscCall(ISDestroy(&part_boundary_faces_is));
1236   PetscFunctionReturn(0);
1237 }
1238 
1239 /* compute inter-partition interface including edges and vertices */
1240 static PetscErrorCode DMPlexComputeCompleteInterface_Private(DM ipdm, IS interface_faces_is, IS *interface_is)
1241 {
1242   DMLabel                label;
1243   PetscInt               value = 1;
1244   const char             interfaceName[] = "DMPlexDistributeInterpolateMarkInterface_interface";
1245   DMPlexInterpolatedFlag intp;
1246   MPI_Comm               comm;
1247 
1248   PetscFunctionBegin;
1249   PetscCall(PetscObjectGetComm((PetscObject)ipdm, &comm));
1250   PetscCall(DMPlexIsInterpolatedCollective(ipdm, &intp));
1251   PetscCheck(intp == DMPLEX_INTERPOLATED_FULL,comm, PETSC_ERR_ARG_WRONG, "only for fully interpolated DMPlex");
1252 
1253   PetscCall(DMLabelCreate(PETSC_COMM_SELF, interfaceName, &label));
1254   PetscCall(DMAddLabel(ipdm, label));
1255   PetscCall(DMLabelSetStratumIS(label, value, interface_faces_is));
1256   PetscCall(DMLabelViewFromOptionsOnComm_Private(label, "-interface_faces_view", comm));
1257   PetscCall(DMPlexLabelComplete(ipdm, label));
1258   PetscCall(DMLabelViewFromOptionsOnComm_Private(label, "-interface_view", comm));
1259   PetscCall(DMLabelGetStratumISOnComm_Private(label, value, comm, interface_is));
1260   PetscCall(PetscObjectSetName((PetscObject)*interface_is, "interface_is"));
1261   PetscCall(ISViewFromOptions(*interface_is, NULL, "-interface_is_view"));
1262   PetscCall(DMRemoveLabelBySelf(ipdm, &label, PETSC_TRUE));
1263   PetscCall(DMLabelDestroy(&label));
1264   PetscFunctionReturn(0);
1265 }
1266 
1267 static PetscErrorCode PointSFGetOutwardInterfacePoints(PetscSF sf, IS *is)
1268 {
1269   PetscInt        n;
1270   const PetscInt  *arr;
1271 
1272   PetscFunctionBegin;
1273   PetscCall(PetscSFGetGraph(sf, NULL, &n, &arr, NULL));
1274   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)sf), n, arr, PETSC_USE_POINTER, is));
1275   PetscFunctionReturn(0);
1276 }
1277 
1278 static PetscErrorCode PointSFGetInwardInterfacePoints(PetscSF sf, IS *is)
1279 {
1280   PetscInt        n;
1281   const PetscInt  *rootdegree;
1282   PetscInt        *arr;
1283 
1284   PetscFunctionBegin;
1285   PetscCall(PetscSFSetUp(sf));
1286   PetscCall(PetscSFComputeDegreeBegin(sf, &rootdegree));
1287   PetscCall(PetscSFComputeDegreeEnd(sf, &rootdegree));
1288   PetscCall(PetscSFComputeMultiRootOriginalNumbering(sf, rootdegree, &n, &arr));
1289   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)sf), n, arr, PETSC_OWN_POINTER, is));
1290   PetscFunctionReturn(0);
1291 }
1292 
1293 static PetscErrorCode PointSFGetInterfacePoints_Private(PetscSF pointSF, IS *is)
1294 {
1295   IS pointSF_out_is, pointSF_in_is;
1296 
1297   PetscFunctionBegin;
1298   PetscCall(PointSFGetOutwardInterfacePoints(pointSF, &pointSF_out_is));
1299   PetscCall(PointSFGetInwardInterfacePoints(pointSF, &pointSF_in_is));
1300   PetscCall(ISExpand(pointSF_out_is, pointSF_in_is, is));
1301   PetscCall(ISDestroy(&pointSF_out_is));
1302   PetscCall(ISDestroy(&pointSF_in_is));
1303   PetscFunctionReturn(0);
1304 }
1305 
1306 #define CHKERRMY(ierr) PetscCheck(!ierr,PETSC_COMM_SELF, PETSC_ERR_PLIB, "PointSF is wrong. Unable to show details!")
1307 
1308 static PetscErrorCode ViewPointsWithType_Internal(DM dm, IS pointsIS, PetscViewer v)
1309 {
1310   DMLabel         label;
1311   PetscSection    coordsSection;
1312   Vec             coordsVec;
1313   PetscScalar     *coordsScalar;
1314   PetscInt        coneSize, depth, dim, i, p, npoints;
1315   const PetscInt  *points;
1316 
1317   PetscFunctionBegin;
1318   PetscCall(DMGetDimension(dm, &dim));
1319   PetscCall(DMGetCoordinateSection(dm, &coordsSection));
1320   PetscCall(DMGetCoordinatesLocal(dm, &coordsVec));
1321   PetscCall(VecGetArray(coordsVec, &coordsScalar));
1322   PetscCall(ISGetLocalSize(pointsIS, &npoints));
1323   PetscCall(ISGetIndices(pointsIS, &points));
1324   PetscCall(DMPlexGetDepthLabel(dm, &label));
1325   PetscCall(PetscViewerASCIIPushTab(v));
1326   for (i=0; i<npoints; i++) {
1327     p = points[i];
1328     PetscCall(DMLabelGetValue(label, p, &depth));
1329     if (!depth) {
1330       PetscInt        n, o;
1331       char            coordstr[128];
1332 
1333       PetscCall(PetscSectionGetDof(coordsSection, p, &n));
1334       PetscCall(PetscSectionGetOffset(coordsSection, p, &o));
1335       PetscCall(coord2str(coordstr, sizeof(coordstr), n, &coordsScalar[o], 1.0));
1336       PetscCall(PetscViewerASCIISynchronizedPrintf(v, "vertex %" PetscInt_FMT " w/ coordinates %s\n", p, coordstr));
1337     } else {
1338       char            entityType[16];
1339 
1340       switch (depth) {
1341         case 1: PetscCall(PetscStrcpy(entityType, "edge")); break;
1342         case 2: PetscCall(PetscStrcpy(entityType, "face")); break;
1343         case 3: PetscCall(PetscStrcpy(entityType, "cell")); break;
1344         default: SETERRQ(PetscObjectComm((PetscObject)v), PETSC_ERR_SUP, "Only for depth <= 3");
1345       }
1346       if (depth == dim && dim < 3) {
1347         PetscCall(PetscStrlcat(entityType, " (cell)", sizeof(entityType)));
1348       }
1349       PetscCall(PetscViewerASCIISynchronizedPrintf(v, "%s %" PetscInt_FMT "\n", entityType, p));
1350     }
1351     PetscCall(DMPlexGetConeSize(dm, p, &coneSize));
1352     if (coneSize) {
1353       const PetscInt *cone;
1354       IS             coneIS;
1355 
1356       PetscCall(DMPlexGetCone(dm, p, &cone));
1357       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, coneSize, cone, PETSC_USE_POINTER, &coneIS));
1358       PetscCall(ViewPointsWithType_Internal(dm, coneIS, v));
1359       PetscCall(ISDestroy(&coneIS));
1360     }
1361   }
1362   PetscCall(PetscViewerASCIIPopTab(v));
1363   PetscCall(VecRestoreArray(coordsVec, &coordsScalar));
1364   PetscCall(ISRestoreIndices(pointsIS, &points));
1365   PetscFunctionReturn(0);
1366 }
1367 
1368 static PetscErrorCode ViewPointsWithType(DM dm, IS points, PetscViewer v)
1369 {
1370   PetscBool       flg;
1371   PetscInt        npoints;
1372   PetscMPIInt     rank;
1373 
1374   PetscFunctionBegin;
1375   PetscCall(PetscObjectTypeCompare((PetscObject)v, PETSCVIEWERASCII, &flg));
1376   PetscCheck(flg,PetscObjectComm((PetscObject)v), PETSC_ERR_SUP, "Only for ASCII viewer");
1377   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)v), &rank));
1378   PetscCall(PetscViewerASCIIPushSynchronized(v));
1379   PetscCall(ISGetLocalSize(points, &npoints));
1380   if (npoints) {
1381     PetscCall(PetscViewerASCIISynchronizedPrintf(v, "[%d] --------\n", rank));
1382     PetscCall(ViewPointsWithType_Internal(dm, points, v));
1383   }
1384   PetscCall(PetscViewerFlush(v));
1385   PetscCall(PetscViewerASCIIPopSynchronized(v));
1386   PetscFunctionReturn(0);
1387 }
1388 
1389 static PetscErrorCode DMPlexComparePointSFWithInterface_Private(DM ipdm, IS interface_is)
1390 {
1391   PetscSF         pointsf;
1392   IS              pointsf_is;
1393   PetscBool       flg;
1394   MPI_Comm        comm;
1395   PetscMPIInt     size;
1396 
1397   PetscFunctionBegin;
1398   PetscCall(PetscObjectGetComm((PetscObject)ipdm, &comm));
1399   PetscCallMPI(MPI_Comm_size(comm, &size));
1400   PetscCall(DMGetPointSF(ipdm, &pointsf));
1401   if (pointsf) {
1402     PetscInt nroots;
1403     PetscCall(PetscSFGetGraph(pointsf, &nroots, NULL, NULL, NULL));
1404     if (nroots < 0) pointsf = NULL; /* uninitialized SF */
1405   }
1406   if (!pointsf) {
1407     PetscInt N=0;
1408     if (interface_is) PetscCall(ISGetSize(interface_is, &N));
1409     PetscCheck(!N,comm, PETSC_ERR_PLIB, "interface_is should be NULL or empty for PointSF being NULL");
1410     PetscFunctionReturn(0);
1411   }
1412 
1413   /* get PointSF points as IS pointsf_is */
1414   PetscCall(PointSFGetInterfacePoints_Private(pointsf, &pointsf_is));
1415 
1416   /* compare pointsf_is with interface_is */
1417   PetscCall(ISEqual(interface_is, pointsf_is, &flg));
1418   PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE,&flg,1,MPIU_BOOL,MPI_LAND,comm));
1419   if (!flg) {
1420     IS pointsf_extra_is, pointsf_missing_is;
1421     PetscViewer errv = PETSC_VIEWER_STDERR_(comm);
1422     CHKERRMY(ISDifference(interface_is, pointsf_is, &pointsf_missing_is));
1423     CHKERRMY(ISDifference(pointsf_is, interface_is, &pointsf_extra_is));
1424     CHKERRMY(PetscViewerASCIIPrintf(errv, "Points missing in PointSF:\n"));
1425     CHKERRMY(ViewPointsWithType(ipdm, pointsf_missing_is, errv));
1426     CHKERRMY(PetscViewerASCIIPrintf(errv, "Extra points in PointSF:\n"));
1427     CHKERRMY(ViewPointsWithType(ipdm, pointsf_extra_is, errv));
1428     CHKERRMY(ISDestroy(&pointsf_extra_is));
1429     CHKERRMY(ISDestroy(&pointsf_missing_is));
1430     SETERRQ(comm, PETSC_ERR_PLIB, "PointSF is wrong! See details above.");
1431   }
1432   PetscCall(ISDestroy(&pointsf_is));
1433   PetscFunctionReturn(0);
1434 }
1435 
1436 /* remove faces & edges from label, leave just vertices */
1437 static PetscErrorCode DMPlexISFilterVertices_Private(DM dm, IS points)
1438 {
1439   PetscInt        vStart, vEnd;
1440   MPI_Comm        comm;
1441 
1442   PetscFunctionBegin;
1443   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1444   PetscCall(DMPlexGetDepthStratum(dm, 0, &vStart, &vEnd));
1445   PetscCall(ISGeneralFilter(points, vStart, vEnd));
1446   PetscFunctionReturn(0);
1447 }
1448 
1449 /*
1450   DMPlexCheckPointSFHeavy - Thoroughly test that the PointSF after parallel DMPlexInterpolate() includes exactly all interface points.
1451 
1452   Collective
1453 
1454   Input Parameters:
1455 . dm - The DMPlex object
1456 
1457   Notes:
1458   The input DMPlex must be serial (one partition has all points, the other partitions have no points).
1459   This is a heavy test which involves DMPlexInterpolate() if the input DM is not interpolated yet, and depends on having a representation of the whole-domain boundary (PortableBoundary), which can be obtained only by DMPlexGetExpandedBoundary_Private() (which involves DMPlexInterpolate() of a sequential DM).
1460   This is mainly intended for debugging/testing purposes.
1461 
1462   Algorithm:
1463   1. boundary faces of the serial version of the whole mesh are found using DMPlexMarkBoundaryFaces()
1464   2. boundary faces are translated into vertices using DMPlexGetConeRecursive() and these are translated into coordinates - this description (aka PortableBoundary) is completely independent of partitioning and point numbering
1465   3. the mesh is distributed or loaded in parallel
1466   4. boundary faces of the distributed mesh are reconstructed from PortableBoundary using DMPlexFindVertices()
1467   5. partition boundary faces of the parallel mesh are found using DMPlexMarkBoundaryFaces()
1468   6. partition interfaces are computed as set difference of partition boundary faces minus the reconstructed boundary
1469   7. check that interface covered by PointSF (union of inward and outward points) is equal to the partition interface for each rank, otherwise print the difference and throw an error
1470 
1471   Level: developer
1472 
1473 .seealso: DMGetPointSF(), DMPlexCheckSymmetry(), DMPlexCheckSkeleton(), DMPlexCheckFaces()
1474 */
1475 static PetscErrorCode DMPlexCheckPointSFHeavy(DM dm, PortableBoundary bnd)
1476 {
1477   DM                     ipdm=NULL;
1478   IS                     boundary_faces_is, interface_faces_is, interface_is;
1479   DMPlexInterpolatedFlag intp;
1480   MPI_Comm               comm;
1481 
1482   PetscFunctionBegin;
1483   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1484 
1485   PetscCall(DMPlexIsInterpolatedCollective(dm, &intp));
1486   if (intp == DMPLEX_INTERPOLATED_FULL) {
1487     ipdm = dm;
1488   } else {
1489     /* create temporary interpolated DM if input DM is not interpolated */
1490     PetscCall(DMPlexSetOrientInterface_Private(dm, PETSC_FALSE));
1491     PetscCall(DMPlexInterpolate(dm, &ipdm)); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexComparePointSFWithInterface_Private() below */
1492     PetscCall(DMPlexSetOrientInterface_Private(dm, PETSC_TRUE));
1493   }
1494   PetscCall(DMViewFromOptions(ipdm, NULL, "-ipdm_view"));
1495 
1496   /* recover ipdm whole-domain boundary faces from the expanded vertices coordinates */
1497   PetscCall(DMPlexExpandedVerticesCoordinatesToFaces_Private(ipdm, bnd, &boundary_faces_is));
1498   /* get inter-partition interface faces (interface_faces_is)*/
1499   PetscCall(DMPlexGetInterfaceFaces_Private(ipdm, boundary_faces_is, &interface_faces_is));
1500   /* compute inter-partition interface including edges and vertices (interface_is) */
1501   PetscCall(DMPlexComputeCompleteInterface_Private(ipdm, interface_faces_is, &interface_is));
1502   /* destroy immediate ISs */
1503   PetscCall(ISDestroy(&boundary_faces_is));
1504   PetscCall(ISDestroy(&interface_faces_is));
1505 
1506   /* for uninterpolated case, keep just vertices in interface */
1507   if (!intp) {
1508     PetscCall(DMPlexISFilterVertices_Private(ipdm, interface_is));
1509     PetscCall(DMDestroy(&ipdm));
1510   }
1511 
1512   /* compare PointSF with the boundary reconstructed from coordinates */
1513   PetscCall(DMPlexComparePointSFWithInterface_Private(dm, interface_is));
1514   PetscCall(PetscPrintf(comm, "DMPlexCheckPointSFHeavy PASSED\n"));
1515   PetscCall(ISDestroy(&interface_is));
1516   PetscFunctionReturn(0);
1517 }
1518 
1519 int main(int argc, char **argv)
1520 {
1521   DM             dm;
1522   AppCtx         user;
1523 
1524   PetscFunctionBeginUser;
1525   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
1526   PetscCall(PetscLogStageRegister("create",&stage[0]));
1527   PetscCall(PetscLogStageRegister("distribute",&stage[1]));
1528   PetscCall(PetscLogStageRegister("interpolate",&stage[2]));
1529   PetscCall(ProcessOptions(PETSC_COMM_WORLD, &user));
1530   PetscCall(CreateMesh(PETSC_COMM_WORLD, &user, &dm));
1531   if (user.nPointsToExpand) {
1532     PetscCall(TestExpandPoints(dm, &user));
1533   }
1534   if (user.ncoords) {
1535     Vec coords;
1536 
1537     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, user.ncoords, user.ncoords, user.coords, &coords));
1538     PetscCall(ViewVerticesFromCoords(dm, coords, user.coordsTol, PETSC_VIEWER_STDOUT_WORLD));
1539     PetscCall(VecDestroy(&coords));
1540   }
1541   PetscCall(DMDestroy(&dm));
1542   PetscCall(PetscFinalize());
1543   return 0;
1544 }
1545 
1546 /*TEST
1547 
1548   testset:
1549     nsize: 2
1550     args: -dm_view ascii::ascii_info_detail
1551     args: -dm_plex_check_all
1552     test:
1553       suffix: 1_tri_dist0
1554       args: -distribute 0 -interpolate {{none create}separate output}
1555     test:
1556       suffix: 1_tri_dist1
1557       args: -distribute 1 -interpolate {{none create after_distribute}separate output}
1558     test:
1559       suffix: 1_quad_dist0
1560       args: -cell_simplex 0 -distribute 0 -interpolate {{none create}separate output}
1561     test:
1562       suffix: 1_quad_dist1
1563       args: -cell_simplex 0 -distribute 1 -interpolate {{none create after_distribute}separate output}
1564     test:
1565       suffix: 1_1d_dist1
1566       args: -dim 1 -distribute 1
1567 
1568   testset:
1569     nsize: 3
1570     args: -testnum 1 -interpolate create
1571     args: -dm_plex_check_all
1572     test:
1573       suffix: 2
1574       args: -dm_view ascii::ascii_info_detail
1575     test:
1576       suffix: 2a
1577       args: -dm_plex_check_cones_conform_on_interfaces_verbose
1578     test:
1579       suffix: 2b
1580       args: -test_expand_points 0,1,2,5,6
1581     test:
1582       suffix: 2c
1583       args: -test_expand_points 0,1,2,5,6 -test_expand_points_empty
1584 
1585   testset:
1586     # the same as 1% for 3D
1587     nsize: 2
1588     args: -dim 3 -dm_view ascii::ascii_info_detail
1589     args: -dm_plex_check_all
1590     test:
1591       suffix: 4_tet_dist0
1592       args: -distribute 0 -interpolate {{none create}separate output}
1593     test:
1594       suffix: 4_tet_dist1
1595       args: -distribute 1 -interpolate {{none create after_distribute}separate output}
1596     test:
1597       suffix: 4_hex_dist0
1598       args: -cell_simplex 0 -distribute 0 -interpolate {{none create}separate output}
1599     test:
1600       suffix: 4_hex_dist1
1601       args: -cell_simplex 0 -distribute 1 -interpolate {{none create after_distribute}separate output}
1602 
1603   test:
1604     # the same as 4_tet_dist0 but test different initial orientations
1605     suffix: 4_tet_test_orient
1606     nsize: 2
1607     args: -dim 3 -distribute 0
1608     args: -dm_plex_check_all
1609     args: -rotate_interface_0 {{0 1 2 11 12 13}}
1610     args: -rotate_interface_1 {{0 1 2 11 12 13}}
1611 
1612   testset:
1613     requires: exodusii
1614     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/TwoQuads.exo
1615     args: -dm_view ascii::ascii_info_detail
1616     args: -dm_plex_check_all
1617     args: -custom_view
1618     test:
1619       suffix: 5_seq
1620       nsize: 1
1621       args: -distribute 0 -interpolate {{none create}separate output}
1622     test:
1623       # Detail viewing in a non-distributed mesh is broken because the DMLabelView() is collective, but the label is not shared
1624       suffix: 5_dist0
1625       nsize: 2
1626       args: -distribute 0 -interpolate {{none create}separate output} -dm_view
1627     test:
1628       suffix: 5_dist1
1629       nsize: 2
1630       args: -distribute 1 -interpolate {{none create after_distribute}separate output}
1631 
1632   testset:
1633     nsize: {{1 2 4}}
1634     args: -use_generator
1635     args: -dm_plex_check_all
1636     args: -distribute -interpolate none
1637     test:
1638       suffix: 6_tri
1639       requires: triangle
1640       args: -faces {{2,2  1,3  7,4}} -cell_simplex 1 -dm_generator triangle
1641     test:
1642       suffix: 6_quad
1643       args: -faces {{2,2  1,3  7,4}} -cell_simplex 0
1644     test:
1645       suffix: 6_tet
1646       requires: ctetgen
1647       args: -faces {{2,2,2  1,3,5  3,4,7}} -cell_simplex 1 -dm_generator ctetgen
1648     test:
1649       suffix: 6_hex
1650       args: -faces {{2,2,2  1,3,5  3,4,7}} -cell_simplex 0
1651   testset:
1652     nsize: {{1 2 4}}
1653     args: -use_generator
1654     args: -dm_plex_check_all
1655     args: -distribute -interpolate create
1656     test:
1657       suffix: 6_int_tri
1658       requires: triangle
1659       args: -faces {{2,2  1,3  7,4}} -cell_simplex 1 -dm_generator triangle
1660     test:
1661       suffix: 6_int_quad
1662       args: -faces {{2,2  1,3  7,4}} -cell_simplex 0
1663     test:
1664       suffix: 6_int_tet
1665       requires: ctetgen
1666       args: -faces {{2,2,2  1,3,5  3,4,7}} -cell_simplex 1 -dm_generator ctetgen
1667     test:
1668       suffix: 6_int_hex
1669       args: -faces {{2,2,2  1,3,5  3,4,7}} -cell_simplex 0
1670   testset:
1671     nsize: {{2 4}}
1672     args: -use_generator
1673     args: -dm_plex_check_all
1674     args: -distribute -interpolate after_distribute
1675     test:
1676       suffix: 6_parint_tri
1677       requires: triangle
1678       args: -faces {{2,2  1,3  7,4}} -cell_simplex 1 -dm_generator triangle
1679     test:
1680       suffix: 6_parint_quad
1681       args: -faces {{2,2  1,3  7,4}} -cell_simplex 0
1682     test:
1683       suffix: 6_parint_tet
1684       requires: ctetgen
1685       args: -faces {{2,2,2  1,3,5  3,4,7}} -cell_simplex 1 -dm_generator ctetgen
1686     test:
1687       suffix: 6_parint_hex
1688       args: -faces {{2,2,2  1,3,5  3,4,7}} -cell_simplex 0
1689 
1690   testset: # 7 EXODUS
1691     requires: exodusii
1692     args: -dm_plex_check_all
1693     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.exo
1694     args: -distribute
1695     test: # seq load, simple partitioner
1696       suffix: 7_exo
1697       nsize: {{1 2 4 5}}
1698       args: -interpolate none
1699     test: # seq load, seq interpolation, simple partitioner
1700       suffix: 7_exo_int_simple
1701       nsize: {{1 2 4 5}}
1702       args: -interpolate create
1703     test: # seq load, seq interpolation, metis partitioner
1704       suffix: 7_exo_int_metis
1705       requires: parmetis
1706       nsize: {{2 4 5}}
1707       args: -interpolate create
1708       args: -petscpartitioner_type parmetis
1709     test: # seq load, simple partitioner, par interpolation
1710       suffix: 7_exo_simple_int
1711       nsize: {{2 4 5}}
1712       args: -interpolate after_distribute
1713     test: # seq load, metis partitioner, par interpolation
1714       suffix: 7_exo_metis_int
1715       requires: parmetis
1716       nsize: {{2 4 5}}
1717       args: -interpolate after_distribute
1718       args: -petscpartitioner_type parmetis
1719 
1720   testset: # 7 HDF5 SEQUANTIAL LOAD
1721     requires: hdf5 !complex
1722     args: -dm_plex_check_all
1723     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -dm_plex_create_from_hdf5_xdmf
1724     args: -dm_plex_hdf5_force_sequential
1725     args: -distribute
1726     test: # seq load, simple partitioner
1727       suffix: 7_seq_hdf5_simple
1728       nsize: {{1 2 4 5}}
1729       args: -interpolate none
1730     test: # seq load, seq interpolation, simple partitioner
1731       suffix: 7_seq_hdf5_int_simple
1732       nsize: {{1 2 4 5}}
1733       args: -interpolate after_create
1734     test: # seq load, seq interpolation, metis partitioner
1735       nsize: {{2 4 5}}
1736       suffix: 7_seq_hdf5_int_metis
1737       requires: parmetis
1738       args: -interpolate after_create
1739       args: -petscpartitioner_type parmetis
1740     test: # seq load, simple partitioner, par interpolation
1741       suffix: 7_seq_hdf5_simple_int
1742       nsize: {{2 4 5}}
1743       args: -interpolate after_distribute
1744     test: # seq load, metis partitioner, par interpolation
1745       nsize: {{2 4 5}}
1746       suffix: 7_seq_hdf5_metis_int
1747       requires: parmetis
1748       args: -interpolate after_distribute
1749       args: -petscpartitioner_type parmetis
1750 
1751   testset: # 7 HDF5 PARALLEL LOAD
1752     requires: hdf5 !complex
1753     nsize: {{2 4 5}}
1754     args: -dm_plex_check_all
1755     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -dm_plex_create_from_hdf5_xdmf
1756     test: # par load
1757       suffix: 7_par_hdf5
1758       args: -interpolate none
1759     test: # par load, par interpolation
1760       suffix: 7_par_hdf5_int
1761       args: -interpolate after_create
1762     test: # par load, parmetis repartitioner
1763       TODO: Parallel partitioning of uninterpolated meshes not supported
1764       suffix: 7_par_hdf5_parmetis
1765       requires: parmetis
1766       args: -distribute -petscpartitioner_type parmetis
1767       args: -interpolate none
1768     test: # par load, par interpolation, parmetis repartitioner
1769       suffix: 7_par_hdf5_int_parmetis
1770       requires: parmetis
1771       args: -distribute -petscpartitioner_type parmetis
1772       args: -interpolate after_create
1773     test: # par load, parmetis partitioner, par interpolation
1774       TODO: Parallel partitioning of uninterpolated meshes not supported
1775       suffix: 7_par_hdf5_parmetis_int
1776       requires: parmetis
1777       args: -distribute -petscpartitioner_type parmetis
1778       args: -interpolate after_distribute
1779 
1780     test:
1781       suffix: 7_hdf5_hierarch
1782       requires: hdf5 ptscotch !complex
1783       nsize: {{2 3 4}separate output}
1784       args: -distribute
1785       args: -interpolate after_create
1786       args: -petscpartitioner_type matpartitioning -petscpartitioner_view ::ascii_info
1787       args: -mat_partitioning_type hierarch -mat_partitioning_hierarchical_nfineparts 2
1788       args: -mat_partitioning_hierarchical_coarseparttype ptscotch -mat_partitioning_hierarchical_fineparttype ptscotch
1789 
1790   test:
1791     suffix: 8
1792     requires: hdf5 !complex
1793     nsize: 4
1794     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -dm_plex_create_from_hdf5_xdmf
1795     args: -distribute 0 -interpolate after_create
1796     args: -view_vertices_from_coords 0.,1.,0.,-0.5,1.,0.,0.583,-0.644,0.,-2.,-2.,-2. -view_vertices_from_coords_tol 1e-3
1797     args: -dm_plex_check_all
1798     args: -custom_view
1799 
1800   testset: # 9 HDF5 SEQUANTIAL LOAD
1801     requires: hdf5 !complex datafilespath
1802     args: -dm_plex_check_all
1803     args: -filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /cells -dm_plex_hdf5_geometry_path /coordinates
1804     args: -dm_plex_hdf5_force_sequential
1805     args: -distribute
1806     test: # seq load, simple partitioner
1807       suffix: 9_seq_hdf5_simple
1808       nsize: {{1 2 4 5}}
1809       args: -interpolate none
1810     test: # seq load, seq interpolation, simple partitioner
1811       suffix: 9_seq_hdf5_int_simple
1812       nsize: {{1 2 4 5}}
1813       args: -interpolate after_create
1814     test: # seq load, seq interpolation, metis partitioner
1815       nsize: {{2 4 5}}
1816       suffix: 9_seq_hdf5_int_metis
1817       requires: parmetis
1818       args: -interpolate after_create
1819       args: -petscpartitioner_type parmetis
1820     test: # seq load, simple partitioner, par interpolation
1821       suffix: 9_seq_hdf5_simple_int
1822       nsize: {{2 4 5}}
1823       args: -interpolate after_distribute
1824     test: # seq load, simple partitioner, par interpolation
1825       # This is like 9_seq_hdf5_simple_int but testing error output of DMPlexCheckPointSFHeavy().
1826       # Once 9_seq_hdf5_simple_int gets fixed, this one gets broken.
1827       # We can then provide an intentionally broken mesh instead.
1828       TODO: This test is broken because PointSF is fixed.
1829       suffix: 9_seq_hdf5_simple_int_err
1830       nsize: 4
1831       args: -interpolate after_distribute
1832       filter: sed -e "/PETSC ERROR/,$$d"
1833     test: # seq load, metis partitioner, par interpolation
1834       nsize: {{2 4 5}}
1835       suffix: 9_seq_hdf5_metis_int
1836       requires: parmetis
1837       args: -interpolate after_distribute
1838       args: -petscpartitioner_type parmetis
1839 
1840   testset: # 9 HDF5 PARALLEL LOAD
1841     requires: hdf5 !complex datafilespath
1842     nsize: {{2 4 5}}
1843     args: -dm_plex_check_all
1844     args: -filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /cells -dm_plex_hdf5_geometry_path /coordinates
1845     test: # par load
1846       suffix: 9_par_hdf5
1847       args: -interpolate none
1848     test: # par load, par interpolation
1849       suffix: 9_par_hdf5_int
1850       args: -interpolate after_create
1851     test: # par load, parmetis repartitioner
1852       TODO: Parallel partitioning of uninterpolated meshes not supported
1853       suffix: 9_par_hdf5_parmetis
1854       requires: parmetis
1855       args: -distribute -petscpartitioner_type parmetis
1856       args: -interpolate none
1857     test: # par load, par interpolation, parmetis repartitioner
1858       suffix: 9_par_hdf5_int_parmetis
1859       requires: parmetis
1860       args: -distribute -petscpartitioner_type parmetis
1861       args: -interpolate after_create
1862     test: # par load, parmetis partitioner, par interpolation
1863       TODO: Parallel partitioning of uninterpolated meshes not supported
1864       suffix: 9_par_hdf5_parmetis_int
1865       requires: parmetis
1866       args: -distribute -petscpartitioner_type parmetis
1867       args: -interpolate after_distribute
1868 
1869   testset: # 10 HDF5 PARALLEL LOAD
1870     requires: hdf5 !complex datafilespath
1871     nsize: {{2 4 7}}
1872     args: -dm_plex_check_all
1873     args: -filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined2.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /topo -dm_plex_hdf5_geometry_path /geom
1874     test: # par load, par interpolation
1875       suffix: 10_par_hdf5_int
1876       args: -interpolate after_create
1877 TEST*/
1878