xref: /petsc/src/dm/impls/plex/tests/ex18.c (revision 40badf4fbc550ac1f60bd080eaff6de6d55b946d)
1 static char help[] = "Tests for parallel mesh loading and parallel topological interpolation\n\n";
2 
3 #include <petsc/private/dmpleximpl.h>
4 /* List of test meshes
5 
6 Network
7 -------
8 Test 0 (2 ranks):
9 
10 network=0:
11 ---------
12   cell 0   cell 1   cell 2          nCells-1       (edge)
13 0 ------ 1 ------ 2 ------ 3 -- -- v --  -- nCells (vertex)
14 
15   vertex distribution:
16     rank 0: 0 1
17     rank 1: 2 3 ... nCells
18   cell(edge) distribution:
19     rank 0: 0 1
20     rank 1: 2 ... nCells-1
21 
22 network=1:
23 ---------
24                v2
25                 ^
26                 |
27                cell 2
28                 |
29  v0 --cell 0--> v3--cell 1--> v1
30 
31   vertex distribution:
32     rank 0: 0 1 3
33     rank 1: 2
34   cell(edge) distribution:
35     rank 0: 0 1
36     rank 1: 2
37 
38   example:
39     mpiexec -n 2 ./ex18 -distribute 1 -dim 1 -orig_dm_view -dist_dm_view -dist_dm_view -petscpartitioner_type parmetis -ncells 50
40 
41 Triangle
42 --------
43 Test 0 (2 ranks):
44 Two triangles sharing a face
45 
46         2
47       / | \
48      /  |  \
49     /   |   \
50    0  0 | 1  3
51     \   |   /
52      \  |  /
53       \ | /
54         1
55 
56   vertex distribution:
57     rank 0: 0 1
58     rank 1: 2 3
59   cell distribution:
60     rank 0: 0
61     rank 1: 1
62 
63 Test 1 (3 ranks):
64 Four triangles partitioned across 3 ranks
65 
66    0 _______ 3
67    | \     / |
68    |  \ 1 /  |
69    |   \ /   |
70    | 0  2  2 |
71    |   / \   |
72    |  / 3 \  |
73    | /     \ |
74    1 ------- 4
75 
76   vertex distribution:
77     rank 0: 0 1
78     rank 1: 2 3
79     rank 2: 4
80   cell distribution:
81     rank 0: 0
82     rank 1: 1
83     rank 2: 2 3
84 
85 Test 2 (3 ranks):
86 Four triangles partitioned across 3 ranks
87 
88    1 _______ 3
89    | \     / |
90    |  \ 1 /  |
91    |   \ /   |
92    | 0  0  2 |
93    |   / \   |
94    |  / 3 \  |
95    | /     \ |
96    2 ------- 4
97 
98   vertex distribution:
99     rank 0: 0 1
100     rank 1: 2 3
101     rank 2: 4
102   cell distribution:
103     rank 0: 0
104     rank 1: 1
105     rank 2: 2 3
106 
107 Tetrahedron
108 -----------
109 Test 0:
110 Two tets sharing a face
111 
112  cell   3 _______    cell
113  0    / | \      \   1
114      /  |  \      \
115     /   |   \      \
116    0----|----4-----2
117     \   |   /      /
118      \  |  /      /
119       \ | /      /
120         1-------
121    y
122    | x
123    |/
124    *----z
125 
126   vertex distribution:
127     rank 0: 0 1
128     rank 1: 2 3 4
129   cell distribution:
130     rank 0: 0
131     rank 1: 1
132 
133 Quadrilateral
134 -------------
135 Test 0 (2 ranks):
136 Two quads sharing a face
137 
138    3-------2-------5
139    |       |       |
140    |   0   |   1   |
141    |       |       |
142    0-------1-------4
143 
144   vertex distribution:
145     rank 0: 0 1 2
146     rank 1: 3 4 5
147   cell distribution:
148     rank 0: 0
149     rank 1: 1
150 
151 TODO Test 1:
152 A quad and a triangle sharing a face
153 
154    5-------4
155    |       | \
156    |   0   |  \
157    |       | 1 \
158    2-------3----6
159 
160 Hexahedron
161 ----------
162 Test 0 (2 ranks):
163 Two hexes sharing a face
164 
165 cell   7-------------6-------------11 cell
166 0     /|            /|            /|     1
167      / |   F1      / |   F7      / |
168     /  |          /  |          /  |
169    4-------------5-------------10  |
170    |   |     F4  |   |     F10 |   |
171    |   |         |   |         |   |
172    |F5 |         |F3 |         |F9 |
173    |   |  F2     |   |   F8    |   |
174    |   3---------|---2---------|---9
175    |  /          |  /          |  /
176    | /   F0      | /    F6     | /
177    |/            |/            |/
178    0-------------1-------------8
179 
180   vertex distribution:
181     rank 0: 0 1 2 3 4 5
182     rank 1: 6 7 8 9 10 11
183   cell distribution:
184     rank 0: 0
185     rank 1: 1
186 
187 */
188 
189 typedef enum {NONE, CREATE, AFTER_CREATE, AFTER_DISTRIBUTE} InterpType;
190 
191 typedef struct {
192   PetscInt   debug;                        /* The debugging level */
193   PetscInt   testNum;                      /* Indicates the mesh to create */
194   PetscInt   dim;                          /* The topological mesh dimension */
195   PetscBool  cellSimplex;                  /* Use simplices or hexes */
196   PetscBool  distribute;                   /* Distribute the mesh */
197   InterpType interpolate;                  /* Interpolate the mesh before or after DMPlexDistribute() */
198   PetscBool  useGenerator;                 /* Construct mesh with a mesh generator */
199   PetscBool  testOrientIF;                 /* Test for different original interface orientations */
200   PetscBool  testHeavy;                    /* Run the heavy PointSF test */
201   PetscBool  customView;                   /* Show results of DMPlexIsInterpolated() etc. */
202   PetscInt   ornt[2];                      /* Orientation of interface on rank 0 and rank 1 */
203   PetscInt   faces[3];                     /* Number of faces per dimension for generator */
204   PetscScalar coords[128];
205   PetscReal  coordsTol;
206   PetscInt   ncoords;
207   PetscInt   pointsToExpand[128];
208   PetscInt   nPointsToExpand;
209   PetscBool  testExpandPointsEmpty;
210   char       filename[PETSC_MAX_PATH_LEN]; /* Import mesh from file */
211 } AppCtx;
212 
213 struct _n_PortableBoundary {
214   Vec coordinates;
215   PetscInt depth;
216   PetscSection *sections;
217 };
218 typedef struct _n_PortableBoundary * PortableBoundary;
219 
220 #if defined(PETSC_USE_LOG)
221 static PetscLogStage  stage[3];
222 #endif
223 
224 static PetscErrorCode DMPlexCheckPointSFHeavy(DM, PortableBoundary);
225 static PetscErrorCode DMPlexSetOrientInterface_Private(DM,PetscBool);
226 static PetscErrorCode DMPlexGetExpandedBoundary_Private(DM, PortableBoundary *);
227 static PetscErrorCode DMPlexExpandedConesToFaces_Private(DM, IS, PetscSection, IS *);
228 
229 static PetscErrorCode PortableBoundaryDestroy(PortableBoundary *bnd)
230 {
231   PetscInt       d;
232 
233   PetscFunctionBegin;
234   if (!*bnd) PetscFunctionReturn(0);
235   CHKERRQ(VecDestroy(&(*bnd)->coordinates));
236   for (d=0; d < (*bnd)->depth; d++) {
237     CHKERRQ(PetscSectionDestroy(&(*bnd)->sections[d]));
238   }
239   CHKERRQ(PetscFree((*bnd)->sections));
240   CHKERRQ(PetscFree(*bnd));
241   PetscFunctionReturn(0);
242 }
243 
244 static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
245 {
246   const char    *interpTypes[4]  = {"none", "create", "after_create", "after_distribute"};
247   PetscInt       interp=NONE, dim;
248   PetscBool      flg1, flg2;
249   PetscErrorCode ierr;
250 
251   PetscFunctionBegin;
252   options->debug        = 0;
253   options->testNum      = 0;
254   options->dim          = 2;
255   options->cellSimplex  = PETSC_TRUE;
256   options->distribute   = PETSC_FALSE;
257   options->interpolate  = NONE;
258   options->useGenerator = PETSC_FALSE;
259   options->testOrientIF = PETSC_FALSE;
260   options->testHeavy    = PETSC_TRUE;
261   options->customView   = PETSC_FALSE;
262   options->testExpandPointsEmpty = PETSC_FALSE;
263   options->ornt[0]      = 0;
264   options->ornt[1]      = 0;
265   options->faces[0]     = 2;
266   options->faces[1]     = 2;
267   options->faces[2]     = 2;
268   options->filename[0]  = '\0';
269   options->coordsTol    = PETSC_DEFAULT;
270 
271   ierr = PetscOptionsBegin(comm, "", "Meshing Interpolation Test Options", "DMPLEX");CHKERRQ(ierr);
272   CHKERRQ(PetscOptionsBoundedInt("-debug", "The debugging level", "ex18.c", options->debug, &options->debug, NULL,0));
273   CHKERRQ(PetscOptionsBoundedInt("-testnum", "The mesh to create", "ex18.c", options->testNum, &options->testNum, NULL,0));
274   CHKERRQ(PetscOptionsBool("-cell_simplex", "Generate simplices if true, otherwise hexes", "ex18.c", options->cellSimplex, &options->cellSimplex, NULL));
275   CHKERRQ(PetscOptionsBool("-distribute", "Distribute the mesh", "ex18.c", options->distribute, &options->distribute, NULL));
276   CHKERRQ(PetscOptionsEList("-interpolate", "Type of mesh interpolation (none, create, after_create, after_distribute)", "ex18.c", interpTypes, 4, interpTypes[options->interpolate], &interp, NULL));
277   options->interpolate = (InterpType) interp;
278   PetscCheckFalse(!options->distribute && options->interpolate == AFTER_DISTRIBUTE,comm, PETSC_ERR_SUP, "-interpolate after_distribute  needs  -distribute 1");
279   CHKERRQ(PetscOptionsBool("-use_generator", "Use a mesh generator to build the mesh", "ex18.c", options->useGenerator, &options->useGenerator, NULL));
280   options->ncoords = 128;
281   CHKERRQ(PetscOptionsScalarArray("-view_vertices_from_coords", "Print DAG points corresponding to vertices with given coordinates", "ex18.c", options->coords, &options->ncoords, NULL));
282   CHKERRQ(PetscOptionsReal("-view_vertices_from_coords_tol", "Tolerance for -view_vertices_from_coords", "ex18.c", options->coordsTol, &options->coordsTol, NULL));
283   options->nPointsToExpand = 128;
284   CHKERRQ(PetscOptionsIntArray("-test_expand_points", "Expand given array of DAG point using DMPlexGetConeRecursive() and print results", "ex18.c", options->pointsToExpand, &options->nPointsToExpand, NULL));
285   if (options->nPointsToExpand) {
286     CHKERRQ(PetscOptionsBool("-test_expand_points_empty", "For -test_expand_points, rank 0 will have empty input array", "ex18.c", options->testExpandPointsEmpty, &options->testExpandPointsEmpty, NULL));
287   }
288   CHKERRQ(PetscOptionsBool("-test_heavy", "Run the heavy PointSF test", "ex18.c", options->testHeavy, &options->testHeavy, NULL));
289   CHKERRQ(PetscOptionsBool("-custom_view", "Custom DMPlex view", "ex18.c", options->customView, &options->customView, NULL));
290   CHKERRQ(PetscOptionsRangeInt("-dim", "The topological mesh dimension", "ex18.c", options->dim, &options->dim, &flg1,1,3));
291   dim = 3;
292   CHKERRQ(PetscOptionsIntArray("-faces", "Number of faces per dimension", "ex18.c", options->faces, &dim, &flg2));
293   if (flg2) {
294     PetscCheckFalse(flg1 && dim != options->dim,comm, PETSC_ERR_ARG_OUTOFRANGE, "specified -dim %D is not equal to length %D of -faces (note that -dim can be omitted)", options->dim, dim);
295     options->dim = dim;
296   }
297   CHKERRQ(PetscOptionsString("-filename", "The mesh file", "ex18.c", options->filename, options->filename, sizeof(options->filename), NULL));
298   CHKERRQ(PetscOptionsBoundedInt("-rotate_interface_0", "Rotation (relative orientation) of interface on rank 0; implies -interpolate create -distribute 0", "ex18.c", options->ornt[0], &options->ornt[0], &options->testOrientIF,0));
299   CHKERRQ(PetscOptionsBoundedInt("-rotate_interface_1", "Rotation (relative orientation) of interface on rank 1; implies -interpolate create -distribute 0", "ex18.c", options->ornt[1], &options->ornt[1], &flg2,0));
300   PetscCheckFalse(flg2 != options->testOrientIF,comm, PETSC_ERR_ARG_OUTOFRANGE, "neither or both -rotate_interface_0 -rotate_interface_1 must be set");
301   if (options->testOrientIF) {
302     PetscInt i;
303     for (i=0; i<2; i++) {
304       if (options->ornt[i] >= 10) options->ornt[i] = -(options->ornt[i]-10);  /* 11 12 13 become -1 -2 -3 */
305     }
306     options->filename[0]  = 0;
307     options->useGenerator = PETSC_FALSE;
308     options->dim          = 3;
309     options->cellSimplex  = PETSC_TRUE;
310     options->interpolate  = CREATE;
311     options->distribute   = PETSC_FALSE;
312   }
313   ierr = PetscOptionsEnd();CHKERRQ(ierr);
314   PetscFunctionReturn(0);
315 }
316 
317 static PetscErrorCode CreateMesh_1D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
318 {
319   PetscInt       testNum = user->testNum;
320   PetscMPIInt    rank,size;
321   PetscInt       numCorners=2,i;
322   PetscInt       numCells,numVertices,network;
323   PetscInt       *cells;
324   PetscReal      *coords;
325 
326   PetscFunctionBegin;
327   CHKERRMPI(MPI_Comm_rank(comm, &rank));
328   CHKERRMPI(MPI_Comm_size(comm, &size));
329   PetscCheckFalse(size > 2,comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %d only for <=2 processes",testNum);
330 
331   numCells = 3;
332   CHKERRQ(PetscOptionsGetInt(NULL, NULL, "-ncells", &numCells, NULL));
333   PetscCheckFalse(numCells < 3,comm, PETSC_ERR_ARG_OUTOFRANGE, "Test ncells must >=3",numCells);
334 
335   if (size == 1) {
336     numVertices = numCells + 1;
337     CHKERRQ(PetscMalloc2(2*numCells,&cells,2*numVertices,&coords));
338     for (i=0; i<numCells; i++) {
339       cells[2*i] = i; cells[2*i+1] = i + 1;
340       coords[2*i] = i; coords[2*i+1] = i + 1;
341     }
342 
343     CHKERRQ(DMPlexCreateFromCellListPetsc(comm, user->dim, numCells, numVertices, numCorners, PETSC_FALSE, cells, user->dim, coords, dm));
344     CHKERRQ(PetscFree2(cells,coords));
345     PetscFunctionReturn(0);
346   }
347 
348   network = 0;
349   CHKERRQ(PetscOptionsGetInt(NULL, NULL, "-network_case", &network, NULL));
350   if (network == 0) {
351     switch (rank) {
352     case 0:
353     {
354       numCells    = 2;
355       numVertices = numCells;
356       CHKERRQ(PetscMalloc2(2*numCells,&cells,2*numCells,&coords));
357       cells[0] = 0; cells[1] = 1;
358       cells[2] = 1; cells[3] = 2;
359       coords[0] = 0.; coords[1] = 1.;
360       coords[2] = 1.; coords[3] = 2.;
361     }
362     break;
363     case 1:
364     {
365       numCells    -= 2;
366       numVertices = numCells + 1;
367       CHKERRQ(PetscMalloc2(2*numCells,&cells,2*numCells,&coords));
368       for (i=0; i<numCells; i++) {
369         cells[2*i] = 2+i; cells[2*i+1] = 2 + i + 1;
370         coords[2*i] = 2+i; coords[2*i+1] = 2 + i + 1;
371       }
372     }
373     break;
374     default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
375     }
376   } else { /* network_case = 1 */
377     /* ----------------------- */
378     switch (rank) {
379     case 0:
380     {
381       numCells    = 2;
382       numVertices = 3;
383       CHKERRQ(PetscMalloc2(2*numCells,&cells,2*numCells,&coords));
384       cells[0] = 0; cells[1] = 3;
385       cells[2] = 3; cells[3] = 1;
386     }
387     break;
388     case 1:
389     {
390       numCells    = 1;
391       numVertices = 1;
392       CHKERRQ(PetscMalloc2(2*numCells,&cells,2*numCells,&coords));
393       cells[0] = 3; cells[1] = 2;
394     }
395     break;
396     default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
397     }
398   }
399   CHKERRQ(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, PETSC_FALSE, cells, user->dim, coords, NULL, NULL, dm));
400   CHKERRQ(PetscFree2(cells,coords));
401   PetscFunctionReturn(0);
402 }
403 
404 static PetscErrorCode CreateSimplex_2D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
405 {
406   PetscInt       testNum = user->testNum, p;
407   PetscMPIInt    rank, size;
408 
409   PetscFunctionBegin;
410   CHKERRMPI(MPI_Comm_rank(comm, &rank));
411   CHKERRMPI(MPI_Comm_size(comm, &size));
412   switch (testNum) {
413   case 0:
414     PetscCheckFalse(size != 2,comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %d only for 2 processes", testNum);
415     switch (rank) {
416       case 0:
417       {
418         const PetscInt numCells  = 1, numVertices = 2, numCorners = 3;
419         const PetscInt cells[3]  = {0, 1, 2};
420         PetscReal      coords[4] = {-0.5, 0.5, 0.0, 0.0};
421         PetscInt       markerPoints[6] = {1, 1, 2, 1, 3, 1};
422 
423         CHKERRQ(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
424         for (p = 0; p < 3; ++p) CHKERRQ(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
425       }
426       break;
427       case 1:
428       {
429         const PetscInt numCells  = 1, numVertices = 2, numCorners = 3;
430         const PetscInt cells[3]  = {1, 3, 2};
431         PetscReal      coords[4] = {0.0, 1.0, 0.5, 0.5};
432         PetscInt       markerPoints[6] = {1, 1, 2, 1, 3, 1};
433 
434         CHKERRQ(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
435         for (p = 0; p < 3; ++p) CHKERRQ(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
436       }
437       break;
438       default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
439     }
440     break;
441   case 1:
442     PetscCheckFalse(size != 3,comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %d only for 3 processes", testNum);
443     switch (rank) {
444       case 0:
445       {
446         const PetscInt numCells  = 1, numVertices = 2, numCorners = 3;
447         const PetscInt cells[3]  = {0, 1, 2};
448         PetscReal      coords[4] = {0.0, 1.0, 0.0, 0.0};
449         PetscInt       markerPoints[6] = {1, 1, 2, 1, 3, 1};
450 
451         CHKERRQ(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
452         for (p = 0; p < 3; ++p) CHKERRQ(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
453       }
454       break;
455       case 1:
456       {
457         const PetscInt numCells  = 1, numVertices = 2, numCorners = 3;
458         const PetscInt cells[3]  = {0, 2, 3};
459         PetscReal      coords[4] = {0.5, 0.5, 1.0, 1.0};
460         PetscInt       markerPoints[6] = {1, 1, 2, 1, 3, 1};
461 
462         CHKERRQ(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
463         for (p = 0; p < 3; ++p) CHKERRQ(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
464       }
465       break;
466       case 2:
467       {
468         const PetscInt numCells  = 2, numVertices = 1, numCorners = 3;
469         const PetscInt cells[6]  = {2, 4, 3, 2, 1, 4};
470         PetscReal      coords[2] = {1.0, 0.0};
471         PetscInt       markerPoints[10] = {2, 1, 3, 1, 4, 1, 5, 1, 6, 1};
472 
473         CHKERRQ(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
474         for (p = 0; p < 3; ++p) CHKERRQ(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
475       }
476       break;
477       default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
478     }
479     break;
480   case 2:
481     PetscCheckFalse(size != 3,comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %d only for 3 processes", testNum);
482     switch (rank) {
483       case 0:
484       {
485         const PetscInt numCells  = 1, numVertices = 2, numCorners = 3;
486         const PetscInt cells[3]  = {1, 2, 0};
487         PetscReal      coords[4] = {0.5, 0.5, 0.0, 1.0};
488         PetscInt       markerPoints[6] = {1, 1, 2, 1, 3, 1};
489 
490         CHKERRQ(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
491         for (p = 0; p < 3; ++p) CHKERRQ(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
492       }
493       break;
494       case 1:
495       {
496         const PetscInt numCells  = 1, numVertices = 2, numCorners = 3;
497         const PetscInt cells[3]  = {1, 0, 3};
498         PetscReal      coords[4] = {0.0, 0.0, 1.0, 1.0};
499         PetscInt       markerPoints[6] = {1, 1, 2, 1, 3, 1};
500 
501         CHKERRQ(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
502         for (p = 0; p < 3; ++p) CHKERRQ(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
503       }
504       break;
505       case 2:
506       {
507         const PetscInt numCells  = 2, numVertices = 1, numCorners = 3;
508         const PetscInt cells[6]  = {0, 4, 3, 0, 2, 4};
509         PetscReal      coords[2] = {1.0, 0.0};
510         PetscInt       markerPoints[10] = {2, 1, 3, 1, 4, 1, 5, 1, 6, 1};
511 
512         CHKERRQ(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
513         for (p = 0; p < 3; ++p) CHKERRQ(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
514       }
515       break;
516       default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
517     }
518     break;
519   default: SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh %D", testNum);
520   }
521   PetscFunctionReturn(0);
522 }
523 
524 static PetscErrorCode CreateSimplex_3D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
525 {
526   PetscInt       testNum = user->testNum, p;
527   PetscMPIInt    rank, size;
528 
529   PetscFunctionBegin;
530   CHKERRMPI(MPI_Comm_rank(comm, &rank));
531   CHKERRMPI(MPI_Comm_size(comm, &size));
532   switch (testNum) {
533   case 0:
534     PetscCheckFalse(size != 2,comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %d only for 2 processes", testNum);
535     switch (rank) {
536       case 0:
537       {
538         const PetscInt numCells  = 1, numVertices = 2, numCorners = 4;
539         const PetscInt cells[4]  = {0, 2, 1, 3};
540         PetscReal      coords[6] = {0.0, 0.0, -0.5,  0.0, -0.5, 0.0};
541         PetscInt       markerPoints[8] = {1, 1, 2, 1, 3, 1, 4, 1};
542 
543         CHKERRQ(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
544         for (p = 0; p < 4; ++p) CHKERRQ(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
545       }
546       break;
547       case 1:
548       {
549         const PetscInt numCells  = 1, numVertices = 3, numCorners = 4;
550         const PetscInt cells[4]  = {1, 2, 4, 3};
551         PetscReal      coords[9] = {1.0, 0.0, 0.0,  0.0, 0.5, 0.0,  0.0, 0.0, 0.5};
552         PetscInt       markerPoints[8] = {1, 1, 2, 1, 3, 1, 4, 1};
553 
554         CHKERRQ(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
555         for (p = 0; p < 4; ++p) CHKERRQ(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
556       }
557       break;
558       default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
559     }
560     break;
561   default: SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh %D", testNum);
562   }
563   if (user->testOrientIF) {
564     PetscInt ifp[] = {8, 6};
565 
566     CHKERRQ(PetscObjectSetName((PetscObject) *dm, "Mesh before orientation"));
567     CHKERRQ(DMViewFromOptions(*dm, NULL, "-before_orientation_dm_view"));
568     /* rotate interface face ifp[rank] by given orientation ornt[rank] */
569     CHKERRQ(DMPlexOrientPoint(*dm, ifp[rank], user->ornt[rank]));
570     CHKERRQ(DMViewFromOptions(*dm, NULL, "-before_orientation_dm_view"));
571     CHKERRQ(DMPlexCheckFaces(*dm, 0));
572     CHKERRQ(DMPlexOrientInterface_Internal(*dm));
573     CHKERRQ(PetscPrintf(comm, "Orientation test PASSED\n"));
574   }
575   PetscFunctionReturn(0);
576 }
577 
578 static PetscErrorCode CreateQuad_2D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
579 {
580   PetscInt       testNum = user->testNum, p;
581   PetscMPIInt    rank, size;
582 
583   PetscFunctionBegin;
584   CHKERRMPI(MPI_Comm_rank(comm, &rank));
585   CHKERRMPI(MPI_Comm_size(comm, &size));
586   switch (testNum) {
587   case 0:
588     PetscCheckFalse(size != 2,comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %d only for 2 processes", testNum);
589     switch (rank) {
590       case 0:
591       {
592         const PetscInt numCells  = 1, numVertices = 3, numCorners = 4;
593         const PetscInt cells[4]  = {0, 1, 2, 3};
594         PetscReal      coords[6] = {-0.5, 0.0, 0.0, 0.0, 0.0, 1.0};
595         PetscInt       markerPoints[4*2] = {1, 1, 2, 1, 3, 1, 4, 1};
596 
597         CHKERRQ(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
598         for (p = 0; p < 4; ++p) CHKERRQ(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
599       }
600       break;
601       case 1:
602       {
603         const PetscInt numCells  = 1, numVertices = 3, numCorners = 4;
604         const PetscInt cells[4]  = {1, 4, 5, 2};
605         PetscReal      coords[6] = {-0.5, 1.0, 0.5, 0.0, 0.5, 1.0};
606         PetscInt       markerPoints[4*2] = {1, 1, 2, 1, 3, 1, 4, 1};
607 
608         CHKERRQ(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
609         for (p = 0; p < 4; ++p) CHKERRQ(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
610       }
611       break;
612       default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
613     }
614     break;
615   default: SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh %D", testNum);
616   }
617   PetscFunctionReturn(0);
618 }
619 
620 static PetscErrorCode CreateHex_3D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
621 {
622   PetscInt       testNum = user->testNum, p;
623   PetscMPIInt    rank, size;
624 
625   PetscFunctionBegin;
626   CHKERRMPI(MPI_Comm_rank(comm, &rank));
627   CHKERRMPI(MPI_Comm_size(comm, &size));
628   switch (testNum) {
629   case 0:
630     PetscCheckFalse(size != 2,comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %d only for 2 processes", testNum);
631     switch (rank) {
632     case 0:
633     {
634       const PetscInt numCells  = 1, numVertices = 6, numCorners = 8;
635       const PetscInt cells[8]  = {0, 3, 2, 1, 4, 5, 6, 7};
636       PetscReal      coords[6*3] = {-0.5,0.0,0.0, 0.0,0.0,0.0, 0.0,1.0,0.0, -0.5,1.0,0.0, -0.5,0.0,1.0, 0.0,0.0,1.0};
637       PetscInt       markerPoints[8*2] = {2,1,3,1,4,1,5,1,6,1,7,1,8,1,9,1};
638 
639       CHKERRQ(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
640       for (p = 0; p < 4; ++p) CHKERRQ(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
641     }
642     break;
643     case 1:
644     {
645       const PetscInt numCells  = 1, numVertices = 6, numCorners = 8;
646       const PetscInt cells[8]  = {1, 2, 9, 8, 5, 10, 11, 6};
647       PetscReal      coords[6*3] = {0.0,1.0,1.0, -0.5,1.0,1.0, 0.5,0.0,0.0, 0.5,1.0,0.0, 0.5,0.0,1.0,  0.5,1.0,1.0};
648       PetscInt       markerPoints[8*2] = {2,1,3,1,4,1,5,1,6,1,7,1,8,1,9,1};
649 
650       CHKERRQ(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
651       for (p = 0; p < 4; ++p) CHKERRQ(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
652     }
653     break;
654     default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
655     }
656   break;
657   default: SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh %D", testNum);
658   }
659   PetscFunctionReturn(0);
660 }
661 
662 static PetscErrorCode CustomView(DM dm, PetscViewer v)
663 {
664   DMPlexInterpolatedFlag interpolated;
665   PetscBool              distributed;
666 
667   PetscFunctionBegin;
668   CHKERRQ(DMPlexIsDistributed(dm, &distributed));
669   CHKERRQ(DMPlexIsInterpolatedCollective(dm, &interpolated));
670   CHKERRQ(PetscViewerASCIIPrintf(v, "DMPlexIsDistributed: %s\n", PetscBools[distributed]));
671   CHKERRQ(PetscViewerASCIIPrintf(v, "DMPlexIsInterpolatedCollective: %s\n", DMPlexInterpolatedFlags[interpolated]));
672   PetscFunctionReturn(0);
673 }
674 
675 static PetscErrorCode CreateMeshFromFile(MPI_Comm comm, AppCtx *user, DM *dm, DM *serialDM)
676 {
677   const char    *filename       = user->filename;
678   PetscBool      testHeavy      = user->testHeavy;
679   PetscBool      interpCreate   = user->interpolate == CREATE ? PETSC_TRUE : PETSC_FALSE;
680   PetscBool      distributed    = PETSC_FALSE;
681 
682   PetscFunctionBegin;
683   *serialDM = NULL;
684   if (testHeavy && interpCreate) CHKERRQ(DMPlexSetOrientInterface_Private(NULL, PETSC_FALSE));
685   CHKERRQ(PetscLogStagePush(stage[0]));
686   CHKERRQ(DMPlexCreateFromFile(comm, filename, "ex18_plex", interpCreate, dm)); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexCheckPointSFHeavy() */
687   CHKERRQ(PetscLogStagePop());
688   if (testHeavy && interpCreate) CHKERRQ(DMPlexSetOrientInterface_Private(NULL, PETSC_TRUE));
689   CHKERRQ(DMPlexIsDistributed(*dm, &distributed));
690   CHKERRQ(PetscPrintf(comm, "DMPlexCreateFromFile produced %s mesh.\n", distributed ? "distributed" : "serial"));
691   if (testHeavy && distributed) {
692     CHKERRQ(PetscOptionsSetValue(NULL, "-dm_plex_hdf5_force_sequential", NULL));
693     CHKERRQ(DMPlexCreateFromFile(comm, filename, "ex18_plex", interpCreate, serialDM));
694     CHKERRQ(DMPlexIsDistributed(*serialDM, &distributed));
695     PetscCheckFalse(distributed,comm, PETSC_ERR_PLIB, "unable to create a serial DM from file");
696   }
697   CHKERRQ(DMGetDimension(*dm, &user->dim));
698   PetscFunctionReturn(0);
699 }
700 
701 static PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm)
702 {
703   PetscPartitioner part;
704   PortableBoundary boundary     = NULL;
705   DM             serialDM       = NULL;
706   PetscBool      cellSimplex    = user->cellSimplex;
707   PetscBool      useGenerator   = user->useGenerator;
708   PetscBool      interpCreate   = user->interpolate == CREATE ? PETSC_TRUE : PETSC_FALSE;
709   PetscBool      interpSerial   = user->interpolate == AFTER_CREATE ? PETSC_TRUE : PETSC_FALSE;
710   PetscBool      interpParallel = user->interpolate == AFTER_DISTRIBUTE ? PETSC_TRUE : PETSC_FALSE;
711   PetscBool      testHeavy      = user->testHeavy;
712   PetscMPIInt    rank;
713 
714   PetscFunctionBegin;
715   CHKERRMPI(MPI_Comm_rank(comm, &rank));
716   if (user->filename[0]) {
717     CHKERRQ(CreateMeshFromFile(comm, user, dm, &serialDM));
718   } else if (useGenerator) {
719     CHKERRQ(PetscLogStagePush(stage[0]));
720     CHKERRQ(DMPlexCreateBoxMesh(comm, user->dim, cellSimplex, user->faces, NULL, NULL, NULL, interpCreate, dm));
721     CHKERRQ(PetscLogStagePop());
722   } else {
723     CHKERRQ(PetscLogStagePush(stage[0]));
724     switch (user->dim) {
725     case 1:
726       CHKERRQ(CreateMesh_1D(comm, interpCreate, user, dm));
727       break;
728     case 2:
729       if (cellSimplex) {
730         CHKERRQ(CreateSimplex_2D(comm, interpCreate, user, dm));
731       } else {
732         CHKERRQ(CreateQuad_2D(comm, interpCreate, user, dm));
733       }
734       break;
735     case 3:
736       if (cellSimplex) {
737         CHKERRQ(CreateSimplex_3D(comm, interpCreate, user, dm));
738       } else {
739         CHKERRQ(CreateHex_3D(comm, interpCreate, user, dm));
740       }
741       break;
742     default:
743       SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "Cannot make meshes for dimension %D", user->dim);
744     }
745     CHKERRQ(PetscLogStagePop());
746   }
747   PetscCheckFalse(user->ncoords % user->dim,comm, PETSC_ERR_ARG_OUTOFRANGE, "length of coordinates array %D must be divisable by spatial dimension %D", user->ncoords, user->dim);
748   CHKERRQ(PetscObjectSetName((PetscObject) *dm, "Original Mesh"));
749   CHKERRQ(DMViewFromOptions(*dm, NULL, "-orig_dm_view"));
750 
751   if (interpSerial) {
752     DM idm;
753 
754     if (testHeavy) CHKERRQ(DMPlexSetOrientInterface_Private(*dm, PETSC_FALSE));
755     CHKERRQ(PetscLogStagePush(stage[2]));
756     CHKERRQ(DMPlexInterpolate(*dm, &idm)); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexCheckPointSFHeavy() */
757     CHKERRQ(PetscLogStagePop());
758     if (testHeavy) CHKERRQ(DMPlexSetOrientInterface_Private(*dm, PETSC_TRUE));
759     CHKERRQ(DMDestroy(dm));
760     *dm = idm;
761     CHKERRQ(PetscObjectSetName((PetscObject) *dm, "Interpolated Mesh"));
762     CHKERRQ(DMViewFromOptions(*dm, NULL, "-intp_dm_view"));
763   }
764 
765   /* Set partitioner options */
766   CHKERRQ(DMPlexGetPartitioner(*dm, &part));
767   if (part) {
768     CHKERRQ(PetscPartitionerSetType(part, PETSCPARTITIONERSIMPLE));
769     CHKERRQ(PetscPartitionerSetFromOptions(part));
770   }
771 
772   if (user->customView) CHKERRQ(CustomView(*dm, PETSC_VIEWER_STDOUT_(comm)));
773   if (testHeavy) {
774     PetscBool distributed;
775 
776     CHKERRQ(DMPlexIsDistributed(*dm, &distributed));
777     if (!serialDM && !distributed) {
778       serialDM = *dm;
779       CHKERRQ(PetscObjectReference((PetscObject)*dm));
780     }
781     if (serialDM) {
782       CHKERRQ(DMPlexGetExpandedBoundary_Private(serialDM, &boundary));
783     }
784     if (boundary) {
785       /* check DM which has been created in parallel and already interpolated */
786       CHKERRQ(DMPlexCheckPointSFHeavy(*dm, boundary));
787     }
788     /* Orient interface because it could be deliberately skipped above. It is idempotent. */
789     CHKERRQ(DMPlexOrientInterface_Internal(*dm));
790   }
791   if (user->distribute) {
792     DM               pdm = NULL;
793 
794     /* Redistribute mesh over processes using that partitioner */
795     CHKERRQ(PetscLogStagePush(stage[1]));
796     CHKERRQ(DMPlexDistribute(*dm, 0, NULL, &pdm));
797     CHKERRQ(PetscLogStagePop());
798     if (pdm) {
799       CHKERRQ(DMDestroy(dm));
800       *dm  = pdm;
801       CHKERRQ(PetscObjectSetName((PetscObject) *dm, "Redistributed Mesh"));
802       CHKERRQ(DMViewFromOptions(*dm, NULL, "-dist_dm_view"));
803     }
804 
805     if (interpParallel) {
806       DM idm;
807 
808       if (testHeavy) CHKERRQ(DMPlexSetOrientInterface_Private(*dm, PETSC_FALSE));
809       CHKERRQ(PetscLogStagePush(stage[2]));
810       CHKERRQ(DMPlexInterpolate(*dm, &idm)); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexCheckPointSFHeavy() */
811       CHKERRQ(PetscLogStagePop());
812       if (testHeavy) CHKERRQ(DMPlexSetOrientInterface_Private(*dm, PETSC_TRUE));
813       CHKERRQ(DMDestroy(dm));
814       *dm = idm;
815       CHKERRQ(PetscObjectSetName((PetscObject) *dm, "Interpolated Redistributed Mesh"));
816       CHKERRQ(DMViewFromOptions(*dm, NULL, "-intp_dm_view"));
817     }
818   }
819   if (testHeavy) {
820     if (boundary) {
821       CHKERRQ(DMPlexCheckPointSFHeavy(*dm, boundary));
822     }
823     /* Orient interface because it could be deliberately skipped above. It is idempotent. */
824     CHKERRQ(DMPlexOrientInterface_Internal(*dm));
825   }
826 
827   CHKERRQ(PetscObjectSetName((PetscObject) *dm, "Parallel Mesh"));
828   CHKERRQ(DMPlexDistributeSetDefault(*dm, PETSC_FALSE));
829   CHKERRQ(DMSetFromOptions(*dm));
830   CHKERRQ(DMViewFromOptions(*dm, NULL, "-dm_view"));
831 
832   if (user->customView) CHKERRQ(CustomView(*dm, PETSC_VIEWER_STDOUT_(comm)));
833   CHKERRQ(DMDestroy(&serialDM));
834   CHKERRQ(PortableBoundaryDestroy(&boundary));
835   PetscFunctionReturn(0);
836 }
837 
838 #define ps2d(number) ((double) PetscRealPart(number))
839 static inline PetscErrorCode coord2str(char buf[], size_t len, PetscInt dim, const PetscScalar coords[], PetscReal tol)
840 {
841   PetscFunctionBegin;
842   PetscCheckFalse(dim > 3,PETSC_COMM_SELF, PETSC_ERR_SUP, "dim must be less than or equal 3");
843   if (tol >= 1e-3) {
844     switch (dim) {
845       case 1: CHKERRQ(PetscSNPrintf(buf,len,"(%12.3f)",ps2d(coords[0])));
846       case 2: CHKERRQ(PetscSNPrintf(buf,len,"(%12.3f, %12.3f)",ps2d(coords[0]),ps2d(coords[1])));
847       case 3: CHKERRQ(PetscSNPrintf(buf,len,"(%12.3f, %12.3f, %12.3f)",ps2d(coords[0]),ps2d(coords[1]),ps2d(coords[2])));
848     }
849   } else {
850     switch (dim) {
851       case 1: CHKERRQ(PetscSNPrintf(buf,len,"(%12.6f)",ps2d(coords[0])));
852       case 2: CHKERRQ(PetscSNPrintf(buf,len,"(%12.6f, %12.6f)",ps2d(coords[0]),ps2d(coords[1])));
853       case 3: CHKERRQ(PetscSNPrintf(buf,len,"(%12.6f, %12.6f, %12.6f)",ps2d(coords[0]),ps2d(coords[1]),ps2d(coords[2])));
854     }
855   }
856   PetscFunctionReturn(0);
857 }
858 
859 static PetscErrorCode ViewVerticesFromCoords(DM dm, Vec coordsVec, PetscReal tol, PetscViewer viewer)
860 {
861   PetscInt       dim, i, npoints;
862   IS             pointsIS;
863   const PetscInt *points;
864   const PetscScalar *coords;
865   char           coordstr[128];
866   MPI_Comm       comm;
867   PetscMPIInt    rank;
868 
869   PetscFunctionBegin;
870   CHKERRQ(PetscObjectGetComm((PetscObject)dm, &comm));
871   CHKERRMPI(MPI_Comm_rank(comm, &rank));
872   CHKERRQ(DMGetDimension(dm, &dim));
873   CHKERRQ(PetscViewerASCIIPushSynchronized(viewer));
874   CHKERRQ(DMPlexFindVertices(dm, coordsVec, tol, &pointsIS));
875   CHKERRQ(ISGetIndices(pointsIS, &points));
876   CHKERRQ(ISGetLocalSize(pointsIS, &npoints));
877   CHKERRQ(VecGetArrayRead(coordsVec, &coords));
878   for (i=0; i < npoints; i++) {
879     CHKERRQ(coord2str(coordstr, sizeof(coordstr), dim, &coords[i*dim], tol));
880     if (rank == 0 && i) CHKERRQ(PetscViewerASCIISynchronizedPrintf(viewer, "-----\n"));
881     CHKERRQ(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] %s --> points[%D] = %D\n", rank, coordstr, i, points[i]));
882     CHKERRQ(PetscViewerFlush(viewer));
883   }
884   CHKERRQ(PetscViewerASCIIPopSynchronized(viewer));
885   CHKERRQ(VecRestoreArrayRead(coordsVec, &coords));
886   CHKERRQ(ISRestoreIndices(pointsIS, &points));
887   CHKERRQ(ISDestroy(&pointsIS));
888   PetscFunctionReturn(0);
889 }
890 
891 static PetscErrorCode TestExpandPoints(DM dm, AppCtx *user)
892 {
893   IS                is;
894   PetscSection      *sects;
895   IS                *iss;
896   PetscInt          d,depth;
897   PetscMPIInt       rank;
898   PetscViewer       viewer=PETSC_VIEWER_STDOUT_WORLD, sviewer;
899 
900   PetscFunctionBegin;
901   CHKERRMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm),&rank));
902   if (user->testExpandPointsEmpty && rank == 0) {
903     CHKERRQ(ISCreateGeneral(PETSC_COMM_SELF, 0, NULL, PETSC_USE_POINTER, &is));
904   } else {
905     CHKERRQ(ISCreateGeneral(PETSC_COMM_SELF, user->nPointsToExpand, user->pointsToExpand, PETSC_USE_POINTER, &is));
906   }
907   CHKERRQ(DMPlexGetConeRecursive(dm, is, &depth, &iss, &sects));
908   CHKERRQ(PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer));
909   CHKERRQ(PetscViewerASCIIPrintf(sviewer, "[%d] ==========================\n",rank));
910   for (d=depth-1; d>=0; d--) {
911     IS          checkIS;
912     PetscBool   flg;
913 
914     CHKERRQ(PetscViewerASCIIPrintf(sviewer, "depth %D ---------------\n",d));
915     CHKERRQ(PetscSectionView(sects[d], sviewer));
916     CHKERRQ(ISView(iss[d], sviewer));
917     /* check reverse operation */
918     if (d < depth-1) {
919       CHKERRQ(DMPlexExpandedConesToFaces_Private(dm, iss[d], sects[d], &checkIS));
920       CHKERRQ(ISEqualUnsorted(checkIS, iss[d+1], &flg));
921       PetscCheckFalse(!flg,PetscObjectComm((PetscObject) checkIS), PETSC_ERR_PLIB, "DMPlexExpandedConesToFaces_Private produced wrong IS");
922       CHKERRQ(ISDestroy(&checkIS));
923     }
924   }
925   CHKERRQ(PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer));
926   CHKERRQ(PetscViewerFlush(viewer));
927   CHKERRQ(DMPlexRestoreConeRecursive(dm, is, &depth, &iss, &sects));
928   CHKERRQ(ISDestroy(&is));
929   PetscFunctionReturn(0);
930 }
931 
932 static PetscErrorCode DMPlexExpandedConesToFaces_Private(DM dm, IS is, PetscSection section, IS *newis)
933 {
934   PetscInt          n,n1,ncone,numCoveredPoints,o,p,q,start,end;
935   const PetscInt    *coveredPoints;
936   const PetscInt    *arr, *cone;
937   PetscInt          *newarr;
938 
939   PetscFunctionBegin;
940   CHKERRQ(ISGetLocalSize(is, &n));
941   CHKERRQ(PetscSectionGetStorageSize(section, &n1));
942   CHKERRQ(PetscSectionGetChart(section, &start, &end));
943   PetscCheckFalse(n != n1,PETSC_COMM_SELF, PETSC_ERR_PLIB, "IS size = %D != %D = section storage size", n, n1);
944   CHKERRQ(ISGetIndices(is, &arr));
945   CHKERRQ(PetscMalloc1(end-start, &newarr));
946   for (q=start; q<end; q++) {
947     CHKERRQ(PetscSectionGetDof(section, q, &ncone));
948     CHKERRQ(PetscSectionGetOffset(section, q, &o));
949     cone = &arr[o];
950     if (ncone == 1) {
951       numCoveredPoints = 1;
952       p = cone[0];
953     } else {
954       PetscInt i;
955       p = PETSC_MAX_INT;
956       for (i=0; i<ncone; i++) if (cone[i] < 0) {p = -1; break;}
957       if (p >= 0) {
958         CHKERRQ(DMPlexGetJoin(dm, ncone, cone, &numCoveredPoints, &coveredPoints));
959         PetscCheckFalse(numCoveredPoints > 1,PETSC_COMM_SELF, PETSC_ERR_PLIB, "more than one covered points for section point %D",q);
960         if (numCoveredPoints) p = coveredPoints[0];
961         else                  p = -2;
962         CHKERRQ(DMPlexRestoreJoin(dm, ncone, cone, &numCoveredPoints, &coveredPoints));
963       }
964     }
965     newarr[q-start] = p;
966   }
967   CHKERRQ(ISRestoreIndices(is, &arr));
968   CHKERRQ(ISCreateGeneral(PETSC_COMM_SELF, end-start, newarr, PETSC_OWN_POINTER, newis));
969   PetscFunctionReturn(0);
970 }
971 
972 static PetscErrorCode DMPlexExpandedVerticesToFaces_Private(DM dm, IS boundary_expanded_is, PetscInt depth, PetscSection sections[], IS *boundary_is)
973 {
974   PetscInt          d;
975   IS                is,newis;
976 
977   PetscFunctionBegin;
978   is = boundary_expanded_is;
979   CHKERRQ(PetscObjectReference((PetscObject)is));
980   for (d = 0; d < depth-1; ++d) {
981     CHKERRQ(DMPlexExpandedConesToFaces_Private(dm, is, sections[d], &newis));
982     CHKERRQ(ISDestroy(&is));
983     is = newis;
984   }
985   *boundary_is = is;
986   PetscFunctionReturn(0);
987 }
988 
989 #define CHKERRQI(incall,ierr) if (ierr) {incall = PETSC_FALSE; }
990 
991 static PetscErrorCode DMLabelViewFromOptionsOnComm_Private(DMLabel label, const char optionname[], MPI_Comm comm)
992 {
993   PetscViewer       viewer;
994   PetscBool         flg;
995   static PetscBool  incall = PETSC_FALSE;
996   PetscViewerFormat format;
997 
998   PetscFunctionBegin;
999   if (incall) PetscFunctionReturn(0);
1000   incall = PETSC_TRUE;
1001   CHKERRQI(incall,PetscOptionsGetViewer(comm,((PetscObject)label)->options,((PetscObject)label)->prefix,optionname,&viewer,&format,&flg));
1002   if (flg) {
1003     CHKERRQI(incall,PetscViewerPushFormat(viewer,format));
1004     CHKERRQI(incall,DMLabelView(label, viewer));
1005     CHKERRQI(incall,PetscViewerPopFormat(viewer));
1006     CHKERRQI(incall,PetscViewerDestroy(&viewer));
1007   }
1008   incall = PETSC_FALSE;
1009   PetscFunctionReturn(0);
1010 }
1011 
1012 /* TODO: this is hotfixing DMLabelGetStratumIS() - it should be fixed systematically instead */
1013 static inline PetscErrorCode DMLabelGetStratumISOnComm_Private(DMLabel label, PetscInt value, MPI_Comm comm, IS *is)
1014 {
1015   IS                tmpis;
1016 
1017   PetscFunctionBegin;
1018   CHKERRQ(DMLabelGetStratumIS(label, value, &tmpis));
1019   if (!tmpis) CHKERRQ(ISCreateGeneral(PETSC_COMM_SELF, 0, NULL, PETSC_USE_POINTER, &tmpis));
1020   CHKERRQ(ISOnComm(tmpis, comm, PETSC_COPY_VALUES, is));
1021   CHKERRQ(ISDestroy(&tmpis));
1022   PetscFunctionReturn(0);
1023 }
1024 
1025 /* currently only for simple PetscSection without fields or constraints */
1026 static PetscErrorCode PetscSectionReplicate_Private(MPI_Comm comm, PetscMPIInt rootrank, PetscSection sec0, PetscSection *secout)
1027 {
1028   PetscSection      sec;
1029   PetscInt          chart[2], p;
1030   PetscInt          *dofarr;
1031   PetscMPIInt       rank;
1032 
1033   PetscFunctionBegin;
1034   CHKERRMPI(MPI_Comm_rank(comm, &rank));
1035   if (rank == rootrank) {
1036     CHKERRQ(PetscSectionGetChart(sec0, &chart[0], &chart[1]));
1037   }
1038   CHKERRMPI(MPI_Bcast(chart, 2, MPIU_INT, rootrank, comm));
1039   CHKERRQ(PetscMalloc1(chart[1]-chart[0], &dofarr));
1040   if (rank == rootrank) {
1041     for (p = chart[0]; p < chart[1]; p++) {
1042       CHKERRQ(PetscSectionGetDof(sec0, p, &dofarr[p-chart[0]]));
1043     }
1044   }
1045   CHKERRMPI(MPI_Bcast(dofarr, chart[1]-chart[0], MPIU_INT, rootrank, comm));
1046   CHKERRQ(PetscSectionCreate(comm, &sec));
1047   CHKERRQ(PetscSectionSetChart(sec, chart[0], chart[1]));
1048   for (p = chart[0]; p < chart[1]; p++) {
1049     CHKERRQ(PetscSectionSetDof(sec, p, dofarr[p-chart[0]]));
1050   }
1051   CHKERRQ(PetscSectionSetUp(sec));
1052   CHKERRQ(PetscFree(dofarr));
1053   *secout = sec;
1054   PetscFunctionReturn(0);
1055 }
1056 
1057 static PetscErrorCode DMPlexExpandedVerticesCoordinatesToFaces_Private(DM ipdm, PortableBoundary bnd, IS *face_is)
1058 {
1059   IS                  faces_expanded_is;
1060 
1061   PetscFunctionBegin;
1062   CHKERRQ(DMPlexFindVertices(ipdm, bnd->coordinates, 0.0, &faces_expanded_is));
1063   CHKERRQ(DMPlexExpandedVerticesToFaces_Private(ipdm, faces_expanded_is, bnd->depth, bnd->sections, face_is));
1064   CHKERRQ(ISDestroy(&faces_expanded_is));
1065   PetscFunctionReturn(0);
1066 }
1067 
1068 /* hack disabling DMPlexOrientInterface() call in DMPlexInterpolate() via -dm_plex_interpolate_orient_interfaces option */
1069 static PetscErrorCode DMPlexSetOrientInterface_Private(DM dm, PetscBool enable)
1070 {
1071   PetscOptions      options = NULL;
1072   const char        *prefix = NULL;
1073   const char        opt[] = "-dm_plex_interpolate_orient_interfaces";
1074   char              prefix_opt[512];
1075   PetscBool         flg, set;
1076   static PetscBool  wasSetTrue = PETSC_FALSE;
1077 
1078   PetscFunctionBegin;
1079   if (dm) {
1080     CHKERRQ(PetscObjectGetOptionsPrefix((PetscObject)dm, &prefix));
1081     options = ((PetscObject)dm)->options;
1082   }
1083   CHKERRQ(PetscStrcpy(prefix_opt, "-"));
1084   CHKERRQ(PetscStrlcat(prefix_opt, prefix, sizeof(prefix_opt)));
1085   CHKERRQ(PetscStrlcat(prefix_opt, &opt[1], sizeof(prefix_opt)));
1086   CHKERRQ(PetscOptionsGetBool(options, prefix, opt, &flg, &set));
1087   if (!enable) {
1088     if (set && flg) wasSetTrue = PETSC_TRUE;
1089     CHKERRQ(PetscOptionsSetValue(options, prefix_opt, "0"));
1090   } else if (set && !flg) {
1091     if (wasSetTrue) {
1092       CHKERRQ(PetscOptionsSetValue(options, prefix_opt, "1"));
1093     } else {
1094       /* default is PETSC_TRUE */
1095       CHKERRQ(PetscOptionsClearValue(options, prefix_opt));
1096     }
1097     wasSetTrue = PETSC_FALSE;
1098   }
1099   if (PetscDefined(USE_DEBUG)) {
1100     CHKERRQ(PetscOptionsGetBool(options, prefix, opt, &flg, &set));
1101     PetscCheckFalse(set && flg != enable,PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "PetscOptionsSetValue did not have the desired effect");
1102   }
1103   PetscFunctionReturn(0);
1104 }
1105 
1106 /* get coordinate description of the whole-domain boundary */
1107 static PetscErrorCode DMPlexGetExpandedBoundary_Private(DM dm, PortableBoundary *boundary)
1108 {
1109   PortableBoundary       bnd0, bnd;
1110   MPI_Comm               comm;
1111   DM                     idm;
1112   DMLabel                label;
1113   PetscInt               d;
1114   const char             boundaryName[] = "DMPlexDistributeInterpolateMarkInterface_boundary";
1115   IS                     boundary_is;
1116   IS                     *boundary_expanded_iss;
1117   PetscMPIInt            rootrank = 0;
1118   PetscMPIInt            rank, size;
1119   PetscInt               value = 1;
1120   DMPlexInterpolatedFlag intp;
1121   PetscBool              flg;
1122 
1123   PetscFunctionBegin;
1124   CHKERRQ(PetscNew(&bnd));
1125   CHKERRQ(PetscObjectGetComm((PetscObject)dm, &comm));
1126   CHKERRMPI(MPI_Comm_rank(comm, &rank));
1127   CHKERRMPI(MPI_Comm_size(comm, &size));
1128   CHKERRQ(DMPlexIsDistributed(dm, &flg));
1129   PetscCheckFalse(flg,comm, PETSC_ERR_ARG_WRONG, "serial DM (all points on one rank) needed");
1130 
1131   /* interpolate serial DM if not yet interpolated */
1132   CHKERRQ(DMPlexIsInterpolatedCollective(dm, &intp));
1133   if (intp == DMPLEX_INTERPOLATED_FULL) {
1134     idm = dm;
1135     CHKERRQ(PetscObjectReference((PetscObject)dm));
1136   } else {
1137     CHKERRQ(DMPlexInterpolate(dm, &idm));
1138     CHKERRQ(DMViewFromOptions(idm, NULL, "-idm_view"));
1139   }
1140 
1141   /* mark whole-domain boundary of the serial DM */
1142   CHKERRQ(DMLabelCreate(PETSC_COMM_SELF, boundaryName, &label));
1143   CHKERRQ(DMAddLabel(idm, label));
1144   CHKERRQ(DMPlexMarkBoundaryFaces(idm, value, label));
1145   CHKERRQ(DMLabelViewFromOptionsOnComm_Private(label, "-idm_boundary_view", comm));
1146   CHKERRQ(DMLabelGetStratumIS(label, value, &boundary_is));
1147 
1148   /* translate to coordinates */
1149   CHKERRQ(PetscNew(&bnd0));
1150   CHKERRQ(DMGetCoordinatesLocalSetUp(idm));
1151   if (rank == rootrank) {
1152     CHKERRQ(DMPlexGetConeRecursive(idm, boundary_is, &bnd0->depth, &boundary_expanded_iss, &bnd0->sections));
1153     CHKERRQ(DMGetCoordinatesLocalTuple(dm, boundary_expanded_iss[0], NULL, &bnd0->coordinates));
1154     /* self-check */
1155     {
1156       IS is0;
1157       CHKERRQ(DMPlexExpandedVerticesCoordinatesToFaces_Private(idm, bnd0, &is0));
1158       CHKERRQ(ISEqual(is0, boundary_is, &flg));
1159       PetscCheckFalse(!flg,PETSC_COMM_SELF, PETSC_ERR_PLIB, "DMPlexExpandedVerticesCoordinatesToFaces_Private produced a wrong IS");
1160       CHKERRQ(ISDestroy(&is0));
1161     }
1162   } else {
1163     CHKERRQ(VecCreateSeq(PETSC_COMM_SELF, 0, &bnd0->coordinates));
1164   }
1165 
1166   {
1167     Vec         tmp;
1168     VecScatter  sc;
1169     IS          xis;
1170     PetscInt    n;
1171 
1172     /* just convert seq vectors to mpi vector */
1173     CHKERRQ(VecGetLocalSize(bnd0->coordinates, &n));
1174     CHKERRMPI(MPI_Bcast(&n, 1, MPIU_INT, rootrank, comm));
1175     if (rank == rootrank) {
1176       CHKERRQ(VecCreateMPI(comm, n, n, &tmp));
1177     } else {
1178       CHKERRQ(VecCreateMPI(comm, 0, n, &tmp));
1179     }
1180     CHKERRQ(VecCopy(bnd0->coordinates, tmp));
1181     CHKERRQ(VecDestroy(&bnd0->coordinates));
1182     bnd0->coordinates = tmp;
1183 
1184     /* replicate coordinates from root rank to all ranks */
1185     CHKERRQ(VecCreateMPI(comm, n, n*size, &bnd->coordinates));
1186     CHKERRQ(ISCreateStride(comm, n, 0, 1, &xis));
1187     CHKERRQ(VecScatterCreate(bnd0->coordinates, xis, bnd->coordinates, NULL, &sc));
1188     CHKERRQ(VecScatterBegin(sc, bnd0->coordinates, bnd->coordinates, INSERT_VALUES, SCATTER_FORWARD));
1189     CHKERRQ(VecScatterEnd(  sc, bnd0->coordinates, bnd->coordinates, INSERT_VALUES, SCATTER_FORWARD));
1190     CHKERRQ(VecScatterDestroy(&sc));
1191     CHKERRQ(ISDestroy(&xis));
1192   }
1193   bnd->depth = bnd0->depth;
1194   CHKERRMPI(MPI_Bcast(&bnd->depth, 1, MPIU_INT, rootrank, comm));
1195   CHKERRQ(PetscMalloc1(bnd->depth, &bnd->sections));
1196   for (d=0; d<bnd->depth; d++) {
1197     CHKERRQ(PetscSectionReplicate_Private(comm, rootrank, (rank == rootrank) ? bnd0->sections[d] : NULL, &bnd->sections[d]));
1198   }
1199 
1200   if (rank == rootrank) {
1201     CHKERRQ(DMPlexRestoreConeRecursive(idm, boundary_is, &bnd0->depth, &boundary_expanded_iss, &bnd0->sections));
1202   }
1203   CHKERRQ(PortableBoundaryDestroy(&bnd0));
1204   CHKERRQ(DMRemoveLabelBySelf(idm, &label, PETSC_TRUE));
1205   CHKERRQ(DMLabelDestroy(&label));
1206   CHKERRQ(ISDestroy(&boundary_is));
1207   CHKERRQ(DMDestroy(&idm));
1208   *boundary = bnd;
1209   PetscFunctionReturn(0);
1210 }
1211 
1212 /* get faces of inter-partition interface */
1213 static PetscErrorCode DMPlexGetInterfaceFaces_Private(DM ipdm, IS boundary_faces_is, IS *interface_faces_is)
1214 {
1215   MPI_Comm               comm;
1216   DMLabel                label;
1217   IS                     part_boundary_faces_is;
1218   const char             partBoundaryName[] = "DMPlexDistributeInterpolateMarkInterface_partBoundary";
1219   PetscInt               value = 1;
1220   DMPlexInterpolatedFlag intp;
1221 
1222   PetscFunctionBegin;
1223   CHKERRQ(PetscObjectGetComm((PetscObject)ipdm, &comm));
1224   CHKERRQ(DMPlexIsInterpolatedCollective(ipdm, &intp));
1225   PetscCheckFalse(intp != DMPLEX_INTERPOLATED_FULL,comm, PETSC_ERR_ARG_WRONG, "only for fully interpolated DMPlex");
1226 
1227   /* get ipdm partition boundary (partBoundary) */
1228   CHKERRQ(DMLabelCreate(PETSC_COMM_SELF, partBoundaryName, &label));
1229   CHKERRQ(DMAddLabel(ipdm, label));
1230   CHKERRQ(DMPlexMarkBoundaryFaces(ipdm, value, label));
1231   CHKERRQ(DMLabelViewFromOptionsOnComm_Private(label, "-ipdm_part_boundary_view", comm));
1232   CHKERRQ(DMLabelGetStratumISOnComm_Private(label, value, comm, &part_boundary_faces_is));
1233   CHKERRQ(DMRemoveLabelBySelf(ipdm, &label, PETSC_TRUE));
1234   CHKERRQ(DMLabelDestroy(&label));
1235 
1236   /* remove ipdm whole-domain boundary (boundary_faces_is) from ipdm partition boundary (part_boundary_faces_is), resulting just in inter-partition interface */
1237   CHKERRQ(ISDifference(part_boundary_faces_is,boundary_faces_is,interface_faces_is));
1238   CHKERRQ(ISDestroy(&part_boundary_faces_is));
1239   PetscFunctionReturn(0);
1240 }
1241 
1242 /* compute inter-partition interface including edges and vertices */
1243 static PetscErrorCode DMPlexComputeCompleteInterface_Private(DM ipdm, IS interface_faces_is, IS *interface_is)
1244 {
1245   DMLabel                label;
1246   PetscInt               value = 1;
1247   const char             interfaceName[] = "DMPlexDistributeInterpolateMarkInterface_interface";
1248   DMPlexInterpolatedFlag intp;
1249   MPI_Comm               comm;
1250 
1251   PetscFunctionBegin;
1252   CHKERRQ(PetscObjectGetComm((PetscObject)ipdm, &comm));
1253   CHKERRQ(DMPlexIsInterpolatedCollective(ipdm, &intp));
1254   PetscCheckFalse(intp != DMPLEX_INTERPOLATED_FULL,comm, PETSC_ERR_ARG_WRONG, "only for fully interpolated DMPlex");
1255 
1256   CHKERRQ(DMLabelCreate(PETSC_COMM_SELF, interfaceName, &label));
1257   CHKERRQ(DMAddLabel(ipdm, label));
1258   CHKERRQ(DMLabelSetStratumIS(label, value, interface_faces_is));
1259   CHKERRQ(DMLabelViewFromOptionsOnComm_Private(label, "-interface_faces_view", comm));
1260   CHKERRQ(DMPlexLabelComplete(ipdm, label));
1261   CHKERRQ(DMLabelViewFromOptionsOnComm_Private(label, "-interface_view", comm));
1262   CHKERRQ(DMLabelGetStratumISOnComm_Private(label, value, comm, interface_is));
1263   CHKERRQ(PetscObjectSetName((PetscObject)*interface_is, "interface_is"));
1264   CHKERRQ(ISViewFromOptions(*interface_is, NULL, "-interface_is_view"));
1265   CHKERRQ(DMRemoveLabelBySelf(ipdm, &label, PETSC_TRUE));
1266   CHKERRQ(DMLabelDestroy(&label));
1267   PetscFunctionReturn(0);
1268 }
1269 
1270 static PetscErrorCode PointSFGetOutwardInterfacePoints(PetscSF sf, IS *is)
1271 {
1272   PetscInt        n;
1273   const PetscInt  *arr;
1274 
1275   PetscFunctionBegin;
1276   CHKERRQ(PetscSFGetGraph(sf, NULL, &n, &arr, NULL));
1277   CHKERRQ(ISCreateGeneral(PetscObjectComm((PetscObject)sf), n, arr, PETSC_USE_POINTER, is));
1278   PetscFunctionReturn(0);
1279 }
1280 
1281 static PetscErrorCode PointSFGetInwardInterfacePoints(PetscSF sf, IS *is)
1282 {
1283   PetscInt        n;
1284   const PetscInt  *rootdegree;
1285   PetscInt        *arr;
1286 
1287   PetscFunctionBegin;
1288   CHKERRQ(PetscSFSetUp(sf));
1289   CHKERRQ(PetscSFComputeDegreeBegin(sf, &rootdegree));
1290   CHKERRQ(PetscSFComputeDegreeEnd(sf, &rootdegree));
1291   CHKERRQ(PetscSFComputeMultiRootOriginalNumbering(sf, rootdegree, &n, &arr));
1292   CHKERRQ(ISCreateGeneral(PetscObjectComm((PetscObject)sf), n, arr, PETSC_OWN_POINTER, is));
1293   PetscFunctionReturn(0);
1294 }
1295 
1296 static PetscErrorCode PointSFGetInterfacePoints_Private(PetscSF pointSF, IS *is)
1297 {
1298   IS pointSF_out_is, pointSF_in_is;
1299 
1300   PetscFunctionBegin;
1301   CHKERRQ(PointSFGetOutwardInterfacePoints(pointSF, &pointSF_out_is));
1302   CHKERRQ(PointSFGetInwardInterfacePoints(pointSF, &pointSF_in_is));
1303   CHKERRQ(ISExpand(pointSF_out_is, pointSF_in_is, is));
1304   CHKERRQ(ISDestroy(&pointSF_out_is));
1305   CHKERRQ(ISDestroy(&pointSF_in_is));
1306   PetscFunctionReturn(0);
1307 }
1308 
1309 #define CHKERRMY(ierr) PetscCheck(!ierr,PETSC_COMM_SELF, PETSC_ERR_PLIB, "PointSF is wrong. Unable to show details!")
1310 
1311 static PetscErrorCode ViewPointsWithType_Internal(DM dm, IS pointsIS, PetscViewer v)
1312 {
1313   DMLabel         label;
1314   PetscSection    coordsSection;
1315   Vec             coordsVec;
1316   PetscScalar     *coordsScalar;
1317   PetscInt        coneSize, depth, dim, i, p, npoints;
1318   const PetscInt  *points;
1319 
1320   PetscFunctionBegin;
1321   CHKERRQ(DMGetDimension(dm, &dim));
1322   CHKERRQ(DMGetCoordinateSection(dm, &coordsSection));
1323   CHKERRQ(DMGetCoordinatesLocal(dm, &coordsVec));
1324   CHKERRQ(VecGetArray(coordsVec, &coordsScalar));
1325   CHKERRQ(ISGetLocalSize(pointsIS, &npoints));
1326   CHKERRQ(ISGetIndices(pointsIS, &points));
1327   CHKERRQ(DMPlexGetDepthLabel(dm, &label));
1328   CHKERRQ(PetscViewerASCIIPushTab(v));
1329   for (i=0; i<npoints; i++) {
1330     p = points[i];
1331     CHKERRQ(DMLabelGetValue(label, p, &depth));
1332     if (!depth) {
1333       PetscInt        n, o;
1334       char            coordstr[128];
1335 
1336       CHKERRQ(PetscSectionGetDof(coordsSection, p, &n));
1337       CHKERRQ(PetscSectionGetOffset(coordsSection, p, &o));
1338       CHKERRQ(coord2str(coordstr, sizeof(coordstr), n, &coordsScalar[o], 1.0));
1339       CHKERRQ(PetscViewerASCIISynchronizedPrintf(v, "vertex %D w/ coordinates %s\n", p, coordstr));
1340     } else {
1341       char            entityType[16];
1342 
1343       switch (depth) {
1344         case 1: CHKERRQ(PetscStrcpy(entityType, "edge")); break;
1345         case 2: CHKERRQ(PetscStrcpy(entityType, "face")); break;
1346         case 3: CHKERRQ(PetscStrcpy(entityType, "cell")); break;
1347         default: SETERRQ(PetscObjectComm((PetscObject)v), PETSC_ERR_SUP, "Only for depth <= 3");
1348       }
1349       if (depth == dim && dim < 3) {
1350         CHKERRQ(PetscStrlcat(entityType, " (cell)", sizeof(entityType)));
1351       }
1352       CHKERRQ(PetscViewerASCIISynchronizedPrintf(v, "%s %D\n", entityType, p));
1353     }
1354     CHKERRQ(DMPlexGetConeSize(dm, p, &coneSize));
1355     if (coneSize) {
1356       const PetscInt *cone;
1357       IS             coneIS;
1358 
1359       CHKERRQ(DMPlexGetCone(dm, p, &cone));
1360       CHKERRQ(ISCreateGeneral(PETSC_COMM_SELF, coneSize, cone, PETSC_USE_POINTER, &coneIS));
1361       CHKERRQ(ViewPointsWithType_Internal(dm, coneIS, v));
1362       CHKERRQ(ISDestroy(&coneIS));
1363     }
1364   }
1365   CHKERRQ(PetscViewerASCIIPopTab(v));
1366   CHKERRQ(VecRestoreArray(coordsVec, &coordsScalar));
1367   CHKERRQ(ISRestoreIndices(pointsIS, &points));
1368   PetscFunctionReturn(0);
1369 }
1370 
1371 static PetscErrorCode ViewPointsWithType(DM dm, IS points, PetscViewer v)
1372 {
1373   PetscBool       flg;
1374   PetscInt        npoints;
1375   PetscMPIInt     rank;
1376 
1377   PetscFunctionBegin;
1378   CHKERRQ(PetscObjectTypeCompare((PetscObject)v, PETSCVIEWERASCII, &flg));
1379   PetscCheckFalse(!flg,PetscObjectComm((PetscObject)v), PETSC_ERR_SUP, "Only for ASCII viewer");
1380   CHKERRMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)v), &rank));
1381   CHKERRQ(PetscViewerASCIIPushSynchronized(v));
1382   CHKERRQ(ISGetLocalSize(points, &npoints));
1383   if (npoints) {
1384     CHKERRQ(PetscViewerASCIISynchronizedPrintf(v, "[%d] --------\n", rank));
1385     CHKERRQ(ViewPointsWithType_Internal(dm, points, v));
1386   }
1387   CHKERRQ(PetscViewerFlush(v));
1388   CHKERRQ(PetscViewerASCIIPopSynchronized(v));
1389   PetscFunctionReturn(0);
1390 }
1391 
1392 static PetscErrorCode DMPlexComparePointSFWithInterface_Private(DM ipdm, IS interface_is)
1393 {
1394   PetscSF         pointsf;
1395   IS              pointsf_is;
1396   PetscBool       flg;
1397   MPI_Comm        comm;
1398   PetscMPIInt     size;
1399 
1400   PetscFunctionBegin;
1401   CHKERRQ(PetscObjectGetComm((PetscObject)ipdm, &comm));
1402   CHKERRMPI(MPI_Comm_size(comm, &size));
1403   CHKERRQ(DMGetPointSF(ipdm, &pointsf));
1404   if (pointsf) {
1405     PetscInt nroots;
1406     CHKERRQ(PetscSFGetGraph(pointsf, &nroots, NULL, NULL, NULL));
1407     if (nroots < 0) pointsf = NULL; /* uninitialized SF */
1408   }
1409   if (!pointsf) {
1410     PetscInt N=0;
1411     if (interface_is) CHKERRQ(ISGetSize(interface_is, &N));
1412     PetscCheckFalse(N,comm, PETSC_ERR_PLIB, "interface_is should be NULL or empty for PointSF being NULL");
1413     PetscFunctionReturn(0);
1414   }
1415 
1416   /* get PointSF points as IS pointsf_is */
1417   CHKERRQ(PointSFGetInterfacePoints_Private(pointsf, &pointsf_is));
1418 
1419   /* compare pointsf_is with interface_is */
1420   CHKERRQ(ISEqual(interface_is, pointsf_is, &flg));
1421   CHKERRMPI(MPI_Allreduce(MPI_IN_PLACE,&flg,1,MPIU_BOOL,MPI_LAND,comm));
1422   if (!flg) {
1423     IS pointsf_extra_is, pointsf_missing_is;
1424     PetscViewer errv = PETSC_VIEWER_STDERR_(comm);
1425     CHKERRMY(ISDifference(interface_is, pointsf_is, &pointsf_missing_is));
1426     CHKERRMY(ISDifference(pointsf_is, interface_is, &pointsf_extra_is));
1427     CHKERRMY(PetscViewerASCIIPrintf(errv, "Points missing in PointSF:\n"));
1428     CHKERRMY(ViewPointsWithType(ipdm, pointsf_missing_is, errv));
1429     CHKERRMY(PetscViewerASCIIPrintf(errv, "Extra points in PointSF:\n"));
1430     CHKERRMY(ViewPointsWithType(ipdm, pointsf_extra_is, errv));
1431     CHKERRMY(ISDestroy(&pointsf_extra_is));
1432     CHKERRMY(ISDestroy(&pointsf_missing_is));
1433     SETERRQ(comm, PETSC_ERR_PLIB, "PointSF is wrong! See details above.");
1434   }
1435   CHKERRQ(ISDestroy(&pointsf_is));
1436   PetscFunctionReturn(0);
1437 }
1438 
1439 /* remove faces & edges from label, leave just vertices */
1440 static PetscErrorCode DMPlexISFilterVertices_Private(DM dm, IS points)
1441 {
1442   PetscInt        vStart, vEnd;
1443   MPI_Comm        comm;
1444 
1445   PetscFunctionBegin;
1446   CHKERRQ(PetscObjectGetComm((PetscObject)dm, &comm));
1447   CHKERRQ(DMPlexGetDepthStratum(dm, 0, &vStart, &vEnd));
1448   CHKERRQ(ISGeneralFilter(points, vStart, vEnd));
1449   PetscFunctionReturn(0);
1450 }
1451 
1452 /*
1453   DMPlexCheckPointSFHeavy - Thoroughly test that the PointSF after parallel DMPlexInterpolate() includes exactly all interface points.
1454 
1455   Collective
1456 
1457   Input Parameters:
1458 . dm - The DMPlex object
1459 
1460   Notes:
1461   The input DMPlex must be serial (one partition has all points, the other partitions have no points).
1462   This is a heavy test which involves DMPlexInterpolate() if the input DM is not interpolated yet, and depends on having a representation of the whole-domain boundary (PortableBoundary), which can be obtained only by DMPlexGetExpandedBoundary_Private() (which involves DMPlexInterpolate() of a sequential DM).
1463   This is mainly intended for debugging/testing purposes.
1464 
1465   Algorithm:
1466   1. boundary faces of the serial version of the whole mesh are found using DMPlexMarkBoundaryFaces()
1467   2. boundary faces are translated into vertices using DMPlexGetConeRecursive() and these are translated into coordinates - this description (aka PortableBoundary) is completely independent of partitioning and point numbering
1468   3. the mesh is distributed or loaded in parallel
1469   4. boundary faces of the distributed mesh are reconstructed from PortableBoundary using DMPlexFindVertices()
1470   5. partition boundary faces of the parallel mesh are found using DMPlexMarkBoundaryFaces()
1471   6. partition interfaces are computed as set difference of partition boundary faces minus the reconstructed boundary
1472   7. check that interface covered by PointSF (union of inward and outward points) is equal to the partition interface for each rank, otherwise print the difference and throw an error
1473 
1474   Level: developer
1475 
1476 .seealso: DMGetPointSF(), DMPlexCheckSymmetry(), DMPlexCheckSkeleton(), DMPlexCheckFaces()
1477 */
1478 static PetscErrorCode DMPlexCheckPointSFHeavy(DM dm, PortableBoundary bnd)
1479 {
1480   DM                     ipdm=NULL;
1481   IS                     boundary_faces_is, interface_faces_is, interface_is;
1482   DMPlexInterpolatedFlag intp;
1483   MPI_Comm               comm;
1484 
1485   PetscFunctionBegin;
1486   CHKERRQ(PetscObjectGetComm((PetscObject)dm, &comm));
1487 
1488   CHKERRQ(DMPlexIsInterpolatedCollective(dm, &intp));
1489   if (intp == DMPLEX_INTERPOLATED_FULL) {
1490     ipdm = dm;
1491   } else {
1492     /* create temporary interpolated DM if input DM is not interpolated */
1493     CHKERRQ(DMPlexSetOrientInterface_Private(dm, PETSC_FALSE));
1494     CHKERRQ(DMPlexInterpolate(dm, &ipdm)); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexComparePointSFWithInterface_Private() below */
1495     CHKERRQ(DMPlexSetOrientInterface_Private(dm, PETSC_TRUE));
1496   }
1497   CHKERRQ(DMViewFromOptions(ipdm, NULL, "-ipdm_view"));
1498 
1499   /* recover ipdm whole-domain boundary faces from the expanded vertices coordinates */
1500   CHKERRQ(DMPlexExpandedVerticesCoordinatesToFaces_Private(ipdm, bnd, &boundary_faces_is));
1501   /* get inter-partition interface faces (interface_faces_is)*/
1502   CHKERRQ(DMPlexGetInterfaceFaces_Private(ipdm, boundary_faces_is, &interface_faces_is));
1503   /* compute inter-partition interface including edges and vertices (interface_is) */
1504   CHKERRQ(DMPlexComputeCompleteInterface_Private(ipdm, interface_faces_is, &interface_is));
1505   /* destroy immediate ISs */
1506   CHKERRQ(ISDestroy(&boundary_faces_is));
1507   CHKERRQ(ISDestroy(&interface_faces_is));
1508 
1509   /* for uninterpolated case, keep just vertices in interface */
1510   if (!intp) {
1511     CHKERRQ(DMPlexISFilterVertices_Private(ipdm, interface_is));
1512     CHKERRQ(DMDestroy(&ipdm));
1513   }
1514 
1515   /* compare PointSF with the boundary reconstructed from coordinates */
1516   CHKERRQ(DMPlexComparePointSFWithInterface_Private(dm, interface_is));
1517   CHKERRQ(PetscPrintf(comm, "DMPlexCheckPointSFHeavy PASSED\n"));
1518   CHKERRQ(ISDestroy(&interface_is));
1519   PetscFunctionReturn(0);
1520 }
1521 
1522 int main(int argc, char **argv)
1523 {
1524   DM             dm;
1525   AppCtx         user;
1526   PetscErrorCode ierr;
1527 
1528   ierr = PetscInitialize(&argc, &argv, NULL, help);if (ierr) return ierr;
1529   CHKERRQ(PetscLogStageRegister("create",&stage[0]));
1530   CHKERRQ(PetscLogStageRegister("distribute",&stage[1]));
1531   CHKERRQ(PetscLogStageRegister("interpolate",&stage[2]));
1532   CHKERRQ(ProcessOptions(PETSC_COMM_WORLD, &user));
1533   CHKERRQ(CreateMesh(PETSC_COMM_WORLD, &user, &dm));
1534   if (user.nPointsToExpand) {
1535     CHKERRQ(TestExpandPoints(dm, &user));
1536   }
1537   if (user.ncoords) {
1538     Vec coords;
1539 
1540     CHKERRQ(VecCreateSeqWithArray(PETSC_COMM_SELF, user.ncoords, user.ncoords, user.coords, &coords));
1541     CHKERRQ(ViewVerticesFromCoords(dm, coords, user.coordsTol, PETSC_VIEWER_STDOUT_WORLD));
1542     CHKERRQ(VecDestroy(&coords));
1543   }
1544   CHKERRQ(DMDestroy(&dm));
1545   ierr = PetscFinalize();
1546   return ierr;
1547 }
1548 
1549 /*TEST
1550 
1551   testset:
1552     nsize: 2
1553     args: -dm_view ascii::ascii_info_detail
1554     args: -dm_plex_check_all
1555     test:
1556       suffix: 1_tri_dist0
1557       args: -distribute 0 -interpolate {{none create}separate output}
1558     test:
1559       suffix: 1_tri_dist1
1560       args: -distribute 1 -interpolate {{none create after_distribute}separate output}
1561     test:
1562       suffix: 1_quad_dist0
1563       args: -cell_simplex 0 -distribute 0 -interpolate {{none create}separate output}
1564     test:
1565       suffix: 1_quad_dist1
1566       args: -cell_simplex 0 -distribute 1 -interpolate {{none create after_distribute}separate output}
1567     test:
1568       suffix: 1_1d_dist1
1569       args: -dim 1 -distribute 1
1570 
1571   testset:
1572     nsize: 3
1573     args: -testnum 1 -interpolate create
1574     args: -dm_plex_check_all
1575     test:
1576       suffix: 2
1577       args: -dm_view ascii::ascii_info_detail
1578     test:
1579       suffix: 2a
1580       args: -dm_plex_check_cones_conform_on_interfaces_verbose
1581     test:
1582       suffix: 2b
1583       args: -test_expand_points 0,1,2,5,6
1584     test:
1585       suffix: 2c
1586       args: -test_expand_points 0,1,2,5,6 -test_expand_points_empty
1587 
1588   testset:
1589     # the same as 1% for 3D
1590     nsize: 2
1591     args: -dim 3 -dm_view ascii::ascii_info_detail
1592     args: -dm_plex_check_all
1593     test:
1594       suffix: 4_tet_dist0
1595       args: -distribute 0 -interpolate {{none create}separate output}
1596     test:
1597       suffix: 4_tet_dist1
1598       args: -distribute 1 -interpolate {{none create after_distribute}separate output}
1599     test:
1600       suffix: 4_hex_dist0
1601       args: -cell_simplex 0 -distribute 0 -interpolate {{none create}separate output}
1602     test:
1603       suffix: 4_hex_dist1
1604       args: -cell_simplex 0 -distribute 1 -interpolate {{none create after_distribute}separate output}
1605 
1606   test:
1607     # the same as 4_tet_dist0 but test different initial orientations
1608     suffix: 4_tet_test_orient
1609     nsize: 2
1610     args: -dim 3 -distribute 0
1611     args: -dm_plex_check_all
1612     args: -rotate_interface_0 {{0 1 2 11 12 13}}
1613     args: -rotate_interface_1 {{0 1 2 11 12 13}}
1614 
1615   testset:
1616     requires: exodusii
1617     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/TwoQuads.exo
1618     args: -dm_view ascii::ascii_info_detail
1619     args: -dm_plex_check_all
1620     args: -custom_view
1621     test:
1622       suffix: 5_seq
1623       nsize: 1
1624       args: -distribute 0 -interpolate {{none create}separate output}
1625     test:
1626       # Detail viewing in a non-distributed mesh is broken because the DMLabelView() is collective, but the label is not shared
1627       suffix: 5_dist0
1628       nsize: 2
1629       args: -distribute 0 -interpolate {{none create}separate output} -dm_view
1630     test:
1631       suffix: 5_dist1
1632       nsize: 2
1633       args: -distribute 1 -interpolate {{none create after_distribute}separate output}
1634 
1635   testset:
1636     nsize: {{1 2 4}}
1637     args: -use_generator
1638     args: -dm_plex_check_all
1639     args: -distribute -interpolate none
1640     test:
1641       suffix: 6_tri
1642       requires: triangle
1643       args: -faces {{2,2  1,3  7,4}} -cell_simplex 1 -dm_generator triangle
1644     test:
1645       suffix: 6_quad
1646       args: -faces {{2,2  1,3  7,4}} -cell_simplex 0
1647     test:
1648       suffix: 6_tet
1649       requires: ctetgen
1650       args: -faces {{2,2,2  1,3,5  3,4,7}} -cell_simplex 1 -dm_generator ctetgen
1651     test:
1652       suffix: 6_hex
1653       args: -faces {{2,2,2  1,3,5  3,4,7}} -cell_simplex 0
1654   testset:
1655     nsize: {{1 2 4}}
1656     args: -use_generator
1657     args: -dm_plex_check_all
1658     args: -distribute -interpolate create
1659     test:
1660       suffix: 6_int_tri
1661       requires: triangle
1662       args: -faces {{2,2  1,3  7,4}} -cell_simplex 1 -dm_generator triangle
1663     test:
1664       suffix: 6_int_quad
1665       args: -faces {{2,2  1,3  7,4}} -cell_simplex 0
1666     test:
1667       suffix: 6_int_tet
1668       requires: ctetgen
1669       args: -faces {{2,2,2  1,3,5  3,4,7}} -cell_simplex 1 -dm_generator ctetgen
1670     test:
1671       suffix: 6_int_hex
1672       args: -faces {{2,2,2  1,3,5  3,4,7}} -cell_simplex 0
1673   testset:
1674     nsize: {{2 4}}
1675     args: -use_generator
1676     args: -dm_plex_check_all
1677     args: -distribute -interpolate after_distribute
1678     test:
1679       suffix: 6_parint_tri
1680       requires: triangle
1681       args: -faces {{2,2  1,3  7,4}} -cell_simplex 1 -dm_generator triangle
1682     test:
1683       suffix: 6_parint_quad
1684       args: -faces {{2,2  1,3  7,4}} -cell_simplex 0
1685     test:
1686       suffix: 6_parint_tet
1687       requires: ctetgen
1688       args: -faces {{2,2,2  1,3,5  3,4,7}} -cell_simplex 1 -dm_generator ctetgen
1689     test:
1690       suffix: 6_parint_hex
1691       args: -faces {{2,2,2  1,3,5  3,4,7}} -cell_simplex 0
1692 
1693   testset: # 7 EXODUS
1694     requires: exodusii
1695     args: -dm_plex_check_all
1696     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.exo
1697     args: -distribute
1698     test: # seq load, simple partitioner
1699       suffix: 7_exo
1700       nsize: {{1 2 4 5}}
1701       args: -interpolate none
1702     test: # seq load, seq interpolation, simple partitioner
1703       suffix: 7_exo_int_simple
1704       nsize: {{1 2 4 5}}
1705       args: -interpolate create
1706     test: # seq load, seq interpolation, metis partitioner
1707       suffix: 7_exo_int_metis
1708       requires: parmetis
1709       nsize: {{2 4 5}}
1710       args: -interpolate create
1711       args: -petscpartitioner_type parmetis
1712     test: # seq load, simple partitioner, par interpolation
1713       suffix: 7_exo_simple_int
1714       nsize: {{2 4 5}}
1715       args: -interpolate after_distribute
1716     test: # seq load, metis partitioner, par interpolation
1717       suffix: 7_exo_metis_int
1718       requires: parmetis
1719       nsize: {{2 4 5}}
1720       args: -interpolate after_distribute
1721       args: -petscpartitioner_type parmetis
1722 
1723   testset: # 7 HDF5 SEQUANTIAL LOAD
1724     requires: hdf5 !complex
1725     args: -dm_plex_check_all
1726     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -dm_plex_create_from_hdf5_xdmf
1727     args: -dm_plex_hdf5_force_sequential
1728     args: -distribute
1729     test: # seq load, simple partitioner
1730       suffix: 7_seq_hdf5_simple
1731       nsize: {{1 2 4 5}}
1732       args: -interpolate none
1733     test: # seq load, seq interpolation, simple partitioner
1734       suffix: 7_seq_hdf5_int_simple
1735       nsize: {{1 2 4 5}}
1736       args: -interpolate after_create
1737     test: # seq load, seq interpolation, metis partitioner
1738       nsize: {{2 4 5}}
1739       suffix: 7_seq_hdf5_int_metis
1740       requires: parmetis
1741       args: -interpolate after_create
1742       args: -petscpartitioner_type parmetis
1743     test: # seq load, simple partitioner, par interpolation
1744       suffix: 7_seq_hdf5_simple_int
1745       nsize: {{2 4 5}}
1746       args: -interpolate after_distribute
1747     test: # seq load, metis partitioner, par interpolation
1748       nsize: {{2 4 5}}
1749       suffix: 7_seq_hdf5_metis_int
1750       requires: parmetis
1751       args: -interpolate after_distribute
1752       args: -petscpartitioner_type parmetis
1753 
1754   testset: # 7 HDF5 PARALLEL LOAD
1755     requires: hdf5 !complex
1756     nsize: {{2 4 5}}
1757     args: -dm_plex_check_all
1758     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -dm_plex_create_from_hdf5_xdmf
1759     test: # par load
1760       suffix: 7_par_hdf5
1761       args: -interpolate none
1762     test: # par load, par interpolation
1763       suffix: 7_par_hdf5_int
1764       args: -interpolate after_create
1765     test: # par load, parmetis repartitioner
1766       TODO: Parallel partitioning of uninterpolated meshes not supported
1767       suffix: 7_par_hdf5_parmetis
1768       requires: parmetis
1769       args: -distribute -petscpartitioner_type parmetis
1770       args: -interpolate none
1771     test: # par load, par interpolation, parmetis repartitioner
1772       suffix: 7_par_hdf5_int_parmetis
1773       requires: parmetis
1774       args: -distribute -petscpartitioner_type parmetis
1775       args: -interpolate after_create
1776     test: # par load, parmetis partitioner, par interpolation
1777       TODO: Parallel partitioning of uninterpolated meshes not supported
1778       suffix: 7_par_hdf5_parmetis_int
1779       requires: parmetis
1780       args: -distribute -petscpartitioner_type parmetis
1781       args: -interpolate after_distribute
1782 
1783     test:
1784       suffix: 7_hdf5_hierarch
1785       requires: hdf5 ptscotch !complex
1786       nsize: {{2 3 4}separate output}
1787       args: -distribute
1788       args: -interpolate after_create
1789       args: -petscpartitioner_type matpartitioning -petscpartitioner_view ::ascii_info
1790       args: -mat_partitioning_type hierarch -mat_partitioning_hierarchical_nfineparts 2
1791       args: -mat_partitioning_hierarchical_coarseparttype ptscotch -mat_partitioning_hierarchical_fineparttype ptscotch
1792 
1793   test:
1794     suffix: 8
1795     requires: hdf5 !complex
1796     nsize: 4
1797     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -dm_plex_create_from_hdf5_xdmf
1798     args: -distribute 0 -interpolate after_create
1799     args: -view_vertices_from_coords 0.,1.,0.,-0.5,1.,0.,0.583,-0.644,0.,-2.,-2.,-2. -view_vertices_from_coords_tol 1e-3
1800     args: -dm_plex_check_all
1801     args: -custom_view
1802 
1803   testset: # 9 HDF5 SEQUANTIAL LOAD
1804     requires: hdf5 !complex datafilespath
1805     args: -dm_plex_check_all
1806     args: -filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /cells -dm_plex_hdf5_geometry_path /coordinates
1807     args: -dm_plex_hdf5_force_sequential
1808     args: -distribute
1809     test: # seq load, simple partitioner
1810       suffix: 9_seq_hdf5_simple
1811       nsize: {{1 2 4 5}}
1812       args: -interpolate none
1813     test: # seq load, seq interpolation, simple partitioner
1814       suffix: 9_seq_hdf5_int_simple
1815       nsize: {{1 2 4 5}}
1816       args: -interpolate after_create
1817     test: # seq load, seq interpolation, metis partitioner
1818       nsize: {{2 4 5}}
1819       suffix: 9_seq_hdf5_int_metis
1820       requires: parmetis
1821       args: -interpolate after_create
1822       args: -petscpartitioner_type parmetis
1823     test: # seq load, simple partitioner, par interpolation
1824       suffix: 9_seq_hdf5_simple_int
1825       nsize: {{2 4 5}}
1826       args: -interpolate after_distribute
1827     test: # seq load, simple partitioner, par interpolation
1828       # This is like 9_seq_hdf5_simple_int but testing error output of DMPlexCheckPointSFHeavy().
1829       # Once 9_seq_hdf5_simple_int gets fixed, this one gets broken.
1830       # We can then provide an intentionally broken mesh instead.
1831       TODO: This test is broken because PointSF is fixed.
1832       suffix: 9_seq_hdf5_simple_int_err
1833       nsize: 4
1834       args: -interpolate after_distribute
1835       filter: sed -e "/PETSC ERROR/,$$d"
1836     test: # seq load, metis partitioner, par interpolation
1837       nsize: {{2 4 5}}
1838       suffix: 9_seq_hdf5_metis_int
1839       requires: parmetis
1840       args: -interpolate after_distribute
1841       args: -petscpartitioner_type parmetis
1842 
1843   testset: # 9 HDF5 PARALLEL LOAD
1844     requires: hdf5 !complex datafilespath
1845     nsize: {{2 4 5}}
1846     args: -dm_plex_check_all
1847     args: -filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /cells -dm_plex_hdf5_geometry_path /coordinates
1848     test: # par load
1849       suffix: 9_par_hdf5
1850       args: -interpolate none
1851     test: # par load, par interpolation
1852       suffix: 9_par_hdf5_int
1853       args: -interpolate after_create
1854     test: # par load, parmetis repartitioner
1855       TODO: Parallel partitioning of uninterpolated meshes not supported
1856       suffix: 9_par_hdf5_parmetis
1857       requires: parmetis
1858       args: -distribute -petscpartitioner_type parmetis
1859       args: -interpolate none
1860     test: # par load, par interpolation, parmetis repartitioner
1861       suffix: 9_par_hdf5_int_parmetis
1862       requires: parmetis
1863       args: -distribute -petscpartitioner_type parmetis
1864       args: -interpolate after_create
1865     test: # par load, parmetis partitioner, par interpolation
1866       TODO: Parallel partitioning of uninterpolated meshes not supported
1867       suffix: 9_par_hdf5_parmetis_int
1868       requires: parmetis
1869       args: -distribute -petscpartitioner_type parmetis
1870       args: -interpolate after_distribute
1871 
1872   testset: # 10 HDF5 PARALLEL LOAD
1873     requires: hdf5 !complex datafilespath
1874     nsize: {{2 4 7}}
1875     args: -dm_plex_check_all
1876     args: -filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined2.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /topo -dm_plex_hdf5_geometry_path /geom
1877     test: # par load, par interpolation
1878       suffix: 10_par_hdf5_int
1879       args: -interpolate after_create
1880 TEST*/
1881