xref: /petsc/src/dm/impls/plex/tests/ex18.c (revision ffa8c5705e8ab2cf85ee1d14dbe507a6e2eb5283)
1 static char help[] = "Tests for parallel mesh loading and parallel topological interpolation\n\n";
2 
3 #include <petsc/private/dmpleximpl.h>
4 /* List of test meshes
5 
6 Network
7 -------
8 Test 0 (2 ranks):
9 
10 network=0:
11 ---------
12   cell 0   cell 1   cell 2          nCells-1       (edge)
13 0 ------ 1 ------ 2 ------ 3 -- -- v --  -- nCells (vertex)
14 
15   vertex distribution:
16     rank 0: 0 1
17     rank 1: 2 3 ... nCells
18   cell(edge) distribution:
19     rank 0: 0 1
20     rank 1: 2 ... nCells-1
21 
22 network=1:
23 ---------
24                v2
25                 ^
26                 |
27                cell 2
28                 |
29  v0 --cell 0--> v3--cell 1--> v1
30 
31   vertex distribution:
32     rank 0: 0 1 3
33     rank 1: 2
34   cell(edge) distribution:
35     rank 0: 0 1
36     rank 1: 2
37 
38   example:
39     mpiexec -n 2 ./ex18 -distribute 1 -dim 1 -orig_dm_view -dist_dm_view -dist_dm_view -petscpartitioner_type parmetis -ncells 50
40 
41 Triangle
42 --------
43 Test 0 (2 ranks):
44 Two triangles sharing a face
45 
46         2
47       / | \
48      /  |  \
49     /   |   \
50    0  0 | 1  3
51     \   |   /
52      \  |  /
53       \ | /
54         1
55 
56   vertex distribution:
57     rank 0: 0 1
58     rank 1: 2 3
59   cell distribution:
60     rank 0: 0
61     rank 1: 1
62 
63 Test 1 (3 ranks):
64 Four triangles partitioned across 3 ranks
65 
66    0 _______ 3
67    | \     / |
68    |  \ 1 /  |
69    |   \ /   |
70    | 0  2  2 |
71    |   / \   |
72    |  / 3 \  |
73    | /     \ |
74    1 ------- 4
75 
76   vertex distribution:
77     rank 0: 0 1
78     rank 1: 2 3
79     rank 2: 4
80   cell distribution:
81     rank 0: 0
82     rank 1: 1
83     rank 2: 2 3
84 
85 Test 2 (3 ranks):
86 Four triangles partitioned across 3 ranks
87 
88    1 _______ 3
89    | \     / |
90    |  \ 1 /  |
91    |   \ /   |
92    | 0  0  2 |
93    |   / \   |
94    |  / 3 \  |
95    | /     \ |
96    2 ------- 4
97 
98   vertex distribution:
99     rank 0: 0 1
100     rank 1: 2 3
101     rank 2: 4
102   cell distribution:
103     rank 0: 0
104     rank 1: 1
105     rank 2: 2 3
106 
107 Tetrahedron
108 -----------
109 Test 0:
110 Two tets sharing a face
111 
112  cell   3 _______    cell
113  0    / | \      \   1
114      /  |  \      \
115     /   |   \      \
116    0----|----4-----2
117     \   |   /      /
118      \  |  /      /
119       \ | /      /
120         1-------
121    y
122    | x
123    |/
124    *----z
125 
126   vertex distribution:
127     rank 0: 0 1
128     rank 1: 2 3 4
129   cell distribution:
130     rank 0: 0
131     rank 1: 1
132 
133 Quadrilateral
134 -------------
135 Test 0 (2 ranks):
136 Two quads sharing a face
137 
138    3-------2-------5
139    |       |       |
140    |   0   |   1   |
141    |       |       |
142    0-------1-------4
143 
144   vertex distribution:
145     rank 0: 0 1 2
146     rank 1: 3 4 5
147   cell distribution:
148     rank 0: 0
149     rank 1: 1
150 
151 TODO Test 1:
152 A quad and a triangle sharing a face
153 
154    5-------4
155    |       | \
156    |   0   |  \
157    |       | 1 \
158    2-------3----6
159 
160 Hexahedron
161 ----------
162 Test 0 (2 ranks):
163 Two hexes sharing a face
164 
165 cell   7-------------6-------------11 cell
166 0     /|            /|            /|     1
167      / |   F1      / |   F7      / |
168     /  |          /  |          /  |
169    4-------------5-------------10  |
170    |   |     F4  |   |     F10 |   |
171    |   |         |   |         |   |
172    |F5 |         |F3 |         |F9 |
173    |   |  F2     |   |   F8    |   |
174    |   3---------|---2---------|---9
175    |  /          |  /          |  /
176    | /   F0      | /    F6     | /
177    |/            |/            |/
178    0-------------1-------------8
179 
180   vertex distribution:
181     rank 0: 0 1 2 3 4 5
182     rank 1: 6 7 8 9 10 11
183   cell distribution:
184     rank 0: 0
185     rank 1: 1
186 
187 */
188 
189 typedef enum {NONE, CREATE, AFTER_CREATE, AFTER_DISTRIBUTE} InterpType;
190 
191 typedef struct {
192   PetscInt   debug;                        /* The debugging level */
193   PetscInt   testNum;                      /* Indicates the mesh to create */
194   PetscInt   dim;                          /* The topological mesh dimension */
195   PetscBool  cellSimplex;                  /* Use simplices or hexes */
196   PetscBool  distribute;                   /* Distribute the mesh */
197   InterpType interpolate;                  /* Interpolate the mesh before or after DMPlexDistribute() */
198   PetscBool  useGenerator;                 /* Construct mesh with a mesh generator */
199   PetscBool  testOrientIF;                 /* Test for different original interface orientations */
200   PetscBool  testHeavy;                    /* Run the heavy PointSF test */
201   PetscBool  customView;                   /* Show results of DMPlexIsInterpolated() etc. */
202   PetscInt   ornt[2];                      /* Orientation of interface on rank 0 and rank 1 */
203   PetscInt   faces[3];                     /* Number of faces per dimension for generator */
204   PetscScalar coords[128];
205   PetscReal  coordsTol;
206   PetscInt   ncoords;
207   PetscInt   pointsToExpand[128];
208   PetscInt   nPointsToExpand;
209   PetscBool  testExpandPointsEmpty;
210   char       filename[PETSC_MAX_PATH_LEN]; /* Import mesh from file */
211 } AppCtx;
212 
213 struct _n_PortableBoundary {
214   Vec coordinates;
215   PetscInt depth;
216   PetscSection *sections;
217 };
218 typedef struct _n_PortableBoundary * PortableBoundary;
219 
220 #if defined(PETSC_USE_LOG)
221 static PetscLogStage  stage[3];
222 #endif
223 
224 static PetscErrorCode DMPlexCheckPointSFHeavy(DM, PortableBoundary);
225 static PetscErrorCode DMPlexSetOrientInterface_Private(DM,PetscBool);
226 static PetscErrorCode DMPlexGetExpandedBoundary_Private(DM, PortableBoundary *);
227 static PetscErrorCode DMPlexExpandedConesToFaces_Private(DM, IS, PetscSection, IS *);
228 
229 static PetscErrorCode PortableBoundaryDestroy(PortableBoundary *bnd)
230 {
231   PetscInt       d;
232 
233   PetscFunctionBegin;
234   if (!*bnd) PetscFunctionReturn(0);
235   PetscCall(VecDestroy(&(*bnd)->coordinates));
236   for (d=0; d < (*bnd)->depth; d++) {
237     PetscCall(PetscSectionDestroy(&(*bnd)->sections[d]));
238   }
239   PetscCall(PetscFree((*bnd)->sections));
240   PetscCall(PetscFree(*bnd));
241   PetscFunctionReturn(0);
242 }
243 
244 static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
245 {
246   const char    *interpTypes[4]  = {"none", "create", "after_create", "after_distribute"};
247   PetscInt       interp=NONE, dim;
248   PetscBool      flg1, flg2;
249   PetscErrorCode ierr;
250 
251   PetscFunctionBegin;
252   options->debug        = 0;
253   options->testNum      = 0;
254   options->dim          = 2;
255   options->cellSimplex  = PETSC_TRUE;
256   options->distribute   = PETSC_FALSE;
257   options->interpolate  = NONE;
258   options->useGenerator = PETSC_FALSE;
259   options->testOrientIF = PETSC_FALSE;
260   options->testHeavy    = PETSC_TRUE;
261   options->customView   = PETSC_FALSE;
262   options->testExpandPointsEmpty = PETSC_FALSE;
263   options->ornt[0]      = 0;
264   options->ornt[1]      = 0;
265   options->faces[0]     = 2;
266   options->faces[1]     = 2;
267   options->faces[2]     = 2;
268   options->filename[0]  = '\0';
269   options->coordsTol    = PETSC_DEFAULT;
270 
271   ierr = PetscOptionsBegin(comm, "", "Meshing Interpolation Test Options", "DMPLEX");PetscCall(ierr);
272   PetscCall(PetscOptionsBoundedInt("-debug", "The debugging level", "ex18.c", options->debug, &options->debug, NULL,0));
273   PetscCall(PetscOptionsBoundedInt("-testnum", "The mesh to create", "ex18.c", options->testNum, &options->testNum, NULL,0));
274   PetscCall(PetscOptionsBool("-cell_simplex", "Generate simplices if true, otherwise hexes", "ex18.c", options->cellSimplex, &options->cellSimplex, NULL));
275   PetscCall(PetscOptionsBool("-distribute", "Distribute the mesh", "ex18.c", options->distribute, &options->distribute, NULL));
276   PetscCall(PetscOptionsEList("-interpolate", "Type of mesh interpolation (none, create, after_create, after_distribute)", "ex18.c", interpTypes, 4, interpTypes[options->interpolate], &interp, NULL));
277   options->interpolate = (InterpType) interp;
278   PetscCheckFalse(!options->distribute && options->interpolate == AFTER_DISTRIBUTE,comm, PETSC_ERR_SUP, "-interpolate after_distribute  needs  -distribute 1");
279   PetscCall(PetscOptionsBool("-use_generator", "Use a mesh generator to build the mesh", "ex18.c", options->useGenerator, &options->useGenerator, NULL));
280   options->ncoords = 128;
281   PetscCall(PetscOptionsScalarArray("-view_vertices_from_coords", "Print DAG points corresponding to vertices with given coordinates", "ex18.c", options->coords, &options->ncoords, NULL));
282   PetscCall(PetscOptionsReal("-view_vertices_from_coords_tol", "Tolerance for -view_vertices_from_coords", "ex18.c", options->coordsTol, &options->coordsTol, NULL));
283   options->nPointsToExpand = 128;
284   PetscCall(PetscOptionsIntArray("-test_expand_points", "Expand given array of DAG point using DMPlexGetConeRecursive() and print results", "ex18.c", options->pointsToExpand, &options->nPointsToExpand, NULL));
285   if (options->nPointsToExpand) {
286     PetscCall(PetscOptionsBool("-test_expand_points_empty", "For -test_expand_points, rank 0 will have empty input array", "ex18.c", options->testExpandPointsEmpty, &options->testExpandPointsEmpty, NULL));
287   }
288   PetscCall(PetscOptionsBool("-test_heavy", "Run the heavy PointSF test", "ex18.c", options->testHeavy, &options->testHeavy, NULL));
289   PetscCall(PetscOptionsBool("-custom_view", "Custom DMPlex view", "ex18.c", options->customView, &options->customView, NULL));
290   PetscCall(PetscOptionsRangeInt("-dim", "The topological mesh dimension", "ex18.c", options->dim, &options->dim, &flg1,1,3));
291   dim = 3;
292   PetscCall(PetscOptionsIntArray("-faces", "Number of faces per dimension", "ex18.c", options->faces, &dim, &flg2));
293   if (flg2) {
294     PetscCheckFalse(flg1 && dim != options->dim,comm, PETSC_ERR_ARG_OUTOFRANGE, "specified -dim %D is not equal to length %D of -faces (note that -dim can be omitted)", options->dim, dim);
295     options->dim = dim;
296   }
297   PetscCall(PetscOptionsString("-filename", "The mesh file", "ex18.c", options->filename, options->filename, sizeof(options->filename), NULL));
298   PetscCall(PetscOptionsBoundedInt("-rotate_interface_0", "Rotation (relative orientation) of interface on rank 0; implies -interpolate create -distribute 0", "ex18.c", options->ornt[0], &options->ornt[0], &options->testOrientIF,0));
299   PetscCall(PetscOptionsBoundedInt("-rotate_interface_1", "Rotation (relative orientation) of interface on rank 1; implies -interpolate create -distribute 0", "ex18.c", options->ornt[1], &options->ornt[1], &flg2,0));
300   PetscCheckFalse(flg2 != options->testOrientIF,comm, PETSC_ERR_ARG_OUTOFRANGE, "neither or both -rotate_interface_0 -rotate_interface_1 must be set");
301   if (options->testOrientIF) {
302     PetscInt i;
303     for (i=0; i<2; i++) {
304       if (options->ornt[i] >= 10) options->ornt[i] = -(options->ornt[i]-10);  /* 11 12 13 become -1 -2 -3 */
305     }
306     options->filename[0]  = 0;
307     options->useGenerator = PETSC_FALSE;
308     options->dim          = 3;
309     options->cellSimplex  = PETSC_TRUE;
310     options->interpolate  = CREATE;
311     options->distribute   = PETSC_FALSE;
312   }
313   ierr = PetscOptionsEnd();PetscCall(ierr);
314   PetscFunctionReturn(0);
315 }
316 
317 static PetscErrorCode CreateMesh_1D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
318 {
319   PetscInt       testNum = user->testNum;
320   PetscMPIInt    rank,size;
321   PetscInt       numCorners=2,i;
322   PetscInt       numCells,numVertices,network;
323   PetscInt       *cells;
324   PetscReal      *coords;
325 
326   PetscFunctionBegin;
327   PetscCallMPI(MPI_Comm_rank(comm, &rank));
328   PetscCallMPI(MPI_Comm_size(comm, &size));
329   PetscCheckFalse(size > 2,comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %d only for <=2 processes",testNum);
330 
331   numCells = 3;
332   PetscCall(PetscOptionsGetInt(NULL, NULL, "-ncells", &numCells, NULL));
333   PetscCheckFalse(numCells < 3,comm, PETSC_ERR_ARG_OUTOFRANGE, "Test ncells must >=3",numCells);
334 
335   if (size == 1) {
336     numVertices = numCells + 1;
337     PetscCall(PetscMalloc2(2*numCells,&cells,2*numVertices,&coords));
338     for (i=0; i<numCells; i++) {
339       cells[2*i] = i; cells[2*i+1] = i + 1;
340       coords[2*i] = i; coords[2*i+1] = i + 1;
341     }
342 
343     PetscCall(DMPlexCreateFromCellListPetsc(comm, user->dim, numCells, numVertices, numCorners, PETSC_FALSE, cells, user->dim, coords, dm));
344     PetscCall(PetscFree2(cells,coords));
345     PetscFunctionReturn(0);
346   }
347 
348   network = 0;
349   PetscCall(PetscOptionsGetInt(NULL, NULL, "-network_case", &network, NULL));
350   if (network == 0) {
351     switch (rank) {
352     case 0:
353     {
354       numCells    = 2;
355       numVertices = numCells;
356       PetscCall(PetscMalloc2(2*numCells,&cells,2*numCells,&coords));
357       cells[0] = 0; cells[1] = 1;
358       cells[2] = 1; cells[3] = 2;
359       coords[0] = 0.; coords[1] = 1.;
360       coords[2] = 1.; coords[3] = 2.;
361     }
362     break;
363     case 1:
364     {
365       numCells    -= 2;
366       numVertices = numCells + 1;
367       PetscCall(PetscMalloc2(2*numCells,&cells,2*numCells,&coords));
368       for (i=0; i<numCells; i++) {
369         cells[2*i] = 2+i; cells[2*i+1] = 2 + i + 1;
370         coords[2*i] = 2+i; coords[2*i+1] = 2 + i + 1;
371       }
372     }
373     break;
374     default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
375     }
376   } else { /* network_case = 1 */
377     /* ----------------------- */
378     switch (rank) {
379     case 0:
380     {
381       numCells    = 2;
382       numVertices = 3;
383       PetscCall(PetscMalloc2(2*numCells,&cells,2*numCells,&coords));
384       cells[0] = 0; cells[1] = 3;
385       cells[2] = 3; cells[3] = 1;
386     }
387     break;
388     case 1:
389     {
390       numCells    = 1;
391       numVertices = 1;
392       PetscCall(PetscMalloc2(2*numCells,&cells,2*numCells,&coords));
393       cells[0] = 3; cells[1] = 2;
394     }
395     break;
396     default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
397     }
398   }
399   PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, PETSC_FALSE, cells, user->dim, coords, NULL, NULL, dm));
400   PetscCall(PetscFree2(cells,coords));
401   PetscFunctionReturn(0);
402 }
403 
404 static PetscErrorCode CreateSimplex_2D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
405 {
406   PetscInt       testNum = user->testNum, p;
407   PetscMPIInt    rank, size;
408 
409   PetscFunctionBegin;
410   PetscCallMPI(MPI_Comm_rank(comm, &rank));
411   PetscCallMPI(MPI_Comm_size(comm, &size));
412   switch (testNum) {
413   case 0:
414     PetscCheckFalse(size != 2,comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %d only for 2 processes", testNum);
415     switch (rank) {
416       case 0:
417       {
418         const PetscInt numCells  = 1, numVertices = 2, numCorners = 3;
419         const PetscInt cells[3]  = {0, 1, 2};
420         PetscReal      coords[4] = {-0.5, 0.5, 0.0, 0.0};
421         PetscInt       markerPoints[6] = {1, 1, 2, 1, 3, 1};
422 
423         PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
424         for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
425       }
426       break;
427       case 1:
428       {
429         const PetscInt numCells  = 1, numVertices = 2, numCorners = 3;
430         const PetscInt cells[3]  = {1, 3, 2};
431         PetscReal      coords[4] = {0.0, 1.0, 0.5, 0.5};
432         PetscInt       markerPoints[6] = {1, 1, 2, 1, 3, 1};
433 
434         PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
435         for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
436       }
437       break;
438       default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
439     }
440     break;
441   case 1:
442     PetscCheckFalse(size != 3,comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %d only for 3 processes", testNum);
443     switch (rank) {
444       case 0:
445       {
446         const PetscInt numCells  = 1, numVertices = 2, numCorners = 3;
447         const PetscInt cells[3]  = {0, 1, 2};
448         PetscReal      coords[4] = {0.0, 1.0, 0.0, 0.0};
449         PetscInt       markerPoints[6] = {1, 1, 2, 1, 3, 1};
450 
451         PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
452         for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
453       }
454       break;
455       case 1:
456       {
457         const PetscInt numCells  = 1, numVertices = 2, numCorners = 3;
458         const PetscInt cells[3]  = {0, 2, 3};
459         PetscReal      coords[4] = {0.5, 0.5, 1.0, 1.0};
460         PetscInt       markerPoints[6] = {1, 1, 2, 1, 3, 1};
461 
462         PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
463         for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
464       }
465       break;
466       case 2:
467       {
468         const PetscInt numCells  = 2, numVertices = 1, numCorners = 3;
469         const PetscInt cells[6]  = {2, 4, 3, 2, 1, 4};
470         PetscReal      coords[2] = {1.0, 0.0};
471         PetscInt       markerPoints[10] = {2, 1, 3, 1, 4, 1, 5, 1, 6, 1};
472 
473         PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
474         for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
475       }
476       break;
477       default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
478     }
479     break;
480   case 2:
481     PetscCheckFalse(size != 3,comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %d only for 3 processes", testNum);
482     switch (rank) {
483       case 0:
484       {
485         const PetscInt numCells  = 1, numVertices = 2, numCorners = 3;
486         const PetscInt cells[3]  = {1, 2, 0};
487         PetscReal      coords[4] = {0.5, 0.5, 0.0, 1.0};
488         PetscInt       markerPoints[6] = {1, 1, 2, 1, 3, 1};
489 
490         PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
491         for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
492       }
493       break;
494       case 1:
495       {
496         const PetscInt numCells  = 1, numVertices = 2, numCorners = 3;
497         const PetscInt cells[3]  = {1, 0, 3};
498         PetscReal      coords[4] = {0.0, 0.0, 1.0, 1.0};
499         PetscInt       markerPoints[6] = {1, 1, 2, 1, 3, 1};
500 
501         PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
502         for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
503       }
504       break;
505       case 2:
506       {
507         const PetscInt numCells  = 2, numVertices = 1, numCorners = 3;
508         const PetscInt cells[6]  = {0, 4, 3, 0, 2, 4};
509         PetscReal      coords[2] = {1.0, 0.0};
510         PetscInt       markerPoints[10] = {2, 1, 3, 1, 4, 1, 5, 1, 6, 1};
511 
512         PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
513         for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
514       }
515       break;
516       default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
517     }
518     break;
519   default: SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh %D", testNum);
520   }
521   PetscFunctionReturn(0);
522 }
523 
524 static PetscErrorCode CreateSimplex_3D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
525 {
526   PetscInt       testNum = user->testNum, p;
527   PetscMPIInt    rank, size;
528 
529   PetscFunctionBegin;
530   PetscCallMPI(MPI_Comm_rank(comm, &rank));
531   PetscCallMPI(MPI_Comm_size(comm, &size));
532   switch (testNum) {
533   case 0:
534     PetscCheckFalse(size != 2,comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %d only for 2 processes", testNum);
535     switch (rank) {
536       case 0:
537       {
538         const PetscInt numCells  = 1, numVertices = 2, numCorners = 4;
539         const PetscInt cells[4]  = {0, 2, 1, 3};
540         PetscReal      coords[6] = {0.0, 0.0, -0.5,  0.0, -0.5, 0.0};
541         PetscInt       markerPoints[8] = {1, 1, 2, 1, 3, 1, 4, 1};
542 
543         PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
544         for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
545       }
546       break;
547       case 1:
548       {
549         const PetscInt numCells  = 1, numVertices = 3, numCorners = 4;
550         const PetscInt cells[4]  = {1, 2, 4, 3};
551         PetscReal      coords[9] = {1.0, 0.0, 0.0,  0.0, 0.5, 0.0,  0.0, 0.0, 0.5};
552         PetscInt       markerPoints[8] = {1, 1, 2, 1, 3, 1, 4, 1};
553 
554         PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
555         for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
556       }
557       break;
558       default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
559     }
560     break;
561   default: SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh %D", testNum);
562   }
563   if (user->testOrientIF) {
564     PetscInt ifp[] = {8, 6};
565 
566     PetscCall(PetscObjectSetName((PetscObject) *dm, "Mesh before orientation"));
567     PetscCall(DMViewFromOptions(*dm, NULL, "-before_orientation_dm_view"));
568     /* rotate interface face ifp[rank] by given orientation ornt[rank] */
569     PetscCall(DMPlexOrientPoint(*dm, ifp[rank], user->ornt[rank]));
570     PetscCall(DMViewFromOptions(*dm, NULL, "-before_orientation_dm_view"));
571     PetscCall(DMPlexCheckFaces(*dm, 0));
572     PetscCall(DMPlexOrientInterface_Internal(*dm));
573     PetscCall(PetscPrintf(comm, "Orientation test PASSED\n"));
574   }
575   PetscFunctionReturn(0);
576 }
577 
578 static PetscErrorCode CreateQuad_2D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
579 {
580   PetscInt       testNum = user->testNum, p;
581   PetscMPIInt    rank, size;
582 
583   PetscFunctionBegin;
584   PetscCallMPI(MPI_Comm_rank(comm, &rank));
585   PetscCallMPI(MPI_Comm_size(comm, &size));
586   switch (testNum) {
587   case 0:
588     PetscCheckFalse(size != 2,comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %d only for 2 processes", testNum);
589     switch (rank) {
590       case 0:
591       {
592         const PetscInt numCells  = 1, numVertices = 3, numCorners = 4;
593         const PetscInt cells[4]  = {0, 1, 2, 3};
594         PetscReal      coords[6] = {-0.5, 0.0, 0.0, 0.0, 0.0, 1.0};
595         PetscInt       markerPoints[4*2] = {1, 1, 2, 1, 3, 1, 4, 1};
596 
597         PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
598         for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
599       }
600       break;
601       case 1:
602       {
603         const PetscInt numCells  = 1, numVertices = 3, numCorners = 4;
604         const PetscInt cells[4]  = {1, 4, 5, 2};
605         PetscReal      coords[6] = {-0.5, 1.0, 0.5, 0.0, 0.5, 1.0};
606         PetscInt       markerPoints[4*2] = {1, 1, 2, 1, 3, 1, 4, 1};
607 
608         PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
609         for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
610       }
611       break;
612       default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
613     }
614     break;
615   default: SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh %D", testNum);
616   }
617   PetscFunctionReturn(0);
618 }
619 
620 static PetscErrorCode CreateHex_3D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
621 {
622   PetscInt       testNum = user->testNum, p;
623   PetscMPIInt    rank, size;
624 
625   PetscFunctionBegin;
626   PetscCallMPI(MPI_Comm_rank(comm, &rank));
627   PetscCallMPI(MPI_Comm_size(comm, &size));
628   switch (testNum) {
629   case 0:
630     PetscCheckFalse(size != 2,comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %d only for 2 processes", testNum);
631     switch (rank) {
632     case 0:
633     {
634       const PetscInt numCells  = 1, numVertices = 6, numCorners = 8;
635       const PetscInt cells[8]  = {0, 3, 2, 1, 4, 5, 6, 7};
636       PetscReal      coords[6*3] = {-0.5,0.0,0.0, 0.0,0.0,0.0, 0.0,1.0,0.0, -0.5,1.0,0.0, -0.5,0.0,1.0, 0.0,0.0,1.0};
637       PetscInt       markerPoints[8*2] = {2,1,3,1,4,1,5,1,6,1,7,1,8,1,9,1};
638 
639       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
640       for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
641     }
642     break;
643     case 1:
644     {
645       const PetscInt numCells  = 1, numVertices = 6, numCorners = 8;
646       const PetscInt cells[8]  = {1, 2, 9, 8, 5, 10, 11, 6};
647       PetscReal      coords[6*3] = {0.0,1.0,1.0, -0.5,1.0,1.0, 0.5,0.0,0.0, 0.5,1.0,0.0, 0.5,0.0,1.0,  0.5,1.0,1.0};
648       PetscInt       markerPoints[8*2] = {2,1,3,1,4,1,5,1,6,1,7,1,8,1,9,1};
649 
650       PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
651       for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p*2], markerPoints[p*2+1]));
652     }
653     break;
654     default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
655     }
656   break;
657   default: SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh %D", testNum);
658   }
659   PetscFunctionReturn(0);
660 }
661 
662 static PetscErrorCode CustomView(DM dm, PetscViewer v)
663 {
664   DMPlexInterpolatedFlag interpolated;
665   PetscBool              distributed;
666 
667   PetscFunctionBegin;
668   PetscCall(DMPlexIsDistributed(dm, &distributed));
669   PetscCall(DMPlexIsInterpolatedCollective(dm, &interpolated));
670   PetscCall(PetscViewerASCIIPrintf(v, "DMPlexIsDistributed: %s\n", PetscBools[distributed]));
671   PetscCall(PetscViewerASCIIPrintf(v, "DMPlexIsInterpolatedCollective: %s\n", DMPlexInterpolatedFlags[interpolated]));
672   PetscFunctionReturn(0);
673 }
674 
675 static PetscErrorCode CreateMeshFromFile(MPI_Comm comm, AppCtx *user, DM *dm, DM *serialDM)
676 {
677   const char    *filename       = user->filename;
678   PetscBool      testHeavy      = user->testHeavy;
679   PetscBool      interpCreate   = user->interpolate == CREATE ? PETSC_TRUE : PETSC_FALSE;
680   PetscBool      distributed    = PETSC_FALSE;
681 
682   PetscFunctionBegin;
683   *serialDM = NULL;
684   if (testHeavy && interpCreate) PetscCall(DMPlexSetOrientInterface_Private(NULL, PETSC_FALSE));
685   PetscCall(PetscLogStagePush(stage[0]));
686   PetscCall(DMPlexCreateFromFile(comm, filename, "ex18_plex", interpCreate, dm)); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexCheckPointSFHeavy() */
687   PetscCall(PetscLogStagePop());
688   if (testHeavy && interpCreate) PetscCall(DMPlexSetOrientInterface_Private(NULL, PETSC_TRUE));
689   PetscCall(DMPlexIsDistributed(*dm, &distributed));
690   PetscCall(PetscPrintf(comm, "DMPlexCreateFromFile produced %s mesh.\n", distributed ? "distributed" : "serial"));
691   if (testHeavy && distributed) {
692     PetscCall(PetscOptionsSetValue(NULL, "-dm_plex_hdf5_force_sequential", NULL));
693     PetscCall(DMPlexCreateFromFile(comm, filename, "ex18_plex", interpCreate, serialDM));
694     PetscCall(DMPlexIsDistributed(*serialDM, &distributed));
695     PetscCheck(!distributed,comm, PETSC_ERR_PLIB, "unable to create a serial DM from file");
696   }
697   PetscCall(DMGetDimension(*dm, &user->dim));
698   PetscFunctionReturn(0);
699 }
700 
701 static PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm)
702 {
703   PetscPartitioner part;
704   PortableBoundary boundary     = NULL;
705   DM             serialDM       = NULL;
706   PetscBool      cellSimplex    = user->cellSimplex;
707   PetscBool      useGenerator   = user->useGenerator;
708   PetscBool      interpCreate   = user->interpolate == CREATE ? PETSC_TRUE : PETSC_FALSE;
709   PetscBool      interpSerial   = user->interpolate == AFTER_CREATE ? PETSC_TRUE : PETSC_FALSE;
710   PetscBool      interpParallel = user->interpolate == AFTER_DISTRIBUTE ? PETSC_TRUE : PETSC_FALSE;
711   PetscBool      testHeavy      = user->testHeavy;
712   PetscMPIInt    rank;
713 
714   PetscFunctionBegin;
715   PetscCallMPI(MPI_Comm_rank(comm, &rank));
716   if (user->filename[0]) {
717     PetscCall(CreateMeshFromFile(comm, user, dm, &serialDM));
718   } else if (useGenerator) {
719     PetscCall(PetscLogStagePush(stage[0]));
720     PetscCall(DMPlexCreateBoxMesh(comm, user->dim, cellSimplex, user->faces, NULL, NULL, NULL, interpCreate, dm));
721     PetscCall(PetscLogStagePop());
722   } else {
723     PetscCall(PetscLogStagePush(stage[0]));
724     switch (user->dim) {
725     case 1:
726       PetscCall(CreateMesh_1D(comm, interpCreate, user, dm));
727       break;
728     case 2:
729       if (cellSimplex) {
730         PetscCall(CreateSimplex_2D(comm, interpCreate, user, dm));
731       } else {
732         PetscCall(CreateQuad_2D(comm, interpCreate, user, dm));
733       }
734       break;
735     case 3:
736       if (cellSimplex) {
737         PetscCall(CreateSimplex_3D(comm, interpCreate, user, dm));
738       } else {
739         PetscCall(CreateHex_3D(comm, interpCreate, user, dm));
740       }
741       break;
742     default:
743       SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "Cannot make meshes for dimension %D", user->dim);
744     }
745     PetscCall(PetscLogStagePop());
746   }
747   PetscCheckFalse(user->ncoords % user->dim,comm, PETSC_ERR_ARG_OUTOFRANGE, "length of coordinates array %D must be divisable by spatial dimension %D", user->ncoords, user->dim);
748   PetscCall(PetscObjectSetName((PetscObject) *dm, "Original Mesh"));
749   PetscCall(DMViewFromOptions(*dm, NULL, "-orig_dm_view"));
750 
751   if (interpSerial) {
752     DM idm;
753 
754     if (testHeavy) PetscCall(DMPlexSetOrientInterface_Private(*dm, PETSC_FALSE));
755     PetscCall(PetscLogStagePush(stage[2]));
756     PetscCall(DMPlexInterpolate(*dm, &idm)); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexCheckPointSFHeavy() */
757     PetscCall(PetscLogStagePop());
758     if (testHeavy) PetscCall(DMPlexSetOrientInterface_Private(*dm, PETSC_TRUE));
759     PetscCall(DMDestroy(dm));
760     *dm = idm;
761     PetscCall(PetscObjectSetName((PetscObject) *dm, "Interpolated Mesh"));
762     PetscCall(DMViewFromOptions(*dm, NULL, "-intp_dm_view"));
763   }
764 
765   /* Set partitioner options */
766   PetscCall(DMPlexGetPartitioner(*dm, &part));
767   if (part) {
768     PetscCall(PetscPartitionerSetType(part, PETSCPARTITIONERSIMPLE));
769     PetscCall(PetscPartitionerSetFromOptions(part));
770   }
771 
772   if (user->customView) PetscCall(CustomView(*dm, PETSC_VIEWER_STDOUT_(comm)));
773   if (testHeavy) {
774     PetscBool distributed;
775 
776     PetscCall(DMPlexIsDistributed(*dm, &distributed));
777     if (!serialDM && !distributed) {
778       serialDM = *dm;
779       PetscCall(PetscObjectReference((PetscObject)*dm));
780     }
781     if (serialDM) {
782       PetscCall(DMPlexGetExpandedBoundary_Private(serialDM, &boundary));
783     }
784     if (boundary) {
785       /* check DM which has been created in parallel and already interpolated */
786       PetscCall(DMPlexCheckPointSFHeavy(*dm, boundary));
787     }
788     /* Orient interface because it could be deliberately skipped above. It is idempotent. */
789     PetscCall(DMPlexOrientInterface_Internal(*dm));
790   }
791   if (user->distribute) {
792     DM               pdm = NULL;
793 
794     /* Redistribute mesh over processes using that partitioner */
795     PetscCall(PetscLogStagePush(stage[1]));
796     PetscCall(DMPlexDistribute(*dm, 0, NULL, &pdm));
797     PetscCall(PetscLogStagePop());
798     if (pdm) {
799       PetscCall(DMDestroy(dm));
800       *dm  = pdm;
801       PetscCall(PetscObjectSetName((PetscObject) *dm, "Redistributed Mesh"));
802       PetscCall(DMViewFromOptions(*dm, NULL, "-dist_dm_view"));
803     }
804 
805     if (interpParallel) {
806       DM idm;
807 
808       if (testHeavy) PetscCall(DMPlexSetOrientInterface_Private(*dm, PETSC_FALSE));
809       PetscCall(PetscLogStagePush(stage[2]));
810       PetscCall(DMPlexInterpolate(*dm, &idm)); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexCheckPointSFHeavy() */
811       PetscCall(PetscLogStagePop());
812       if (testHeavy) PetscCall(DMPlexSetOrientInterface_Private(*dm, PETSC_TRUE));
813       PetscCall(DMDestroy(dm));
814       *dm = idm;
815       PetscCall(PetscObjectSetName((PetscObject) *dm, "Interpolated Redistributed Mesh"));
816       PetscCall(DMViewFromOptions(*dm, NULL, "-intp_dm_view"));
817     }
818   }
819   if (testHeavy) {
820     if (boundary) {
821       PetscCall(DMPlexCheckPointSFHeavy(*dm, boundary));
822     }
823     /* Orient interface because it could be deliberately skipped above. It is idempotent. */
824     PetscCall(DMPlexOrientInterface_Internal(*dm));
825   }
826 
827   PetscCall(PetscObjectSetName((PetscObject) *dm, "Parallel Mesh"));
828   PetscCall(DMPlexDistributeSetDefault(*dm, PETSC_FALSE));
829   PetscCall(DMSetFromOptions(*dm));
830   PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
831 
832   if (user->customView) PetscCall(CustomView(*dm, PETSC_VIEWER_STDOUT_(comm)));
833   PetscCall(DMDestroy(&serialDM));
834   PetscCall(PortableBoundaryDestroy(&boundary));
835   PetscFunctionReturn(0);
836 }
837 
838 #define ps2d(number) ((double) PetscRealPart(number))
839 static inline PetscErrorCode coord2str(char buf[], size_t len, PetscInt dim, const PetscScalar coords[], PetscReal tol)
840 {
841   PetscFunctionBegin;
842   PetscCheckFalse(dim > 3,PETSC_COMM_SELF, PETSC_ERR_SUP, "dim must be less than or equal 3");
843   if (tol >= 1e-3) {
844     switch (dim) {
845       case 1: PetscCall(PetscSNPrintf(buf,len,"(%12.3f)",ps2d(coords[0])));
846       case 2: PetscCall(PetscSNPrintf(buf,len,"(%12.3f, %12.3f)",ps2d(coords[0]),ps2d(coords[1])));
847       case 3: PetscCall(PetscSNPrintf(buf,len,"(%12.3f, %12.3f, %12.3f)",ps2d(coords[0]),ps2d(coords[1]),ps2d(coords[2])));
848     }
849   } else {
850     switch (dim) {
851       case 1: PetscCall(PetscSNPrintf(buf,len,"(%12.6f)",ps2d(coords[0])));
852       case 2: PetscCall(PetscSNPrintf(buf,len,"(%12.6f, %12.6f)",ps2d(coords[0]),ps2d(coords[1])));
853       case 3: PetscCall(PetscSNPrintf(buf,len,"(%12.6f, %12.6f, %12.6f)",ps2d(coords[0]),ps2d(coords[1]),ps2d(coords[2])));
854     }
855   }
856   PetscFunctionReturn(0);
857 }
858 
859 static PetscErrorCode ViewVerticesFromCoords(DM dm, Vec coordsVec, PetscReal tol, PetscViewer viewer)
860 {
861   PetscInt       dim, i, npoints;
862   IS             pointsIS;
863   const PetscInt *points;
864   const PetscScalar *coords;
865   char           coordstr[128];
866   MPI_Comm       comm;
867   PetscMPIInt    rank;
868 
869   PetscFunctionBegin;
870   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
871   PetscCallMPI(MPI_Comm_rank(comm, &rank));
872   PetscCall(DMGetDimension(dm, &dim));
873   PetscCall(PetscViewerASCIIPushSynchronized(viewer));
874   PetscCall(DMPlexFindVertices(dm, coordsVec, tol, &pointsIS));
875   PetscCall(ISGetIndices(pointsIS, &points));
876   PetscCall(ISGetLocalSize(pointsIS, &npoints));
877   PetscCall(VecGetArrayRead(coordsVec, &coords));
878   for (i=0; i < npoints; i++) {
879     PetscCall(coord2str(coordstr, sizeof(coordstr), dim, &coords[i*dim], tol));
880     if (rank == 0 && i) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "-----\n"));
881     PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] %s --> points[%D] = %D\n", rank, coordstr, i, points[i]));
882     PetscCall(PetscViewerFlush(viewer));
883   }
884   PetscCall(PetscViewerASCIIPopSynchronized(viewer));
885   PetscCall(VecRestoreArrayRead(coordsVec, &coords));
886   PetscCall(ISRestoreIndices(pointsIS, &points));
887   PetscCall(ISDestroy(&pointsIS));
888   PetscFunctionReturn(0);
889 }
890 
891 static PetscErrorCode TestExpandPoints(DM dm, AppCtx *user)
892 {
893   IS                is;
894   PetscSection      *sects;
895   IS                *iss;
896   PetscInt          d,depth;
897   PetscMPIInt       rank;
898   PetscViewer       viewer=PETSC_VIEWER_STDOUT_WORLD, sviewer;
899 
900   PetscFunctionBegin;
901   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm),&rank));
902   if (user->testExpandPointsEmpty && rank == 0) {
903     PetscCall(ISCreateGeneral(PETSC_COMM_SELF, 0, NULL, PETSC_USE_POINTER, &is));
904   } else {
905     PetscCall(ISCreateGeneral(PETSC_COMM_SELF, user->nPointsToExpand, user->pointsToExpand, PETSC_USE_POINTER, &is));
906   }
907   PetscCall(DMPlexGetConeRecursive(dm, is, &depth, &iss, &sects));
908   PetscCall(PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer));
909   PetscCall(PetscViewerASCIIPrintf(sviewer, "[%d] ==========================\n",rank));
910   for (d=depth-1; d>=0; d--) {
911     IS          checkIS;
912     PetscBool   flg;
913 
914     PetscCall(PetscViewerASCIIPrintf(sviewer, "depth %D ---------------\n",d));
915     PetscCall(PetscSectionView(sects[d], sviewer));
916     PetscCall(ISView(iss[d], sviewer));
917     /* check reverse operation */
918     if (d < depth-1) {
919       PetscCall(DMPlexExpandedConesToFaces_Private(dm, iss[d], sects[d], &checkIS));
920       PetscCall(ISEqualUnsorted(checkIS, iss[d+1], &flg));
921       PetscCheck(flg,PetscObjectComm((PetscObject) checkIS), PETSC_ERR_PLIB, "DMPlexExpandedConesToFaces_Private produced wrong IS");
922       PetscCall(ISDestroy(&checkIS));
923     }
924   }
925   PetscCall(PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer));
926   PetscCall(PetscViewerFlush(viewer));
927   PetscCall(DMPlexRestoreConeRecursive(dm, is, &depth, &iss, &sects));
928   PetscCall(ISDestroy(&is));
929   PetscFunctionReturn(0);
930 }
931 
932 static PetscErrorCode DMPlexExpandedConesToFaces_Private(DM dm, IS is, PetscSection section, IS *newis)
933 {
934   PetscInt          n,n1,ncone,numCoveredPoints,o,p,q,start,end;
935   const PetscInt    *coveredPoints;
936   const PetscInt    *arr, *cone;
937   PetscInt          *newarr;
938 
939   PetscFunctionBegin;
940   PetscCall(ISGetLocalSize(is, &n));
941   PetscCall(PetscSectionGetStorageSize(section, &n1));
942   PetscCall(PetscSectionGetChart(section, &start, &end));
943   PetscCheckFalse(n != n1,PETSC_COMM_SELF, PETSC_ERR_PLIB, "IS size = %D != %D = section storage size", n, n1);
944   PetscCall(ISGetIndices(is, &arr));
945   PetscCall(PetscMalloc1(end-start, &newarr));
946   for (q=start; q<end; q++) {
947     PetscCall(PetscSectionGetDof(section, q, &ncone));
948     PetscCall(PetscSectionGetOffset(section, q, &o));
949     cone = &arr[o];
950     if (ncone == 1) {
951       numCoveredPoints = 1;
952       p = cone[0];
953     } else {
954       PetscInt i;
955       p = PETSC_MAX_INT;
956       for (i=0; i<ncone; i++) if (cone[i] < 0) {p = -1; break;}
957       if (p >= 0) {
958         PetscCall(DMPlexGetJoin(dm, ncone, cone, &numCoveredPoints, &coveredPoints));
959         PetscCheckFalse(numCoveredPoints > 1,PETSC_COMM_SELF, PETSC_ERR_PLIB, "more than one covered points for section point %D",q);
960         if (numCoveredPoints) p = coveredPoints[0];
961         else                  p = -2;
962         PetscCall(DMPlexRestoreJoin(dm, ncone, cone, &numCoveredPoints, &coveredPoints));
963       }
964     }
965     newarr[q-start] = p;
966   }
967   PetscCall(ISRestoreIndices(is, &arr));
968   PetscCall(ISCreateGeneral(PETSC_COMM_SELF, end-start, newarr, PETSC_OWN_POINTER, newis));
969   PetscFunctionReturn(0);
970 }
971 
972 static PetscErrorCode DMPlexExpandedVerticesToFaces_Private(DM dm, IS boundary_expanded_is, PetscInt depth, PetscSection sections[], IS *boundary_is)
973 {
974   PetscInt          d;
975   IS                is,newis;
976 
977   PetscFunctionBegin;
978   is = boundary_expanded_is;
979   PetscCall(PetscObjectReference((PetscObject)is));
980   for (d = 0; d < depth-1; ++d) {
981     PetscCall(DMPlexExpandedConesToFaces_Private(dm, is, sections[d], &newis));
982     PetscCall(ISDestroy(&is));
983     is = newis;
984   }
985   *boundary_is = is;
986   PetscFunctionReturn(0);
987 }
988 
989 #define CHKERRQI(incall,ierr) if (ierr) {incall = PETSC_FALSE; }
990 
991 static PetscErrorCode DMLabelViewFromOptionsOnComm_Private(DMLabel label, const char optionname[], MPI_Comm comm)
992 {
993   PetscViewer       viewer;
994   PetscBool         flg;
995   static PetscBool  incall = PETSC_FALSE;
996   PetscViewerFormat format;
997 
998   PetscFunctionBegin;
999   if (incall) PetscFunctionReturn(0);
1000   incall = PETSC_TRUE;
1001   CHKERRQI(incall,PetscOptionsGetViewer(comm,((PetscObject)label)->options,((PetscObject)label)->prefix,optionname,&viewer,&format,&flg));
1002   if (flg) {
1003     CHKERRQI(incall,PetscViewerPushFormat(viewer,format));
1004     CHKERRQI(incall,DMLabelView(label, viewer));
1005     CHKERRQI(incall,PetscViewerPopFormat(viewer));
1006     CHKERRQI(incall,PetscViewerDestroy(&viewer));
1007   }
1008   incall = PETSC_FALSE;
1009   PetscFunctionReturn(0);
1010 }
1011 
1012 /* TODO: this is hotfixing DMLabelGetStratumIS() - it should be fixed systematically instead */
1013 static inline PetscErrorCode DMLabelGetStratumISOnComm_Private(DMLabel label, PetscInt value, MPI_Comm comm, IS *is)
1014 {
1015   IS                tmpis;
1016 
1017   PetscFunctionBegin;
1018   PetscCall(DMLabelGetStratumIS(label, value, &tmpis));
1019   if (!tmpis) PetscCall(ISCreateGeneral(PETSC_COMM_SELF, 0, NULL, PETSC_USE_POINTER, &tmpis));
1020   PetscCall(ISOnComm(tmpis, comm, PETSC_COPY_VALUES, is));
1021   PetscCall(ISDestroy(&tmpis));
1022   PetscFunctionReturn(0);
1023 }
1024 
1025 /* currently only for simple PetscSection without fields or constraints */
1026 static PetscErrorCode PetscSectionReplicate_Private(MPI_Comm comm, PetscMPIInt rootrank, PetscSection sec0, PetscSection *secout)
1027 {
1028   PetscSection      sec;
1029   PetscInt          chart[2], p;
1030   PetscInt          *dofarr;
1031   PetscMPIInt       rank;
1032 
1033   PetscFunctionBegin;
1034   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1035   if (rank == rootrank) {
1036     PetscCall(PetscSectionGetChart(sec0, &chart[0], &chart[1]));
1037   }
1038   PetscCallMPI(MPI_Bcast(chart, 2, MPIU_INT, rootrank, comm));
1039   PetscCall(PetscMalloc1(chart[1]-chart[0], &dofarr));
1040   if (rank == rootrank) {
1041     for (p = chart[0]; p < chart[1]; p++) {
1042       PetscCall(PetscSectionGetDof(sec0, p, &dofarr[p-chart[0]]));
1043     }
1044   }
1045   PetscCallMPI(MPI_Bcast(dofarr, chart[1]-chart[0], MPIU_INT, rootrank, comm));
1046   PetscCall(PetscSectionCreate(comm, &sec));
1047   PetscCall(PetscSectionSetChart(sec, chart[0], chart[1]));
1048   for (p = chart[0]; p < chart[1]; p++) {
1049     PetscCall(PetscSectionSetDof(sec, p, dofarr[p-chart[0]]));
1050   }
1051   PetscCall(PetscSectionSetUp(sec));
1052   PetscCall(PetscFree(dofarr));
1053   *secout = sec;
1054   PetscFunctionReturn(0);
1055 }
1056 
1057 static PetscErrorCode DMPlexExpandedVerticesCoordinatesToFaces_Private(DM ipdm, PortableBoundary bnd, IS *face_is)
1058 {
1059   IS                  faces_expanded_is;
1060 
1061   PetscFunctionBegin;
1062   PetscCall(DMPlexFindVertices(ipdm, bnd->coordinates, 0.0, &faces_expanded_is));
1063   PetscCall(DMPlexExpandedVerticesToFaces_Private(ipdm, faces_expanded_is, bnd->depth, bnd->sections, face_is));
1064   PetscCall(ISDestroy(&faces_expanded_is));
1065   PetscFunctionReturn(0);
1066 }
1067 
1068 /* hack disabling DMPlexOrientInterface() call in DMPlexInterpolate() via -dm_plex_interpolate_orient_interfaces option */
1069 static PetscErrorCode DMPlexSetOrientInterface_Private(DM dm, PetscBool enable)
1070 {
1071   PetscOptions      options = NULL;
1072   const char        *prefix = NULL;
1073   const char        opt[] = "-dm_plex_interpolate_orient_interfaces";
1074   char              prefix_opt[512];
1075   PetscBool         flg, set;
1076   static PetscBool  wasSetTrue = PETSC_FALSE;
1077 
1078   PetscFunctionBegin;
1079   if (dm) {
1080     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)dm, &prefix));
1081     options = ((PetscObject)dm)->options;
1082   }
1083   PetscCall(PetscStrcpy(prefix_opt, "-"));
1084   PetscCall(PetscStrlcat(prefix_opt, prefix, sizeof(prefix_opt)));
1085   PetscCall(PetscStrlcat(prefix_opt, &opt[1], sizeof(prefix_opt)));
1086   PetscCall(PetscOptionsGetBool(options, prefix, opt, &flg, &set));
1087   if (!enable) {
1088     if (set && flg) wasSetTrue = PETSC_TRUE;
1089     PetscCall(PetscOptionsSetValue(options, prefix_opt, "0"));
1090   } else if (set && !flg) {
1091     if (wasSetTrue) {
1092       PetscCall(PetscOptionsSetValue(options, prefix_opt, "1"));
1093     } else {
1094       /* default is PETSC_TRUE */
1095       PetscCall(PetscOptionsClearValue(options, prefix_opt));
1096     }
1097     wasSetTrue = PETSC_FALSE;
1098   }
1099   if (PetscDefined(USE_DEBUG)) {
1100     PetscCall(PetscOptionsGetBool(options, prefix, opt, &flg, &set));
1101     PetscCheckFalse(set && flg != enable,PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "PetscOptionsSetValue did not have the desired effect");
1102   }
1103   PetscFunctionReturn(0);
1104 }
1105 
1106 /* get coordinate description of the whole-domain boundary */
1107 static PetscErrorCode DMPlexGetExpandedBoundary_Private(DM dm, PortableBoundary *boundary)
1108 {
1109   PortableBoundary       bnd0, bnd;
1110   MPI_Comm               comm;
1111   DM                     idm;
1112   DMLabel                label;
1113   PetscInt               d;
1114   const char             boundaryName[] = "DMPlexDistributeInterpolateMarkInterface_boundary";
1115   IS                     boundary_is;
1116   IS                     *boundary_expanded_iss;
1117   PetscMPIInt            rootrank = 0;
1118   PetscMPIInt            rank, size;
1119   PetscInt               value = 1;
1120   DMPlexInterpolatedFlag intp;
1121   PetscBool              flg;
1122 
1123   PetscFunctionBegin;
1124   PetscCall(PetscNew(&bnd));
1125   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1126   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1127   PetscCallMPI(MPI_Comm_size(comm, &size));
1128   PetscCall(DMPlexIsDistributed(dm, &flg));
1129   PetscCheck(!flg,comm, PETSC_ERR_ARG_WRONG, "serial DM (all points on one rank) needed");
1130 
1131   /* interpolate serial DM if not yet interpolated */
1132   PetscCall(DMPlexIsInterpolatedCollective(dm, &intp));
1133   if (intp == DMPLEX_INTERPOLATED_FULL) {
1134     idm = dm;
1135     PetscCall(PetscObjectReference((PetscObject)dm));
1136   } else {
1137     PetscCall(DMPlexInterpolate(dm, &idm));
1138     PetscCall(DMViewFromOptions(idm, NULL, "-idm_view"));
1139   }
1140 
1141   /* mark whole-domain boundary of the serial DM */
1142   PetscCall(DMLabelCreate(PETSC_COMM_SELF, boundaryName, &label));
1143   PetscCall(DMAddLabel(idm, label));
1144   PetscCall(DMPlexMarkBoundaryFaces(idm, value, label));
1145   PetscCall(DMLabelViewFromOptionsOnComm_Private(label, "-idm_boundary_view", comm));
1146   PetscCall(DMLabelGetStratumIS(label, value, &boundary_is));
1147 
1148   /* translate to coordinates */
1149   PetscCall(PetscNew(&bnd0));
1150   PetscCall(DMGetCoordinatesLocalSetUp(idm));
1151   if (rank == rootrank) {
1152     PetscCall(DMPlexGetConeRecursive(idm, boundary_is, &bnd0->depth, &boundary_expanded_iss, &bnd0->sections));
1153     PetscCall(DMGetCoordinatesLocalTuple(dm, boundary_expanded_iss[0], NULL, &bnd0->coordinates));
1154     /* self-check */
1155     {
1156       IS is0;
1157       PetscCall(DMPlexExpandedVerticesCoordinatesToFaces_Private(idm, bnd0, &is0));
1158       PetscCall(ISEqual(is0, boundary_is, &flg));
1159       PetscCheck(flg,PETSC_COMM_SELF, PETSC_ERR_PLIB, "DMPlexExpandedVerticesCoordinatesToFaces_Private produced a wrong IS");
1160       PetscCall(ISDestroy(&is0));
1161     }
1162   } else {
1163     PetscCall(VecCreateSeq(PETSC_COMM_SELF, 0, &bnd0->coordinates));
1164   }
1165 
1166   {
1167     Vec         tmp;
1168     VecScatter  sc;
1169     IS          xis;
1170     PetscInt    n;
1171 
1172     /* just convert seq vectors to mpi vector */
1173     PetscCall(VecGetLocalSize(bnd0->coordinates, &n));
1174     PetscCallMPI(MPI_Bcast(&n, 1, MPIU_INT, rootrank, comm));
1175     if (rank == rootrank) {
1176       PetscCall(VecCreateMPI(comm, n, n, &tmp));
1177     } else {
1178       PetscCall(VecCreateMPI(comm, 0, n, &tmp));
1179     }
1180     PetscCall(VecCopy(bnd0->coordinates, tmp));
1181     PetscCall(VecDestroy(&bnd0->coordinates));
1182     bnd0->coordinates = tmp;
1183 
1184     /* replicate coordinates from root rank to all ranks */
1185     PetscCall(VecCreateMPI(comm, n, n*size, &bnd->coordinates));
1186     PetscCall(ISCreateStride(comm, n, 0, 1, &xis));
1187     PetscCall(VecScatterCreate(bnd0->coordinates, xis, bnd->coordinates, NULL, &sc));
1188     PetscCall(VecScatterBegin(sc, bnd0->coordinates, bnd->coordinates, INSERT_VALUES, SCATTER_FORWARD));
1189     PetscCall(VecScatterEnd(  sc, bnd0->coordinates, bnd->coordinates, INSERT_VALUES, SCATTER_FORWARD));
1190     PetscCall(VecScatterDestroy(&sc));
1191     PetscCall(ISDestroy(&xis));
1192   }
1193   bnd->depth = bnd0->depth;
1194   PetscCallMPI(MPI_Bcast(&bnd->depth, 1, MPIU_INT, rootrank, comm));
1195   PetscCall(PetscMalloc1(bnd->depth, &bnd->sections));
1196   for (d=0; d<bnd->depth; d++) {
1197     PetscCall(PetscSectionReplicate_Private(comm, rootrank, (rank == rootrank) ? bnd0->sections[d] : NULL, &bnd->sections[d]));
1198   }
1199 
1200   if (rank == rootrank) {
1201     PetscCall(DMPlexRestoreConeRecursive(idm, boundary_is, &bnd0->depth, &boundary_expanded_iss, &bnd0->sections));
1202   }
1203   PetscCall(PortableBoundaryDestroy(&bnd0));
1204   PetscCall(DMRemoveLabelBySelf(idm, &label, PETSC_TRUE));
1205   PetscCall(DMLabelDestroy(&label));
1206   PetscCall(ISDestroy(&boundary_is));
1207   PetscCall(DMDestroy(&idm));
1208   *boundary = bnd;
1209   PetscFunctionReturn(0);
1210 }
1211 
1212 /* get faces of inter-partition interface */
1213 static PetscErrorCode DMPlexGetInterfaceFaces_Private(DM ipdm, IS boundary_faces_is, IS *interface_faces_is)
1214 {
1215   MPI_Comm               comm;
1216   DMLabel                label;
1217   IS                     part_boundary_faces_is;
1218   const char             partBoundaryName[] = "DMPlexDistributeInterpolateMarkInterface_partBoundary";
1219   PetscInt               value = 1;
1220   DMPlexInterpolatedFlag intp;
1221 
1222   PetscFunctionBegin;
1223   PetscCall(PetscObjectGetComm((PetscObject)ipdm, &comm));
1224   PetscCall(DMPlexIsInterpolatedCollective(ipdm, &intp));
1225   PetscCheckFalse(intp != DMPLEX_INTERPOLATED_FULL,comm, PETSC_ERR_ARG_WRONG, "only for fully interpolated DMPlex");
1226 
1227   /* get ipdm partition boundary (partBoundary) */
1228   PetscCall(DMLabelCreate(PETSC_COMM_SELF, partBoundaryName, &label));
1229   PetscCall(DMAddLabel(ipdm, label));
1230   PetscCall(DMPlexMarkBoundaryFaces(ipdm, value, label));
1231   PetscCall(DMLabelViewFromOptionsOnComm_Private(label, "-ipdm_part_boundary_view", comm));
1232   PetscCall(DMLabelGetStratumISOnComm_Private(label, value, comm, &part_boundary_faces_is));
1233   PetscCall(DMRemoveLabelBySelf(ipdm, &label, PETSC_TRUE));
1234   PetscCall(DMLabelDestroy(&label));
1235 
1236   /* remove ipdm whole-domain boundary (boundary_faces_is) from ipdm partition boundary (part_boundary_faces_is), resulting just in inter-partition interface */
1237   PetscCall(ISDifference(part_boundary_faces_is,boundary_faces_is,interface_faces_is));
1238   PetscCall(ISDestroy(&part_boundary_faces_is));
1239   PetscFunctionReturn(0);
1240 }
1241 
1242 /* compute inter-partition interface including edges and vertices */
1243 static PetscErrorCode DMPlexComputeCompleteInterface_Private(DM ipdm, IS interface_faces_is, IS *interface_is)
1244 {
1245   DMLabel                label;
1246   PetscInt               value = 1;
1247   const char             interfaceName[] = "DMPlexDistributeInterpolateMarkInterface_interface";
1248   DMPlexInterpolatedFlag intp;
1249   MPI_Comm               comm;
1250 
1251   PetscFunctionBegin;
1252   PetscCall(PetscObjectGetComm((PetscObject)ipdm, &comm));
1253   PetscCall(DMPlexIsInterpolatedCollective(ipdm, &intp));
1254   PetscCheckFalse(intp != DMPLEX_INTERPOLATED_FULL,comm, PETSC_ERR_ARG_WRONG, "only for fully interpolated DMPlex");
1255 
1256   PetscCall(DMLabelCreate(PETSC_COMM_SELF, interfaceName, &label));
1257   PetscCall(DMAddLabel(ipdm, label));
1258   PetscCall(DMLabelSetStratumIS(label, value, interface_faces_is));
1259   PetscCall(DMLabelViewFromOptionsOnComm_Private(label, "-interface_faces_view", comm));
1260   PetscCall(DMPlexLabelComplete(ipdm, label));
1261   PetscCall(DMLabelViewFromOptionsOnComm_Private(label, "-interface_view", comm));
1262   PetscCall(DMLabelGetStratumISOnComm_Private(label, value, comm, interface_is));
1263   PetscCall(PetscObjectSetName((PetscObject)*interface_is, "interface_is"));
1264   PetscCall(ISViewFromOptions(*interface_is, NULL, "-interface_is_view"));
1265   PetscCall(DMRemoveLabelBySelf(ipdm, &label, PETSC_TRUE));
1266   PetscCall(DMLabelDestroy(&label));
1267   PetscFunctionReturn(0);
1268 }
1269 
1270 static PetscErrorCode PointSFGetOutwardInterfacePoints(PetscSF sf, IS *is)
1271 {
1272   PetscInt        n;
1273   const PetscInt  *arr;
1274 
1275   PetscFunctionBegin;
1276   PetscCall(PetscSFGetGraph(sf, NULL, &n, &arr, NULL));
1277   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)sf), n, arr, PETSC_USE_POINTER, is));
1278   PetscFunctionReturn(0);
1279 }
1280 
1281 static PetscErrorCode PointSFGetInwardInterfacePoints(PetscSF sf, IS *is)
1282 {
1283   PetscInt        n;
1284   const PetscInt  *rootdegree;
1285   PetscInt        *arr;
1286 
1287   PetscFunctionBegin;
1288   PetscCall(PetscSFSetUp(sf));
1289   PetscCall(PetscSFComputeDegreeBegin(sf, &rootdegree));
1290   PetscCall(PetscSFComputeDegreeEnd(sf, &rootdegree));
1291   PetscCall(PetscSFComputeMultiRootOriginalNumbering(sf, rootdegree, &n, &arr));
1292   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)sf), n, arr, PETSC_OWN_POINTER, is));
1293   PetscFunctionReturn(0);
1294 }
1295 
1296 static PetscErrorCode PointSFGetInterfacePoints_Private(PetscSF pointSF, IS *is)
1297 {
1298   IS pointSF_out_is, pointSF_in_is;
1299 
1300   PetscFunctionBegin;
1301   PetscCall(PointSFGetOutwardInterfacePoints(pointSF, &pointSF_out_is));
1302   PetscCall(PointSFGetInwardInterfacePoints(pointSF, &pointSF_in_is));
1303   PetscCall(ISExpand(pointSF_out_is, pointSF_in_is, is));
1304   PetscCall(ISDestroy(&pointSF_out_is));
1305   PetscCall(ISDestroy(&pointSF_in_is));
1306   PetscFunctionReturn(0);
1307 }
1308 
1309 #define CHKERRMY(ierr) PetscCheck(!ierr,PETSC_COMM_SELF, PETSC_ERR_PLIB, "PointSF is wrong. Unable to show details!")
1310 
1311 static PetscErrorCode ViewPointsWithType_Internal(DM dm, IS pointsIS, PetscViewer v)
1312 {
1313   DMLabel         label;
1314   PetscSection    coordsSection;
1315   Vec             coordsVec;
1316   PetscScalar     *coordsScalar;
1317   PetscInt        coneSize, depth, dim, i, p, npoints;
1318   const PetscInt  *points;
1319 
1320   PetscFunctionBegin;
1321   PetscCall(DMGetDimension(dm, &dim));
1322   PetscCall(DMGetCoordinateSection(dm, &coordsSection));
1323   PetscCall(DMGetCoordinatesLocal(dm, &coordsVec));
1324   PetscCall(VecGetArray(coordsVec, &coordsScalar));
1325   PetscCall(ISGetLocalSize(pointsIS, &npoints));
1326   PetscCall(ISGetIndices(pointsIS, &points));
1327   PetscCall(DMPlexGetDepthLabel(dm, &label));
1328   PetscCall(PetscViewerASCIIPushTab(v));
1329   for (i=0; i<npoints; i++) {
1330     p = points[i];
1331     PetscCall(DMLabelGetValue(label, p, &depth));
1332     if (!depth) {
1333       PetscInt        n, o;
1334       char            coordstr[128];
1335 
1336       PetscCall(PetscSectionGetDof(coordsSection, p, &n));
1337       PetscCall(PetscSectionGetOffset(coordsSection, p, &o));
1338       PetscCall(coord2str(coordstr, sizeof(coordstr), n, &coordsScalar[o], 1.0));
1339       PetscCall(PetscViewerASCIISynchronizedPrintf(v, "vertex %D w/ coordinates %s\n", p, coordstr));
1340     } else {
1341       char            entityType[16];
1342 
1343       switch (depth) {
1344         case 1: PetscCall(PetscStrcpy(entityType, "edge")); break;
1345         case 2: PetscCall(PetscStrcpy(entityType, "face")); break;
1346         case 3: PetscCall(PetscStrcpy(entityType, "cell")); break;
1347         default: SETERRQ(PetscObjectComm((PetscObject)v), PETSC_ERR_SUP, "Only for depth <= 3");
1348       }
1349       if (depth == dim && dim < 3) {
1350         PetscCall(PetscStrlcat(entityType, " (cell)", sizeof(entityType)));
1351       }
1352       PetscCall(PetscViewerASCIISynchronizedPrintf(v, "%s %D\n", entityType, p));
1353     }
1354     PetscCall(DMPlexGetConeSize(dm, p, &coneSize));
1355     if (coneSize) {
1356       const PetscInt *cone;
1357       IS             coneIS;
1358 
1359       PetscCall(DMPlexGetCone(dm, p, &cone));
1360       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, coneSize, cone, PETSC_USE_POINTER, &coneIS));
1361       PetscCall(ViewPointsWithType_Internal(dm, coneIS, v));
1362       PetscCall(ISDestroy(&coneIS));
1363     }
1364   }
1365   PetscCall(PetscViewerASCIIPopTab(v));
1366   PetscCall(VecRestoreArray(coordsVec, &coordsScalar));
1367   PetscCall(ISRestoreIndices(pointsIS, &points));
1368   PetscFunctionReturn(0);
1369 }
1370 
1371 static PetscErrorCode ViewPointsWithType(DM dm, IS points, PetscViewer v)
1372 {
1373   PetscBool       flg;
1374   PetscInt        npoints;
1375   PetscMPIInt     rank;
1376 
1377   PetscFunctionBegin;
1378   PetscCall(PetscObjectTypeCompare((PetscObject)v, PETSCVIEWERASCII, &flg));
1379   PetscCheck(flg,PetscObjectComm((PetscObject)v), PETSC_ERR_SUP, "Only for ASCII viewer");
1380   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)v), &rank));
1381   PetscCall(PetscViewerASCIIPushSynchronized(v));
1382   PetscCall(ISGetLocalSize(points, &npoints));
1383   if (npoints) {
1384     PetscCall(PetscViewerASCIISynchronizedPrintf(v, "[%d] --------\n", rank));
1385     PetscCall(ViewPointsWithType_Internal(dm, points, v));
1386   }
1387   PetscCall(PetscViewerFlush(v));
1388   PetscCall(PetscViewerASCIIPopSynchronized(v));
1389   PetscFunctionReturn(0);
1390 }
1391 
1392 static PetscErrorCode DMPlexComparePointSFWithInterface_Private(DM ipdm, IS interface_is)
1393 {
1394   PetscSF         pointsf;
1395   IS              pointsf_is;
1396   PetscBool       flg;
1397   MPI_Comm        comm;
1398   PetscMPIInt     size;
1399 
1400   PetscFunctionBegin;
1401   PetscCall(PetscObjectGetComm((PetscObject)ipdm, &comm));
1402   PetscCallMPI(MPI_Comm_size(comm, &size));
1403   PetscCall(DMGetPointSF(ipdm, &pointsf));
1404   if (pointsf) {
1405     PetscInt nroots;
1406     PetscCall(PetscSFGetGraph(pointsf, &nroots, NULL, NULL, NULL));
1407     if (nroots < 0) pointsf = NULL; /* uninitialized SF */
1408   }
1409   if (!pointsf) {
1410     PetscInt N=0;
1411     if (interface_is) PetscCall(ISGetSize(interface_is, &N));
1412     PetscCheck(!N,comm, PETSC_ERR_PLIB, "interface_is should be NULL or empty for PointSF being NULL");
1413     PetscFunctionReturn(0);
1414   }
1415 
1416   /* get PointSF points as IS pointsf_is */
1417   PetscCall(PointSFGetInterfacePoints_Private(pointsf, &pointsf_is));
1418 
1419   /* compare pointsf_is with interface_is */
1420   PetscCall(ISEqual(interface_is, pointsf_is, &flg));
1421   PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE,&flg,1,MPIU_BOOL,MPI_LAND,comm));
1422   if (!flg) {
1423     IS pointsf_extra_is, pointsf_missing_is;
1424     PetscViewer errv = PETSC_VIEWER_STDERR_(comm);
1425     CHKERRMY(ISDifference(interface_is, pointsf_is, &pointsf_missing_is));
1426     CHKERRMY(ISDifference(pointsf_is, interface_is, &pointsf_extra_is));
1427     CHKERRMY(PetscViewerASCIIPrintf(errv, "Points missing in PointSF:\n"));
1428     CHKERRMY(ViewPointsWithType(ipdm, pointsf_missing_is, errv));
1429     CHKERRMY(PetscViewerASCIIPrintf(errv, "Extra points in PointSF:\n"));
1430     CHKERRMY(ViewPointsWithType(ipdm, pointsf_extra_is, errv));
1431     CHKERRMY(ISDestroy(&pointsf_extra_is));
1432     CHKERRMY(ISDestroy(&pointsf_missing_is));
1433     SETERRQ(comm, PETSC_ERR_PLIB, "PointSF is wrong! See details above.");
1434   }
1435   PetscCall(ISDestroy(&pointsf_is));
1436   PetscFunctionReturn(0);
1437 }
1438 
1439 /* remove faces & edges from label, leave just vertices */
1440 static PetscErrorCode DMPlexISFilterVertices_Private(DM dm, IS points)
1441 {
1442   PetscInt        vStart, vEnd;
1443   MPI_Comm        comm;
1444 
1445   PetscFunctionBegin;
1446   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1447   PetscCall(DMPlexGetDepthStratum(dm, 0, &vStart, &vEnd));
1448   PetscCall(ISGeneralFilter(points, vStart, vEnd));
1449   PetscFunctionReturn(0);
1450 }
1451 
1452 /*
1453   DMPlexCheckPointSFHeavy - Thoroughly test that the PointSF after parallel DMPlexInterpolate() includes exactly all interface points.
1454 
1455   Collective
1456 
1457   Input Parameters:
1458 . dm - The DMPlex object
1459 
1460   Notes:
1461   The input DMPlex must be serial (one partition has all points, the other partitions have no points).
1462   This is a heavy test which involves DMPlexInterpolate() if the input DM is not interpolated yet, and depends on having a representation of the whole-domain boundary (PortableBoundary), which can be obtained only by DMPlexGetExpandedBoundary_Private() (which involves DMPlexInterpolate() of a sequential DM).
1463   This is mainly intended for debugging/testing purposes.
1464 
1465   Algorithm:
1466   1. boundary faces of the serial version of the whole mesh are found using DMPlexMarkBoundaryFaces()
1467   2. boundary faces are translated into vertices using DMPlexGetConeRecursive() and these are translated into coordinates - this description (aka PortableBoundary) is completely independent of partitioning and point numbering
1468   3. the mesh is distributed or loaded in parallel
1469   4. boundary faces of the distributed mesh are reconstructed from PortableBoundary using DMPlexFindVertices()
1470   5. partition boundary faces of the parallel mesh are found using DMPlexMarkBoundaryFaces()
1471   6. partition interfaces are computed as set difference of partition boundary faces minus the reconstructed boundary
1472   7. check that interface covered by PointSF (union of inward and outward points) is equal to the partition interface for each rank, otherwise print the difference and throw an error
1473 
1474   Level: developer
1475 
1476 .seealso: DMGetPointSF(), DMPlexCheckSymmetry(), DMPlexCheckSkeleton(), DMPlexCheckFaces()
1477 */
1478 static PetscErrorCode DMPlexCheckPointSFHeavy(DM dm, PortableBoundary bnd)
1479 {
1480   DM                     ipdm=NULL;
1481   IS                     boundary_faces_is, interface_faces_is, interface_is;
1482   DMPlexInterpolatedFlag intp;
1483   MPI_Comm               comm;
1484 
1485   PetscFunctionBegin;
1486   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1487 
1488   PetscCall(DMPlexIsInterpolatedCollective(dm, &intp));
1489   if (intp == DMPLEX_INTERPOLATED_FULL) {
1490     ipdm = dm;
1491   } else {
1492     /* create temporary interpolated DM if input DM is not interpolated */
1493     PetscCall(DMPlexSetOrientInterface_Private(dm, PETSC_FALSE));
1494     PetscCall(DMPlexInterpolate(dm, &ipdm)); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexComparePointSFWithInterface_Private() below */
1495     PetscCall(DMPlexSetOrientInterface_Private(dm, PETSC_TRUE));
1496   }
1497   PetscCall(DMViewFromOptions(ipdm, NULL, "-ipdm_view"));
1498 
1499   /* recover ipdm whole-domain boundary faces from the expanded vertices coordinates */
1500   PetscCall(DMPlexExpandedVerticesCoordinatesToFaces_Private(ipdm, bnd, &boundary_faces_is));
1501   /* get inter-partition interface faces (interface_faces_is)*/
1502   PetscCall(DMPlexGetInterfaceFaces_Private(ipdm, boundary_faces_is, &interface_faces_is));
1503   /* compute inter-partition interface including edges and vertices (interface_is) */
1504   PetscCall(DMPlexComputeCompleteInterface_Private(ipdm, interface_faces_is, &interface_is));
1505   /* destroy immediate ISs */
1506   PetscCall(ISDestroy(&boundary_faces_is));
1507   PetscCall(ISDestroy(&interface_faces_is));
1508 
1509   /* for uninterpolated case, keep just vertices in interface */
1510   if (!intp) {
1511     PetscCall(DMPlexISFilterVertices_Private(ipdm, interface_is));
1512     PetscCall(DMDestroy(&ipdm));
1513   }
1514 
1515   /* compare PointSF with the boundary reconstructed from coordinates */
1516   PetscCall(DMPlexComparePointSFWithInterface_Private(dm, interface_is));
1517   PetscCall(PetscPrintf(comm, "DMPlexCheckPointSFHeavy PASSED\n"));
1518   PetscCall(ISDestroy(&interface_is));
1519   PetscFunctionReturn(0);
1520 }
1521 
1522 int main(int argc, char **argv)
1523 {
1524   DM             dm;
1525   AppCtx         user;
1526 
1527   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
1528   PetscCall(PetscLogStageRegister("create",&stage[0]));
1529   PetscCall(PetscLogStageRegister("distribute",&stage[1]));
1530   PetscCall(PetscLogStageRegister("interpolate",&stage[2]));
1531   PetscCall(ProcessOptions(PETSC_COMM_WORLD, &user));
1532   PetscCall(CreateMesh(PETSC_COMM_WORLD, &user, &dm));
1533   if (user.nPointsToExpand) {
1534     PetscCall(TestExpandPoints(dm, &user));
1535   }
1536   if (user.ncoords) {
1537     Vec coords;
1538 
1539     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, user.ncoords, user.ncoords, user.coords, &coords));
1540     PetscCall(ViewVerticesFromCoords(dm, coords, user.coordsTol, PETSC_VIEWER_STDOUT_WORLD));
1541     PetscCall(VecDestroy(&coords));
1542   }
1543   PetscCall(DMDestroy(&dm));
1544   PetscCall(PetscFinalize());
1545   return 0;
1546 }
1547 
1548 /*TEST
1549 
1550   testset:
1551     nsize: 2
1552     args: -dm_view ascii::ascii_info_detail
1553     args: -dm_plex_check_all
1554     test:
1555       suffix: 1_tri_dist0
1556       args: -distribute 0 -interpolate {{none create}separate output}
1557     test:
1558       suffix: 1_tri_dist1
1559       args: -distribute 1 -interpolate {{none create after_distribute}separate output}
1560     test:
1561       suffix: 1_quad_dist0
1562       args: -cell_simplex 0 -distribute 0 -interpolate {{none create}separate output}
1563     test:
1564       suffix: 1_quad_dist1
1565       args: -cell_simplex 0 -distribute 1 -interpolate {{none create after_distribute}separate output}
1566     test:
1567       suffix: 1_1d_dist1
1568       args: -dim 1 -distribute 1
1569 
1570   testset:
1571     nsize: 3
1572     args: -testnum 1 -interpolate create
1573     args: -dm_plex_check_all
1574     test:
1575       suffix: 2
1576       args: -dm_view ascii::ascii_info_detail
1577     test:
1578       suffix: 2a
1579       args: -dm_plex_check_cones_conform_on_interfaces_verbose
1580     test:
1581       suffix: 2b
1582       args: -test_expand_points 0,1,2,5,6
1583     test:
1584       suffix: 2c
1585       args: -test_expand_points 0,1,2,5,6 -test_expand_points_empty
1586 
1587   testset:
1588     # the same as 1% for 3D
1589     nsize: 2
1590     args: -dim 3 -dm_view ascii::ascii_info_detail
1591     args: -dm_plex_check_all
1592     test:
1593       suffix: 4_tet_dist0
1594       args: -distribute 0 -interpolate {{none create}separate output}
1595     test:
1596       suffix: 4_tet_dist1
1597       args: -distribute 1 -interpolate {{none create after_distribute}separate output}
1598     test:
1599       suffix: 4_hex_dist0
1600       args: -cell_simplex 0 -distribute 0 -interpolate {{none create}separate output}
1601     test:
1602       suffix: 4_hex_dist1
1603       args: -cell_simplex 0 -distribute 1 -interpolate {{none create after_distribute}separate output}
1604 
1605   test:
1606     # the same as 4_tet_dist0 but test different initial orientations
1607     suffix: 4_tet_test_orient
1608     nsize: 2
1609     args: -dim 3 -distribute 0
1610     args: -dm_plex_check_all
1611     args: -rotate_interface_0 {{0 1 2 11 12 13}}
1612     args: -rotate_interface_1 {{0 1 2 11 12 13}}
1613 
1614   testset:
1615     requires: exodusii
1616     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/TwoQuads.exo
1617     args: -dm_view ascii::ascii_info_detail
1618     args: -dm_plex_check_all
1619     args: -custom_view
1620     test:
1621       suffix: 5_seq
1622       nsize: 1
1623       args: -distribute 0 -interpolate {{none create}separate output}
1624     test:
1625       # Detail viewing in a non-distributed mesh is broken because the DMLabelView() is collective, but the label is not shared
1626       suffix: 5_dist0
1627       nsize: 2
1628       args: -distribute 0 -interpolate {{none create}separate output} -dm_view
1629     test:
1630       suffix: 5_dist1
1631       nsize: 2
1632       args: -distribute 1 -interpolate {{none create after_distribute}separate output}
1633 
1634   testset:
1635     nsize: {{1 2 4}}
1636     args: -use_generator
1637     args: -dm_plex_check_all
1638     args: -distribute -interpolate none
1639     test:
1640       suffix: 6_tri
1641       requires: triangle
1642       args: -faces {{2,2  1,3  7,4}} -cell_simplex 1 -dm_generator triangle
1643     test:
1644       suffix: 6_quad
1645       args: -faces {{2,2  1,3  7,4}} -cell_simplex 0
1646     test:
1647       suffix: 6_tet
1648       requires: ctetgen
1649       args: -faces {{2,2,2  1,3,5  3,4,7}} -cell_simplex 1 -dm_generator ctetgen
1650     test:
1651       suffix: 6_hex
1652       args: -faces {{2,2,2  1,3,5  3,4,7}} -cell_simplex 0
1653   testset:
1654     nsize: {{1 2 4}}
1655     args: -use_generator
1656     args: -dm_plex_check_all
1657     args: -distribute -interpolate create
1658     test:
1659       suffix: 6_int_tri
1660       requires: triangle
1661       args: -faces {{2,2  1,3  7,4}} -cell_simplex 1 -dm_generator triangle
1662     test:
1663       suffix: 6_int_quad
1664       args: -faces {{2,2  1,3  7,4}} -cell_simplex 0
1665     test:
1666       suffix: 6_int_tet
1667       requires: ctetgen
1668       args: -faces {{2,2,2  1,3,5  3,4,7}} -cell_simplex 1 -dm_generator ctetgen
1669     test:
1670       suffix: 6_int_hex
1671       args: -faces {{2,2,2  1,3,5  3,4,7}} -cell_simplex 0
1672   testset:
1673     nsize: {{2 4}}
1674     args: -use_generator
1675     args: -dm_plex_check_all
1676     args: -distribute -interpolate after_distribute
1677     test:
1678       suffix: 6_parint_tri
1679       requires: triangle
1680       args: -faces {{2,2  1,3  7,4}} -cell_simplex 1 -dm_generator triangle
1681     test:
1682       suffix: 6_parint_quad
1683       args: -faces {{2,2  1,3  7,4}} -cell_simplex 0
1684     test:
1685       suffix: 6_parint_tet
1686       requires: ctetgen
1687       args: -faces {{2,2,2  1,3,5  3,4,7}} -cell_simplex 1 -dm_generator ctetgen
1688     test:
1689       suffix: 6_parint_hex
1690       args: -faces {{2,2,2  1,3,5  3,4,7}} -cell_simplex 0
1691 
1692   testset: # 7 EXODUS
1693     requires: exodusii
1694     args: -dm_plex_check_all
1695     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.exo
1696     args: -distribute
1697     test: # seq load, simple partitioner
1698       suffix: 7_exo
1699       nsize: {{1 2 4 5}}
1700       args: -interpolate none
1701     test: # seq load, seq interpolation, simple partitioner
1702       suffix: 7_exo_int_simple
1703       nsize: {{1 2 4 5}}
1704       args: -interpolate create
1705     test: # seq load, seq interpolation, metis partitioner
1706       suffix: 7_exo_int_metis
1707       requires: parmetis
1708       nsize: {{2 4 5}}
1709       args: -interpolate create
1710       args: -petscpartitioner_type parmetis
1711     test: # seq load, simple partitioner, par interpolation
1712       suffix: 7_exo_simple_int
1713       nsize: {{2 4 5}}
1714       args: -interpolate after_distribute
1715     test: # seq load, metis partitioner, par interpolation
1716       suffix: 7_exo_metis_int
1717       requires: parmetis
1718       nsize: {{2 4 5}}
1719       args: -interpolate after_distribute
1720       args: -petscpartitioner_type parmetis
1721 
1722   testset: # 7 HDF5 SEQUANTIAL LOAD
1723     requires: hdf5 !complex
1724     args: -dm_plex_check_all
1725     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -dm_plex_create_from_hdf5_xdmf
1726     args: -dm_plex_hdf5_force_sequential
1727     args: -distribute
1728     test: # seq load, simple partitioner
1729       suffix: 7_seq_hdf5_simple
1730       nsize: {{1 2 4 5}}
1731       args: -interpolate none
1732     test: # seq load, seq interpolation, simple partitioner
1733       suffix: 7_seq_hdf5_int_simple
1734       nsize: {{1 2 4 5}}
1735       args: -interpolate after_create
1736     test: # seq load, seq interpolation, metis partitioner
1737       nsize: {{2 4 5}}
1738       suffix: 7_seq_hdf5_int_metis
1739       requires: parmetis
1740       args: -interpolate after_create
1741       args: -petscpartitioner_type parmetis
1742     test: # seq load, simple partitioner, par interpolation
1743       suffix: 7_seq_hdf5_simple_int
1744       nsize: {{2 4 5}}
1745       args: -interpolate after_distribute
1746     test: # seq load, metis partitioner, par interpolation
1747       nsize: {{2 4 5}}
1748       suffix: 7_seq_hdf5_metis_int
1749       requires: parmetis
1750       args: -interpolate after_distribute
1751       args: -petscpartitioner_type parmetis
1752 
1753   testset: # 7 HDF5 PARALLEL LOAD
1754     requires: hdf5 !complex
1755     nsize: {{2 4 5}}
1756     args: -dm_plex_check_all
1757     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -dm_plex_create_from_hdf5_xdmf
1758     test: # par load
1759       suffix: 7_par_hdf5
1760       args: -interpolate none
1761     test: # par load, par interpolation
1762       suffix: 7_par_hdf5_int
1763       args: -interpolate after_create
1764     test: # par load, parmetis repartitioner
1765       TODO: Parallel partitioning of uninterpolated meshes not supported
1766       suffix: 7_par_hdf5_parmetis
1767       requires: parmetis
1768       args: -distribute -petscpartitioner_type parmetis
1769       args: -interpolate none
1770     test: # par load, par interpolation, parmetis repartitioner
1771       suffix: 7_par_hdf5_int_parmetis
1772       requires: parmetis
1773       args: -distribute -petscpartitioner_type parmetis
1774       args: -interpolate after_create
1775     test: # par load, parmetis partitioner, par interpolation
1776       TODO: Parallel partitioning of uninterpolated meshes not supported
1777       suffix: 7_par_hdf5_parmetis_int
1778       requires: parmetis
1779       args: -distribute -petscpartitioner_type parmetis
1780       args: -interpolate after_distribute
1781 
1782     test:
1783       suffix: 7_hdf5_hierarch
1784       requires: hdf5 ptscotch !complex
1785       nsize: {{2 3 4}separate output}
1786       args: -distribute
1787       args: -interpolate after_create
1788       args: -petscpartitioner_type matpartitioning -petscpartitioner_view ::ascii_info
1789       args: -mat_partitioning_type hierarch -mat_partitioning_hierarchical_nfineparts 2
1790       args: -mat_partitioning_hierarchical_coarseparttype ptscotch -mat_partitioning_hierarchical_fineparttype ptscotch
1791 
1792   test:
1793     suffix: 8
1794     requires: hdf5 !complex
1795     nsize: 4
1796     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -dm_plex_create_from_hdf5_xdmf
1797     args: -distribute 0 -interpolate after_create
1798     args: -view_vertices_from_coords 0.,1.,0.,-0.5,1.,0.,0.583,-0.644,0.,-2.,-2.,-2. -view_vertices_from_coords_tol 1e-3
1799     args: -dm_plex_check_all
1800     args: -custom_view
1801 
1802   testset: # 9 HDF5 SEQUANTIAL LOAD
1803     requires: hdf5 !complex datafilespath
1804     args: -dm_plex_check_all
1805     args: -filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /cells -dm_plex_hdf5_geometry_path /coordinates
1806     args: -dm_plex_hdf5_force_sequential
1807     args: -distribute
1808     test: # seq load, simple partitioner
1809       suffix: 9_seq_hdf5_simple
1810       nsize: {{1 2 4 5}}
1811       args: -interpolate none
1812     test: # seq load, seq interpolation, simple partitioner
1813       suffix: 9_seq_hdf5_int_simple
1814       nsize: {{1 2 4 5}}
1815       args: -interpolate after_create
1816     test: # seq load, seq interpolation, metis partitioner
1817       nsize: {{2 4 5}}
1818       suffix: 9_seq_hdf5_int_metis
1819       requires: parmetis
1820       args: -interpolate after_create
1821       args: -petscpartitioner_type parmetis
1822     test: # seq load, simple partitioner, par interpolation
1823       suffix: 9_seq_hdf5_simple_int
1824       nsize: {{2 4 5}}
1825       args: -interpolate after_distribute
1826     test: # seq load, simple partitioner, par interpolation
1827       # This is like 9_seq_hdf5_simple_int but testing error output of DMPlexCheckPointSFHeavy().
1828       # Once 9_seq_hdf5_simple_int gets fixed, this one gets broken.
1829       # We can then provide an intentionally broken mesh instead.
1830       TODO: This test is broken because PointSF is fixed.
1831       suffix: 9_seq_hdf5_simple_int_err
1832       nsize: 4
1833       args: -interpolate after_distribute
1834       filter: sed -e "/PETSC ERROR/,$$d"
1835     test: # seq load, metis partitioner, par interpolation
1836       nsize: {{2 4 5}}
1837       suffix: 9_seq_hdf5_metis_int
1838       requires: parmetis
1839       args: -interpolate after_distribute
1840       args: -petscpartitioner_type parmetis
1841 
1842   testset: # 9 HDF5 PARALLEL LOAD
1843     requires: hdf5 !complex datafilespath
1844     nsize: {{2 4 5}}
1845     args: -dm_plex_check_all
1846     args: -filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /cells -dm_plex_hdf5_geometry_path /coordinates
1847     test: # par load
1848       suffix: 9_par_hdf5
1849       args: -interpolate none
1850     test: # par load, par interpolation
1851       suffix: 9_par_hdf5_int
1852       args: -interpolate after_create
1853     test: # par load, parmetis repartitioner
1854       TODO: Parallel partitioning of uninterpolated meshes not supported
1855       suffix: 9_par_hdf5_parmetis
1856       requires: parmetis
1857       args: -distribute -petscpartitioner_type parmetis
1858       args: -interpolate none
1859     test: # par load, par interpolation, parmetis repartitioner
1860       suffix: 9_par_hdf5_int_parmetis
1861       requires: parmetis
1862       args: -distribute -petscpartitioner_type parmetis
1863       args: -interpolate after_create
1864     test: # par load, parmetis partitioner, par interpolation
1865       TODO: Parallel partitioning of uninterpolated meshes not supported
1866       suffix: 9_par_hdf5_parmetis_int
1867       requires: parmetis
1868       args: -distribute -petscpartitioner_type parmetis
1869       args: -interpolate after_distribute
1870 
1871   testset: # 10 HDF5 PARALLEL LOAD
1872     requires: hdf5 !complex datafilespath
1873     nsize: {{2 4 7}}
1874     args: -dm_plex_check_all
1875     args: -filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined2.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /topo -dm_plex_hdf5_geometry_path /geom
1876     test: # par load, par interpolation
1877       suffix: 10_par_hdf5_int
1878       args: -interpolate after_create
1879 TEST*/
1880