xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision 69f65dfb176f3d3e756fc44d2300fd6791726976)
1 #include <petsc/private/dmpleximpl.h>  /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/dmlabelimpl.h> /*I      "petscdmlabel.h"  I*/
3 
4 /*@C
5   DMPlexSetAdjacencyUser - Define adjacency in the mesh using a user-provided callback
6 
7   Input Parameters:
8 + dm      - The DM object
9 . user    - The user callback, may be NULL (to clear the callback)
10 - ctx     - context for callback evaluation, may be NULL
11 
12   Level: advanced
13 
14   Notes:
15      The caller of DMPlexGetAdjacency may need to arrange that a large enough array is available for the adjacency.
16 
17      Any setting here overrides other configuration of DMPlex adjacency determination.
18 
19 .seealso: `DMSetAdjacency()`, `DMPlexDistribute()`, `DMPlexPreallocateOperator()`, `DMPlexGetAdjacency()`, `DMPlexGetAdjacencyUser()`
20 @*/
21 PetscErrorCode DMPlexSetAdjacencyUser(DM dm, PetscErrorCode (*user)(DM, PetscInt, PetscInt *, PetscInt[], void *), void *ctx)
22 {
23   DM_Plex *mesh = (DM_Plex *)dm->data;
24 
25   PetscFunctionBegin;
26   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
27   mesh->useradjacency    = user;
28   mesh->useradjacencyctx = ctx;
29   PetscFunctionReturn(PETSC_SUCCESS);
30 }
31 
32 /*@C
33   DMPlexGetAdjacencyUser - get the user-defined adjacency callback
34 
35   Input Parameter:
36 . dm      - The DM object
37 
38   Output Parameters:
39 + user    - The callback
40 - ctx     - context for callback evaluation
41 
42   Level: advanced
43 
44 .seealso: `DMSetAdjacency()`, `DMPlexDistribute()`, `DMPlexPreallocateOperator()`, `DMPlexGetAdjacency()`, `DMPlexSetAdjacencyUser()`
45 @*/
46 PetscErrorCode DMPlexGetAdjacencyUser(DM dm, PetscErrorCode (**user)(DM, PetscInt, PetscInt *, PetscInt[], void *), void **ctx)
47 {
48   DM_Plex *mesh = (DM_Plex *)dm->data;
49 
50   PetscFunctionBegin;
51   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
52   if (user) *user = mesh->useradjacency;
53   if (ctx) *ctx = mesh->useradjacencyctx;
54   PetscFunctionReturn(PETSC_SUCCESS);
55 }
56 
57 /*@
58   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
59 
60   Input Parameters:
61 + dm      - The DM object
62 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
63 
64   Level: intermediate
65 
66 .seealso: `DMGetAdjacency()`, `DMSetAdjacency()`, `DMPlexDistribute()`, `DMPlexPreallocateOperator()`, `DMPlexSetAnchors()`
67 @*/
68 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
69 {
70   DM_Plex *mesh = (DM_Plex *)dm->data;
71 
72   PetscFunctionBegin;
73   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
74   mesh->useAnchors = useAnchors;
75   PetscFunctionReturn(PETSC_SUCCESS);
76 }
77 
78 /*@
79   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
80 
81   Input Parameter:
82 . dm      - The DM object
83 
84   Output Parameter:
85 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
86 
87   Level: intermediate
88 
89 .seealso: `DMPlexSetAdjacencyUseAnchors()`, `DMSetAdjacency()`, `DMGetAdjacency()`, `DMPlexDistribute()`, `DMPlexPreallocateOperator()`, `DMPlexSetAnchors()`
90 @*/
91 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
92 {
93   DM_Plex *mesh = (DM_Plex *)dm->data;
94 
95   PetscFunctionBegin;
96   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
97   PetscValidBoolPointer(useAnchors, 2);
98   *useAnchors = mesh->useAnchors;
99   PetscFunctionReturn(PETSC_SUCCESS);
100 }
101 
102 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
103 {
104   const PetscInt *cone   = NULL;
105   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
106 
107   PetscFunctionBeginHot;
108   PetscCall(DMPlexGetConeSize(dm, p, &coneSize));
109   PetscCall(DMPlexGetCone(dm, p, &cone));
110   for (c = 0; c <= coneSize; ++c) {
111     const PetscInt  point   = !c ? p : cone[c - 1];
112     const PetscInt *support = NULL;
113     PetscInt        supportSize, s, q;
114 
115     PetscCall(DMPlexGetSupportSize(dm, point, &supportSize));
116     PetscCall(DMPlexGetSupport(dm, point, &support));
117     for (s = 0; s < supportSize; ++s) {
118       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = support[s]), 0); ++q) {
119         if (support[s] == adj[q]) break;
120       }
121       PetscCheck(numAdj <= maxAdjSize, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%" PetscInt_FMT ")", maxAdjSize);
122     }
123   }
124   *adjSize = numAdj;
125   PetscFunctionReturn(PETSC_SUCCESS);
126 }
127 
128 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
129 {
130   const PetscInt *support = NULL;
131   PetscInt        numAdj = 0, maxAdjSize = *adjSize, supportSize, s;
132 
133   PetscFunctionBeginHot;
134   PetscCall(DMPlexGetSupportSize(dm, p, &supportSize));
135   PetscCall(DMPlexGetSupport(dm, p, &support));
136   for (s = 0; s <= supportSize; ++s) {
137     const PetscInt  point = !s ? p : support[s - 1];
138     const PetscInt *cone  = NULL;
139     PetscInt        coneSize, c, q;
140 
141     PetscCall(DMPlexGetConeSize(dm, point, &coneSize));
142     PetscCall(DMPlexGetCone(dm, point, &cone));
143     for (c = 0; c < coneSize; ++c) {
144       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = cone[c]), 0); ++q) {
145         if (cone[c] == adj[q]) break;
146       }
147       PetscCheck(numAdj <= maxAdjSize, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%" PetscInt_FMT ")", maxAdjSize);
148     }
149   }
150   *adjSize = numAdj;
151   PetscFunctionReturn(PETSC_SUCCESS);
152 }
153 
154 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
155 {
156   PetscInt *star   = NULL;
157   PetscInt  numAdj = 0, maxAdjSize = *adjSize, starSize, s;
158 
159   PetscFunctionBeginHot;
160   PetscCall(DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star));
161   for (s = 0; s < starSize * 2; s += 2) {
162     const PetscInt *closure = NULL;
163     PetscInt        closureSize, c, q;
164 
165     PetscCall(DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt **)&closure));
166     for (c = 0; c < closureSize * 2; c += 2) {
167       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = closure[c]), 0); ++q) {
168         if (closure[c] == adj[q]) break;
169       }
170       PetscCheck(numAdj <= maxAdjSize, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%" PetscInt_FMT ")", maxAdjSize);
171     }
172     PetscCall(DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt **)&closure));
173   }
174   PetscCall(DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star));
175   *adjSize = numAdj;
176   PetscFunctionReturn(PETSC_SUCCESS);
177 }
178 
179 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
180 {
181   static PetscInt asiz       = 0;
182   PetscInt        maxAnchors = 1;
183   PetscInt        aStart = -1, aEnd = -1;
184   PetscInt        maxAdjSize;
185   PetscSection    aSec = NULL;
186   IS              aIS  = NULL;
187   const PetscInt *anchors;
188   DM_Plex        *mesh = (DM_Plex *)dm->data;
189 
190   PetscFunctionBeginHot;
191   if (useAnchors) {
192     PetscCall(DMPlexGetAnchors(dm, &aSec, &aIS));
193     if (aSec) {
194       PetscCall(PetscSectionGetMaxDof(aSec, &maxAnchors));
195       maxAnchors = PetscMax(1, maxAnchors);
196       PetscCall(PetscSectionGetChart(aSec, &aStart, &aEnd));
197       PetscCall(ISGetIndices(aIS, &anchors));
198     }
199   }
200   if (!*adj) {
201     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
202 
203     PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
204     PetscCall(DMPlexGetDepth(dm, &depth));
205     depth = PetscMax(depth, -depth);
206     PetscCall(DMPlexGetMaxSizes(dm, &maxC, &maxS));
207     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC, depth + 1) - 1) / (maxC - 1)) : depth + 1;
208     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS, depth + 1) - 1) / (maxS - 1)) : depth + 1;
209     asiz          = PetscMax(PetscPowInt(maxS, depth) * coneSeries, PetscPowInt(maxC, depth) * supportSeries);
210     asiz *= maxAnchors;
211     asiz = PetscMin(asiz, pEnd - pStart);
212     PetscCall(PetscMalloc1(asiz, adj));
213   }
214   if (*adjSize < 0) *adjSize = asiz;
215   maxAdjSize = *adjSize;
216   if (mesh->useradjacency) {
217     PetscCall((*mesh->useradjacency)(dm, p, adjSize, *adj, mesh->useradjacencyctx));
218   } else if (useTransitiveClosure) {
219     PetscCall(DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj));
220   } else if (useCone) {
221     PetscCall(DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj));
222   } else {
223     PetscCall(DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj));
224   }
225   if (useAnchors && aSec) {
226     PetscInt  origSize = *adjSize;
227     PetscInt  numAdj   = origSize;
228     PetscInt  i        = 0, j;
229     PetscInt *orig     = *adj;
230 
231     while (i < origSize) {
232       PetscInt p    = orig[i];
233       PetscInt aDof = 0;
234 
235       if (p >= aStart && p < aEnd) PetscCall(PetscSectionGetDof(aSec, p, &aDof));
236       if (aDof) {
237         PetscInt aOff;
238         PetscInt s, q;
239 
240         for (j = i + 1; j < numAdj; j++) orig[j - 1] = orig[j];
241         origSize--;
242         numAdj--;
243         PetscCall(PetscSectionGetOffset(aSec, p, &aOff));
244         for (s = 0; s < aDof; ++s) {
245           for (q = 0; q < numAdj || ((void)(orig[numAdj++] = anchors[aOff + s]), 0); ++q) {
246             if (anchors[aOff + s] == orig[q]) break;
247           }
248           PetscCheck(numAdj <= maxAdjSize, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%" PetscInt_FMT ")", maxAdjSize);
249         }
250       } else {
251         i++;
252       }
253     }
254     *adjSize = numAdj;
255     PetscCall(ISRestoreIndices(aIS, &anchors));
256   }
257   PetscFunctionReturn(PETSC_SUCCESS);
258 }
259 
260 /*@
261   DMPlexGetAdjacency - Return all points adjacent to the given point
262 
263   Input Parameters:
264 + dm - The DM object
265 - p  - The point
266 
267   Input/Output Parameters:
268 + adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE;
269             on output the number of adjacent points
270 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize;
271         on output contains the adjacent points
272 
273   Level: advanced
274 
275   Notes:
276     The user must PetscFree the adj array if it was not passed in.
277 
278 .seealso: `DMSetAdjacency()`, `DMPlexDistribute()`, `DMCreateMatrix()`, `DMPlexPreallocateOperator()`
279 @*/
280 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
281 {
282   PetscBool useCone, useClosure, useAnchors;
283 
284   PetscFunctionBeginHot;
285   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
286   PetscValidIntPointer(adjSize, 3);
287   PetscValidPointer(adj, 4);
288   PetscCall(DMGetBasicAdjacency(dm, &useCone, &useClosure));
289   PetscCall(DMPlexGetAdjacencyUseAnchors(dm, &useAnchors));
290   PetscCall(DMPlexGetAdjacency_Internal(dm, p, useCone, useClosure, useAnchors, adjSize, adj));
291   PetscFunctionReturn(PETSC_SUCCESS);
292 }
293 
294 /*@
295   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
296 
297   Collective on dm
298 
299   Input Parameters:
300 + dm      - The DM
301 . sfPoint - The PetscSF which encodes point connectivity
302 . rootRankSection -
303 . rootRanks -
304 . leftRankSection -
305 - leafRanks -
306 
307   Output Parameters:
308 + processRanks - A list of process neighbors, or NULL
309 - sfProcess    - An SF encoding the two-sided process connectivity, or NULL
310 
311   Level: developer
312 
313 .seealso: `PetscSFCreate()`, `DMPlexCreateProcessSF()`
314 @*/
315 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
316 {
317   const PetscSFNode *remotePoints;
318   PetscInt          *localPointsNew;
319   PetscSFNode       *remotePointsNew;
320   const PetscInt    *nranks;
321   PetscInt          *ranksNew;
322   PetscBT            neighbors;
323   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
324   PetscMPIInt        size, proc, rank;
325 
326   PetscFunctionBegin;
327   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
328   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
329   if (processRanks) PetscValidPointer(processRanks, 7);
330   if (sfProcess) PetscValidPointer(sfProcess, 8);
331   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)dm), &size));
332   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
333   PetscCall(PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints));
334   PetscCall(PetscBTCreate(size, &neighbors));
335   PetscCall(PetscBTMemzero(size, neighbors));
336   /* Compute root-to-leaf process connectivity */
337   PetscCall(PetscSectionGetChart(rootRankSection, &pStart, &pEnd));
338   PetscCall(ISGetIndices(rootRanks, &nranks));
339   for (p = pStart; p < pEnd; ++p) {
340     PetscInt ndof, noff, n;
341 
342     PetscCall(PetscSectionGetDof(rootRankSection, p, &ndof));
343     PetscCall(PetscSectionGetOffset(rootRankSection, p, &noff));
344     for (n = 0; n < ndof; ++n) PetscCall(PetscBTSet(neighbors, nranks[noff + n]));
345   }
346   PetscCall(ISRestoreIndices(rootRanks, &nranks));
347   /* Compute leaf-to-neighbor process connectivity */
348   PetscCall(PetscSectionGetChart(leafRankSection, &pStart, &pEnd));
349   PetscCall(ISGetIndices(leafRanks, &nranks));
350   for (p = pStart; p < pEnd; ++p) {
351     PetscInt ndof, noff, n;
352 
353     PetscCall(PetscSectionGetDof(leafRankSection, p, &ndof));
354     PetscCall(PetscSectionGetOffset(leafRankSection, p, &noff));
355     for (n = 0; n < ndof; ++n) PetscCall(PetscBTSet(neighbors, nranks[noff + n]));
356   }
357   PetscCall(ISRestoreIndices(leafRanks, &nranks));
358   /* Compute leaf-to-root process connectivity */
359   for (l = 0; l < numLeaves; ++l) PetscCall(PetscBTSet(neighbors, remotePoints[l].rank));
360   /* Calculate edges */
361   PetscCall(PetscBTClear(neighbors, rank));
362   for (proc = 0, numNeighbors = 0; proc < size; ++proc) {
363     if (PetscBTLookup(neighbors, proc)) ++numNeighbors;
364   }
365   PetscCall(PetscMalloc1(numNeighbors, &ranksNew));
366   PetscCall(PetscMalloc1(numNeighbors, &localPointsNew));
367   PetscCall(PetscMalloc1(numNeighbors, &remotePointsNew));
368   for (proc = 0, n = 0; proc < size; ++proc) {
369     if (PetscBTLookup(neighbors, proc)) {
370       ranksNew[n]              = proc;
371       localPointsNew[n]        = proc;
372       remotePointsNew[n].index = rank;
373       remotePointsNew[n].rank  = proc;
374       ++n;
375     }
376   }
377   PetscCall(PetscBTDestroy(&neighbors));
378   if (processRanks) PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks));
379   else PetscCall(PetscFree(ranksNew));
380   if (sfProcess) {
381     PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess));
382     PetscCall(PetscObjectSetName((PetscObject)*sfProcess, "Two-Sided Process SF"));
383     PetscCall(PetscSFSetFromOptions(*sfProcess));
384     PetscCall(PetscSFSetGraph(*sfProcess, size, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER));
385   }
386   PetscFunctionReturn(PETSC_SUCCESS);
387 }
388 
389 /*@
390   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
391 
392   Collective on dm
393 
394   Input Parameter:
395 . dm - The DM
396 
397   Output Parameters:
398 + rootSection - The number of leaves for a given root point
399 . rootrank    - The rank of each edge into the root point
400 . leafSection - The number of processes sharing a given leaf point
401 - leafrank    - The rank of each process sharing a leaf point
402 
403   Level: developer
404 
405 .seealso: `DMPlexCreateOverlapLabel()`, `DMPlexDistribute()`, `DMPlexDistributeOverlap()`
406 @*/
407 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
408 {
409   MPI_Comm        comm;
410   PetscSF         sfPoint;
411   const PetscInt *rootdegree;
412   PetscInt       *myrank, *remoterank;
413   PetscInt        pStart, pEnd, p, nedges;
414   PetscMPIInt     rank;
415 
416   PetscFunctionBegin;
417   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
418   PetscCallMPI(MPI_Comm_rank(comm, &rank));
419   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
420   PetscCall(DMGetPointSF(dm, &sfPoint));
421   /* Compute number of leaves for each root */
422   PetscCall(PetscObjectSetName((PetscObject)rootSection, "Root Section"));
423   PetscCall(PetscSectionSetChart(rootSection, pStart, pEnd));
424   PetscCall(PetscSFComputeDegreeBegin(sfPoint, &rootdegree));
425   PetscCall(PetscSFComputeDegreeEnd(sfPoint, &rootdegree));
426   for (p = pStart; p < pEnd; ++p) PetscCall(PetscSectionSetDof(rootSection, p, rootdegree[p - pStart]));
427   PetscCall(PetscSectionSetUp(rootSection));
428   /* Gather rank of each leaf to root */
429   PetscCall(PetscSectionGetStorageSize(rootSection, &nedges));
430   PetscCall(PetscMalloc1(pEnd - pStart, &myrank));
431   PetscCall(PetscMalloc1(nedges, &remoterank));
432   for (p = 0; p < pEnd - pStart; ++p) myrank[p] = rank;
433   PetscCall(PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank));
434   PetscCall(PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank));
435   PetscCall(PetscFree(myrank));
436   PetscCall(ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank));
437   /* Distribute remote ranks to leaves */
438   PetscCall(PetscObjectSetName((PetscObject)leafSection, "Leaf Section"));
439   PetscCall(DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank));
440   PetscFunctionReturn(PETSC_SUCCESS);
441 }
442 
443 /*@C
444   DMPlexCreateOverlapLabel - Compute a label indicating what overlap points should be sent to new processes
445 
446   Collective on dm
447 
448   Input Parameters:
449 + dm          - The DM
450 . levels      - Number of overlap levels
451 . rootSection - The number of leaves for a given root point
452 . rootrank    - The rank of each edge into the root point
453 . leafSection - The number of processes sharing a given leaf point
454 - leafrank    - The rank of each process sharing a leaf point
455 
456   Output Parameter:
457 . ovLabel     - DMLabel containing remote overlap contributions as point/rank pairings
458 
459   Level: developer
460 
461 .seealso: `DMPlexCreateOverlapLabelFromLabels()`, `DMPlexGetAdjacency()`, `DMPlexDistributeOwnership()`, `DMPlexDistribute()`
462 @*/
463 PetscErrorCode DMPlexCreateOverlapLabel(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
464 {
465   MPI_Comm           comm;
466   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
467   PetscSF            sfPoint;
468   const PetscSFNode *remote;
469   const PetscInt    *local;
470   const PetscInt    *nrank, *rrank;
471   PetscInt          *adj = NULL;
472   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l;
473   PetscMPIInt        rank, size;
474   PetscBool          flg;
475 
476   PetscFunctionBegin;
477   *ovLabel = NULL;
478   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
479   PetscCallMPI(MPI_Comm_size(comm, &size));
480   PetscCallMPI(MPI_Comm_rank(comm, &rank));
481   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
482   PetscCall(DMGetPointSF(dm, &sfPoint));
483   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
484   if (!levels) {
485     /* Add owned points */
486     PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel));
487     for (p = pStart; p < pEnd; ++p) PetscCall(DMLabelSetValue(*ovLabel, p, rank));
488     PetscFunctionReturn(PETSC_SUCCESS);
489   }
490   PetscCall(PetscSectionGetChart(leafSection, &sStart, &sEnd));
491   PetscCall(PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote));
492   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap adjacency", &ovAdjByRank));
493   /* Handle leaves: shared with the root point */
494   for (l = 0; l < nleaves; ++l) {
495     PetscInt adjSize = PETSC_DETERMINE, a;
496 
497     PetscCall(DMPlexGetAdjacency(dm, local ? local[l] : l, &adjSize, &adj));
498     for (a = 0; a < adjSize; ++a) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank));
499   }
500   PetscCall(ISGetIndices(rootrank, &rrank));
501   PetscCall(ISGetIndices(leafrank, &nrank));
502   /* Handle roots */
503   for (p = pStart; p < pEnd; ++p) {
504     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
505 
506     if ((p >= sStart) && (p < sEnd)) {
507       /* Some leaves share a root with other leaves on different processes */
508       PetscCall(PetscSectionGetDof(leafSection, p, &neighbors));
509       if (neighbors) {
510         PetscCall(PetscSectionGetOffset(leafSection, p, &noff));
511         PetscCall(DMPlexGetAdjacency(dm, p, &adjSize, &adj));
512         for (n = 0; n < neighbors; ++n) {
513           const PetscInt remoteRank = nrank[noff + n];
514 
515           if (remoteRank == rank) continue;
516           for (a = 0; a < adjSize; ++a) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remoteRank));
517         }
518       }
519     }
520     /* Roots are shared with leaves */
521     PetscCall(PetscSectionGetDof(rootSection, p, &neighbors));
522     if (!neighbors) continue;
523     PetscCall(PetscSectionGetOffset(rootSection, p, &noff));
524     PetscCall(DMPlexGetAdjacency(dm, p, &adjSize, &adj));
525     for (n = 0; n < neighbors; ++n) {
526       const PetscInt remoteRank = rrank[noff + n];
527 
528       if (remoteRank == rank) continue;
529       for (a = 0; a < adjSize; ++a) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remoteRank));
530     }
531   }
532   PetscCall(PetscFree(adj));
533   PetscCall(ISRestoreIndices(rootrank, &rrank));
534   PetscCall(ISRestoreIndices(leafrank, &nrank));
535   /* Add additional overlap levels */
536   for (l = 1; l < levels; l++) {
537     /* Propagate point donations over SF to capture remote connections */
538     PetscCall(DMPlexPartitionLabelPropagate(dm, ovAdjByRank));
539     /* Add next level of point donations to the label */
540     PetscCall(DMPlexPartitionLabelAdjacency(dm, ovAdjByRank));
541   }
542   /* We require the closure in the overlap */
543   PetscCall(DMPlexPartitionLabelClosure(dm, ovAdjByRank));
544   PetscCall(PetscOptionsHasName(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-overlap_view", &flg));
545   if (flg) {
546     PetscViewer viewer;
547     PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm), &viewer));
548     PetscCall(DMLabelView(ovAdjByRank, viewer));
549   }
550   /* Invert sender to receiver label */
551   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel));
552   PetscCall(DMPlexPartitionLabelInvert(dm, ovAdjByRank, NULL, *ovLabel));
553   /* Add owned points, except for shared local points */
554   for (p = pStart; p < pEnd; ++p) PetscCall(DMLabelSetValue(*ovLabel, p, rank));
555   for (l = 0; l < nleaves; ++l) {
556     PetscCall(DMLabelClearValue(*ovLabel, local[l], rank));
557     PetscCall(DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank));
558   }
559   /* Clean up */
560   PetscCall(DMLabelDestroy(&ovAdjByRank));
561   PetscFunctionReturn(PETSC_SUCCESS);
562 }
563 
564 static PetscErrorCode HandlePoint_Private(DM dm, PetscInt p, PetscSection section, const PetscInt ranks[], PetscInt numExLabels, const DMLabel exLabel[], const PetscInt exValue[], DMLabel ovAdjByRank)
565 {
566   PetscInt neighbors, el;
567 
568   PetscFunctionBegin;
569   PetscCall(PetscSectionGetDof(section, p, &neighbors));
570   if (neighbors) {
571     PetscInt   *adj     = NULL;
572     PetscInt    adjSize = PETSC_DETERMINE, noff, n, a;
573     PetscMPIInt rank;
574 
575     PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
576     PetscCall(PetscSectionGetOffset(section, p, &noff));
577     PetscCall(DMPlexGetAdjacency(dm, p, &adjSize, &adj));
578     for (n = 0; n < neighbors; ++n) {
579       const PetscInt remoteRank = ranks[noff + n];
580 
581       if (remoteRank == rank) continue;
582       for (a = 0; a < adjSize; ++a) {
583         PetscBool insert = PETSC_TRUE;
584 
585         for (el = 0; el < numExLabels; ++el) {
586           PetscInt exVal;
587           PetscCall(DMLabelGetValue(exLabel[el], adj[a], &exVal));
588           if (exVal == exValue[el]) {
589             insert = PETSC_FALSE;
590             break;
591           }
592         }
593         if (insert) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remoteRank));
594       }
595     }
596     PetscCall(PetscFree(adj));
597   }
598   PetscFunctionReturn(PETSC_SUCCESS);
599 }
600 
601 /*@C
602   DMPlexCreateOverlapLabelFromLabels - Compute a label indicating what overlap points should be sent to new processes
603 
604   Collective on dm
605 
606   Input Parameters:
607 + dm          - The DM
608 . numLabels   - The number of labels to draw candidate points from
609 . label       - An array of labels containing candidate points
610 . value       - An array of label values marking the candidate points
611 . numExLabels - The number of labels to use for exclusion
612 . exLabel     - An array of labels indicating points to be excluded, or NULL
613 . exValue     - An array of label values to be excluded, or NULL
614 . rootSection - The number of leaves for a given root point
615 . rootrank    - The rank of each edge into the root point
616 . leafSection - The number of processes sharing a given leaf point
617 - leafrank    - The rank of each process sharing a leaf point
618 
619   Output Parameter:
620 . ovLabel     - DMLabel containing remote overlap contributions as point/rank pairings
621 
622   Note:
623   The candidate points are only added to the overlap if they are adjacent to a shared point
624 
625   Level: developer
626 
627 .seealso: `DMPlexCreateOverlapLabel()`, `DMPlexGetAdjacency()`, `DMPlexDistributeOwnership()`, `DMPlexDistribute()`
628 @*/
629 PetscErrorCode DMPlexCreateOverlapLabelFromLabels(DM dm, PetscInt numLabels, const DMLabel label[], const PetscInt value[], PetscInt numExLabels, const DMLabel exLabel[], const PetscInt exValue[], PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
630 {
631   MPI_Comm           comm;
632   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
633   PetscSF            sfPoint;
634   const PetscSFNode *remote;
635   const PetscInt    *local;
636   const PetscInt    *nrank, *rrank;
637   PetscInt          *adj = NULL;
638   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l, el;
639   PetscMPIInt        rank, size;
640   PetscBool          flg;
641 
642   PetscFunctionBegin;
643   *ovLabel = NULL;
644   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
645   PetscCallMPI(MPI_Comm_size(comm, &size));
646   PetscCallMPI(MPI_Comm_rank(comm, &rank));
647   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
648   PetscCall(DMGetPointSF(dm, &sfPoint));
649   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
650   PetscCall(PetscSectionGetChart(leafSection, &sStart, &sEnd));
651   PetscCall(PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote));
652   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap adjacency", &ovAdjByRank));
653   PetscCall(ISGetIndices(rootrank, &rrank));
654   PetscCall(ISGetIndices(leafrank, &nrank));
655   for (l = 0; l < numLabels; ++l) {
656     IS              valIS;
657     const PetscInt *points;
658     PetscInt        n;
659 
660     PetscCall(DMLabelGetStratumIS(label[l], value[l], &valIS));
661     if (!valIS) continue;
662     PetscCall(ISGetIndices(valIS, &points));
663     PetscCall(ISGetLocalSize(valIS, &n));
664     for (PetscInt i = 0; i < n; ++i) {
665       const PetscInt p = points[i];
666 
667       if ((p >= sStart) && (p < sEnd)) {
668         PetscInt loc, adjSize = PETSC_DETERMINE;
669 
670         /* Handle leaves: shared with the root point */
671         if (local) PetscCall(PetscFindInt(p, nleaves, local, &loc));
672         else loc = (p >= 0 && p < nleaves) ? p : -1;
673         if (loc >= 0) {
674           const PetscInt remoteRank = remote[loc].rank;
675 
676           PetscCall(DMPlexGetAdjacency(dm, p, &adjSize, &adj));
677           for (PetscInt a = 0; a < adjSize; ++a) {
678             PetscBool insert = PETSC_TRUE;
679 
680             for (el = 0; el < numExLabels; ++el) {
681               PetscInt exVal;
682               PetscCall(DMLabelGetValue(exLabel[el], adj[a], &exVal));
683               if (exVal == exValue[el]) {
684                 insert = PETSC_FALSE;
685                 break;
686               }
687             }
688             if (insert) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remoteRank));
689           }
690         }
691         /* Some leaves share a root with other leaves on different processes */
692         PetscCall(HandlePoint_Private(dm, p, leafSection, nrank, numExLabels, exLabel, exValue, ovAdjByRank));
693       }
694       /* Roots are shared with leaves */
695       PetscCall(HandlePoint_Private(dm, p, rootSection, rrank, numExLabels, exLabel, exValue, ovAdjByRank));
696     }
697     PetscCall(ISRestoreIndices(valIS, &points));
698     PetscCall(ISDestroy(&valIS));
699   }
700   PetscCall(PetscFree(adj));
701   PetscCall(ISRestoreIndices(rootrank, &rrank));
702   PetscCall(ISRestoreIndices(leafrank, &nrank));
703   /* We require the closure in the overlap */
704   PetscCall(DMPlexPartitionLabelClosure(dm, ovAdjByRank));
705   PetscCall(PetscOptionsHasName(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-overlap_view", &flg));
706   if (flg) {
707     PetscViewer viewer;
708     PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm), &viewer));
709     PetscCall(DMLabelView(ovAdjByRank, viewer));
710   }
711   /* Invert sender to receiver label */
712   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel));
713   PetscCall(DMPlexPartitionLabelInvert(dm, ovAdjByRank, NULL, *ovLabel));
714   /* Add owned points, except for shared local points */
715   for (p = pStart; p < pEnd; ++p) PetscCall(DMLabelSetValue(*ovLabel, p, rank));
716   for (l = 0; l < nleaves; ++l) {
717     PetscCall(DMLabelClearValue(*ovLabel, local[l], rank));
718     PetscCall(DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank));
719   }
720   /* Clean up */
721   PetscCall(DMLabelDestroy(&ovAdjByRank));
722   PetscFunctionReturn(PETSC_SUCCESS);
723 }
724 
725 /*@C
726   DMPlexCreateOverlapMigrationSF - Create an SF describing the new mesh distribution to make the overlap described by the input SF
727 
728   Collective on dm
729 
730   Input Parameters:
731 + dm          - The DM
732 - overlapSF   - The SF mapping ghost points in overlap to owner points on other processes
733 
734   Output Parameter:
735 . migrationSF - An SF that maps original points in old locations to points in new locations
736 
737   Level: developer
738 
739 .seealso: `DMPlexCreateOverlapLabel()`, `DMPlexDistributeOverlap()`, `DMPlexDistribute()`
740 @*/
741 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
742 {
743   MPI_Comm           comm;
744   PetscMPIInt        rank, size;
745   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
746   PetscInt          *pointDepths, *remoteDepths, *ilocal;
747   PetscInt          *depthRecv, *depthShift, *depthIdx;
748   PetscSFNode       *iremote;
749   PetscSF            pointSF;
750   const PetscInt    *sharedLocal;
751   const PetscSFNode *overlapRemote, *sharedRemote;
752 
753   PetscFunctionBegin;
754   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
755   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
756   PetscCallMPI(MPI_Comm_rank(comm, &rank));
757   PetscCallMPI(MPI_Comm_size(comm, &size));
758   PetscCall(DMGetDimension(dm, &dim));
759 
760   /* Before building the migration SF we need to know the new stratum offsets */
761   PetscCall(PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote));
762   PetscCall(PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths));
763   for (d = 0; d < dim + 1; d++) {
764     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
765     for (p = pStart; p < pEnd; p++) pointDepths[p] = d;
766   }
767   for (p = 0; p < nleaves; p++) remoteDepths[p] = -1;
768   PetscCall(PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths, MPI_REPLACE));
769   PetscCall(PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths, MPI_REPLACE));
770 
771   /* Count received points in each stratum and compute the internal strata shift */
772   PetscCall(PetscMalloc3(dim + 1, &depthRecv, dim + 1, &depthShift, dim + 1, &depthIdx));
773   for (d = 0; d < dim + 1; d++) depthRecv[d] = 0;
774   for (p = 0; p < nleaves; p++) depthRecv[remoteDepths[p]]++;
775   depthShift[dim] = 0;
776   for (d = 0; d < dim; d++) depthShift[d] = depthRecv[dim];
777   for (d = 1; d < dim; d++) depthShift[d] += depthRecv[0];
778   for (d = dim - 2; d > 0; d--) depthShift[d] += depthRecv[d + 1];
779   for (d = 0; d < dim + 1; d++) {
780     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
781     depthIdx[d] = pStart + depthShift[d];
782   }
783 
784   /* Form the overlap SF build an SF that describes the full overlap migration SF */
785   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
786   newLeaves = pEnd - pStart + nleaves;
787   PetscCall(PetscMalloc1(newLeaves, &ilocal));
788   PetscCall(PetscMalloc1(newLeaves, &iremote));
789   /* First map local points to themselves */
790   for (d = 0; d < dim + 1; d++) {
791     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
792     for (p = pStart; p < pEnd; p++) {
793       point                = p + depthShift[d];
794       ilocal[point]        = point;
795       iremote[point].index = p;
796       iremote[point].rank  = rank;
797       depthIdx[d]++;
798     }
799   }
800 
801   /* Add in the remote roots for currently shared points */
802   PetscCall(DMGetPointSF(dm, &pointSF));
803   PetscCall(PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote));
804   for (d = 0; d < dim + 1; d++) {
805     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
806     for (p = 0; p < numSharedPoints; p++) {
807       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
808         point                = sharedLocal[p] + depthShift[d];
809         iremote[point].index = sharedRemote[p].index;
810         iremote[point].rank  = sharedRemote[p].rank;
811       }
812     }
813   }
814 
815   /* Now add the incoming overlap points */
816   for (p = 0; p < nleaves; p++) {
817     point                = depthIdx[remoteDepths[p]];
818     ilocal[point]        = point;
819     iremote[point].index = overlapRemote[p].index;
820     iremote[point].rank  = overlapRemote[p].rank;
821     depthIdx[remoteDepths[p]]++;
822   }
823   PetscCall(PetscFree2(pointDepths, remoteDepths));
824 
825   PetscCall(PetscSFCreate(comm, migrationSF));
826   PetscCall(PetscObjectSetName((PetscObject)*migrationSF, "Overlap Migration SF"));
827   PetscCall(PetscSFSetFromOptions(*migrationSF));
828   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
829   PetscCall(PetscSFSetGraph(*migrationSF, pEnd - pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER));
830 
831   PetscCall(PetscFree3(depthRecv, depthShift, depthIdx));
832   PetscFunctionReturn(PETSC_SUCCESS);
833 }
834 
835 /*@
836   DMPlexStratifyMigrationSF - Rearrange the leaves of a migration sf for stratification.
837 
838   Input Parameters:
839 + dm          - The DM
840 - sf          - A star forest with non-ordered leaves, usually defining a DM point migration
841 
842   Output Parameter:
843 . migrationSF - A star forest with added leaf indirection that ensures the resulting DM is stratified
844 
845   Note:
846   This lexicographically sorts by (depth, cellType)
847 
848   Level: developer
849 
850 .seealso: `DMPlexPartitionLabelCreateSF()`, `DMPlexDistribute()`, `DMPlexDistributeOverlap()`
851 @*/
852 PetscErrorCode DMPlexStratifyMigrationSF(DM dm, PetscSF sf, PetscSF *migrationSF)
853 {
854   MPI_Comm           comm;
855   PetscMPIInt        rank, size;
856   PetscInt           d, ldepth, depth, dim, p, pStart, pEnd, nroots, nleaves;
857   PetscSFNode       *pointDepths, *remoteDepths;
858   PetscInt          *ilocal;
859   PetscInt          *depthRecv, *depthShift, *depthIdx;
860   PetscInt          *ctRecv, *ctShift, *ctIdx;
861   const PetscSFNode *iremote;
862 
863   PetscFunctionBegin;
864   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
865   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
866   PetscCallMPI(MPI_Comm_rank(comm, &rank));
867   PetscCallMPI(MPI_Comm_size(comm, &size));
868   PetscCall(DMPlexGetDepth(dm, &ldepth));
869   PetscCall(DMGetDimension(dm, &dim));
870   PetscCall(MPIU_Allreduce(&ldepth, &depth, 1, MPIU_INT, MPI_MAX, comm));
871   PetscCheck(!(ldepth >= 0) || !(depth != ldepth), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %" PetscInt_FMT " != %" PetscInt_FMT, ldepth, depth);
872   PetscCall(PetscLogEventBegin(DMPLEX_PartStratSF, dm, 0, 0, 0));
873 
874   /* Before building the migration SF we need to know the new stratum offsets */
875   PetscCall(PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote));
876   PetscCall(PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths));
877   for (d = 0; d < depth + 1; ++d) {
878     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
879     for (p = pStart; p < pEnd; ++p) {
880       DMPolytopeType ct;
881 
882       PetscCall(DMPlexGetCellType(dm, p, &ct));
883       pointDepths[p].index = d;
884       pointDepths[p].rank  = ct;
885     }
886   }
887   for (p = 0; p < nleaves; ++p) {
888     remoteDepths[p].index = -1;
889     remoteDepths[p].rank  = -1;
890   }
891   PetscCall(PetscSFBcastBegin(sf, MPIU_2INT, pointDepths, remoteDepths, MPI_REPLACE));
892   PetscCall(PetscSFBcastEnd(sf, MPIU_2INT, pointDepths, remoteDepths, MPI_REPLACE));
893   /* Count received points in each stratum and compute the internal strata shift */
894   PetscCall(PetscCalloc6(depth + 1, &depthRecv, depth + 1, &depthShift, depth + 1, &depthIdx, DM_NUM_POLYTOPES, &ctRecv, DM_NUM_POLYTOPES, &ctShift, DM_NUM_POLYTOPES, &ctIdx));
895   for (p = 0; p < nleaves; ++p) {
896     if (remoteDepths[p].rank < 0) {
897       ++depthRecv[remoteDepths[p].index];
898     } else {
899       ++ctRecv[remoteDepths[p].rank];
900     }
901   }
902   {
903     PetscInt depths[4], dims[4], shift = 0, i, c;
904 
905     /* Cells (depth), Vertices (0), Faces (depth-1), Edges (1)
906          Consider DM_POLYTOPE_FV_GHOST and DM_POLYTOPE_INTERIOR_GHOST as cells
907      */
908     depths[0] = depth;
909     depths[1] = 0;
910     depths[2] = depth - 1;
911     depths[3] = 1;
912     dims[0]   = dim;
913     dims[1]   = 0;
914     dims[2]   = dim - 1;
915     dims[3]   = 1;
916     for (i = 0; i <= depth; ++i) {
917       const PetscInt dep = depths[i];
918       const PetscInt dim = dims[i];
919 
920       for (c = 0; c < DM_NUM_POLYTOPES; ++c) {
921         if (DMPolytopeTypeGetDim((DMPolytopeType)c) != dim && !(i == 0 && (c == DM_POLYTOPE_FV_GHOST || c == DM_POLYTOPE_INTERIOR_GHOST))) continue;
922         ctShift[c] = shift;
923         shift += ctRecv[c];
924       }
925       depthShift[dep] = shift;
926       shift += depthRecv[dep];
927     }
928     for (c = 0; c < DM_NUM_POLYTOPES; ++c) {
929       const PetscInt ctDim = DMPolytopeTypeGetDim((DMPolytopeType)c);
930 
931       if ((ctDim < 0 || ctDim > dim) && (c != DM_POLYTOPE_FV_GHOST && c != DM_POLYTOPE_INTERIOR_GHOST)) {
932         ctShift[c] = shift;
933         shift += ctRecv[c];
934       }
935     }
936   }
937   /* Derive a new local permutation based on stratified indices */
938   PetscCall(PetscMalloc1(nleaves, &ilocal));
939   for (p = 0; p < nleaves; ++p) {
940     const PetscInt       dep = remoteDepths[p].index;
941     const DMPolytopeType ct  = (DMPolytopeType)remoteDepths[p].rank;
942 
943     if ((PetscInt)ct < 0) {
944       ilocal[p] = depthShift[dep] + depthIdx[dep];
945       ++depthIdx[dep];
946     } else {
947       ilocal[p] = ctShift[ct] + ctIdx[ct];
948       ++ctIdx[ct];
949     }
950   }
951   PetscCall(PetscSFCreate(comm, migrationSF));
952   PetscCall(PetscObjectSetName((PetscObject)*migrationSF, "Migration SF"));
953   PetscCall(PetscSFSetGraph(*migrationSF, nroots, nleaves, ilocal, PETSC_OWN_POINTER, (PetscSFNode *)iremote, PETSC_COPY_VALUES));
954   PetscCall(PetscFree2(pointDepths, remoteDepths));
955   PetscCall(PetscFree6(depthRecv, depthShift, depthIdx, ctRecv, ctShift, ctIdx));
956   PetscCall(PetscLogEventEnd(DMPLEX_PartStratSF, dm, 0, 0, 0));
957   PetscFunctionReturn(PETSC_SUCCESS);
958 }
959 
960 /*@
961   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
962 
963   Collective on dm
964 
965   Input Parameters:
966 + dm - The DMPlex object
967 . pointSF - The PetscSF describing the communication pattern
968 . originalSection - The PetscSection for existing data layout
969 - originalVec - The existing data in a local vector
970 
971   Output Parameters:
972 + newSection - The PetscSF describing the new data layout
973 - newVec - The new data in a local vector
974 
975   Level: developer
976 
977 .seealso: `DMPlexDistribute()`, `DMPlexDistributeFieldIS()`, `DMPlexDistributeData()`
978 @*/
979 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
980 {
981   PetscSF      fieldSF;
982   PetscInt    *remoteOffsets, fieldSize;
983   PetscScalar *originalValues, *newValues;
984 
985   PetscFunctionBegin;
986   PetscCall(PetscLogEventBegin(DMPLEX_DistributeField, dm, 0, 0, 0));
987   PetscCall(PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection));
988 
989   PetscCall(PetscSectionGetStorageSize(newSection, &fieldSize));
990   PetscCall(VecSetSizes(newVec, fieldSize, PETSC_DETERMINE));
991   PetscCall(VecSetType(newVec, dm->vectype));
992 
993   PetscCall(VecGetArray(originalVec, &originalValues));
994   PetscCall(VecGetArray(newVec, &newValues));
995   PetscCall(PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF));
996   PetscCall(PetscFree(remoteOffsets));
997   PetscCall(PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues, MPI_REPLACE));
998   PetscCall(PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues, MPI_REPLACE));
999   PetscCall(PetscSFDestroy(&fieldSF));
1000   PetscCall(VecRestoreArray(newVec, &newValues));
1001   PetscCall(VecRestoreArray(originalVec, &originalValues));
1002   PetscCall(PetscLogEventEnd(DMPLEX_DistributeField, dm, 0, 0, 0));
1003   PetscFunctionReturn(PETSC_SUCCESS);
1004 }
1005 
1006 /*@
1007   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
1008 
1009   Collective on dm
1010 
1011   Input Parameters:
1012 + dm - The DMPlex object
1013 . pointSF - The PetscSF describing the communication pattern
1014 . originalSection - The PetscSection for existing data layout
1015 - originalIS - The existing data
1016 
1017   Output Parameters:
1018 + newSection - The PetscSF describing the new data layout
1019 - newIS - The new data
1020 
1021   Level: developer
1022 
1023 .seealso: `DMPlexDistribute()`, `DMPlexDistributeField()`, `DMPlexDistributeData()`
1024 @*/
1025 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
1026 {
1027   PetscSF         fieldSF;
1028   PetscInt       *newValues, *remoteOffsets, fieldSize;
1029   const PetscInt *originalValues;
1030 
1031   PetscFunctionBegin;
1032   PetscCall(PetscLogEventBegin(DMPLEX_DistributeField, dm, 0, 0, 0));
1033   PetscCall(PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection));
1034 
1035   PetscCall(PetscSectionGetStorageSize(newSection, &fieldSize));
1036   PetscCall(PetscMalloc1(fieldSize, &newValues));
1037 
1038   PetscCall(ISGetIndices(originalIS, &originalValues));
1039   PetscCall(PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF));
1040   PetscCall(PetscFree(remoteOffsets));
1041   PetscCall(PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *)originalValues, newValues, MPI_REPLACE));
1042   PetscCall(PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *)originalValues, newValues, MPI_REPLACE));
1043   PetscCall(PetscSFDestroy(&fieldSF));
1044   PetscCall(ISRestoreIndices(originalIS, &originalValues));
1045   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS));
1046   PetscCall(PetscLogEventEnd(DMPLEX_DistributeField, dm, 0, 0, 0));
1047   PetscFunctionReturn(PETSC_SUCCESS);
1048 }
1049 
1050 /*@
1051   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
1052 
1053   Collective on dm
1054 
1055   Input Parameters:
1056 + dm - The DMPlex object
1057 . pointSF - The PetscSF describing the communication pattern
1058 . originalSection - The PetscSection for existing data layout
1059 . datatype - The type of data
1060 - originalData - The existing data
1061 
1062   Output Parameters:
1063 + newSection - The PetscSection describing the new data layout
1064 - newData - The new data
1065 
1066   Level: developer
1067 
1068 .seealso: `DMPlexDistribute()`, `DMPlexDistributeField()`
1069 @*/
1070 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
1071 {
1072   PetscSF     fieldSF;
1073   PetscInt   *remoteOffsets, fieldSize;
1074   PetscMPIInt dataSize;
1075 
1076   PetscFunctionBegin;
1077   PetscCall(PetscLogEventBegin(DMPLEX_DistributeData, dm, 0, 0, 0));
1078   PetscCall(PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection));
1079 
1080   PetscCall(PetscSectionGetStorageSize(newSection, &fieldSize));
1081   PetscCallMPI(MPI_Type_size(datatype, &dataSize));
1082   PetscCall(PetscMalloc(fieldSize * dataSize, newData));
1083 
1084   PetscCall(PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF));
1085   PetscCall(PetscFree(remoteOffsets));
1086   PetscCall(PetscSFBcastBegin(fieldSF, datatype, originalData, *newData, MPI_REPLACE));
1087   PetscCall(PetscSFBcastEnd(fieldSF, datatype, originalData, *newData, MPI_REPLACE));
1088   PetscCall(PetscSFDestroy(&fieldSF));
1089   PetscCall(PetscLogEventEnd(DMPLEX_DistributeData, dm, 0, 0, 0));
1090   PetscFunctionReturn(PETSC_SUCCESS);
1091 }
1092 
1093 static PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1094 {
1095   DM_Plex     *pmesh = (DM_Plex *)(dmParallel)->data;
1096   MPI_Comm     comm;
1097   PetscSF      coneSF;
1098   PetscSection originalConeSection, newConeSection;
1099   PetscInt    *remoteOffsets, *cones, *globCones, *newCones, newConesSize;
1100   PetscBool    flg;
1101 
1102   PetscFunctionBegin;
1103   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1104   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
1105   PetscCall(PetscLogEventBegin(DMPLEX_DistributeCones, dm, 0, 0, 0));
1106   /* Distribute cone section */
1107   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1108   PetscCall(DMPlexGetConeSection(dm, &originalConeSection));
1109   PetscCall(DMPlexGetConeSection(dmParallel, &newConeSection));
1110   PetscCall(PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection));
1111   PetscCall(DMSetUp(dmParallel));
1112   /* Communicate and renumber cones */
1113   PetscCall(PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF));
1114   PetscCall(PetscFree(remoteOffsets));
1115   PetscCall(DMPlexGetCones(dm, &cones));
1116   if (original) {
1117     PetscInt numCones;
1118 
1119     PetscCall(PetscSectionGetStorageSize(originalConeSection, &numCones));
1120     PetscCall(PetscMalloc1(numCones, &globCones));
1121     PetscCall(ISLocalToGlobalMappingApplyBlock(original, numCones, cones, globCones));
1122   } else {
1123     globCones = cones;
1124   }
1125   PetscCall(DMPlexGetCones(dmParallel, &newCones));
1126   PetscCall(PetscSFBcastBegin(coneSF, MPIU_INT, globCones, newCones, MPI_REPLACE));
1127   PetscCall(PetscSFBcastEnd(coneSF, MPIU_INT, globCones, newCones, MPI_REPLACE));
1128   if (original) PetscCall(PetscFree(globCones));
1129   PetscCall(PetscSectionGetStorageSize(newConeSection, &newConesSize));
1130   PetscCall(ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones));
1131   if (PetscDefined(USE_DEBUG)) {
1132     PetscInt  p;
1133     PetscBool valid = PETSC_TRUE;
1134     for (p = 0; p < newConesSize; ++p) {
1135       if (newCones[p] < 0) {
1136         valid = PETSC_FALSE;
1137         PetscCall(PetscPrintf(PETSC_COMM_SELF, "[%d] Point %" PetscInt_FMT " not in overlap SF\n", PetscGlobalRank, p));
1138       }
1139     }
1140     PetscCheck(valid, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1141   }
1142   PetscCall(PetscOptionsHasName(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-cones_view", &flg));
1143   if (flg) {
1144     PetscCall(PetscPrintf(comm, "Serial Cone Section:\n"));
1145     PetscCall(PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_(comm)));
1146     PetscCall(PetscPrintf(comm, "Parallel Cone Section:\n"));
1147     PetscCall(PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_(comm)));
1148     PetscCall(PetscSFView(coneSF, NULL));
1149   }
1150   PetscCall(DMPlexGetConeOrientations(dm, &cones));
1151   PetscCall(DMPlexGetConeOrientations(dmParallel, &newCones));
1152   PetscCall(PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones, MPI_REPLACE));
1153   PetscCall(PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones, MPI_REPLACE));
1154   PetscCall(PetscSFDestroy(&coneSF));
1155   PetscCall(PetscLogEventEnd(DMPLEX_DistributeCones, dm, 0, 0, 0));
1156   /* Create supports and stratify DMPlex */
1157   {
1158     PetscInt pStart, pEnd;
1159 
1160     PetscCall(PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd));
1161     PetscCall(PetscSectionSetChart(pmesh->supportSection, pStart, pEnd));
1162   }
1163   PetscCall(DMPlexSymmetrize(dmParallel));
1164   PetscCall(DMPlexStratify(dmParallel));
1165   {
1166     PetscBool useCone, useClosure, useAnchors;
1167 
1168     PetscCall(DMGetBasicAdjacency(dm, &useCone, &useClosure));
1169     PetscCall(DMSetBasicAdjacency(dmParallel, useCone, useClosure));
1170     PetscCall(DMPlexGetAdjacencyUseAnchors(dm, &useAnchors));
1171     PetscCall(DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors));
1172   }
1173   PetscFunctionReturn(PETSC_SUCCESS);
1174 }
1175 
1176 static PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
1177 {
1178   MPI_Comm         comm;
1179   DM               cdm, cdmParallel;
1180   PetscSection     originalCoordSection, newCoordSection;
1181   Vec              originalCoordinates, newCoordinates;
1182   PetscInt         bs;
1183   const char      *name;
1184   const PetscReal *maxCell, *Lstart, *L;
1185 
1186   PetscFunctionBegin;
1187   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1188   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1189 
1190   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1191   PetscCall(DMGetCoordinateDM(dm, &cdm));
1192   PetscCall(DMGetCoordinateDM(dmParallel, &cdmParallel));
1193   PetscCall(DMCopyDisc(cdm, cdmParallel));
1194   PetscCall(DMGetCoordinateSection(dm, &originalCoordSection));
1195   PetscCall(DMGetCoordinateSection(dmParallel, &newCoordSection));
1196   PetscCall(DMGetCoordinatesLocal(dm, &originalCoordinates));
1197   if (originalCoordinates) {
1198     PetscCall(VecCreate(PETSC_COMM_SELF, &newCoordinates));
1199     PetscCall(PetscObjectGetName((PetscObject)originalCoordinates, &name));
1200     PetscCall(PetscObjectSetName((PetscObject)newCoordinates, name));
1201 
1202     PetscCall(DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates));
1203     PetscCall(DMSetCoordinatesLocal(dmParallel, newCoordinates));
1204     PetscCall(VecGetBlockSize(originalCoordinates, &bs));
1205     PetscCall(VecSetBlockSize(newCoordinates, bs));
1206     PetscCall(VecDestroy(&newCoordinates));
1207   }
1208 
1209   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1210   PetscCall(DMGetPeriodicity(dm, &maxCell, &Lstart, &L));
1211   PetscCall(DMSetPeriodicity(dmParallel, maxCell, Lstart, L));
1212   PetscCall(DMGetCellCoordinateDM(dm, &cdm));
1213   if (cdm) {
1214     PetscCall(DMClone(dmParallel, &cdmParallel));
1215     PetscCall(DMSetCellCoordinateDM(dmParallel, cdmParallel));
1216     PetscCall(DMCopyDisc(cdm, cdmParallel));
1217     PetscCall(DMDestroy(&cdmParallel));
1218     PetscCall(DMGetCellCoordinateSection(dm, &originalCoordSection));
1219     PetscCall(DMGetCellCoordinatesLocal(dm, &originalCoordinates));
1220     PetscCall(PetscSectionCreate(comm, &newCoordSection));
1221     if (originalCoordinates) {
1222       PetscCall(VecCreate(PETSC_COMM_SELF, &newCoordinates));
1223       PetscCall(PetscObjectGetName((PetscObject)originalCoordinates, &name));
1224       PetscCall(PetscObjectSetName((PetscObject)newCoordinates, name));
1225 
1226       PetscCall(DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates));
1227       PetscCall(VecGetBlockSize(originalCoordinates, &bs));
1228       PetscCall(VecSetBlockSize(newCoordinates, bs));
1229       PetscCall(DMSetCellCoordinateSection(dmParallel, bs, newCoordSection));
1230       PetscCall(DMSetCellCoordinatesLocal(dmParallel, newCoordinates));
1231       PetscCall(VecDestroy(&newCoordinates));
1232     }
1233     PetscCall(PetscSectionDestroy(&newCoordSection));
1234   }
1235   PetscFunctionReturn(PETSC_SUCCESS);
1236 }
1237 
1238 static PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1239 {
1240   DM_Plex         *mesh = (DM_Plex *)dm->data;
1241   MPI_Comm         comm;
1242   DMLabel          depthLabel;
1243   PetscMPIInt      rank;
1244   PetscInt         depth, d, numLabels, numLocalLabels, l;
1245   PetscBool        hasLabels  = PETSC_FALSE, lsendDepth, sendDepth;
1246   PetscObjectState depthState = -1;
1247 
1248   PetscFunctionBegin;
1249   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1250   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1251 
1252   PetscCall(PetscLogEventBegin(DMPLEX_DistributeLabels, dm, 0, 0, 0));
1253   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1254   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1255 
1256   /* If the user has changed the depth label, communicate it instead */
1257   PetscCall(DMPlexGetDepth(dm, &depth));
1258   PetscCall(DMPlexGetDepthLabel(dm, &depthLabel));
1259   if (depthLabel) PetscCall(PetscObjectStateGet((PetscObject)depthLabel, &depthState));
1260   lsendDepth = mesh->depthState != depthState ? PETSC_TRUE : PETSC_FALSE;
1261   PetscCall(MPIU_Allreduce(&lsendDepth, &sendDepth, 1, MPIU_BOOL, MPI_LOR, comm));
1262   if (sendDepth) {
1263     PetscCall(DMPlexGetDepthLabel(dmParallel, &dmParallel->depthLabel));
1264     PetscCall(DMRemoveLabelBySelf(dmParallel, &dmParallel->depthLabel, PETSC_FALSE));
1265   }
1266   /* Everyone must have either the same number of labels, or none */
1267   PetscCall(DMGetNumLabels(dm, &numLocalLabels));
1268   numLabels = numLocalLabels;
1269   PetscCallMPI(MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm));
1270   if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1271   for (l = 0; l < numLabels; ++l) {
1272     DMLabel     label = NULL, labelNew = NULL;
1273     PetscBool   isDepth, lisOutput     = PETSC_TRUE, isOutput;
1274     const char *name = NULL;
1275 
1276     if (hasLabels) {
1277       PetscCall(DMGetLabelByNum(dm, l, &label));
1278       /* Skip "depth" because it is recreated */
1279       PetscCall(PetscObjectGetName((PetscObject)label, &name));
1280       PetscCall(PetscStrcmp(name, "depth", &isDepth));
1281     } else {
1282       isDepth = PETSC_FALSE;
1283     }
1284     PetscCallMPI(MPI_Bcast(&isDepth, 1, MPIU_BOOL, 0, comm));
1285     if (isDepth && !sendDepth) continue;
1286     PetscCall(DMLabelDistribute(label, migrationSF, &labelNew));
1287     if (isDepth) {
1288       /* Put in any missing strata which can occur if users are managing the depth label themselves */
1289       PetscInt gdepth;
1290 
1291       PetscCall(MPIU_Allreduce(&depth, &gdepth, 1, MPIU_INT, MPI_MAX, comm));
1292       PetscCheck(!(depth >= 0) || !(gdepth != depth), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %" PetscInt_FMT " != %" PetscInt_FMT, depth, gdepth);
1293       for (d = 0; d <= gdepth; ++d) {
1294         PetscBool has;
1295 
1296         PetscCall(DMLabelHasStratum(labelNew, d, &has));
1297         if (!has) PetscCall(DMLabelAddStratum(labelNew, d));
1298       }
1299     }
1300     PetscCall(DMAddLabel(dmParallel, labelNew));
1301     /* Put the output flag in the new label */
1302     if (hasLabels) PetscCall(DMGetLabelOutput(dm, name, &lisOutput));
1303     PetscCall(MPIU_Allreduce(&lisOutput, &isOutput, 1, MPIU_BOOL, MPI_LAND, comm));
1304     PetscCall(PetscObjectGetName((PetscObject)labelNew, &name));
1305     PetscCall(DMSetLabelOutput(dmParallel, name, isOutput));
1306     PetscCall(DMLabelDestroy(&labelNew));
1307   }
1308   {
1309     DMLabel ctLabel;
1310 
1311     // Reset label for fast lookup
1312     PetscCall(DMPlexGetCellTypeLabel(dmParallel, &ctLabel));
1313     PetscCall(DMLabelMakeAllInvalid_Internal(ctLabel));
1314   }
1315   PetscCall(PetscLogEventEnd(DMPLEX_DistributeLabels, dm, 0, 0, 0));
1316   PetscFunctionReturn(PETSC_SUCCESS);
1317 }
1318 
1319 static PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1320 {
1321   DM_Plex     *mesh  = (DM_Plex *)dm->data;
1322   DM_Plex     *pmesh = (DM_Plex *)(dmParallel)->data;
1323   MPI_Comm     comm;
1324   DM           refTree;
1325   PetscSection origParentSection, newParentSection;
1326   PetscInt    *origParents, *origChildIDs;
1327   PetscBool    flg;
1328 
1329   PetscFunctionBegin;
1330   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1331   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
1332   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1333 
1334   /* Set up tree */
1335   PetscCall(DMPlexGetReferenceTree(dm, &refTree));
1336   PetscCall(DMPlexSetReferenceTree(dmParallel, refTree));
1337   PetscCall(DMPlexGetTree(dm, &origParentSection, &origParents, &origChildIDs, NULL, NULL));
1338   if (origParentSection) {
1339     PetscInt  pStart, pEnd;
1340     PetscInt *newParents, *newChildIDs, *globParents;
1341     PetscInt *remoteOffsetsParents, newParentSize;
1342     PetscSF   parentSF;
1343 
1344     PetscCall(DMPlexGetChart(dmParallel, &pStart, &pEnd));
1345     PetscCall(PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel), &newParentSection));
1346     PetscCall(PetscSectionSetChart(newParentSection, pStart, pEnd));
1347     PetscCall(PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection));
1348     PetscCall(PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF));
1349     PetscCall(PetscFree(remoteOffsetsParents));
1350     PetscCall(PetscSectionGetStorageSize(newParentSection, &newParentSize));
1351     PetscCall(PetscMalloc2(newParentSize, &newParents, newParentSize, &newChildIDs));
1352     if (original) {
1353       PetscInt numParents;
1354 
1355       PetscCall(PetscSectionGetStorageSize(origParentSection, &numParents));
1356       PetscCall(PetscMalloc1(numParents, &globParents));
1357       PetscCall(ISLocalToGlobalMappingApplyBlock(original, numParents, origParents, globParents));
1358     } else {
1359       globParents = origParents;
1360     }
1361     PetscCall(PetscSFBcastBegin(parentSF, MPIU_INT, globParents, newParents, MPI_REPLACE));
1362     PetscCall(PetscSFBcastEnd(parentSF, MPIU_INT, globParents, newParents, MPI_REPLACE));
1363     if (original) PetscCall(PetscFree(globParents));
1364     PetscCall(PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs, MPI_REPLACE));
1365     PetscCall(PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs, MPI_REPLACE));
1366     PetscCall(ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents));
1367     if (PetscDefined(USE_DEBUG)) {
1368       PetscInt  p;
1369       PetscBool valid = PETSC_TRUE;
1370       for (p = 0; p < newParentSize; ++p) {
1371         if (newParents[p] < 0) {
1372           valid = PETSC_FALSE;
1373           PetscCall(PetscPrintf(PETSC_COMM_SELF, "Point %" PetscInt_FMT " not in overlap SF\n", p));
1374         }
1375       }
1376       PetscCheck(valid, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1377     }
1378     PetscCall(PetscOptionsHasName(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-parents_view", &flg));
1379     if (flg) {
1380       PetscCall(PetscPrintf(comm, "Serial Parent Section: \n"));
1381       PetscCall(PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_(comm)));
1382       PetscCall(PetscPrintf(comm, "Parallel Parent Section: \n"));
1383       PetscCall(PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_(comm)));
1384       PetscCall(PetscSFView(parentSF, NULL));
1385     }
1386     PetscCall(DMPlexSetTree(dmParallel, newParentSection, newParents, newChildIDs));
1387     PetscCall(PetscSectionDestroy(&newParentSection));
1388     PetscCall(PetscFree2(newParents, newChildIDs));
1389     PetscCall(PetscSFDestroy(&parentSF));
1390   }
1391   pmesh->useAnchors = mesh->useAnchors;
1392   PetscFunctionReturn(PETSC_SUCCESS);
1393 }
1394 
1395 /*@
1396   DMPlexSetPartitionBalance - Should distribution of the DM attempt to balance the shared point partition?
1397 
1398   Input Parameters:
1399 + dm - The DMPlex object
1400 - flg - Balance the partition?
1401 
1402   Level: intermediate
1403 
1404 .seealso: `DMPlexDistribute()`, `DMPlexGetPartitionBalance()`
1405 @*/
1406 PetscErrorCode DMPlexSetPartitionBalance(DM dm, PetscBool flg)
1407 {
1408   DM_Plex *mesh = (DM_Plex *)dm->data;
1409 
1410   PetscFunctionBegin;
1411   mesh->partitionBalance = flg;
1412   PetscFunctionReturn(PETSC_SUCCESS);
1413 }
1414 
1415 /*@
1416   DMPlexGetPartitionBalance - Does distribution of the DM attempt to balance the shared point partition?
1417 
1418   Input Parameter:
1419 . dm - The DMPlex object
1420 
1421   Output Parameter:
1422 . flg - Balance the partition?
1423 
1424   Level: intermediate
1425 
1426 .seealso: `DMPlexDistribute()`, `DMPlexSetPartitionBalance()`
1427 @*/
1428 PetscErrorCode DMPlexGetPartitionBalance(DM dm, PetscBool *flg)
1429 {
1430   DM_Plex *mesh = (DM_Plex *)dm->data;
1431 
1432   PetscFunctionBegin;
1433   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1434   PetscValidBoolPointer(flg, 2);
1435   *flg = mesh->partitionBalance;
1436   PetscFunctionReturn(PETSC_SUCCESS);
1437 }
1438 
1439 typedef struct {
1440   PetscInt vote, rank, index;
1441 } Petsc3Int;
1442 
1443 /* MaxLoc, but carry a third piece of information around */
1444 static void MPIAPI MaxLocCarry(void *in_, void *inout_, PetscMPIInt *len_, MPI_Datatype *dtype)
1445 {
1446   Petsc3Int *a = (Petsc3Int *)inout_;
1447   Petsc3Int *b = (Petsc3Int *)in_;
1448   PetscInt   i, len = *len_;
1449   for (i = 0; i < len; i++) {
1450     if (a[i].vote < b[i].vote) {
1451       a[i].vote  = b[i].vote;
1452       a[i].rank  = b[i].rank;
1453       a[i].index = b[i].index;
1454     } else if (a[i].vote <= b[i].vote) {
1455       if (a[i].rank >= b[i].rank) {
1456         a[i].rank  = b[i].rank;
1457         a[i].index = b[i].index;
1458       }
1459     }
1460   }
1461 }
1462 
1463 /*@C
1464   DMPlexCreatePointSF - Build a point SF from an SF describing a point migration
1465 
1466   Input Parameters:
1467 + dm          - The source DMPlex object
1468 . migrationSF - The star forest that describes the parallel point remapping
1469 - ownership   - Flag causing a vote to determine point ownership
1470 
1471   Output Parameter:
1472 . pointSF     - The star forest describing the point overlap in the remapped DM
1473 
1474   Notes:
1475   Output pointSF is guaranteed to have the array of local indices (leaves) sorted.
1476 
1477   Level: developer
1478 
1479 .seealso: `DMPlexDistribute()`, `DMPlexDistributeOverlap()`
1480 @*/
1481 PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownership, PetscSF *pointSF)
1482 {
1483   PetscMPIInt        rank, size;
1484   PetscInt           p, nroots, nleaves, idx, npointLeaves;
1485   PetscInt          *pointLocal;
1486   const PetscInt    *leaves;
1487   const PetscSFNode *roots;
1488   PetscSFNode       *rootNodes, *leafNodes, *pointRemote;
1489   Vec                shifts;
1490   const PetscInt     numShifts  = 13759;
1491   const PetscScalar *shift      = NULL;
1492   const PetscBool    shiftDebug = PETSC_FALSE;
1493   PetscBool          balance;
1494 
1495   PetscFunctionBegin;
1496   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1497   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
1498   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)dm), &size));
1499   PetscCall(PetscLogEventBegin(DMPLEX_CreatePointSF, dm, 0, 0, 0));
1500 
1501   PetscCall(DMPlexGetPartitionBalance(dm, &balance));
1502   PetscCall(PetscSFGetGraph(migrationSF, &nroots, &nleaves, &leaves, &roots));
1503   PetscCall(PetscMalloc2(nroots, &rootNodes, nleaves, &leafNodes));
1504   if (ownership) {
1505     MPI_Op       op;
1506     MPI_Datatype datatype;
1507     Petsc3Int   *rootVote = NULL, *leafVote = NULL;
1508     /* If balancing, we compute a random cyclic shift of the rank for each remote point. That way, the max will evenly distribute among ranks. */
1509     if (balance) {
1510       PetscRandom r;
1511 
1512       PetscCall(PetscRandomCreate(PETSC_COMM_SELF, &r));
1513       PetscCall(PetscRandomSetInterval(r, 0, 2467 * size));
1514       PetscCall(VecCreate(PETSC_COMM_SELF, &shifts));
1515       PetscCall(VecSetSizes(shifts, numShifts, numShifts));
1516       PetscCall(VecSetType(shifts, VECSTANDARD));
1517       PetscCall(VecSetRandom(shifts, r));
1518       PetscCall(PetscRandomDestroy(&r));
1519       PetscCall(VecGetArrayRead(shifts, &shift));
1520     }
1521 
1522     PetscCall(PetscMalloc1(nroots, &rootVote));
1523     PetscCall(PetscMalloc1(nleaves, &leafVote));
1524     /* Point ownership vote: Process with highest rank owns shared points */
1525     for (p = 0; p < nleaves; ++p) {
1526       if (shiftDebug) {
1527         PetscCall(PetscSynchronizedPrintf(PetscObjectComm((PetscObject)dm), "[%d] Point %" PetscInt_FMT " RemotePoint %" PetscInt_FMT " Shift %" PetscInt_FMT " MyRank %" PetscInt_FMT "\n", rank, leaves ? leaves[p] : p, roots[p].index,
1528                                           (PetscInt)PetscRealPart(shift[roots[p].index % numShifts]), (rank + (shift ? (PetscInt)PetscRealPart(shift[roots[p].index % numShifts]) : 0)) % size));
1529       }
1530       /* Either put in a bid or we know we own it */
1531       leafVote[p].vote  = (rank + (shift ? (PetscInt)PetscRealPart(shift[roots[p].index % numShifts]) : 0)) % size;
1532       leafVote[p].rank  = rank;
1533       leafVote[p].index = p;
1534     }
1535     for (p = 0; p < nroots; p++) {
1536       /* Root must not participate in the reduction, flag so that MAXLOC does not use */
1537       rootVote[p].vote  = -3;
1538       rootVote[p].rank  = -3;
1539       rootVote[p].index = -3;
1540     }
1541     PetscCallMPI(MPI_Type_contiguous(3, MPIU_INT, &datatype));
1542     PetscCallMPI(MPI_Type_commit(&datatype));
1543     PetscCallMPI(MPI_Op_create(&MaxLocCarry, 1, &op));
1544     PetscCall(PetscSFReduceBegin(migrationSF, datatype, leafVote, rootVote, op));
1545     PetscCall(PetscSFReduceEnd(migrationSF, datatype, leafVote, rootVote, op));
1546     PetscCallMPI(MPI_Op_free(&op));
1547     PetscCallMPI(MPI_Type_free(&datatype));
1548     for (p = 0; p < nroots; p++) {
1549       rootNodes[p].rank  = rootVote[p].rank;
1550       rootNodes[p].index = rootVote[p].index;
1551     }
1552     PetscCall(PetscFree(leafVote));
1553     PetscCall(PetscFree(rootVote));
1554   } else {
1555     for (p = 0; p < nroots; p++) {
1556       rootNodes[p].index = -1;
1557       rootNodes[p].rank  = rank;
1558     }
1559     for (p = 0; p < nleaves; p++) {
1560       /* Write new local id into old location */
1561       if (roots[p].rank == rank) rootNodes[roots[p].index].index = leaves ? leaves[p] : p;
1562     }
1563   }
1564   PetscCall(PetscSFBcastBegin(migrationSF, MPIU_2INT, rootNodes, leafNodes, MPI_REPLACE));
1565   PetscCall(PetscSFBcastEnd(migrationSF, MPIU_2INT, rootNodes, leafNodes, MPI_REPLACE));
1566 
1567   for (npointLeaves = 0, p = 0; p < nleaves; p++) {
1568     if (leafNodes[p].rank != rank) npointLeaves++;
1569   }
1570   PetscCall(PetscMalloc1(npointLeaves, &pointLocal));
1571   PetscCall(PetscMalloc1(npointLeaves, &pointRemote));
1572   for (idx = 0, p = 0; p < nleaves; p++) {
1573     if (leafNodes[p].rank != rank) {
1574       /* Note that pointLocal is automatically sorted as it is sublist of 0, ..., nleaves-1 */
1575       pointLocal[idx]  = p;
1576       pointRemote[idx] = leafNodes[p];
1577       idx++;
1578     }
1579   }
1580   if (shift) {
1581     PetscCall(VecRestoreArrayRead(shifts, &shift));
1582     PetscCall(VecDestroy(&shifts));
1583   }
1584   if (shiftDebug) PetscCall(PetscSynchronizedFlush(PetscObjectComm((PetscObject)dm), PETSC_STDOUT));
1585   PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)dm), pointSF));
1586   PetscCall(PetscSFSetFromOptions(*pointSF));
1587   PetscCall(PetscSFSetGraph(*pointSF, nleaves, npointLeaves, pointLocal, PETSC_OWN_POINTER, pointRemote, PETSC_OWN_POINTER));
1588   PetscCall(PetscFree2(rootNodes, leafNodes));
1589   PetscCall(PetscLogEventEnd(DMPLEX_CreatePointSF, dm, 0, 0, 0));
1590   if (PetscDefined(USE_DEBUG)) PetscCall(DMPlexCheckPointSF(dm, *pointSF, PETSC_FALSE));
1591   PetscFunctionReturn(PETSC_SUCCESS);
1592 }
1593 
1594 /*@C
1595   DMPlexMigrate  - Migrates internal DM data over the supplied star forest
1596 
1597   Collective on dm
1598 
1599   Input Parameters:
1600 + dm       - The source DMPlex object
1601 - sf       - The star forest communication context describing the migration pattern
1602 
1603   Output Parameter:
1604 . targetDM - The target DMPlex object
1605 
1606   Level: intermediate
1607 
1608 .seealso: `DMPlexDistribute()`, `DMPlexDistributeOverlap()`
1609 @*/
1610 PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM)
1611 {
1612   MPI_Comm               comm;
1613   PetscInt               dim, cdim, nroots;
1614   PetscSF                sfPoint;
1615   ISLocalToGlobalMapping ltogMigration;
1616   ISLocalToGlobalMapping ltogOriginal = NULL;
1617 
1618   PetscFunctionBegin;
1619   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1620   PetscCall(PetscLogEventBegin(DMPLEX_Migrate, dm, 0, 0, 0));
1621   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1622   PetscCall(DMGetDimension(dm, &dim));
1623   PetscCall(DMSetDimension(targetDM, dim));
1624   PetscCall(DMGetCoordinateDim(dm, &cdim));
1625   PetscCall(DMSetCoordinateDim(targetDM, cdim));
1626 
1627   /* Check for a one-to-all distribution pattern */
1628   PetscCall(DMGetPointSF(dm, &sfPoint));
1629   PetscCall(PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL));
1630   if (nroots >= 0) {
1631     IS        isOriginal;
1632     PetscInt  n, size, nleaves;
1633     PetscInt *numbering_orig, *numbering_new;
1634 
1635     /* Get the original point numbering */
1636     PetscCall(DMPlexCreatePointNumbering(dm, &isOriginal));
1637     PetscCall(ISLocalToGlobalMappingCreateIS(isOriginal, &ltogOriginal));
1638     PetscCall(ISLocalToGlobalMappingGetSize(ltogOriginal, &size));
1639     PetscCall(ISLocalToGlobalMappingGetBlockIndices(ltogOriginal, (const PetscInt **)&numbering_orig));
1640     /* Convert to positive global numbers */
1641     for (n = 0; n < size; n++) {
1642       if (numbering_orig[n] < 0) numbering_orig[n] = -(numbering_orig[n] + 1);
1643     }
1644     /* Derive the new local-to-global mapping from the old one */
1645     PetscCall(PetscSFGetGraph(sf, NULL, &nleaves, NULL, NULL));
1646     PetscCall(PetscMalloc1(nleaves, &numbering_new));
1647     PetscCall(PetscSFBcastBegin(sf, MPIU_INT, numbering_orig, numbering_new, MPI_REPLACE));
1648     PetscCall(PetscSFBcastEnd(sf, MPIU_INT, numbering_orig, numbering_new, MPI_REPLACE));
1649     PetscCall(ISLocalToGlobalMappingCreate(comm, 1, nleaves, numbering_new, PETSC_OWN_POINTER, &ltogMigration));
1650     PetscCall(ISLocalToGlobalMappingRestoreIndices(ltogOriginal, (const PetscInt **)&numbering_orig));
1651     PetscCall(ISDestroy(&isOriginal));
1652   } else {
1653     /* One-to-all distribution pattern: We can derive LToG from SF */
1654     PetscCall(ISLocalToGlobalMappingCreateSF(sf, 0, &ltogMigration));
1655   }
1656   PetscCall(PetscObjectSetName((PetscObject)ltogMigration, "Point renumbering for DM migration"));
1657   PetscCall(ISLocalToGlobalMappingViewFromOptions(ltogMigration, (PetscObject)dm, "-partition_view"));
1658   /* Migrate DM data to target DM */
1659   PetscCall(DMPlexDistributeCones(dm, sf, ltogOriginal, ltogMigration, targetDM));
1660   PetscCall(DMPlexDistributeLabels(dm, sf, targetDM));
1661   PetscCall(DMPlexDistributeCoordinates(dm, sf, targetDM));
1662   PetscCall(DMPlexDistributeSetupTree(dm, sf, ltogOriginal, ltogMigration, targetDM));
1663   PetscCall(ISLocalToGlobalMappingDestroy(&ltogOriginal));
1664   PetscCall(ISLocalToGlobalMappingDestroy(&ltogMigration));
1665   PetscCall(PetscLogEventEnd(DMPLEX_Migrate, dm, 0, 0, 0));
1666   PetscFunctionReturn(PETSC_SUCCESS);
1667 }
1668 
1669 /*@C
1670   DMPlexDistribute - Distributes the mesh and any associated sections.
1671 
1672   Collective on dm
1673 
1674   Input Parameters:
1675 + dm  - The original DMPlex object
1676 - overlap - The overlap of partitions, 0 is the default
1677 
1678   Output Parameters:
1679 + sf - The PetscSF used for point distribution, or NULL if not needed
1680 - dmParallel - The distributed DMPlex object
1681 
1682   Note: If the mesh was not distributed, the output dmParallel will be NULL.
1683 
1684   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
1685   representation on the mesh.
1686 
1687   Level: intermediate
1688 
1689 .seealso: `DMPlexCreate()`, `DMSetAdjacency()`, `DMPlexGetOverlap()`
1690 @*/
1691 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1692 {
1693   MPI_Comm         comm;
1694   PetscPartitioner partitioner;
1695   IS               cellPart;
1696   PetscSection     cellPartSection;
1697   DM               dmCoord;
1698   DMLabel          lblPartition, lblMigration;
1699   PetscSF          sfMigration, sfStratified, sfPoint;
1700   PetscBool        flg, balance;
1701   PetscMPIInt      rank, size;
1702 
1703   PetscFunctionBegin;
1704   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1705   PetscValidLogicalCollectiveInt(dm, overlap, 2);
1706   if (sf) PetscValidPointer(sf, 3);
1707   PetscValidPointer(dmParallel, 4);
1708 
1709   if (sf) *sf = NULL;
1710   *dmParallel = NULL;
1711   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1712   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1713   PetscCallMPI(MPI_Comm_size(comm, &size));
1714   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
1715 
1716   PetscCall(PetscLogEventBegin(DMPLEX_Distribute, dm, 0, 0, 0));
1717   /* Create cell partition */
1718   PetscCall(PetscLogEventBegin(DMPLEX_Partition, dm, 0, 0, 0));
1719   PetscCall(PetscSectionCreate(comm, &cellPartSection));
1720   PetscCall(DMPlexGetPartitioner(dm, &partitioner));
1721   PetscCall(PetscPartitionerDMPlexPartition(partitioner, dm, NULL, cellPartSection, &cellPart));
1722   PetscCall(PetscLogEventBegin(DMPLEX_PartSelf, dm, 0, 0, 0));
1723   {
1724     /* Convert partition to DMLabel */
1725     IS              is;
1726     PetscHSetI      ht;
1727     const PetscInt *points;
1728     PetscInt       *iranks;
1729     PetscInt        pStart, pEnd, proc, npoints, poff = 0, nranks;
1730 
1731     PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Point Partition", &lblPartition));
1732     /* Preallocate strata */
1733     PetscCall(PetscHSetICreate(&ht));
1734     PetscCall(PetscSectionGetChart(cellPartSection, &pStart, &pEnd));
1735     for (proc = pStart; proc < pEnd; proc++) {
1736       PetscCall(PetscSectionGetDof(cellPartSection, proc, &npoints));
1737       if (npoints) PetscCall(PetscHSetIAdd(ht, proc));
1738     }
1739     PetscCall(PetscHSetIGetSize(ht, &nranks));
1740     PetscCall(PetscMalloc1(nranks, &iranks));
1741     PetscCall(PetscHSetIGetElems(ht, &poff, iranks));
1742     PetscCall(PetscHSetIDestroy(&ht));
1743     PetscCall(DMLabelAddStrata(lblPartition, nranks, iranks));
1744     PetscCall(PetscFree(iranks));
1745     /* Inline DMPlexPartitionLabelClosure() */
1746     PetscCall(ISGetIndices(cellPart, &points));
1747     PetscCall(PetscSectionGetChart(cellPartSection, &pStart, &pEnd));
1748     for (proc = pStart; proc < pEnd; proc++) {
1749       PetscCall(PetscSectionGetDof(cellPartSection, proc, &npoints));
1750       if (!npoints) continue;
1751       PetscCall(PetscSectionGetOffset(cellPartSection, proc, &poff));
1752       PetscCall(DMPlexClosurePoints_Private(dm, npoints, points + poff, &is));
1753       PetscCall(DMLabelSetStratumIS(lblPartition, proc, is));
1754       PetscCall(ISDestroy(&is));
1755     }
1756     PetscCall(ISRestoreIndices(cellPart, &points));
1757   }
1758   PetscCall(PetscLogEventEnd(DMPLEX_PartSelf, dm, 0, 0, 0));
1759 
1760   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Point migration", &lblMigration));
1761   PetscCall(DMPlexPartitionLabelInvert(dm, lblPartition, NULL, lblMigration));
1762   PetscCall(DMPlexPartitionLabelCreateSF(dm, lblMigration, &sfMigration));
1763   PetscCall(DMPlexStratifyMigrationSF(dm, sfMigration, &sfStratified));
1764   PetscCall(PetscSFDestroy(&sfMigration));
1765   sfMigration = sfStratified;
1766   PetscCall(PetscSFSetUp(sfMigration));
1767   PetscCall(PetscLogEventEnd(DMPLEX_Partition, dm, 0, 0, 0));
1768   PetscCall(PetscOptionsHasName(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-partition_view", &flg));
1769   if (flg) {
1770     PetscCall(DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_(comm)));
1771     PetscCall(PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_(comm)));
1772   }
1773 
1774   /* Create non-overlapping parallel DM and migrate internal data */
1775   PetscCall(DMPlexCreate(comm, dmParallel));
1776   PetscCall(PetscObjectSetName((PetscObject)*dmParallel, "Parallel Mesh"));
1777   PetscCall(DMPlexMigrate(dm, sfMigration, *dmParallel));
1778 
1779   /* Build the point SF without overlap */
1780   PetscCall(DMPlexGetPartitionBalance(dm, &balance));
1781   PetscCall(DMPlexSetPartitionBalance(*dmParallel, balance));
1782   PetscCall(DMPlexCreatePointSF(*dmParallel, sfMigration, PETSC_TRUE, &sfPoint));
1783   PetscCall(DMSetPointSF(*dmParallel, sfPoint));
1784   PetscCall(DMPlexMigrateIsoperiodicFaceSF_Internal(dm, *dmParallel, sfMigration));
1785   PetscCall(DMGetCoordinateDM(*dmParallel, &dmCoord));
1786   if (dmCoord) PetscCall(DMSetPointSF(dmCoord, sfPoint));
1787   if (flg) PetscCall(PetscSFView(sfPoint, NULL));
1788 
1789   if (overlap > 0) {
1790     DM                 dmOverlap;
1791     PetscInt           nroots, nleaves, noldleaves, l;
1792     const PetscInt    *oldLeaves;
1793     PetscSFNode       *newRemote, *permRemote;
1794     const PetscSFNode *oldRemote;
1795     PetscSF            sfOverlap, sfOverlapPoint;
1796 
1797     /* Add the partition overlap to the distributed DM */
1798     PetscCall(DMPlexDistributeOverlap(*dmParallel, overlap, &sfOverlap, &dmOverlap));
1799     PetscCall(DMDestroy(dmParallel));
1800     *dmParallel = dmOverlap;
1801     if (flg) {
1802       PetscCall(PetscPrintf(comm, "Overlap Migration SF:\n"));
1803       PetscCall(PetscSFView(sfOverlap, NULL));
1804     }
1805 
1806     /* Re-map the migration SF to establish the full migration pattern */
1807     PetscCall(PetscSFGetGraph(sfMigration, &nroots, &noldleaves, &oldLeaves, &oldRemote));
1808     PetscCall(PetscSFGetGraph(sfOverlap, NULL, &nleaves, NULL, NULL));
1809     PetscCall(PetscMalloc1(nleaves, &newRemote));
1810     /* oldRemote: original root point mapping to original leaf point
1811        newRemote: original leaf point mapping to overlapped leaf point */
1812     if (oldLeaves) {
1813       /* After stratification, the migration remotes may not be in root (canonical) order, so we reorder using the leaf numbering */
1814       PetscCall(PetscMalloc1(noldleaves, &permRemote));
1815       for (l = 0; l < noldleaves; ++l) permRemote[oldLeaves[l]] = oldRemote[l];
1816       oldRemote = permRemote;
1817     }
1818     PetscCall(PetscSFBcastBegin(sfOverlap, MPIU_2INT, oldRemote, newRemote, MPI_REPLACE));
1819     PetscCall(PetscSFBcastEnd(sfOverlap, MPIU_2INT, oldRemote, newRemote, MPI_REPLACE));
1820     if (oldLeaves) PetscCall(PetscFree(oldRemote));
1821     PetscCall(PetscSFCreate(comm, &sfOverlapPoint));
1822     PetscCall(PetscSFSetGraph(sfOverlapPoint, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER));
1823     PetscCall(PetscSFDestroy(&sfOverlap));
1824     PetscCall(PetscSFDestroy(&sfMigration));
1825     sfMigration = sfOverlapPoint;
1826   }
1827   /* Cleanup Partition */
1828   PetscCall(DMLabelDestroy(&lblPartition));
1829   PetscCall(DMLabelDestroy(&lblMigration));
1830   PetscCall(PetscSectionDestroy(&cellPartSection));
1831   PetscCall(ISDestroy(&cellPart));
1832   /* Copy discretization */
1833   PetscCall(DMCopyDisc(dm, *dmParallel));
1834   /* Create sfNatural */
1835   if (dm->useNatural) {
1836     PetscSection section;
1837 
1838     PetscCall(DMSetUseNatural(*dmParallel, PETSC_TRUE));
1839     PetscCall(DMGetLocalSection(dm, &section));
1840 
1841     /* First DM with useNatural = PETSC_TRUE is considered natural */
1842     /* sfMigration and sfNatural are respectively the point and dofs SFs mapping to this natural DM */
1843     /* Compose with a previous sfNatural if present */
1844     if (dm->sfNatural) {
1845       PetscSF natSF;
1846 
1847       PetscCall(DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &natSF));
1848       PetscCall(PetscSFCompose(dm->sfNatural, natSF, &(*dmParallel)->sfNatural));
1849       PetscCall(PetscSFDestroy(&natSF));
1850     } else {
1851       PetscCall(DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &(*dmParallel)->sfNatural));
1852     }
1853     /* Compose with a previous sfMigration if present */
1854     if (dm->sfMigration) {
1855       PetscSF naturalPointSF;
1856 
1857       PetscCall(PetscSFCompose(dm->sfMigration, sfMigration, &naturalPointSF));
1858       PetscCall(PetscSFDestroy(&sfMigration));
1859       sfMigration = naturalPointSF;
1860     }
1861   }
1862   PetscCall(DMPlexCopy_Internal(dm, PETSC_TRUE, PETSC_FALSE, *dmParallel));
1863   /* Cleanup */
1864   if (sf) {
1865     *sf = sfMigration;
1866   } else PetscCall(PetscSFDestroy(&sfMigration));
1867   PetscCall(PetscSFDestroy(&sfPoint));
1868   PetscCall(PetscLogEventEnd(DMPLEX_Distribute, dm, 0, 0, 0));
1869   PetscFunctionReturn(PETSC_SUCCESS);
1870 }
1871 
1872 /*@C
1873   DMPlexDistributeOverlap - Add partition overlap to a distributed non-overlapping DM.
1874 
1875   Collective on dm
1876 
1877   Input Parameters:
1878 + dm  - The non-overlapping distributed DMPlex object
1879 - overlap - The overlap of partitions (the same on all ranks)
1880 
1881   Output Parameters:
1882 + sf - The PetscSF used for point distribution
1883 - dmOverlap - The overlapping distributed DMPlex object, or NULL
1884 
1885   Notes:
1886   If the mesh was not distributed, the return value is NULL.
1887 
1888   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
1889   representation on the mesh.
1890 
1891   Options Database Keys:
1892 + -dm_plex_overlap_labels <name1,name2,...> - List of overlap label names
1893 . -dm_plex_overlap_values <int1,int2,...>   - List of overlap label values
1894 . -dm_plex_overlap_exclude_label <name>     - Label used to exclude points from overlap
1895 - -dm_plex_overlap_exclude_value <int>      - Label value used to exclude points from overlap
1896 
1897   Level: advanced
1898 
1899 .seealso: `DMPlexCreate()`, `DMSetAdjacency()`, `DMPlexDistribute()`, `DMPlexCreateOverlapLabel()`, `DMPlexGetOverlap()`
1900 @*/
1901 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, PetscSF *sf, DM *dmOverlap)
1902 {
1903   DM_Plex     *mesh = (DM_Plex *)dm->data;
1904   MPI_Comm     comm;
1905   PetscMPIInt  size, rank;
1906   PetscSection rootSection, leafSection;
1907   IS           rootrank, leafrank;
1908   DM           dmCoord;
1909   DMLabel      lblOverlap;
1910   PetscSF      sfOverlap, sfStratified, sfPoint;
1911 
1912   PetscFunctionBegin;
1913   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1914   PetscValidLogicalCollectiveInt(dm, overlap, 2);
1915   if (sf) PetscValidPointer(sf, 3);
1916   PetscValidPointer(dmOverlap, 4);
1917 
1918   if (sf) *sf = NULL;
1919   *dmOverlap = NULL;
1920   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1921   PetscCallMPI(MPI_Comm_size(comm, &size));
1922   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1923   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
1924   {
1925     // We need to get options for the _already_distributed mesh, so it must be done here
1926     PetscInt    overlap;
1927     const char *prefix;
1928     char        oldPrefix[PETSC_MAX_PATH_LEN];
1929 
1930     oldPrefix[0] = '\0';
1931     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)dm, &prefix));
1932     PetscCall(PetscStrncpy(oldPrefix, prefix, sizeof(oldPrefix)));
1933     PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)dm, "dist_"));
1934     PetscCall(DMPlexGetOverlap(dm, &overlap));
1935     PetscObjectOptionsBegin((PetscObject)dm);
1936     PetscCall(DMSetFromOptions_Overlap_Plex(dm, PetscOptionsObject, &overlap));
1937     PetscOptionsEnd();
1938     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)dm, oldPrefix[0] == '\0' ? NULL : oldPrefix));
1939   }
1940   PetscCall(PetscLogEventBegin(DMPLEX_DistributeOverlap, dm, 0, 0, 0));
1941   /* Compute point overlap with neighbouring processes on the distributed DM */
1942   PetscCall(PetscLogEventBegin(DMPLEX_Partition, dm, 0, 0, 0));
1943   PetscCall(PetscSectionCreate(comm, &rootSection));
1944   PetscCall(PetscSectionCreate(comm, &leafSection));
1945   PetscCall(DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank));
1946   if (mesh->numOvLabels) PetscCall(DMPlexCreateOverlapLabelFromLabels(dm, mesh->numOvLabels, mesh->ovLabels, mesh->ovValues, mesh->numOvExLabels, mesh->ovExLabels, mesh->ovExValues, rootSection, rootrank, leafSection, leafrank, &lblOverlap));
1947   else PetscCall(DMPlexCreateOverlapLabel(dm, overlap, rootSection, rootrank, leafSection, leafrank, &lblOverlap));
1948   /* Convert overlap label to stratified migration SF */
1949   PetscCall(DMPlexPartitionLabelCreateSF(dm, lblOverlap, &sfOverlap));
1950   PetscCall(DMPlexStratifyMigrationSF(dm, sfOverlap, &sfStratified));
1951   PetscCall(PetscSFDestroy(&sfOverlap));
1952   sfOverlap = sfStratified;
1953   PetscCall(PetscObjectSetName((PetscObject)sfOverlap, "Overlap SF"));
1954   PetscCall(PetscSFSetFromOptions(sfOverlap));
1955 
1956   PetscCall(PetscSectionDestroy(&rootSection));
1957   PetscCall(PetscSectionDestroy(&leafSection));
1958   PetscCall(ISDestroy(&rootrank));
1959   PetscCall(ISDestroy(&leafrank));
1960   PetscCall(PetscLogEventEnd(DMPLEX_Partition, dm, 0, 0, 0));
1961 
1962   /* Build the overlapping DM */
1963   PetscCall(DMPlexCreate(comm, dmOverlap));
1964   PetscCall(PetscObjectSetName((PetscObject)*dmOverlap, "Parallel Mesh"));
1965   PetscCall(DMPlexMigrate(dm, sfOverlap, *dmOverlap));
1966   /* Store the overlap in the new DM */
1967   PetscCall(DMPlexSetOverlap_Plex(*dmOverlap, dm, overlap));
1968   /* Build the new point SF */
1969   PetscCall(DMPlexCreatePointSF(*dmOverlap, sfOverlap, PETSC_FALSE, &sfPoint));
1970   PetscCall(DMSetPointSF(*dmOverlap, sfPoint));
1971   PetscCall(DMGetCoordinateDM(*dmOverlap, &dmCoord));
1972   if (dmCoord) PetscCall(DMSetPointSF(dmCoord, sfPoint));
1973   PetscCall(DMGetCellCoordinateDM(*dmOverlap, &dmCoord));
1974   if (dmCoord) PetscCall(DMSetPointSF(dmCoord, sfPoint));
1975   PetscCall(PetscSFDestroy(&sfPoint));
1976   /* Cleanup overlap partition */
1977   PetscCall(DMLabelDestroy(&lblOverlap));
1978   if (sf) *sf = sfOverlap;
1979   else PetscCall(PetscSFDestroy(&sfOverlap));
1980   PetscCall(PetscLogEventEnd(DMPLEX_DistributeOverlap, dm, 0, 0, 0));
1981   PetscFunctionReturn(PETSC_SUCCESS);
1982 }
1983 
1984 PetscErrorCode DMPlexGetOverlap_Plex(DM dm, PetscInt *overlap)
1985 {
1986   DM_Plex *mesh = (DM_Plex *)dm->data;
1987 
1988   PetscFunctionBegin;
1989   *overlap = mesh->overlap;
1990   PetscFunctionReturn(PETSC_SUCCESS);
1991 }
1992 
1993 PetscErrorCode DMPlexSetOverlap_Plex(DM dm, DM dmSrc, PetscInt overlap)
1994 {
1995   DM_Plex *mesh    = NULL;
1996   DM_Plex *meshSrc = NULL;
1997 
1998   PetscFunctionBegin;
1999   PetscValidHeaderSpecificType(dm, DM_CLASSID, 1, DMPLEX);
2000   mesh          = (DM_Plex *)dm->data;
2001   mesh->overlap = overlap;
2002   if (dmSrc) {
2003     PetscValidHeaderSpecificType(dmSrc, DM_CLASSID, 2, DMPLEX);
2004     meshSrc = (DM_Plex *)dmSrc->data;
2005     mesh->overlap += meshSrc->overlap;
2006   }
2007   PetscFunctionReturn(PETSC_SUCCESS);
2008 }
2009 
2010 /*@
2011   DMPlexGetOverlap - Get the width of the cell overlap
2012 
2013   Not collective
2014 
2015   Input Parameter:
2016 . dm   - The DM
2017 
2018   Output Parameter:
2019 . overlap - the width of the cell overlap
2020 
2021   Level: intermediate
2022 
2023 .seealso: `DMPlexSetOverlap()`, `DMPlexDistribute()`
2024 @*/
2025 PetscErrorCode DMPlexGetOverlap(DM dm, PetscInt *overlap)
2026 {
2027   PetscFunctionBegin;
2028   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2029   PetscValidIntPointer(overlap, 2);
2030   PetscUseMethod(dm, "DMPlexGetOverlap_C", (DM, PetscInt *), (dm, overlap));
2031   PetscFunctionReturn(PETSC_SUCCESS);
2032 }
2033 
2034 /*@
2035   DMPlexSetOverlap - Set the width of the cell overlap
2036 
2037   Logically collective
2038 
2039   Input Parameters:
2040 + dm      - The DM
2041 . dmSrc   - The DM that produced this one, or NULL
2042 - overlap - the width of the cell overlap
2043 
2044   Note:
2045   The overlap from dmSrc is added to dm
2046 
2047   Level: intermediate
2048 
2049 .seealso: `DMPlexGetOverlap()`, `DMPlexDistribute()`
2050 @*/
2051 PetscErrorCode DMPlexSetOverlap(DM dm, DM dmSrc, PetscInt overlap)
2052 {
2053   PetscFunctionBegin;
2054   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2055   PetscValidLogicalCollectiveInt(dm, overlap, 3);
2056   PetscTryMethod(dm, "DMPlexSetOverlap_C", (DM, DM, PetscInt), (dm, dmSrc, overlap));
2057   PetscFunctionReturn(PETSC_SUCCESS);
2058 }
2059 
2060 PetscErrorCode DMPlexDistributeSetDefault_Plex(DM dm, PetscBool dist)
2061 {
2062   DM_Plex *mesh = (DM_Plex *)dm->data;
2063 
2064   PetscFunctionBegin;
2065   mesh->distDefault = dist;
2066   PetscFunctionReturn(PETSC_SUCCESS);
2067 }
2068 
2069 /*@
2070   DMPlexDistributeSetDefault - Set flag indicating whether the DM should be distributed by default
2071 
2072   Logically collective
2073 
2074   Input Parameters:
2075 + dm   - The DM
2076 - dist - Flag for distribution
2077 
2078   Level: intermediate
2079 
2080 .seealso: `DMPlexDistributeGetDefault()`, `DMPlexDistribute()`
2081 @*/
2082 PetscErrorCode DMPlexDistributeSetDefault(DM dm, PetscBool dist)
2083 {
2084   PetscFunctionBegin;
2085   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2086   PetscValidLogicalCollectiveBool(dm, dist, 2);
2087   PetscTryMethod(dm, "DMPlexDistributeSetDefault_C", (DM, PetscBool), (dm, dist));
2088   PetscFunctionReturn(PETSC_SUCCESS);
2089 }
2090 
2091 PetscErrorCode DMPlexDistributeGetDefault_Plex(DM dm, PetscBool *dist)
2092 {
2093   DM_Plex *mesh = (DM_Plex *)dm->data;
2094 
2095   PetscFunctionBegin;
2096   *dist = mesh->distDefault;
2097   PetscFunctionReturn(PETSC_SUCCESS);
2098 }
2099 
2100 /*@
2101   DMPlexDistributeGetDefault - Get flag indicating whether the DM should be distributed by default
2102 
2103   Not collective
2104 
2105   Input Parameter:
2106 . dm   - The DM
2107 
2108   Output Parameter:
2109 . dist - Flag for distribution
2110 
2111   Level: intermediate
2112 
2113 .seealso: `DMPlexDistributeSetDefault()`, `DMPlexDistribute()`
2114 @*/
2115 PetscErrorCode DMPlexDistributeGetDefault(DM dm, PetscBool *dist)
2116 {
2117   PetscFunctionBegin;
2118   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2119   PetscValidBoolPointer(dist, 2);
2120   PetscUseMethod(dm, "DMPlexDistributeGetDefault_C", (DM, PetscBool *), (dm, dist));
2121   PetscFunctionReturn(PETSC_SUCCESS);
2122 }
2123 
2124 /*@C
2125   DMPlexGetGatherDM - Get a copy of the DMPlex that gathers all points on the
2126   root process of the original's communicator.
2127 
2128   Collective on dm
2129 
2130   Input Parameter:
2131 . dm - the original DMPlex object
2132 
2133   Output Parameters:
2134 + sf - the PetscSF used for point distribution (optional)
2135 - gatherMesh - the gathered DM object, or NULL
2136 
2137   Level: intermediate
2138 
2139 .seealso: `DMPlexDistribute()`, `DMPlexGetRedundantDM()`
2140 @*/
2141 PetscErrorCode DMPlexGetGatherDM(DM dm, PetscSF *sf, DM *gatherMesh)
2142 {
2143   MPI_Comm         comm;
2144   PetscMPIInt      size;
2145   PetscPartitioner oldPart, gatherPart;
2146 
2147   PetscFunctionBegin;
2148   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2149   PetscValidPointer(gatherMesh, 3);
2150   *gatherMesh = NULL;
2151   if (sf) *sf = NULL;
2152   comm = PetscObjectComm((PetscObject)dm);
2153   PetscCallMPI(MPI_Comm_size(comm, &size));
2154   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
2155   PetscCall(DMPlexGetPartitioner(dm, &oldPart));
2156   PetscCall(PetscObjectReference((PetscObject)oldPart));
2157   PetscCall(PetscPartitionerCreate(comm, &gatherPart));
2158   PetscCall(PetscPartitionerSetType(gatherPart, PETSCPARTITIONERGATHER));
2159   PetscCall(DMPlexSetPartitioner(dm, gatherPart));
2160   PetscCall(DMPlexDistribute(dm, 0, sf, gatherMesh));
2161 
2162   PetscCall(DMPlexSetPartitioner(dm, oldPart));
2163   PetscCall(PetscPartitionerDestroy(&gatherPart));
2164   PetscCall(PetscPartitionerDestroy(&oldPart));
2165   PetscFunctionReturn(PETSC_SUCCESS);
2166 }
2167 
2168 /*@C
2169   DMPlexGetRedundantDM - Get a copy of the DMPlex that is completely copied on each process.
2170 
2171   Collective on dm
2172 
2173   Input Parameter:
2174 . dm - the original DMPlex object
2175 
2176   Output Parameters:
2177 + sf - the PetscSF used for point distribution (optional)
2178 - redundantMesh - the redundant DM object, or NULL
2179 
2180   Level: intermediate
2181 
2182 .seealso: `DMPlexDistribute()`, `DMPlexGetGatherDM()`
2183 @*/
2184 PetscErrorCode DMPlexGetRedundantDM(DM dm, PetscSF *sf, DM *redundantMesh)
2185 {
2186   MPI_Comm     comm;
2187   PetscMPIInt  size, rank;
2188   PetscInt     pStart, pEnd, p;
2189   PetscInt     numPoints = -1;
2190   PetscSF      migrationSF, sfPoint, gatherSF;
2191   DM           gatherDM, dmCoord;
2192   PetscSFNode *points;
2193 
2194   PetscFunctionBegin;
2195   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2196   PetscValidPointer(redundantMesh, 3);
2197   *redundantMesh = NULL;
2198   comm           = PetscObjectComm((PetscObject)dm);
2199   PetscCallMPI(MPI_Comm_size(comm, &size));
2200   if (size == 1) {
2201     PetscCall(PetscObjectReference((PetscObject)dm));
2202     *redundantMesh = dm;
2203     if (sf) *sf = NULL;
2204     PetscFunctionReturn(PETSC_SUCCESS);
2205   }
2206   PetscCall(DMPlexGetGatherDM(dm, &gatherSF, &gatherDM));
2207   if (!gatherDM) PetscFunctionReturn(PETSC_SUCCESS);
2208   PetscCallMPI(MPI_Comm_rank(comm, &rank));
2209   PetscCall(DMPlexGetChart(gatherDM, &pStart, &pEnd));
2210   numPoints = pEnd - pStart;
2211   PetscCallMPI(MPI_Bcast(&numPoints, 1, MPIU_INT, 0, comm));
2212   PetscCall(PetscMalloc1(numPoints, &points));
2213   PetscCall(PetscSFCreate(comm, &migrationSF));
2214   for (p = 0; p < numPoints; p++) {
2215     points[p].index = p;
2216     points[p].rank  = 0;
2217   }
2218   PetscCall(PetscSFSetGraph(migrationSF, pEnd - pStart, numPoints, NULL, PETSC_OWN_POINTER, points, PETSC_OWN_POINTER));
2219   PetscCall(DMPlexCreate(comm, redundantMesh));
2220   PetscCall(PetscObjectSetName((PetscObject)*redundantMesh, "Redundant Mesh"));
2221   PetscCall(DMPlexMigrate(gatherDM, migrationSF, *redundantMesh));
2222   /* This is to express that all point are in overlap */
2223   PetscCall(DMPlexSetOverlap_Plex(*redundantMesh, NULL, PETSC_MAX_INT));
2224   PetscCall(DMPlexCreatePointSF(*redundantMesh, migrationSF, PETSC_FALSE, &sfPoint));
2225   PetscCall(DMSetPointSF(*redundantMesh, sfPoint));
2226   PetscCall(DMGetCoordinateDM(*redundantMesh, &dmCoord));
2227   if (dmCoord) PetscCall(DMSetPointSF(dmCoord, sfPoint));
2228   PetscCall(PetscSFDestroy(&sfPoint));
2229   if (sf) {
2230     PetscSF tsf;
2231 
2232     PetscCall(PetscSFCompose(gatherSF, migrationSF, &tsf));
2233     PetscCall(DMPlexStratifyMigrationSF(dm, tsf, sf));
2234     PetscCall(PetscSFDestroy(&tsf));
2235   }
2236   PetscCall(PetscSFDestroy(&migrationSF));
2237   PetscCall(PetscSFDestroy(&gatherSF));
2238   PetscCall(DMDestroy(&gatherDM));
2239   PetscCall(DMCopyDisc(dm, *redundantMesh));
2240   PetscCall(DMPlexCopy_Internal(dm, PETSC_TRUE, PETSC_FALSE, *redundantMesh));
2241   PetscFunctionReturn(PETSC_SUCCESS);
2242 }
2243 
2244 /*@
2245   DMPlexIsDistributed - Find out whether this DM is distributed, i.e. more than one rank owns some points.
2246 
2247   Collective
2248 
2249   Input Parameter:
2250 . dm      - The DM object
2251 
2252   Output Parameter:
2253 . distributed - Flag whether the DM is distributed
2254 
2255   Level: intermediate
2256 
2257   Notes:
2258   This currently finds out whether at least two ranks have any DAG points.
2259   This involves MPI_Allreduce() with one integer.
2260   The result is currently not stashed so every call to this routine involves this global communication.
2261 
2262 .seealso: `DMPlexDistribute()`, `DMPlexGetOverlap()`, `DMPlexIsInterpolated()`
2263 @*/
2264 PetscErrorCode DMPlexIsDistributed(DM dm, PetscBool *distributed)
2265 {
2266   PetscInt    pStart, pEnd, count;
2267   MPI_Comm    comm;
2268   PetscMPIInt size;
2269 
2270   PetscFunctionBegin;
2271   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2272   PetscValidBoolPointer(distributed, 2);
2273   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
2274   PetscCallMPI(MPI_Comm_size(comm, &size));
2275   if (size == 1) {
2276     *distributed = PETSC_FALSE;
2277     PetscFunctionReturn(PETSC_SUCCESS);
2278   }
2279   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
2280   count = (pEnd - pStart) > 0 ? 1 : 0;
2281   PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE, &count, 1, MPIU_INT, MPI_SUM, comm));
2282   *distributed = count > 1 ? PETSC_TRUE : PETSC_FALSE;
2283   PetscFunctionReturn(PETSC_SUCCESS);
2284 }
2285 
2286 /*@C
2287   DMPlexDistributionSetName - Set the name of the specific parallel distribution
2288 
2289   Input Parameters:
2290 + dm   - The DM
2291 - name - The name of the specific parallel distribution
2292 
2293   Note:
2294   If distribution name is set when saving, DMPlexTopologyView() saves the plex's
2295   parallel distribution (i.e., partition, ownership, and local ordering of points) under
2296   this name. Conversely, if distribution name is set when loading, DMPlexTopologyLoad()
2297   loads the parallel distribution stored in file under this name.
2298 
2299   Level: developer
2300 
2301 .seealso: `DMPlexDistributionGetName()`, `DMPlexTopologyView()`, `DMPlexTopologyLoad()`
2302 @*/
2303 PetscErrorCode DMPlexDistributionSetName(DM dm, const char name[])
2304 {
2305   DM_Plex *mesh = (DM_Plex *)dm->data;
2306 
2307   PetscFunctionBegin;
2308   PetscValidHeaderSpecificType(dm, DM_CLASSID, 1, DMPLEX);
2309   if (name) PetscValidCharPointer(name, 2);
2310   PetscCall(PetscFree(mesh->distributionName));
2311   PetscCall(PetscStrallocpy(name, &mesh->distributionName));
2312   PetscFunctionReturn(PETSC_SUCCESS);
2313 }
2314 
2315 /*@C
2316   DMPlexDistributionGetName - Retrieve the name of the specific parallel distribution
2317 
2318   Input Parameter:
2319 . dm - The DM
2320 
2321   Output Parameter:
2322 . name - The name of the specific parallel distribution
2323 
2324   Note:
2325   If distribution name is set when saving, DMPlexTopologyView() saves the plex's
2326   parallel distribution (i.e., partition, ownership, and local ordering of points) under
2327   this name. Conversely, if distribution name is set when loading, DMPlexTopologyLoad()
2328   loads the parallel distribution stored in file under this name.
2329 
2330   Level: developer
2331 
2332 .seealso: `DMPlexDistributionSetName()`, `DMPlexTopologyView()`, `DMPlexTopologyLoad()`
2333 @*/
2334 PetscErrorCode DMPlexDistributionGetName(DM dm, const char *name[])
2335 {
2336   DM_Plex *mesh = (DM_Plex *)dm->data;
2337 
2338   PetscFunctionBegin;
2339   PetscValidHeaderSpecificType(dm, DM_CLASSID, 1, DMPLEX);
2340   PetscValidPointer(name, 2);
2341   *name = mesh->distributionName;
2342   PetscFunctionReturn(PETSC_SUCCESS);
2343 }
2344