xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision 80eb40d0eabdd4c112fb6f4b9c1977024e4db02d)
1 #include <petsc/private/dmpleximpl.h>  /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/dmlabelimpl.h> /*I      "petscdmlabel.h"  I*/
3 
4 /*@C
5   DMPlexSetAdjacencyUser - Define adjacency in the mesh using a user-provided callback
6 
7   Input Parameters:
8 + dm      - The DM object
9 . user    - The user callback, may be NULL (to clear the callback)
10 - ctx     - context for callback evaluation, may be NULL
11 
12   Level: advanced
13 
14   Notes:
15      The caller of DMPlexGetAdjacency may need to arrange that a large enough array is available for the adjacency.
16 
17      Any setting here overrides other configuration of DMPlex adjacency determination.
18 
19 .seealso: `DMSetAdjacency()`, `DMPlexDistribute()`, `DMPlexPreallocateOperator()`, `DMPlexGetAdjacency()`, `DMPlexGetAdjacencyUser()`
20 @*/
21 PetscErrorCode DMPlexSetAdjacencyUser(DM dm, PetscErrorCode (*user)(DM, PetscInt, PetscInt *, PetscInt[], void *), void *ctx)
22 {
23   DM_Plex *mesh = (DM_Plex *)dm->data;
24 
25   PetscFunctionBegin;
26   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
27   mesh->useradjacency    = user;
28   mesh->useradjacencyctx = ctx;
29   PetscFunctionReturn(PETSC_SUCCESS);
30 }
31 
32 /*@C
33   DMPlexGetAdjacencyUser - get the user-defined adjacency callback
34 
35   Input Parameter:
36 . dm      - The DM object
37 
38   Output Parameters:
39 + user    - The callback
40 - ctx     - context for callback evaluation
41 
42   Level: advanced
43 
44 .seealso: `DMSetAdjacency()`, `DMPlexDistribute()`, `DMPlexPreallocateOperator()`, `DMPlexGetAdjacency()`, `DMPlexSetAdjacencyUser()`
45 @*/
46 PetscErrorCode DMPlexGetAdjacencyUser(DM dm, PetscErrorCode (**user)(DM, PetscInt, PetscInt *, PetscInt[], void *), void **ctx)
47 {
48   DM_Plex *mesh = (DM_Plex *)dm->data;
49 
50   PetscFunctionBegin;
51   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
52   if (user) *user = mesh->useradjacency;
53   if (ctx) *ctx = mesh->useradjacencyctx;
54   PetscFunctionReturn(PETSC_SUCCESS);
55 }
56 
57 /*@
58   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
59 
60   Input Parameters:
61 + dm      - The DM object
62 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
63 
64   Level: intermediate
65 
66 .seealso: `DMGetAdjacency()`, `DMSetAdjacency()`, `DMPlexDistribute()`, `DMPlexPreallocateOperator()`, `DMPlexSetAnchors()`
67 @*/
68 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
69 {
70   DM_Plex *mesh = (DM_Plex *)dm->data;
71 
72   PetscFunctionBegin;
73   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
74   mesh->useAnchors = useAnchors;
75   PetscFunctionReturn(PETSC_SUCCESS);
76 }
77 
78 /*@
79   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
80 
81   Input Parameter:
82 . dm      - The DM object
83 
84   Output Parameter:
85 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
86 
87   Level: intermediate
88 
89 .seealso: `DMPlexSetAdjacencyUseAnchors()`, `DMSetAdjacency()`, `DMGetAdjacency()`, `DMPlexDistribute()`, `DMPlexPreallocateOperator()`, `DMPlexSetAnchors()`
90 @*/
91 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
92 {
93   DM_Plex *mesh = (DM_Plex *)dm->data;
94 
95   PetscFunctionBegin;
96   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
97   PetscValidBoolPointer(useAnchors, 2);
98   *useAnchors = mesh->useAnchors;
99   PetscFunctionReturn(PETSC_SUCCESS);
100 }
101 
102 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
103 {
104   const PetscInt *cone   = NULL;
105   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
106 
107   PetscFunctionBeginHot;
108   PetscCall(DMPlexGetConeSize(dm, p, &coneSize));
109   PetscCall(DMPlexGetCone(dm, p, &cone));
110   for (c = 0; c <= coneSize; ++c) {
111     const PetscInt  point   = !c ? p : cone[c - 1];
112     const PetscInt *support = NULL;
113     PetscInt        supportSize, s, q;
114 
115     PetscCall(DMPlexGetSupportSize(dm, point, &supportSize));
116     PetscCall(DMPlexGetSupport(dm, point, &support));
117     for (s = 0; s < supportSize; ++s) {
118       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = support[s]), 0); ++q) {
119         if (support[s] == adj[q]) break;
120       }
121       PetscCheck(numAdj <= maxAdjSize, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%" PetscInt_FMT ")", maxAdjSize);
122     }
123   }
124   *adjSize = numAdj;
125   PetscFunctionReturn(PETSC_SUCCESS);
126 }
127 
128 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
129 {
130   const PetscInt *support = NULL;
131   PetscInt        numAdj = 0, maxAdjSize = *adjSize, supportSize, s;
132 
133   PetscFunctionBeginHot;
134   PetscCall(DMPlexGetSupportSize(dm, p, &supportSize));
135   PetscCall(DMPlexGetSupport(dm, p, &support));
136   for (s = 0; s <= supportSize; ++s) {
137     const PetscInt  point = !s ? p : support[s - 1];
138     const PetscInt *cone  = NULL;
139     PetscInt        coneSize, c, q;
140 
141     PetscCall(DMPlexGetConeSize(dm, point, &coneSize));
142     PetscCall(DMPlexGetCone(dm, point, &cone));
143     for (c = 0; c < coneSize; ++c) {
144       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = cone[c]), 0); ++q) {
145         if (cone[c] == adj[q]) break;
146       }
147       PetscCheck(numAdj <= maxAdjSize, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%" PetscInt_FMT ")", maxAdjSize);
148     }
149   }
150   *adjSize = numAdj;
151   PetscFunctionReturn(PETSC_SUCCESS);
152 }
153 
154 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
155 {
156   PetscInt *star   = NULL;
157   PetscInt  numAdj = 0, maxAdjSize = *adjSize, starSize, s;
158 
159   PetscFunctionBeginHot;
160   PetscCall(DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star));
161   for (s = 0; s < starSize * 2; s += 2) {
162     const PetscInt *closure = NULL;
163     PetscInt        closureSize, c, q;
164 
165     PetscCall(DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt **)&closure));
166     for (c = 0; c < closureSize * 2; c += 2) {
167       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = closure[c]), 0); ++q) {
168         if (closure[c] == adj[q]) break;
169       }
170       PetscCheck(numAdj <= maxAdjSize, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%" PetscInt_FMT ")", maxAdjSize);
171     }
172     PetscCall(DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt **)&closure));
173   }
174   PetscCall(DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star));
175   *adjSize = numAdj;
176   PetscFunctionReturn(PETSC_SUCCESS);
177 }
178 
179 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
180 {
181   static PetscInt asiz       = 0;
182   PetscInt        maxAnchors = 1;
183   PetscInt        aStart = -1, aEnd = -1;
184   PetscInt        maxAdjSize;
185   PetscSection    aSec = NULL;
186   IS              aIS  = NULL;
187   const PetscInt *anchors;
188   DM_Plex        *mesh = (DM_Plex *)dm->data;
189 
190   PetscFunctionBeginHot;
191   if (useAnchors) {
192     PetscCall(DMPlexGetAnchors(dm, &aSec, &aIS));
193     if (aSec) {
194       PetscCall(PetscSectionGetMaxDof(aSec, &maxAnchors));
195       maxAnchors = PetscMax(1, maxAnchors);
196       PetscCall(PetscSectionGetChart(aSec, &aStart, &aEnd));
197       PetscCall(ISGetIndices(aIS, &anchors));
198     }
199   }
200   if (!*adj) {
201     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
202 
203     PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
204     PetscCall(DMPlexGetDepth(dm, &depth));
205     depth = PetscMax(depth, -depth);
206     PetscCall(DMPlexGetMaxSizes(dm, &maxC, &maxS));
207     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC, depth + 1) - 1) / (maxC - 1)) : depth + 1;
208     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS, depth + 1) - 1) / (maxS - 1)) : depth + 1;
209     asiz          = PetscMax(PetscPowInt(maxS, depth) * coneSeries, PetscPowInt(maxC, depth) * supportSeries);
210     asiz *= maxAnchors;
211     asiz = PetscMin(asiz, pEnd - pStart);
212     PetscCall(PetscMalloc1(asiz, adj));
213   }
214   if (*adjSize < 0) *adjSize = asiz;
215   maxAdjSize = *adjSize;
216   if (mesh->useradjacency) {
217     PetscCall((*mesh->useradjacency)(dm, p, adjSize, *adj, mesh->useradjacencyctx));
218   } else if (useTransitiveClosure) {
219     PetscCall(DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj));
220   } else if (useCone) {
221     PetscCall(DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj));
222   } else {
223     PetscCall(DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj));
224   }
225   if (useAnchors && aSec) {
226     PetscInt  origSize = *adjSize;
227     PetscInt  numAdj   = origSize;
228     PetscInt  i        = 0, j;
229     PetscInt *orig     = *adj;
230 
231     while (i < origSize) {
232       PetscInt p    = orig[i];
233       PetscInt aDof = 0;
234 
235       if (p >= aStart && p < aEnd) PetscCall(PetscSectionGetDof(aSec, p, &aDof));
236       if (aDof) {
237         PetscInt aOff;
238         PetscInt s, q;
239 
240         for (j = i + 1; j < numAdj; j++) orig[j - 1] = orig[j];
241         origSize--;
242         numAdj--;
243         PetscCall(PetscSectionGetOffset(aSec, p, &aOff));
244         for (s = 0; s < aDof; ++s) {
245           for (q = 0; q < numAdj || ((void)(orig[numAdj++] = anchors[aOff + s]), 0); ++q) {
246             if (anchors[aOff + s] == orig[q]) break;
247           }
248           PetscCheck(numAdj <= maxAdjSize, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%" PetscInt_FMT ")", maxAdjSize);
249         }
250       } else {
251         i++;
252       }
253     }
254     *adjSize = numAdj;
255     PetscCall(ISRestoreIndices(aIS, &anchors));
256   }
257   PetscFunctionReturn(PETSC_SUCCESS);
258 }
259 
260 /*@
261   DMPlexGetAdjacency - Return all points adjacent to the given point
262 
263   Input Parameters:
264 + dm - The DM object
265 - p  - The point
266 
267   Input/Output Parameters:
268 + adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE;
269             on output the number of adjacent points
270 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize;
271         on output contains the adjacent points
272 
273   Level: advanced
274 
275   Notes:
276     The user must PetscFree the adj array if it was not passed in.
277 
278 .seealso: `DMSetAdjacency()`, `DMPlexDistribute()`, `DMCreateMatrix()`, `DMPlexPreallocateOperator()`
279 @*/
280 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
281 {
282   PetscBool useCone, useClosure, useAnchors;
283 
284   PetscFunctionBeginHot;
285   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
286   PetscValidIntPointer(adjSize, 3);
287   PetscValidPointer(adj, 4);
288   PetscCall(DMGetBasicAdjacency(dm, &useCone, &useClosure));
289   PetscCall(DMPlexGetAdjacencyUseAnchors(dm, &useAnchors));
290   PetscCall(DMPlexGetAdjacency_Internal(dm, p, useCone, useClosure, useAnchors, adjSize, adj));
291   PetscFunctionReturn(PETSC_SUCCESS);
292 }
293 
294 /*@
295   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
296 
297   Collective on dm
298 
299   Input Parameters:
300 + dm      - The DM
301 . sfPoint - The PetscSF which encodes point connectivity
302 . rootRankSection -
303 . rootRanks -
304 . leftRankSection -
305 - leafRanks -
306 
307   Output Parameters:
308 + processRanks - A list of process neighbors, or NULL
309 - sfProcess    - An SF encoding the two-sided process connectivity, or NULL
310 
311   Level: developer
312 
313 .seealso: `PetscSFCreate()`, `DMPlexCreateProcessSF()`
314 @*/
315 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
316 {
317   const PetscSFNode *remotePoints;
318   PetscInt          *localPointsNew;
319   PetscSFNode       *remotePointsNew;
320   const PetscInt    *nranks;
321   PetscInt          *ranksNew;
322   PetscBT            neighbors;
323   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
324   PetscMPIInt        size, proc, rank;
325 
326   PetscFunctionBegin;
327   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
328   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
329   if (processRanks) PetscValidPointer(processRanks, 7);
330   if (sfProcess) PetscValidPointer(sfProcess, 8);
331   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)dm), &size));
332   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
333   PetscCall(PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints));
334   PetscCall(PetscBTCreate(size, &neighbors));
335   PetscCall(PetscBTMemzero(size, neighbors));
336   /* Compute root-to-leaf process connectivity */
337   PetscCall(PetscSectionGetChart(rootRankSection, &pStart, &pEnd));
338   PetscCall(ISGetIndices(rootRanks, &nranks));
339   for (p = pStart; p < pEnd; ++p) {
340     PetscInt ndof, noff, n;
341 
342     PetscCall(PetscSectionGetDof(rootRankSection, p, &ndof));
343     PetscCall(PetscSectionGetOffset(rootRankSection, p, &noff));
344     for (n = 0; n < ndof; ++n) PetscCall(PetscBTSet(neighbors, nranks[noff + n]));
345   }
346   PetscCall(ISRestoreIndices(rootRanks, &nranks));
347   /* Compute leaf-to-neighbor process connectivity */
348   PetscCall(PetscSectionGetChart(leafRankSection, &pStart, &pEnd));
349   PetscCall(ISGetIndices(leafRanks, &nranks));
350   for (p = pStart; p < pEnd; ++p) {
351     PetscInt ndof, noff, n;
352 
353     PetscCall(PetscSectionGetDof(leafRankSection, p, &ndof));
354     PetscCall(PetscSectionGetOffset(leafRankSection, p, &noff));
355     for (n = 0; n < ndof; ++n) PetscCall(PetscBTSet(neighbors, nranks[noff + n]));
356   }
357   PetscCall(ISRestoreIndices(leafRanks, &nranks));
358   /* Compute leaf-to-root process connectivity */
359   for (l = 0; l < numLeaves; ++l) PetscCall(PetscBTSet(neighbors, remotePoints[l].rank));
360   /* Calculate edges */
361   PetscCall(PetscBTClear(neighbors, rank));
362   for (proc = 0, numNeighbors = 0; proc < size; ++proc) {
363     if (PetscBTLookup(neighbors, proc)) ++numNeighbors;
364   }
365   PetscCall(PetscMalloc1(numNeighbors, &ranksNew));
366   PetscCall(PetscMalloc1(numNeighbors, &localPointsNew));
367   PetscCall(PetscMalloc1(numNeighbors, &remotePointsNew));
368   for (proc = 0, n = 0; proc < size; ++proc) {
369     if (PetscBTLookup(neighbors, proc)) {
370       ranksNew[n]              = proc;
371       localPointsNew[n]        = proc;
372       remotePointsNew[n].index = rank;
373       remotePointsNew[n].rank  = proc;
374       ++n;
375     }
376   }
377   PetscCall(PetscBTDestroy(&neighbors));
378   if (processRanks) PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks));
379   else PetscCall(PetscFree(ranksNew));
380   if (sfProcess) {
381     PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess));
382     PetscCall(PetscObjectSetName((PetscObject)*sfProcess, "Two-Sided Process SF"));
383     PetscCall(PetscSFSetFromOptions(*sfProcess));
384     PetscCall(PetscSFSetGraph(*sfProcess, size, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER));
385   }
386   PetscFunctionReturn(PETSC_SUCCESS);
387 }
388 
389 /*@
390   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
391 
392   Collective on dm
393 
394   Input Parameter:
395 . dm - The DM
396 
397   Output Parameters:
398 + rootSection - The number of leaves for a given root point
399 . rootrank    - The rank of each edge into the root point
400 . leafSection - The number of processes sharing a given leaf point
401 - leafrank    - The rank of each process sharing a leaf point
402 
403   Level: developer
404 
405 .seealso: `DMPlexCreateOverlapLabel()`, `DMPlexDistribute()`, `DMPlexDistributeOverlap()`
406 @*/
407 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
408 {
409   MPI_Comm        comm;
410   PetscSF         sfPoint;
411   const PetscInt *rootdegree;
412   PetscInt       *myrank, *remoterank;
413   PetscInt        pStart, pEnd, p, nedges;
414   PetscMPIInt     rank;
415 
416   PetscFunctionBegin;
417   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
418   PetscCallMPI(MPI_Comm_rank(comm, &rank));
419   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
420   PetscCall(DMGetPointSF(dm, &sfPoint));
421   /* Compute number of leaves for each root */
422   PetscCall(PetscObjectSetName((PetscObject)rootSection, "Root Section"));
423   PetscCall(PetscSectionSetChart(rootSection, pStart, pEnd));
424   PetscCall(PetscSFComputeDegreeBegin(sfPoint, &rootdegree));
425   PetscCall(PetscSFComputeDegreeEnd(sfPoint, &rootdegree));
426   for (p = pStart; p < pEnd; ++p) PetscCall(PetscSectionSetDof(rootSection, p, rootdegree[p - pStart]));
427   PetscCall(PetscSectionSetUp(rootSection));
428   /* Gather rank of each leaf to root */
429   PetscCall(PetscSectionGetStorageSize(rootSection, &nedges));
430   PetscCall(PetscMalloc1(pEnd - pStart, &myrank));
431   PetscCall(PetscMalloc1(nedges, &remoterank));
432   for (p = 0; p < pEnd - pStart; ++p) myrank[p] = rank;
433   PetscCall(PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank));
434   PetscCall(PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank));
435   PetscCall(PetscFree(myrank));
436   PetscCall(ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank));
437   /* Distribute remote ranks to leaves */
438   PetscCall(PetscObjectSetName((PetscObject)leafSection, "Leaf Section"));
439   PetscCall(DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank));
440   PetscFunctionReturn(PETSC_SUCCESS);
441 }
442 
443 /*@C
444   DMPlexCreateOverlapLabel - Compute a label indicating what overlap points should be sent to new processes
445 
446   Collective on dm
447 
448   Input Parameters:
449 + dm          - The DM
450 . levels      - Number of overlap levels
451 . rootSection - The number of leaves for a given root point
452 . rootrank    - The rank of each edge into the root point
453 . leafSection - The number of processes sharing a given leaf point
454 - leafrank    - The rank of each process sharing a leaf point
455 
456   Output Parameter:
457 . ovLabel     - DMLabel containing remote overlap contributions as point/rank pairings
458 
459   Level: developer
460 
461 .seealso: `DMPlexCreateOverlapLabelFromLabels()`, `DMPlexGetAdjacency()`, `DMPlexDistributeOwnership()`, `DMPlexDistribute()`
462 @*/
463 PetscErrorCode DMPlexCreateOverlapLabel(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
464 {
465   MPI_Comm           comm;
466   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
467   PetscSF            sfPoint;
468   const PetscSFNode *remote;
469   const PetscInt    *local;
470   const PetscInt    *nrank, *rrank;
471   PetscInt          *adj = NULL;
472   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l;
473   PetscMPIInt        rank, size;
474   PetscBool          flg;
475 
476   PetscFunctionBegin;
477   *ovLabel = NULL;
478   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
479   PetscCallMPI(MPI_Comm_size(comm, &size));
480   PetscCallMPI(MPI_Comm_rank(comm, &rank));
481   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
482   PetscCall(DMGetPointSF(dm, &sfPoint));
483   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
484   if (!levels) {
485     /* Add owned points */
486     PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel));
487     for (p = pStart; p < pEnd; ++p) PetscCall(DMLabelSetValue(*ovLabel, p, rank));
488     PetscFunctionReturn(PETSC_SUCCESS);
489   }
490   PetscCall(PetscSectionGetChart(leafSection, &sStart, &sEnd));
491   PetscCall(PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote));
492   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap adjacency", &ovAdjByRank));
493   /* Handle leaves: shared with the root point */
494   for (l = 0; l < nleaves; ++l) {
495     PetscInt adjSize = PETSC_DETERMINE, a;
496 
497     PetscCall(DMPlexGetAdjacency(dm, local ? local[l] : l, &adjSize, &adj));
498     for (a = 0; a < adjSize; ++a) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank));
499   }
500   PetscCall(ISGetIndices(rootrank, &rrank));
501   PetscCall(ISGetIndices(leafrank, &nrank));
502   /* Handle roots */
503   for (p = pStart; p < pEnd; ++p) {
504     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
505 
506     if ((p >= sStart) && (p < sEnd)) {
507       /* Some leaves share a root with other leaves on different processes */
508       PetscCall(PetscSectionGetDof(leafSection, p, &neighbors));
509       if (neighbors) {
510         PetscCall(PetscSectionGetOffset(leafSection, p, &noff));
511         PetscCall(DMPlexGetAdjacency(dm, p, &adjSize, &adj));
512         for (n = 0; n < neighbors; ++n) {
513           const PetscInt remoteRank = nrank[noff + n];
514 
515           if (remoteRank == rank) continue;
516           for (a = 0; a < adjSize; ++a) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remoteRank));
517         }
518       }
519     }
520     /* Roots are shared with leaves */
521     PetscCall(PetscSectionGetDof(rootSection, p, &neighbors));
522     if (!neighbors) continue;
523     PetscCall(PetscSectionGetOffset(rootSection, p, &noff));
524     PetscCall(DMPlexGetAdjacency(dm, p, &adjSize, &adj));
525     for (n = 0; n < neighbors; ++n) {
526       const PetscInt remoteRank = rrank[noff + n];
527 
528       if (remoteRank == rank) continue;
529       for (a = 0; a < adjSize; ++a) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remoteRank));
530     }
531   }
532   PetscCall(PetscFree(adj));
533   PetscCall(ISRestoreIndices(rootrank, &rrank));
534   PetscCall(ISRestoreIndices(leafrank, &nrank));
535   /* Add additional overlap levels */
536   for (l = 1; l < levels; l++) {
537     /* Propagate point donations over SF to capture remote connections */
538     PetscCall(DMPlexPartitionLabelPropagate(dm, ovAdjByRank));
539     /* Add next level of point donations to the label */
540     PetscCall(DMPlexPartitionLabelAdjacency(dm, ovAdjByRank));
541   }
542   /* We require the closure in the overlap */
543   PetscCall(DMPlexPartitionLabelClosure(dm, ovAdjByRank));
544   PetscCall(PetscOptionsHasName(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-overlap_view", &flg));
545   if (flg) {
546     PetscViewer viewer;
547     PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm), &viewer));
548     PetscCall(DMLabelView(ovAdjByRank, viewer));
549   }
550   /* Invert sender to receiver label */
551   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel));
552   PetscCall(DMPlexPartitionLabelInvert(dm, ovAdjByRank, NULL, *ovLabel));
553   /* Add owned points, except for shared local points */
554   for (p = pStart; p < pEnd; ++p) PetscCall(DMLabelSetValue(*ovLabel, p, rank));
555   for (l = 0; l < nleaves; ++l) {
556     PetscCall(DMLabelClearValue(*ovLabel, local[l], rank));
557     PetscCall(DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank));
558   }
559   /* Clean up */
560   PetscCall(DMLabelDestroy(&ovAdjByRank));
561   PetscFunctionReturn(PETSC_SUCCESS);
562 }
563 
564 static PetscErrorCode HandlePoint_Private(DM dm, PetscInt p, PetscSection section, const PetscInt ranks[], PetscInt numExLabels, const DMLabel exLabel[], const PetscInt exValue[], DMLabel ovAdjByRank)
565 {
566   PetscInt neighbors, el;
567 
568   PetscFunctionBegin;
569   PetscCall(PetscSectionGetDof(section, p, &neighbors));
570   if (neighbors) {
571     PetscInt   *adj     = NULL;
572     PetscInt    adjSize = PETSC_DETERMINE, noff, n, a;
573     PetscMPIInt rank;
574 
575     PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
576     PetscCall(PetscSectionGetOffset(section, p, &noff));
577     PetscCall(DMPlexGetAdjacency(dm, p, &adjSize, &adj));
578     for (n = 0; n < neighbors; ++n) {
579       const PetscInt remoteRank = ranks[noff + n];
580 
581       if (remoteRank == rank) continue;
582       for (a = 0; a < adjSize; ++a) {
583         PetscBool insert = PETSC_TRUE;
584 
585         for (el = 0; el < numExLabels; ++el) {
586           PetscInt exVal;
587           PetscCall(DMLabelGetValue(exLabel[el], adj[a], &exVal));
588           if (exVal == exValue[el]) {
589             insert = PETSC_FALSE;
590             break;
591           }
592         }
593         if (insert) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remoteRank));
594       }
595     }
596     PetscCall(PetscFree(adj));
597   }
598   PetscFunctionReturn(PETSC_SUCCESS);
599 }
600 
601 /*@C
602   DMPlexCreateOverlapLabelFromLabels - Compute a label indicating what overlap points should be sent to new processes
603 
604   Collective on dm
605 
606   Input Parameters:
607 + dm          - The DM
608 . numLabels   - The number of labels to draw candidate points from
609 . label       - An array of labels containing candidate points
610 . value       - An array of label values marking the candidate points
611 . numExLabels - The number of labels to use for exclusion
612 . exLabel     - An array of labels indicating points to be excluded, or NULL
613 . exValue     - An array of label values to be excluded, or NULL
614 . rootSection - The number of leaves for a given root point
615 . rootrank    - The rank of each edge into the root point
616 . leafSection - The number of processes sharing a given leaf point
617 - leafrank    - The rank of each process sharing a leaf point
618 
619   Output Parameter:
620 . ovLabel     - DMLabel containing remote overlap contributions as point/rank pairings
621 
622   Note:
623   The candidate points are only added to the overlap if they are adjacent to a shared point
624 
625   Level: developer
626 
627 .seealso: `DMPlexCreateOverlapLabel()`, `DMPlexGetAdjacency()`, `DMPlexDistributeOwnership()`, `DMPlexDistribute()`
628 @*/
629 PetscErrorCode DMPlexCreateOverlapLabelFromLabels(DM dm, PetscInt numLabels, const DMLabel label[], const PetscInt value[], PetscInt numExLabels, const DMLabel exLabel[], const PetscInt exValue[], PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
630 {
631   MPI_Comm           comm;
632   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
633   PetscSF            sfPoint;
634   const PetscSFNode *remote;
635   const PetscInt    *local;
636   const PetscInt    *nrank, *rrank;
637   PetscInt          *adj = NULL;
638   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l, el;
639   PetscMPIInt        rank, size;
640   PetscBool          flg;
641 
642   PetscFunctionBegin;
643   *ovLabel = NULL;
644   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
645   PetscCallMPI(MPI_Comm_size(comm, &size));
646   PetscCallMPI(MPI_Comm_rank(comm, &rank));
647   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
648   PetscCall(DMGetPointSF(dm, &sfPoint));
649   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
650   PetscCall(PetscSectionGetChart(leafSection, &sStart, &sEnd));
651   PetscCall(PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote));
652   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap adjacency", &ovAdjByRank));
653   PetscCall(ISGetIndices(rootrank, &rrank));
654   PetscCall(ISGetIndices(leafrank, &nrank));
655   for (l = 0; l < numLabels; ++l) {
656     IS              valIS;
657     const PetscInt *points;
658     PetscInt        n;
659 
660     PetscCall(DMLabelGetStratumIS(label[l], value[l], &valIS));
661     if (!valIS) continue;
662     PetscCall(ISGetIndices(valIS, &points));
663     PetscCall(ISGetLocalSize(valIS, &n));
664     for (PetscInt i = 0; i < n; ++i) {
665       const PetscInt p = points[i];
666 
667       if ((p >= sStart) && (p < sEnd)) {
668         PetscInt loc, adjSize = PETSC_DETERMINE;
669 
670         /* Handle leaves: shared with the root point */
671         if (local) PetscCall(PetscFindInt(p, nleaves, local, &loc));
672         else loc = (p >= 0 && p < nleaves) ? p : -1;
673         if (loc >= 0) {
674           const PetscInt remoteRank = remote[loc].rank;
675 
676           PetscCall(DMPlexGetAdjacency(dm, p, &adjSize, &adj));
677           for (PetscInt a = 0; a < adjSize; ++a) {
678             PetscBool insert = PETSC_TRUE;
679 
680             for (el = 0; el < numExLabels; ++el) {
681               PetscInt exVal;
682               PetscCall(DMLabelGetValue(exLabel[el], adj[a], &exVal));
683               if (exVal == exValue[el]) {
684                 insert = PETSC_FALSE;
685                 break;
686               }
687             }
688             if (insert) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remoteRank));
689           }
690         }
691         /* Some leaves share a root with other leaves on different processes */
692         PetscCall(HandlePoint_Private(dm, p, leafSection, nrank, numExLabels, exLabel, exValue, ovAdjByRank));
693       }
694       /* Roots are shared with leaves */
695       PetscCall(HandlePoint_Private(dm, p, rootSection, rrank, numExLabels, exLabel, exValue, ovAdjByRank));
696     }
697     PetscCall(ISRestoreIndices(valIS, &points));
698     PetscCall(ISDestroy(&valIS));
699   }
700   PetscCall(PetscFree(adj));
701   PetscCall(ISRestoreIndices(rootrank, &rrank));
702   PetscCall(ISRestoreIndices(leafrank, &nrank));
703   /* We require the closure in the overlap */
704   PetscCall(DMPlexPartitionLabelClosure(dm, ovAdjByRank));
705   PetscCall(PetscOptionsHasName(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-overlap_view", &flg));
706   if (flg) {
707     PetscViewer viewer;
708     PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm), &viewer));
709     PetscCall(DMLabelView(ovAdjByRank, viewer));
710   }
711   /* Invert sender to receiver label */
712   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel));
713   PetscCall(DMPlexPartitionLabelInvert(dm, ovAdjByRank, NULL, *ovLabel));
714   /* Add owned points, except for shared local points */
715   for (p = pStart; p < pEnd; ++p) PetscCall(DMLabelSetValue(*ovLabel, p, rank));
716   for (l = 0; l < nleaves; ++l) {
717     PetscCall(DMLabelClearValue(*ovLabel, local[l], rank));
718     PetscCall(DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank));
719   }
720   /* Clean up */
721   PetscCall(DMLabelDestroy(&ovAdjByRank));
722   PetscFunctionReturn(PETSC_SUCCESS);
723 }
724 
725 /*@C
726   DMPlexCreateOverlapMigrationSF - Create an SF describing the new mesh distribution to make the overlap described by the input SF
727 
728   Collective on dm
729 
730   Input Parameters:
731 + dm          - The DM
732 - overlapSF   - The SF mapping ghost points in overlap to owner points on other processes
733 
734   Output Parameter:
735 . migrationSF - An SF that maps original points in old locations to points in new locations
736 
737   Level: developer
738 
739 .seealso: `DMPlexCreateOverlapLabel()`, `DMPlexDistributeOverlap()`, `DMPlexDistribute()`
740 @*/
741 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
742 {
743   MPI_Comm           comm;
744   PetscMPIInt        rank, size;
745   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
746   PetscInt          *pointDepths, *remoteDepths, *ilocal;
747   PetscInt          *depthRecv, *depthShift, *depthIdx;
748   PetscSFNode       *iremote;
749   PetscSF            pointSF;
750   const PetscInt    *sharedLocal;
751   const PetscSFNode *overlapRemote, *sharedRemote;
752 
753   PetscFunctionBegin;
754   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
755   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
756   PetscCallMPI(MPI_Comm_rank(comm, &rank));
757   PetscCallMPI(MPI_Comm_size(comm, &size));
758   PetscCall(DMGetDimension(dm, &dim));
759 
760   /* Before building the migration SF we need to know the new stratum offsets */
761   PetscCall(PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote));
762   PetscCall(PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths));
763   for (d = 0; d < dim + 1; d++) {
764     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
765     for (p = pStart; p < pEnd; p++) pointDepths[p] = d;
766   }
767   for (p = 0; p < nleaves; p++) remoteDepths[p] = -1;
768   PetscCall(PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths, MPI_REPLACE));
769   PetscCall(PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths, MPI_REPLACE));
770 
771   /* Count received points in each stratum and compute the internal strata shift */
772   PetscCall(PetscMalloc3(dim + 1, &depthRecv, dim + 1, &depthShift, dim + 1, &depthIdx));
773   for (d = 0; d < dim + 1; d++) depthRecv[d] = 0;
774   for (p = 0; p < nleaves; p++) depthRecv[remoteDepths[p]]++;
775   depthShift[dim] = 0;
776   for (d = 0; d < dim; d++) depthShift[d] = depthRecv[dim];
777   for (d = 1; d < dim; d++) depthShift[d] += depthRecv[0];
778   for (d = dim - 2; d > 0; d--) depthShift[d] += depthRecv[d + 1];
779   for (d = 0; d < dim + 1; d++) {
780     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
781     depthIdx[d] = pStart + depthShift[d];
782   }
783 
784   /* Form the overlap SF build an SF that describes the full overlap migration SF */
785   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
786   newLeaves = pEnd - pStart + nleaves;
787   PetscCall(PetscMalloc1(newLeaves, &ilocal));
788   PetscCall(PetscMalloc1(newLeaves, &iremote));
789   /* First map local points to themselves */
790   for (d = 0; d < dim + 1; d++) {
791     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
792     for (p = pStart; p < pEnd; p++) {
793       point                = p + depthShift[d];
794       ilocal[point]        = point;
795       iremote[point].index = p;
796       iremote[point].rank  = rank;
797       depthIdx[d]++;
798     }
799   }
800 
801   /* Add in the remote roots for currently shared points */
802   PetscCall(DMGetPointSF(dm, &pointSF));
803   PetscCall(PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote));
804   for (d = 0; d < dim + 1; d++) {
805     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
806     for (p = 0; p < numSharedPoints; p++) {
807       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
808         point                = sharedLocal[p] + depthShift[d];
809         iremote[point].index = sharedRemote[p].index;
810         iremote[point].rank  = sharedRemote[p].rank;
811       }
812     }
813   }
814 
815   /* Now add the incoming overlap points */
816   for (p = 0; p < nleaves; p++) {
817     point                = depthIdx[remoteDepths[p]];
818     ilocal[point]        = point;
819     iremote[point].index = overlapRemote[p].index;
820     iremote[point].rank  = overlapRemote[p].rank;
821     depthIdx[remoteDepths[p]]++;
822   }
823   PetscCall(PetscFree2(pointDepths, remoteDepths));
824 
825   PetscCall(PetscSFCreate(comm, migrationSF));
826   PetscCall(PetscObjectSetName((PetscObject)*migrationSF, "Overlap Migration SF"));
827   PetscCall(PetscSFSetFromOptions(*migrationSF));
828   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
829   PetscCall(PetscSFSetGraph(*migrationSF, pEnd - pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER));
830 
831   PetscCall(PetscFree3(depthRecv, depthShift, depthIdx));
832   PetscFunctionReturn(PETSC_SUCCESS);
833 }
834 
835 /*@
836   DMPlexStratifyMigrationSF - Rearrange the leaves of a migration sf for stratification.
837 
838   Input Parameters:
839 + dm          - The DM
840 - sf          - A star forest with non-ordered leaves, usually defining a DM point migration
841 
842   Output Parameter:
843 . migrationSF - A star forest with added leaf indirection that ensures the resulting DM is stratified
844 
845   Note:
846   This lexicographically sorts by (depth, cellType)
847 
848   Level: developer
849 
850 .seealso: `DMPlexPartitionLabelCreateSF()`, `DMPlexDistribute()`, `DMPlexDistributeOverlap()`
851 @*/
852 PetscErrorCode DMPlexStratifyMigrationSF(DM dm, PetscSF sf, PetscSF *migrationSF)
853 {
854   MPI_Comm           comm;
855   PetscMPIInt        rank, size;
856   PetscInt           d, ldepth, depth, dim, p, pStart, pEnd, nroots, nleaves;
857   PetscSFNode       *pointDepths, *remoteDepths;
858   PetscInt          *ilocal;
859   PetscInt          *depthRecv, *depthShift, *depthIdx;
860   PetscInt          *ctRecv, *ctShift, *ctIdx;
861   const PetscSFNode *iremote;
862 
863   PetscFunctionBegin;
864   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
865   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
866   PetscCallMPI(MPI_Comm_rank(comm, &rank));
867   PetscCallMPI(MPI_Comm_size(comm, &size));
868   PetscCall(DMPlexGetDepth(dm, &ldepth));
869   PetscCall(DMGetDimension(dm, &dim));
870   PetscCall(MPIU_Allreduce(&ldepth, &depth, 1, MPIU_INT, MPI_MAX, comm));
871   PetscCheck(!(ldepth >= 0) || !(depth != ldepth), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %" PetscInt_FMT " != %" PetscInt_FMT, ldepth, depth);
872   PetscCall(PetscLogEventBegin(DMPLEX_PartStratSF, dm, 0, 0, 0));
873 
874   /* Before building the migration SF we need to know the new stratum offsets */
875   PetscCall(PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote));
876   PetscCall(PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths));
877   for (d = 0; d < depth + 1; ++d) {
878     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
879     for (p = pStart; p < pEnd; ++p) {
880       DMPolytopeType ct;
881 
882       PetscCall(DMPlexGetCellType(dm, p, &ct));
883       pointDepths[p].index = d;
884       pointDepths[p].rank  = ct;
885     }
886   }
887   for (p = 0; p < nleaves; ++p) {
888     remoteDepths[p].index = -1;
889     remoteDepths[p].rank  = -1;
890   }
891   PetscCall(PetscSFBcastBegin(sf, MPIU_2INT, pointDepths, remoteDepths, MPI_REPLACE));
892   PetscCall(PetscSFBcastEnd(sf, MPIU_2INT, pointDepths, remoteDepths, MPI_REPLACE));
893   /* Count received points in each stratum and compute the internal strata shift */
894   PetscCall(PetscCalloc6(depth + 1, &depthRecv, depth + 1, &depthShift, depth + 1, &depthIdx, DM_NUM_POLYTOPES, &ctRecv, DM_NUM_POLYTOPES, &ctShift, DM_NUM_POLYTOPES, &ctIdx));
895   for (p = 0; p < nleaves; ++p) {
896     if (remoteDepths[p].rank < 0) {
897       ++depthRecv[remoteDepths[p].index];
898     } else {
899       ++ctRecv[remoteDepths[p].rank];
900     }
901   }
902   {
903     PetscInt depths[4], dims[4], shift = 0, i, c;
904 
905     /* Cells (depth), Vertices (0), Faces (depth-1), Edges (1)
906          Consider DM_POLYTOPE_FV_GHOST and DM_POLYTOPE_INTERIOR_GHOST as cells
907      */
908     depths[0] = depth;
909     depths[1] = 0;
910     depths[2] = depth - 1;
911     depths[3] = 1;
912     dims[0]   = dim;
913     dims[1]   = 0;
914     dims[2]   = dim - 1;
915     dims[3]   = 1;
916     for (i = 0; i <= depth; ++i) {
917       const PetscInt dep = depths[i];
918       const PetscInt dim = dims[i];
919 
920       for (c = 0; c < DM_NUM_POLYTOPES; ++c) {
921         if (DMPolytopeTypeGetDim((DMPolytopeType)c) != dim && !(i == 0 && (c == DM_POLYTOPE_FV_GHOST || c == DM_POLYTOPE_INTERIOR_GHOST))) continue;
922         ctShift[c] = shift;
923         shift += ctRecv[c];
924       }
925       depthShift[dep] = shift;
926       shift += depthRecv[dep];
927     }
928     for (c = 0; c < DM_NUM_POLYTOPES; ++c) {
929       const PetscInt ctDim = DMPolytopeTypeGetDim((DMPolytopeType)c);
930 
931       if ((ctDim < 0 || ctDim > dim) && (c != DM_POLYTOPE_FV_GHOST && c != DM_POLYTOPE_INTERIOR_GHOST)) {
932         ctShift[c] = shift;
933         shift += ctRecv[c];
934       }
935     }
936   }
937   /* Derive a new local permutation based on stratified indices */
938   PetscCall(PetscMalloc1(nleaves, &ilocal));
939   for (p = 0; p < nleaves; ++p) {
940     const PetscInt       dep = remoteDepths[p].index;
941     const DMPolytopeType ct  = (DMPolytopeType)remoteDepths[p].rank;
942 
943     if ((PetscInt)ct < 0) {
944       ilocal[p] = depthShift[dep] + depthIdx[dep];
945       ++depthIdx[dep];
946     } else {
947       ilocal[p] = ctShift[ct] + ctIdx[ct];
948       ++ctIdx[ct];
949     }
950   }
951   PetscCall(PetscSFCreate(comm, migrationSF));
952   PetscCall(PetscObjectSetName((PetscObject)*migrationSF, "Migration SF"));
953   PetscCall(PetscSFSetGraph(*migrationSF, nroots, nleaves, ilocal, PETSC_OWN_POINTER, (PetscSFNode *)iremote, PETSC_COPY_VALUES));
954   PetscCall(PetscFree2(pointDepths, remoteDepths));
955   PetscCall(PetscFree6(depthRecv, depthShift, depthIdx, ctRecv, ctShift, ctIdx));
956   PetscCall(PetscLogEventEnd(DMPLEX_PartStratSF, dm, 0, 0, 0));
957   PetscFunctionReturn(PETSC_SUCCESS);
958 }
959 
960 /*@
961   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
962 
963   Collective on dm
964 
965   Input Parameters:
966 + dm - The DMPlex object
967 . pointSF - The PetscSF describing the communication pattern
968 . originalSection - The PetscSection for existing data layout
969 - originalVec - The existing data in a local vector
970 
971   Output Parameters:
972 + newSection - The PetscSF describing the new data layout
973 - newVec - The new data in a local vector
974 
975   Level: developer
976 
977 .seealso: `DMPlexDistribute()`, `DMPlexDistributeFieldIS()`, `DMPlexDistributeData()`
978 @*/
979 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
980 {
981   PetscSF      fieldSF;
982   PetscInt    *remoteOffsets, fieldSize;
983   PetscScalar *originalValues, *newValues;
984 
985   PetscFunctionBegin;
986   PetscCall(PetscLogEventBegin(DMPLEX_DistributeField, dm, 0, 0, 0));
987   PetscCall(PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection));
988 
989   PetscCall(PetscSectionGetStorageSize(newSection, &fieldSize));
990   PetscCall(VecSetSizes(newVec, fieldSize, PETSC_DETERMINE));
991   PetscCall(VecSetType(newVec, dm->vectype));
992 
993   PetscCall(VecGetArray(originalVec, &originalValues));
994   PetscCall(VecGetArray(newVec, &newValues));
995   PetscCall(PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF));
996   PetscCall(PetscFree(remoteOffsets));
997   PetscCall(PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues, MPI_REPLACE));
998   PetscCall(PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues, MPI_REPLACE));
999   PetscCall(PetscSFDestroy(&fieldSF));
1000   PetscCall(VecRestoreArray(newVec, &newValues));
1001   PetscCall(VecRestoreArray(originalVec, &originalValues));
1002   PetscCall(PetscLogEventEnd(DMPLEX_DistributeField, dm, 0, 0, 0));
1003   PetscFunctionReturn(PETSC_SUCCESS);
1004 }
1005 
1006 /*@
1007   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
1008 
1009   Collective on dm
1010 
1011   Input Parameters:
1012 + dm - The DMPlex object
1013 . pointSF - The PetscSF describing the communication pattern
1014 . originalSection - The PetscSection for existing data layout
1015 - originalIS - The existing data
1016 
1017   Output Parameters:
1018 + newSection - The PetscSF describing the new data layout
1019 - newIS - The new data
1020 
1021   Level: developer
1022 
1023 .seealso: `DMPlexDistribute()`, `DMPlexDistributeField()`, `DMPlexDistributeData()`
1024 @*/
1025 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
1026 {
1027   PetscSF         fieldSF;
1028   PetscInt       *newValues, *remoteOffsets, fieldSize;
1029   const PetscInt *originalValues;
1030 
1031   PetscFunctionBegin;
1032   PetscCall(PetscLogEventBegin(DMPLEX_DistributeField, dm, 0, 0, 0));
1033   PetscCall(PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection));
1034 
1035   PetscCall(PetscSectionGetStorageSize(newSection, &fieldSize));
1036   PetscCall(PetscMalloc1(fieldSize, &newValues));
1037 
1038   PetscCall(ISGetIndices(originalIS, &originalValues));
1039   PetscCall(PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF));
1040   PetscCall(PetscFree(remoteOffsets));
1041   PetscCall(PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *)originalValues, newValues, MPI_REPLACE));
1042   PetscCall(PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *)originalValues, newValues, MPI_REPLACE));
1043   PetscCall(PetscSFDestroy(&fieldSF));
1044   PetscCall(ISRestoreIndices(originalIS, &originalValues));
1045   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS));
1046   PetscCall(PetscLogEventEnd(DMPLEX_DistributeField, dm, 0, 0, 0));
1047   PetscFunctionReturn(PETSC_SUCCESS);
1048 }
1049 
1050 /*@
1051   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
1052 
1053   Collective on dm
1054 
1055   Input Parameters:
1056 + dm - The DMPlex object
1057 . pointSF - The PetscSF describing the communication pattern
1058 . originalSection - The PetscSection for existing data layout
1059 . datatype - The type of data
1060 - originalData - The existing data
1061 
1062   Output Parameters:
1063 + newSection - The PetscSection describing the new data layout
1064 - newData - The new data
1065 
1066   Level: developer
1067 
1068 .seealso: `DMPlexDistribute()`, `DMPlexDistributeField()`
1069 @*/
1070 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
1071 {
1072   PetscSF     fieldSF;
1073   PetscInt   *remoteOffsets, fieldSize;
1074   PetscMPIInt dataSize;
1075 
1076   PetscFunctionBegin;
1077   PetscCall(PetscLogEventBegin(DMPLEX_DistributeData, dm, 0, 0, 0));
1078   PetscCall(PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection));
1079 
1080   PetscCall(PetscSectionGetStorageSize(newSection, &fieldSize));
1081   PetscCallMPI(MPI_Type_size(datatype, &dataSize));
1082   PetscCall(PetscMalloc(fieldSize * dataSize, newData));
1083 
1084   PetscCall(PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF));
1085   PetscCall(PetscFree(remoteOffsets));
1086   PetscCall(PetscSFBcastBegin(fieldSF, datatype, originalData, *newData, MPI_REPLACE));
1087   PetscCall(PetscSFBcastEnd(fieldSF, datatype, originalData, *newData, MPI_REPLACE));
1088   PetscCall(PetscSFDestroy(&fieldSF));
1089   PetscCall(PetscLogEventEnd(DMPLEX_DistributeData, dm, 0, 0, 0));
1090   PetscFunctionReturn(PETSC_SUCCESS);
1091 }
1092 
1093 static PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1094 {
1095   DM_Plex     *pmesh = (DM_Plex *)(dmParallel)->data;
1096   MPI_Comm     comm;
1097   PetscSF      coneSF;
1098   PetscSection originalConeSection, newConeSection;
1099   PetscInt    *remoteOffsets, *cones, *globCones, *newCones, newConesSize;
1100   PetscBool    flg;
1101 
1102   PetscFunctionBegin;
1103   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1104   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
1105   PetscCall(PetscLogEventBegin(DMPLEX_DistributeCones, dm, 0, 0, 0));
1106   /* Distribute cone section */
1107   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1108   PetscCall(DMPlexGetConeSection(dm, &originalConeSection));
1109   PetscCall(DMPlexGetConeSection(dmParallel, &newConeSection));
1110   PetscCall(PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection));
1111   PetscCall(DMSetUp(dmParallel));
1112   /* Communicate and renumber cones */
1113   PetscCall(PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF));
1114   PetscCall(PetscFree(remoteOffsets));
1115   PetscCall(DMPlexGetCones(dm, &cones));
1116   if (original) {
1117     PetscInt numCones;
1118 
1119     PetscCall(PetscSectionGetStorageSize(originalConeSection, &numCones));
1120     PetscCall(PetscMalloc1(numCones, &globCones));
1121     PetscCall(ISLocalToGlobalMappingApplyBlock(original, numCones, cones, globCones));
1122   } else {
1123     globCones = cones;
1124   }
1125   PetscCall(DMPlexGetCones(dmParallel, &newCones));
1126   PetscCall(PetscSFBcastBegin(coneSF, MPIU_INT, globCones, newCones, MPI_REPLACE));
1127   PetscCall(PetscSFBcastEnd(coneSF, MPIU_INT, globCones, newCones, MPI_REPLACE));
1128   if (original) PetscCall(PetscFree(globCones));
1129   PetscCall(PetscSectionGetStorageSize(newConeSection, &newConesSize));
1130   PetscCall(ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones));
1131   if (PetscDefined(USE_DEBUG)) {
1132     PetscInt  p;
1133     PetscBool valid = PETSC_TRUE;
1134     for (p = 0; p < newConesSize; ++p) {
1135       if (newCones[p] < 0) {
1136         valid = PETSC_FALSE;
1137         PetscCall(PetscPrintf(PETSC_COMM_SELF, "[%d] Point %" PetscInt_FMT " not in overlap SF\n", PetscGlobalRank, p));
1138       }
1139     }
1140     PetscCheck(valid, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1141   }
1142   PetscCall(PetscOptionsHasName(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-cones_view", &flg));
1143   if (flg) {
1144     PetscCall(PetscPrintf(comm, "Serial Cone Section:\n"));
1145     PetscCall(PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_(comm)));
1146     PetscCall(PetscPrintf(comm, "Parallel Cone Section:\n"));
1147     PetscCall(PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_(comm)));
1148     PetscCall(PetscSFView(coneSF, NULL));
1149   }
1150   PetscCall(DMPlexGetConeOrientations(dm, &cones));
1151   PetscCall(DMPlexGetConeOrientations(dmParallel, &newCones));
1152   PetscCall(PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones, MPI_REPLACE));
1153   PetscCall(PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones, MPI_REPLACE));
1154   PetscCall(PetscSFDestroy(&coneSF));
1155   PetscCall(PetscLogEventEnd(DMPLEX_DistributeCones, dm, 0, 0, 0));
1156   /* Create supports and stratify DMPlex */
1157   {
1158     PetscInt pStart, pEnd;
1159 
1160     PetscCall(PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd));
1161     PetscCall(PetscSectionSetChart(pmesh->supportSection, pStart, pEnd));
1162   }
1163   PetscCall(DMPlexSymmetrize(dmParallel));
1164   PetscCall(DMPlexStratify(dmParallel));
1165   {
1166     PetscBool useCone, useClosure, useAnchors;
1167 
1168     PetscCall(DMGetBasicAdjacency(dm, &useCone, &useClosure));
1169     PetscCall(DMSetBasicAdjacency(dmParallel, useCone, useClosure));
1170     PetscCall(DMPlexGetAdjacencyUseAnchors(dm, &useAnchors));
1171     PetscCall(DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors));
1172   }
1173   PetscFunctionReturn(PETSC_SUCCESS);
1174 }
1175 
1176 static PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
1177 {
1178   MPI_Comm         comm;
1179   DM               cdm, cdmParallel;
1180   PetscSection     originalCoordSection, newCoordSection;
1181   Vec              originalCoordinates, newCoordinates;
1182   PetscInt         bs;
1183   const char      *name;
1184   const PetscReal *maxCell, *Lstart, *L;
1185 
1186   PetscFunctionBegin;
1187   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1188   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1189 
1190   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1191   PetscCall(DMGetCoordinateDM(dm, &cdm));
1192   PetscCall(DMGetCoordinateDM(dmParallel, &cdmParallel));
1193   PetscCall(DMCopyDisc(cdm, cdmParallel));
1194   PetscCall(DMGetCoordinateSection(dm, &originalCoordSection));
1195   PetscCall(DMGetCoordinateSection(dmParallel, &newCoordSection));
1196   PetscCall(DMGetCoordinatesLocal(dm, &originalCoordinates));
1197   if (originalCoordinates) {
1198     PetscCall(VecCreate(PETSC_COMM_SELF, &newCoordinates));
1199     PetscCall(PetscObjectGetName((PetscObject)originalCoordinates, &name));
1200     PetscCall(PetscObjectSetName((PetscObject)newCoordinates, name));
1201 
1202     PetscCall(DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates));
1203     PetscCall(DMSetCoordinatesLocal(dmParallel, newCoordinates));
1204     PetscCall(VecGetBlockSize(originalCoordinates, &bs));
1205     PetscCall(VecSetBlockSize(newCoordinates, bs));
1206     PetscCall(VecDestroy(&newCoordinates));
1207   }
1208 
1209   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1210   PetscCall(DMGetPeriodicity(dm, &maxCell, &Lstart, &L));
1211   PetscCall(DMSetPeriodicity(dmParallel, maxCell, Lstart, L));
1212   PetscCall(DMGetCellCoordinateDM(dm, &cdm));
1213   if (cdm) {
1214     PetscCall(DMClone(dmParallel, &cdmParallel));
1215     PetscCall(DMSetCellCoordinateDM(dmParallel, cdmParallel));
1216     PetscCall(DMCopyDisc(cdm, cdmParallel));
1217     PetscCall(DMDestroy(&cdmParallel));
1218     PetscCall(DMGetCellCoordinateSection(dm, &originalCoordSection));
1219     PetscCall(DMGetCellCoordinatesLocal(dm, &originalCoordinates));
1220     PetscCall(PetscSectionCreate(comm, &newCoordSection));
1221     if (originalCoordinates) {
1222       PetscCall(VecCreate(PETSC_COMM_SELF, &newCoordinates));
1223       PetscCall(PetscObjectGetName((PetscObject)originalCoordinates, &name));
1224       PetscCall(PetscObjectSetName((PetscObject)newCoordinates, name));
1225 
1226       PetscCall(DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates));
1227       PetscCall(VecGetBlockSize(originalCoordinates, &bs));
1228       PetscCall(VecSetBlockSize(newCoordinates, bs));
1229       PetscCall(DMSetCellCoordinateSection(dmParallel, bs, newCoordSection));
1230       PetscCall(DMSetCellCoordinatesLocal(dmParallel, newCoordinates));
1231       PetscCall(VecDestroy(&newCoordinates));
1232     }
1233     PetscCall(PetscSectionDestroy(&newCoordSection));
1234   }
1235   PetscFunctionReturn(PETSC_SUCCESS);
1236 }
1237 
1238 static PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1239 {
1240   DM_Plex         *mesh = (DM_Plex *)dm->data;
1241   MPI_Comm         comm;
1242   DMLabel          depthLabel;
1243   PetscMPIInt      rank;
1244   PetscInt         depth, d, numLabels, numLocalLabels, l;
1245   PetscBool        hasLabels  = PETSC_FALSE, lsendDepth, sendDepth;
1246   PetscObjectState depthState = -1;
1247 
1248   PetscFunctionBegin;
1249   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1250   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1251 
1252   PetscCall(PetscLogEventBegin(DMPLEX_DistributeLabels, dm, 0, 0, 0));
1253   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1254   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1255 
1256   /* If the user has changed the depth label, communicate it instead */
1257   PetscCall(DMPlexGetDepth(dm, &depth));
1258   PetscCall(DMPlexGetDepthLabel(dm, &depthLabel));
1259   if (depthLabel) PetscCall(PetscObjectStateGet((PetscObject)depthLabel, &depthState));
1260   lsendDepth = mesh->depthState != depthState ? PETSC_TRUE : PETSC_FALSE;
1261   PetscCall(MPIU_Allreduce(&lsendDepth, &sendDepth, 1, MPIU_BOOL, MPI_LOR, comm));
1262   if (sendDepth) {
1263     PetscCall(DMPlexGetDepthLabel(dmParallel, &dmParallel->depthLabel));
1264     PetscCall(DMRemoveLabelBySelf(dmParallel, &dmParallel->depthLabel, PETSC_FALSE));
1265   }
1266   /* Everyone must have either the same number of labels, or none */
1267   PetscCall(DMGetNumLabels(dm, &numLocalLabels));
1268   numLabels = numLocalLabels;
1269   PetscCallMPI(MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm));
1270   if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1271   for (l = 0; l < numLabels; ++l) {
1272     DMLabel     label = NULL, labelNew = NULL;
1273     PetscBool   isDepth, lisOutput     = PETSC_TRUE, isOutput;
1274     const char *name = NULL;
1275 
1276     if (hasLabels) {
1277       PetscCall(DMGetLabelByNum(dm, l, &label));
1278       /* Skip "depth" because it is recreated */
1279       PetscCall(PetscObjectGetName((PetscObject)label, &name));
1280       PetscCall(PetscStrcmp(name, "depth", &isDepth));
1281     }
1282     PetscCallMPI(MPI_Bcast(&isDepth, 1, MPIU_BOOL, 0, comm));
1283     if (isDepth && !sendDepth) continue;
1284     PetscCall(DMLabelDistribute(label, migrationSF, &labelNew));
1285     if (isDepth) {
1286       /* Put in any missing strata which can occur if users are managing the depth label themselves */
1287       PetscInt gdepth;
1288 
1289       PetscCall(MPIU_Allreduce(&depth, &gdepth, 1, MPIU_INT, MPI_MAX, comm));
1290       PetscCheck(!(depth >= 0) || !(gdepth != depth), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %" PetscInt_FMT " != %" PetscInt_FMT, depth, gdepth);
1291       for (d = 0; d <= gdepth; ++d) {
1292         PetscBool has;
1293 
1294         PetscCall(DMLabelHasStratum(labelNew, d, &has));
1295         if (!has) PetscCall(DMLabelAddStratum(labelNew, d));
1296       }
1297     }
1298     PetscCall(DMAddLabel(dmParallel, labelNew));
1299     /* Put the output flag in the new label */
1300     if (hasLabels) PetscCall(DMGetLabelOutput(dm, name, &lisOutput));
1301     PetscCall(MPIU_Allreduce(&lisOutput, &isOutput, 1, MPIU_BOOL, MPI_LAND, comm));
1302     PetscCall(PetscObjectGetName((PetscObject)labelNew, &name));
1303     PetscCall(DMSetLabelOutput(dmParallel, name, isOutput));
1304     PetscCall(DMLabelDestroy(&labelNew));
1305   }
1306   {
1307     DMLabel ctLabel;
1308 
1309     // Reset label for fast lookup
1310     PetscCall(DMPlexGetCellTypeLabel(dmParallel, &ctLabel));
1311     PetscCall(DMLabelMakeAllInvalid_Internal(ctLabel));
1312   }
1313   PetscCall(PetscLogEventEnd(DMPLEX_DistributeLabels, dm, 0, 0, 0));
1314   PetscFunctionReturn(PETSC_SUCCESS);
1315 }
1316 
1317 static PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1318 {
1319   DM_Plex     *mesh  = (DM_Plex *)dm->data;
1320   DM_Plex     *pmesh = (DM_Plex *)(dmParallel)->data;
1321   MPI_Comm     comm;
1322   DM           refTree;
1323   PetscSection origParentSection, newParentSection;
1324   PetscInt    *origParents, *origChildIDs;
1325   PetscBool    flg;
1326 
1327   PetscFunctionBegin;
1328   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1329   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
1330   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1331 
1332   /* Set up tree */
1333   PetscCall(DMPlexGetReferenceTree(dm, &refTree));
1334   PetscCall(DMPlexSetReferenceTree(dmParallel, refTree));
1335   PetscCall(DMPlexGetTree(dm, &origParentSection, &origParents, &origChildIDs, NULL, NULL));
1336   if (origParentSection) {
1337     PetscInt  pStart, pEnd;
1338     PetscInt *newParents, *newChildIDs, *globParents;
1339     PetscInt *remoteOffsetsParents, newParentSize;
1340     PetscSF   parentSF;
1341 
1342     PetscCall(DMPlexGetChart(dmParallel, &pStart, &pEnd));
1343     PetscCall(PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel), &newParentSection));
1344     PetscCall(PetscSectionSetChart(newParentSection, pStart, pEnd));
1345     PetscCall(PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection));
1346     PetscCall(PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF));
1347     PetscCall(PetscFree(remoteOffsetsParents));
1348     PetscCall(PetscSectionGetStorageSize(newParentSection, &newParentSize));
1349     PetscCall(PetscMalloc2(newParentSize, &newParents, newParentSize, &newChildIDs));
1350     if (original) {
1351       PetscInt numParents;
1352 
1353       PetscCall(PetscSectionGetStorageSize(origParentSection, &numParents));
1354       PetscCall(PetscMalloc1(numParents, &globParents));
1355       PetscCall(ISLocalToGlobalMappingApplyBlock(original, numParents, origParents, globParents));
1356     } else {
1357       globParents = origParents;
1358     }
1359     PetscCall(PetscSFBcastBegin(parentSF, MPIU_INT, globParents, newParents, MPI_REPLACE));
1360     PetscCall(PetscSFBcastEnd(parentSF, MPIU_INT, globParents, newParents, MPI_REPLACE));
1361     if (original) PetscCall(PetscFree(globParents));
1362     PetscCall(PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs, MPI_REPLACE));
1363     PetscCall(PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs, MPI_REPLACE));
1364     PetscCall(ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents));
1365     if (PetscDefined(USE_DEBUG)) {
1366       PetscInt  p;
1367       PetscBool valid = PETSC_TRUE;
1368       for (p = 0; p < newParentSize; ++p) {
1369         if (newParents[p] < 0) {
1370           valid = PETSC_FALSE;
1371           PetscCall(PetscPrintf(PETSC_COMM_SELF, "Point %" PetscInt_FMT " not in overlap SF\n", p));
1372         }
1373       }
1374       PetscCheck(valid, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1375     }
1376     PetscCall(PetscOptionsHasName(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-parents_view", &flg));
1377     if (flg) {
1378       PetscCall(PetscPrintf(comm, "Serial Parent Section: \n"));
1379       PetscCall(PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_(comm)));
1380       PetscCall(PetscPrintf(comm, "Parallel Parent Section: \n"));
1381       PetscCall(PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_(comm)));
1382       PetscCall(PetscSFView(parentSF, NULL));
1383     }
1384     PetscCall(DMPlexSetTree(dmParallel, newParentSection, newParents, newChildIDs));
1385     PetscCall(PetscSectionDestroy(&newParentSection));
1386     PetscCall(PetscFree2(newParents, newChildIDs));
1387     PetscCall(PetscSFDestroy(&parentSF));
1388   }
1389   pmesh->useAnchors = mesh->useAnchors;
1390   PetscFunctionReturn(PETSC_SUCCESS);
1391 }
1392 
1393 /*@
1394   DMPlexSetPartitionBalance - Should distribution of the DM attempt to balance the shared point partition?
1395 
1396   Input Parameters:
1397 + dm - The DMPlex object
1398 - flg - Balance the partition?
1399 
1400   Level: intermediate
1401 
1402 .seealso: `DMPlexDistribute()`, `DMPlexGetPartitionBalance()`
1403 @*/
1404 PetscErrorCode DMPlexSetPartitionBalance(DM dm, PetscBool flg)
1405 {
1406   DM_Plex *mesh = (DM_Plex *)dm->data;
1407 
1408   PetscFunctionBegin;
1409   mesh->partitionBalance = flg;
1410   PetscFunctionReturn(PETSC_SUCCESS);
1411 }
1412 
1413 /*@
1414   DMPlexGetPartitionBalance - Does distribution of the DM attempt to balance the shared point partition?
1415 
1416   Input Parameter:
1417 . dm - The DMPlex object
1418 
1419   Output Parameter:
1420 . flg - Balance the partition?
1421 
1422   Level: intermediate
1423 
1424 .seealso: `DMPlexDistribute()`, `DMPlexSetPartitionBalance()`
1425 @*/
1426 PetscErrorCode DMPlexGetPartitionBalance(DM dm, PetscBool *flg)
1427 {
1428   DM_Plex *mesh = (DM_Plex *)dm->data;
1429 
1430   PetscFunctionBegin;
1431   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1432   PetscValidBoolPointer(flg, 2);
1433   *flg = mesh->partitionBalance;
1434   PetscFunctionReturn(PETSC_SUCCESS);
1435 }
1436 
1437 typedef struct {
1438   PetscInt vote, rank, index;
1439 } Petsc3Int;
1440 
1441 /* MaxLoc, but carry a third piece of information around */
1442 static void MPIAPI MaxLocCarry(void *in_, void *inout_, PetscMPIInt *len_, MPI_Datatype *dtype)
1443 {
1444   Petsc3Int *a = (Petsc3Int *)inout_;
1445   Petsc3Int *b = (Petsc3Int *)in_;
1446   PetscInt   i, len = *len_;
1447   for (i = 0; i < len; i++) {
1448     if (a[i].vote < b[i].vote) {
1449       a[i].vote  = b[i].vote;
1450       a[i].rank  = b[i].rank;
1451       a[i].index = b[i].index;
1452     } else if (a[i].vote <= b[i].vote) {
1453       if (a[i].rank >= b[i].rank) {
1454         a[i].rank  = b[i].rank;
1455         a[i].index = b[i].index;
1456       }
1457     }
1458   }
1459 }
1460 
1461 /*@C
1462   DMPlexCreatePointSF - Build a point SF from an SF describing a point migration
1463 
1464   Input Parameters:
1465 + dm          - The source DMPlex object
1466 . migrationSF - The star forest that describes the parallel point remapping
1467 - ownership   - Flag causing a vote to determine point ownership
1468 
1469   Output Parameter:
1470 . pointSF     - The star forest describing the point overlap in the remapped DM
1471 
1472   Notes:
1473   Output pointSF is guaranteed to have the array of local indices (leaves) sorted.
1474 
1475   Level: developer
1476 
1477 .seealso: `DMPlexDistribute()`, `DMPlexDistributeOverlap()`
1478 @*/
1479 PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownership, PetscSF *pointSF)
1480 {
1481   PetscMPIInt        rank, size;
1482   PetscInt           p, nroots, nleaves, idx, npointLeaves;
1483   PetscInt          *pointLocal;
1484   const PetscInt    *leaves;
1485   const PetscSFNode *roots;
1486   PetscSFNode       *rootNodes, *leafNodes, *pointRemote;
1487   Vec                shifts;
1488   const PetscInt     numShifts  = 13759;
1489   const PetscScalar *shift      = NULL;
1490   const PetscBool    shiftDebug = PETSC_FALSE;
1491   PetscBool          balance;
1492 
1493   PetscFunctionBegin;
1494   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1495   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
1496   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)dm), &size));
1497   PetscCall(PetscLogEventBegin(DMPLEX_CreatePointSF, dm, 0, 0, 0));
1498 
1499   PetscCall(DMPlexGetPartitionBalance(dm, &balance));
1500   PetscCall(PetscSFGetGraph(migrationSF, &nroots, &nleaves, &leaves, &roots));
1501   PetscCall(PetscMalloc2(nroots, &rootNodes, nleaves, &leafNodes));
1502   if (ownership) {
1503     MPI_Op       op;
1504     MPI_Datatype datatype;
1505     Petsc3Int   *rootVote = NULL, *leafVote = NULL;
1506     /* If balancing, we compute a random cyclic shift of the rank for each remote point. That way, the max will evenly distribute among ranks. */
1507     if (balance) {
1508       PetscRandom r;
1509 
1510       PetscCall(PetscRandomCreate(PETSC_COMM_SELF, &r));
1511       PetscCall(PetscRandomSetInterval(r, 0, 2467 * size));
1512       PetscCall(VecCreate(PETSC_COMM_SELF, &shifts));
1513       PetscCall(VecSetSizes(shifts, numShifts, numShifts));
1514       PetscCall(VecSetType(shifts, VECSTANDARD));
1515       PetscCall(VecSetRandom(shifts, r));
1516       PetscCall(PetscRandomDestroy(&r));
1517       PetscCall(VecGetArrayRead(shifts, &shift));
1518     }
1519 
1520     PetscCall(PetscMalloc1(nroots, &rootVote));
1521     PetscCall(PetscMalloc1(nleaves, &leafVote));
1522     /* Point ownership vote: Process with highest rank owns shared points */
1523     for (p = 0; p < nleaves; ++p) {
1524       if (shiftDebug) {
1525         PetscCall(PetscSynchronizedPrintf(PetscObjectComm((PetscObject)dm), "[%d] Point %" PetscInt_FMT " RemotePoint %" PetscInt_FMT " Shift %" PetscInt_FMT " MyRank %" PetscInt_FMT "\n", rank, leaves ? leaves[p] : p, roots[p].index,
1526                                           (PetscInt)PetscRealPart(shift[roots[p].index % numShifts]), (rank + (shift ? (PetscInt)PetscRealPart(shift[roots[p].index % numShifts]) : 0)) % size));
1527       }
1528       /* Either put in a bid or we know we own it */
1529       leafVote[p].vote  = (rank + (shift ? (PetscInt)PetscRealPart(shift[roots[p].index % numShifts]) : 0)) % size;
1530       leafVote[p].rank  = rank;
1531       leafVote[p].index = p;
1532     }
1533     for (p = 0; p < nroots; p++) {
1534       /* Root must not participate in the reduction, flag so that MAXLOC does not use */
1535       rootVote[p].vote  = -3;
1536       rootVote[p].rank  = -3;
1537       rootVote[p].index = -3;
1538     }
1539     PetscCallMPI(MPI_Type_contiguous(3, MPIU_INT, &datatype));
1540     PetscCallMPI(MPI_Type_commit(&datatype));
1541     PetscCallMPI(MPI_Op_create(&MaxLocCarry, 1, &op));
1542     PetscCall(PetscSFReduceBegin(migrationSF, datatype, leafVote, rootVote, op));
1543     PetscCall(PetscSFReduceEnd(migrationSF, datatype, leafVote, rootVote, op));
1544     PetscCallMPI(MPI_Op_free(&op));
1545     PetscCallMPI(MPI_Type_free(&datatype));
1546     for (p = 0; p < nroots; p++) {
1547       rootNodes[p].rank  = rootVote[p].rank;
1548       rootNodes[p].index = rootVote[p].index;
1549     }
1550     PetscCall(PetscFree(leafVote));
1551     PetscCall(PetscFree(rootVote));
1552   } else {
1553     for (p = 0; p < nroots; p++) {
1554       rootNodes[p].index = -1;
1555       rootNodes[p].rank  = rank;
1556     }
1557     for (p = 0; p < nleaves; p++) {
1558       /* Write new local id into old location */
1559       if (roots[p].rank == rank) rootNodes[roots[p].index].index = leaves ? leaves[p] : p;
1560     }
1561   }
1562   PetscCall(PetscSFBcastBegin(migrationSF, MPIU_2INT, rootNodes, leafNodes, MPI_REPLACE));
1563   PetscCall(PetscSFBcastEnd(migrationSF, MPIU_2INT, rootNodes, leafNodes, MPI_REPLACE));
1564 
1565   for (npointLeaves = 0, p = 0; p < nleaves; p++) {
1566     if (leafNodes[p].rank != rank) npointLeaves++;
1567   }
1568   PetscCall(PetscMalloc1(npointLeaves, &pointLocal));
1569   PetscCall(PetscMalloc1(npointLeaves, &pointRemote));
1570   for (idx = 0, p = 0; p < nleaves; p++) {
1571     if (leafNodes[p].rank != rank) {
1572       /* Note that pointLocal is automatically sorted as it is sublist of 0, ..., nleaves-1 */
1573       pointLocal[idx]  = p;
1574       pointRemote[idx] = leafNodes[p];
1575       idx++;
1576     }
1577   }
1578   if (shift) {
1579     PetscCall(VecRestoreArrayRead(shifts, &shift));
1580     PetscCall(VecDestroy(&shifts));
1581   }
1582   if (shiftDebug) PetscCall(PetscSynchronizedFlush(PetscObjectComm((PetscObject)dm), PETSC_STDOUT));
1583   PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)dm), pointSF));
1584   PetscCall(PetscSFSetFromOptions(*pointSF));
1585   PetscCall(PetscSFSetGraph(*pointSF, nleaves, npointLeaves, pointLocal, PETSC_OWN_POINTER, pointRemote, PETSC_OWN_POINTER));
1586   PetscCall(PetscFree2(rootNodes, leafNodes));
1587   PetscCall(PetscLogEventEnd(DMPLEX_CreatePointSF, dm, 0, 0, 0));
1588   if (PetscDefined(USE_DEBUG)) PetscCall(DMPlexCheckPointSF(dm, *pointSF, PETSC_FALSE));
1589   PetscFunctionReturn(PETSC_SUCCESS);
1590 }
1591 
1592 /*@C
1593   DMPlexMigrate  - Migrates internal DM data over the supplied star forest
1594 
1595   Collective on dm
1596 
1597   Input Parameters:
1598 + dm       - The source DMPlex object
1599 - sf       - The star forest communication context describing the migration pattern
1600 
1601   Output Parameter:
1602 . targetDM - The target DMPlex object
1603 
1604   Level: intermediate
1605 
1606 .seealso: `DMPlexDistribute()`, `DMPlexDistributeOverlap()`
1607 @*/
1608 PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM)
1609 {
1610   MPI_Comm               comm;
1611   PetscInt               dim, cdim, nroots;
1612   PetscSF                sfPoint;
1613   ISLocalToGlobalMapping ltogMigration;
1614   ISLocalToGlobalMapping ltogOriginal = NULL;
1615 
1616   PetscFunctionBegin;
1617   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1618   PetscCall(PetscLogEventBegin(DMPLEX_Migrate, dm, 0, 0, 0));
1619   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1620   PetscCall(DMGetDimension(dm, &dim));
1621   PetscCall(DMSetDimension(targetDM, dim));
1622   PetscCall(DMGetCoordinateDim(dm, &cdim));
1623   PetscCall(DMSetCoordinateDim(targetDM, cdim));
1624 
1625   /* Check for a one-to-all distribution pattern */
1626   PetscCall(DMGetPointSF(dm, &sfPoint));
1627   PetscCall(PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL));
1628   if (nroots >= 0) {
1629     IS        isOriginal;
1630     PetscInt  n, size, nleaves;
1631     PetscInt *numbering_orig, *numbering_new;
1632 
1633     /* Get the original point numbering */
1634     PetscCall(DMPlexCreatePointNumbering(dm, &isOriginal));
1635     PetscCall(ISLocalToGlobalMappingCreateIS(isOriginal, &ltogOriginal));
1636     PetscCall(ISLocalToGlobalMappingGetSize(ltogOriginal, &size));
1637     PetscCall(ISLocalToGlobalMappingGetBlockIndices(ltogOriginal, (const PetscInt **)&numbering_orig));
1638     /* Convert to positive global numbers */
1639     for (n = 0; n < size; n++) {
1640       if (numbering_orig[n] < 0) numbering_orig[n] = -(numbering_orig[n] + 1);
1641     }
1642     /* Derive the new local-to-global mapping from the old one */
1643     PetscCall(PetscSFGetGraph(sf, NULL, &nleaves, NULL, NULL));
1644     PetscCall(PetscMalloc1(nleaves, &numbering_new));
1645     PetscCall(PetscSFBcastBegin(sf, MPIU_INT, numbering_orig, numbering_new, MPI_REPLACE));
1646     PetscCall(PetscSFBcastEnd(sf, MPIU_INT, numbering_orig, numbering_new, MPI_REPLACE));
1647     PetscCall(ISLocalToGlobalMappingCreate(comm, 1, nleaves, numbering_new, PETSC_OWN_POINTER, &ltogMigration));
1648     PetscCall(ISLocalToGlobalMappingRestoreIndices(ltogOriginal, (const PetscInt **)&numbering_orig));
1649     PetscCall(ISDestroy(&isOriginal));
1650   } else {
1651     /* One-to-all distribution pattern: We can derive LToG from SF */
1652     PetscCall(ISLocalToGlobalMappingCreateSF(sf, 0, &ltogMigration));
1653   }
1654   PetscCall(PetscObjectSetName((PetscObject)ltogMigration, "Point renumbering for DM migration"));
1655   PetscCall(ISLocalToGlobalMappingViewFromOptions(ltogMigration, (PetscObject)dm, "-partition_view"));
1656   /* Migrate DM data to target DM */
1657   PetscCall(DMPlexDistributeCones(dm, sf, ltogOriginal, ltogMigration, targetDM));
1658   PetscCall(DMPlexDistributeLabels(dm, sf, targetDM));
1659   PetscCall(DMPlexDistributeCoordinates(dm, sf, targetDM));
1660   PetscCall(DMPlexDistributeSetupTree(dm, sf, ltogOriginal, ltogMigration, targetDM));
1661   PetscCall(ISLocalToGlobalMappingDestroy(&ltogOriginal));
1662   PetscCall(ISLocalToGlobalMappingDestroy(&ltogMigration));
1663   PetscCall(PetscLogEventEnd(DMPLEX_Migrate, dm, 0, 0, 0));
1664   PetscFunctionReturn(PETSC_SUCCESS);
1665 }
1666 
1667 /*@C
1668   DMPlexDistribute - Distributes the mesh and any associated sections.
1669 
1670   Collective on dm
1671 
1672   Input Parameters:
1673 + dm  - The original DMPlex object
1674 - overlap - The overlap of partitions, 0 is the default
1675 
1676   Output Parameters:
1677 + sf - The PetscSF used for point distribution, or NULL if not needed
1678 - dmParallel - The distributed DMPlex object
1679 
1680   Note: If the mesh was not distributed, the output dmParallel will be NULL.
1681 
1682   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
1683   representation on the mesh.
1684 
1685   Level: intermediate
1686 
1687 .seealso: `DMPlexCreate()`, `DMSetAdjacency()`, `DMPlexGetOverlap()`
1688 @*/
1689 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1690 {
1691   MPI_Comm         comm;
1692   PetscPartitioner partitioner;
1693   IS               cellPart;
1694   PetscSection     cellPartSection;
1695   DM               dmCoord;
1696   DMLabel          lblPartition, lblMigration;
1697   PetscSF          sfMigration, sfStratified, sfPoint;
1698   PetscBool        flg, balance;
1699   PetscMPIInt      rank, size;
1700 
1701   PetscFunctionBegin;
1702   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1703   PetscValidLogicalCollectiveInt(dm, overlap, 2);
1704   if (sf) PetscValidPointer(sf, 3);
1705   PetscValidPointer(dmParallel, 4);
1706 
1707   if (sf) *sf = NULL;
1708   *dmParallel = NULL;
1709   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1710   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1711   PetscCallMPI(MPI_Comm_size(comm, &size));
1712   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
1713 
1714   PetscCall(PetscLogEventBegin(DMPLEX_Distribute, dm, 0, 0, 0));
1715   /* Create cell partition */
1716   PetscCall(PetscLogEventBegin(DMPLEX_Partition, dm, 0, 0, 0));
1717   PetscCall(PetscSectionCreate(comm, &cellPartSection));
1718   PetscCall(DMPlexGetPartitioner(dm, &partitioner));
1719   PetscCall(PetscPartitionerDMPlexPartition(partitioner, dm, NULL, cellPartSection, &cellPart));
1720   PetscCall(PetscLogEventBegin(DMPLEX_PartSelf, dm, 0, 0, 0));
1721   {
1722     /* Convert partition to DMLabel */
1723     IS              is;
1724     PetscHSetI      ht;
1725     const PetscInt *points;
1726     PetscInt       *iranks;
1727     PetscInt        pStart, pEnd, proc, npoints, poff = 0, nranks;
1728 
1729     PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Point Partition", &lblPartition));
1730     /* Preallocate strata */
1731     PetscCall(PetscHSetICreate(&ht));
1732     PetscCall(PetscSectionGetChart(cellPartSection, &pStart, &pEnd));
1733     for (proc = pStart; proc < pEnd; proc++) {
1734       PetscCall(PetscSectionGetDof(cellPartSection, proc, &npoints));
1735       if (npoints) PetscCall(PetscHSetIAdd(ht, proc));
1736     }
1737     PetscCall(PetscHSetIGetSize(ht, &nranks));
1738     PetscCall(PetscMalloc1(nranks, &iranks));
1739     PetscCall(PetscHSetIGetElems(ht, &poff, iranks));
1740     PetscCall(PetscHSetIDestroy(&ht));
1741     PetscCall(DMLabelAddStrata(lblPartition, nranks, iranks));
1742     PetscCall(PetscFree(iranks));
1743     /* Inline DMPlexPartitionLabelClosure() */
1744     PetscCall(ISGetIndices(cellPart, &points));
1745     PetscCall(PetscSectionGetChart(cellPartSection, &pStart, &pEnd));
1746     for (proc = pStart; proc < pEnd; proc++) {
1747       PetscCall(PetscSectionGetDof(cellPartSection, proc, &npoints));
1748       if (!npoints) continue;
1749       PetscCall(PetscSectionGetOffset(cellPartSection, proc, &poff));
1750       PetscCall(DMPlexClosurePoints_Private(dm, npoints, points + poff, &is));
1751       PetscCall(DMLabelSetStratumIS(lblPartition, proc, is));
1752       PetscCall(ISDestroy(&is));
1753     }
1754     PetscCall(ISRestoreIndices(cellPart, &points));
1755   }
1756   PetscCall(PetscLogEventEnd(DMPLEX_PartSelf, dm, 0, 0, 0));
1757 
1758   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Point migration", &lblMigration));
1759   PetscCall(DMPlexPartitionLabelInvert(dm, lblPartition, NULL, lblMigration));
1760   PetscCall(DMPlexPartitionLabelCreateSF(dm, lblMigration, &sfMigration));
1761   PetscCall(DMPlexStratifyMigrationSF(dm, sfMigration, &sfStratified));
1762   PetscCall(PetscSFDestroy(&sfMigration));
1763   sfMigration = sfStratified;
1764   PetscCall(PetscSFSetUp(sfMigration));
1765   PetscCall(PetscLogEventEnd(DMPLEX_Partition, dm, 0, 0, 0));
1766   PetscCall(PetscOptionsHasName(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-partition_view", &flg));
1767   if (flg) {
1768     PetscCall(DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_(comm)));
1769     PetscCall(PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_(comm)));
1770   }
1771 
1772   /* Create non-overlapping parallel DM and migrate internal data */
1773   PetscCall(DMPlexCreate(comm, dmParallel));
1774   PetscCall(PetscObjectSetName((PetscObject)*dmParallel, "Parallel Mesh"));
1775   PetscCall(DMPlexMigrate(dm, sfMigration, *dmParallel));
1776 
1777   /* Build the point SF without overlap */
1778   PetscCall(DMPlexGetPartitionBalance(dm, &balance));
1779   PetscCall(DMPlexSetPartitionBalance(*dmParallel, balance));
1780   PetscCall(DMPlexCreatePointSF(*dmParallel, sfMigration, PETSC_TRUE, &sfPoint));
1781   PetscCall(DMSetPointSF(*dmParallel, sfPoint));
1782   PetscCall(DMPlexMigrateIsoperiodicFaceSF_Internal(dm, *dmParallel, sfMigration));
1783   PetscCall(DMGetCoordinateDM(*dmParallel, &dmCoord));
1784   if (dmCoord) PetscCall(DMSetPointSF(dmCoord, sfPoint));
1785   if (flg) PetscCall(PetscSFView(sfPoint, NULL));
1786 
1787   if (overlap > 0) {
1788     DM                 dmOverlap;
1789     PetscInt           nroots, nleaves, noldleaves, l;
1790     const PetscInt    *oldLeaves;
1791     PetscSFNode       *newRemote, *permRemote;
1792     const PetscSFNode *oldRemote;
1793     PetscSF            sfOverlap, sfOverlapPoint;
1794 
1795     /* Add the partition overlap to the distributed DM */
1796     PetscCall(DMPlexDistributeOverlap(*dmParallel, overlap, &sfOverlap, &dmOverlap));
1797     PetscCall(DMDestroy(dmParallel));
1798     *dmParallel = dmOverlap;
1799     if (flg) {
1800       PetscCall(PetscPrintf(comm, "Overlap Migration SF:\n"));
1801       PetscCall(PetscSFView(sfOverlap, NULL));
1802     }
1803 
1804     /* Re-map the migration SF to establish the full migration pattern */
1805     PetscCall(PetscSFGetGraph(sfMigration, &nroots, &noldleaves, &oldLeaves, &oldRemote));
1806     PetscCall(PetscSFGetGraph(sfOverlap, NULL, &nleaves, NULL, NULL));
1807     PetscCall(PetscMalloc1(nleaves, &newRemote));
1808     /* oldRemote: original root point mapping to original leaf point
1809        newRemote: original leaf point mapping to overlapped leaf point */
1810     if (oldLeaves) {
1811       /* After stratification, the migration remotes may not be in root (canonical) order, so we reorder using the leaf numbering */
1812       PetscCall(PetscMalloc1(noldleaves, &permRemote));
1813       for (l = 0; l < noldleaves; ++l) permRemote[oldLeaves[l]] = oldRemote[l];
1814       oldRemote = permRemote;
1815     }
1816     PetscCall(PetscSFBcastBegin(sfOverlap, MPIU_2INT, oldRemote, newRemote, MPI_REPLACE));
1817     PetscCall(PetscSFBcastEnd(sfOverlap, MPIU_2INT, oldRemote, newRemote, MPI_REPLACE));
1818     if (oldLeaves) PetscCall(PetscFree(oldRemote));
1819     PetscCall(PetscSFCreate(comm, &sfOverlapPoint));
1820     PetscCall(PetscSFSetGraph(sfOverlapPoint, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER));
1821     PetscCall(PetscSFDestroy(&sfOverlap));
1822     PetscCall(PetscSFDestroy(&sfMigration));
1823     sfMigration = sfOverlapPoint;
1824   }
1825   /* Cleanup Partition */
1826   PetscCall(DMLabelDestroy(&lblPartition));
1827   PetscCall(DMLabelDestroy(&lblMigration));
1828   PetscCall(PetscSectionDestroy(&cellPartSection));
1829   PetscCall(ISDestroy(&cellPart));
1830   /* Copy discretization */
1831   PetscCall(DMCopyDisc(dm, *dmParallel));
1832   /* Create sfNatural */
1833   if (dm->useNatural) {
1834     PetscSection section;
1835 
1836     PetscCall(DMSetUseNatural(*dmParallel, PETSC_TRUE));
1837     PetscCall(DMGetLocalSection(dm, &section));
1838 
1839     /* First DM with useNatural = PETSC_TRUE is considered natural */
1840     /* sfMigration and sfNatural are respectively the point and dofs SFs mapping to this natural DM */
1841     /* Compose with a previous sfNatural if present */
1842     if (dm->sfNatural) {
1843       PetscSF natSF;
1844 
1845       PetscCall(DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &natSF));
1846       PetscCall(PetscSFCompose(dm->sfNatural, natSF, &(*dmParallel)->sfNatural));
1847       PetscCall(PetscSFDestroy(&natSF));
1848     } else {
1849       PetscCall(DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &(*dmParallel)->sfNatural));
1850     }
1851     /* Compose with a previous sfMigration if present */
1852     if (dm->sfMigration) {
1853       PetscSF naturalPointSF;
1854 
1855       PetscCall(PetscSFCompose(dm->sfMigration, sfMigration, &naturalPointSF));
1856       PetscCall(PetscSFDestroy(&sfMigration));
1857       sfMigration = naturalPointSF;
1858     }
1859   }
1860   PetscCall(DMPlexCopy_Internal(dm, PETSC_TRUE, PETSC_FALSE, *dmParallel));
1861   /* Cleanup */
1862   if (sf) {
1863     *sf = sfMigration;
1864   } else PetscCall(PetscSFDestroy(&sfMigration));
1865   PetscCall(PetscSFDestroy(&sfPoint));
1866   PetscCall(PetscLogEventEnd(DMPLEX_Distribute, dm, 0, 0, 0));
1867   PetscFunctionReturn(PETSC_SUCCESS);
1868 }
1869 
1870 /*@C
1871   DMPlexDistributeOverlap - Add partition overlap to a distributed non-overlapping DM.
1872 
1873   Collective on dm
1874 
1875   Input Parameters:
1876 + dm  - The non-overlapping distributed DMPlex object
1877 - overlap - The overlap of partitions (the same on all ranks)
1878 
1879   Output Parameters:
1880 + sf - The PetscSF used for point distribution
1881 - dmOverlap - The overlapping distributed DMPlex object, or NULL
1882 
1883   Notes:
1884   If the mesh was not distributed, the return value is NULL.
1885 
1886   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
1887   representation on the mesh.
1888 
1889   Options Database Keys:
1890 + -dm_plex_overlap_labels <name1,name2,...> - List of overlap label names
1891 . -dm_plex_overlap_values <int1,int2,...>   - List of overlap label values
1892 . -dm_plex_overlap_exclude_label <name>     - Label used to exclude points from overlap
1893 - -dm_plex_overlap_exclude_value <int>      - Label value used to exclude points from overlap
1894 
1895   Level: advanced
1896 
1897 .seealso: `DMPlexCreate()`, `DMSetAdjacency()`, `DMPlexDistribute()`, `DMPlexCreateOverlapLabel()`, `DMPlexGetOverlap()`
1898 @*/
1899 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, PetscSF *sf, DM *dmOverlap)
1900 {
1901   DM_Plex     *mesh = (DM_Plex *)dm->data;
1902   MPI_Comm     comm;
1903   PetscMPIInt  size, rank;
1904   PetscSection rootSection, leafSection;
1905   IS           rootrank, leafrank;
1906   DM           dmCoord;
1907   DMLabel      lblOverlap;
1908   PetscSF      sfOverlap, sfStratified, sfPoint;
1909 
1910   PetscFunctionBegin;
1911   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1912   PetscValidLogicalCollectiveInt(dm, overlap, 2);
1913   if (sf) PetscValidPointer(sf, 3);
1914   PetscValidPointer(dmOverlap, 4);
1915 
1916   if (sf) *sf = NULL;
1917   *dmOverlap = NULL;
1918   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1919   PetscCallMPI(MPI_Comm_size(comm, &size));
1920   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1921   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
1922   {
1923     // We need to get options for the _already_distributed mesh, so it must be done here
1924     PetscInt    overlap;
1925     const char *prefix;
1926     char        oldPrefix[PETSC_MAX_PATH_LEN];
1927 
1928     oldPrefix[0] = '\0';
1929     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)dm, &prefix));
1930     PetscCall(PetscStrcpy(oldPrefix, prefix));
1931     PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)dm, "dist_"));
1932     PetscCall(DMPlexGetOverlap(dm, &overlap));
1933     PetscObjectOptionsBegin((PetscObject)dm);
1934     PetscCall(DMSetFromOptions_Overlap_Plex(dm, PetscOptionsObject, &overlap));
1935     PetscOptionsEnd();
1936     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)dm, oldPrefix[0] == '\0' ? NULL : oldPrefix));
1937   }
1938   PetscCall(PetscLogEventBegin(DMPLEX_DistributeOverlap, dm, 0, 0, 0));
1939   /* Compute point overlap with neighbouring processes on the distributed DM */
1940   PetscCall(PetscLogEventBegin(DMPLEX_Partition, dm, 0, 0, 0));
1941   PetscCall(PetscSectionCreate(comm, &rootSection));
1942   PetscCall(PetscSectionCreate(comm, &leafSection));
1943   PetscCall(DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank));
1944   if (mesh->numOvLabels) PetscCall(DMPlexCreateOverlapLabelFromLabels(dm, mesh->numOvLabels, mesh->ovLabels, mesh->ovValues, mesh->numOvExLabels, mesh->ovExLabels, mesh->ovExValues, rootSection, rootrank, leafSection, leafrank, &lblOverlap));
1945   else PetscCall(DMPlexCreateOverlapLabel(dm, overlap, rootSection, rootrank, leafSection, leafrank, &lblOverlap));
1946   /* Convert overlap label to stratified migration SF */
1947   PetscCall(DMPlexPartitionLabelCreateSF(dm, lblOverlap, &sfOverlap));
1948   PetscCall(DMPlexStratifyMigrationSF(dm, sfOverlap, &sfStratified));
1949   PetscCall(PetscSFDestroy(&sfOverlap));
1950   sfOverlap = sfStratified;
1951   PetscCall(PetscObjectSetName((PetscObject)sfOverlap, "Overlap SF"));
1952   PetscCall(PetscSFSetFromOptions(sfOverlap));
1953 
1954   PetscCall(PetscSectionDestroy(&rootSection));
1955   PetscCall(PetscSectionDestroy(&leafSection));
1956   PetscCall(ISDestroy(&rootrank));
1957   PetscCall(ISDestroy(&leafrank));
1958   PetscCall(PetscLogEventEnd(DMPLEX_Partition, dm, 0, 0, 0));
1959 
1960   /* Build the overlapping DM */
1961   PetscCall(DMPlexCreate(comm, dmOverlap));
1962   PetscCall(PetscObjectSetName((PetscObject)*dmOverlap, "Parallel Mesh"));
1963   PetscCall(DMPlexMigrate(dm, sfOverlap, *dmOverlap));
1964   /* Store the overlap in the new DM */
1965   PetscCall(DMPlexSetOverlap_Plex(*dmOverlap, dm, overlap));
1966   /* Build the new point SF */
1967   PetscCall(DMPlexCreatePointSF(*dmOverlap, sfOverlap, PETSC_FALSE, &sfPoint));
1968   PetscCall(DMSetPointSF(*dmOverlap, sfPoint));
1969   PetscCall(DMGetCoordinateDM(*dmOverlap, &dmCoord));
1970   if (dmCoord) PetscCall(DMSetPointSF(dmCoord, sfPoint));
1971   PetscCall(DMGetCellCoordinateDM(*dmOverlap, &dmCoord));
1972   if (dmCoord) PetscCall(DMSetPointSF(dmCoord, sfPoint));
1973   PetscCall(PetscSFDestroy(&sfPoint));
1974   /* Cleanup overlap partition */
1975   PetscCall(DMLabelDestroy(&lblOverlap));
1976   if (sf) *sf = sfOverlap;
1977   else PetscCall(PetscSFDestroy(&sfOverlap));
1978   PetscCall(PetscLogEventEnd(DMPLEX_DistributeOverlap, dm, 0, 0, 0));
1979   PetscFunctionReturn(PETSC_SUCCESS);
1980 }
1981 
1982 PetscErrorCode DMPlexGetOverlap_Plex(DM dm, PetscInt *overlap)
1983 {
1984   DM_Plex *mesh = (DM_Plex *)dm->data;
1985 
1986   PetscFunctionBegin;
1987   *overlap = mesh->overlap;
1988   PetscFunctionReturn(PETSC_SUCCESS);
1989 }
1990 
1991 PetscErrorCode DMPlexSetOverlap_Plex(DM dm, DM dmSrc, PetscInt overlap)
1992 {
1993   DM_Plex *mesh    = NULL;
1994   DM_Plex *meshSrc = NULL;
1995 
1996   PetscFunctionBegin;
1997   PetscValidHeaderSpecificType(dm, DM_CLASSID, 1, DMPLEX);
1998   mesh          = (DM_Plex *)dm->data;
1999   mesh->overlap = overlap;
2000   if (dmSrc) {
2001     PetscValidHeaderSpecificType(dmSrc, DM_CLASSID, 2, DMPLEX);
2002     meshSrc = (DM_Plex *)dmSrc->data;
2003     mesh->overlap += meshSrc->overlap;
2004   }
2005   PetscFunctionReturn(PETSC_SUCCESS);
2006 }
2007 
2008 /*@
2009   DMPlexGetOverlap - Get the width of the cell overlap
2010 
2011   Not collective
2012 
2013   Input Parameter:
2014 . dm   - The DM
2015 
2016   Output Parameter:
2017 . overlap - the width of the cell overlap
2018 
2019   Level: intermediate
2020 
2021 .seealso: `DMPlexSetOverlap()`, `DMPlexDistribute()`
2022 @*/
2023 PetscErrorCode DMPlexGetOverlap(DM dm, PetscInt *overlap)
2024 {
2025   PetscFunctionBegin;
2026   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2027   PetscValidIntPointer(overlap, 2);
2028   PetscUseMethod(dm, "DMPlexGetOverlap_C", (DM, PetscInt *), (dm, overlap));
2029   PetscFunctionReturn(PETSC_SUCCESS);
2030 }
2031 
2032 /*@
2033   DMPlexSetOverlap - Set the width of the cell overlap
2034 
2035   Logically collective
2036 
2037   Input Parameters:
2038 + dm      - The DM
2039 . dmSrc   - The DM that produced this one, or NULL
2040 - overlap - the width of the cell overlap
2041 
2042   Note:
2043   The overlap from dmSrc is added to dm
2044 
2045   Level: intermediate
2046 
2047 .seealso: `DMPlexGetOverlap()`, `DMPlexDistribute()`
2048 @*/
2049 PetscErrorCode DMPlexSetOverlap(DM dm, DM dmSrc, PetscInt overlap)
2050 {
2051   PetscFunctionBegin;
2052   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2053   PetscValidLogicalCollectiveInt(dm, overlap, 3);
2054   PetscTryMethod(dm, "DMPlexSetOverlap_C", (DM, DM, PetscInt), (dm, dmSrc, overlap));
2055   PetscFunctionReturn(PETSC_SUCCESS);
2056 }
2057 
2058 PetscErrorCode DMPlexDistributeSetDefault_Plex(DM dm, PetscBool dist)
2059 {
2060   DM_Plex *mesh = (DM_Plex *)dm->data;
2061 
2062   PetscFunctionBegin;
2063   mesh->distDefault = dist;
2064   PetscFunctionReturn(PETSC_SUCCESS);
2065 }
2066 
2067 /*@
2068   DMPlexDistributeSetDefault - Set flag indicating whether the DM should be distributed by default
2069 
2070   Logically collective
2071 
2072   Input Parameters:
2073 + dm   - The DM
2074 - dist - Flag for distribution
2075 
2076   Level: intermediate
2077 
2078 .seealso: `DMPlexDistributeGetDefault()`, `DMPlexDistribute()`
2079 @*/
2080 PetscErrorCode DMPlexDistributeSetDefault(DM dm, PetscBool dist)
2081 {
2082   PetscFunctionBegin;
2083   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2084   PetscValidLogicalCollectiveBool(dm, dist, 2);
2085   PetscTryMethod(dm, "DMPlexDistributeSetDefault_C", (DM, PetscBool), (dm, dist));
2086   PetscFunctionReturn(PETSC_SUCCESS);
2087 }
2088 
2089 PetscErrorCode DMPlexDistributeGetDefault_Plex(DM dm, PetscBool *dist)
2090 {
2091   DM_Plex *mesh = (DM_Plex *)dm->data;
2092 
2093   PetscFunctionBegin;
2094   *dist = mesh->distDefault;
2095   PetscFunctionReturn(PETSC_SUCCESS);
2096 }
2097 
2098 /*@
2099   DMPlexDistributeGetDefault - Get flag indicating whether the DM should be distributed by default
2100 
2101   Not collective
2102 
2103   Input Parameter:
2104 . dm   - The DM
2105 
2106   Output Parameter:
2107 . dist - Flag for distribution
2108 
2109   Level: intermediate
2110 
2111 .seealso: `DMPlexDistributeSetDefault()`, `DMPlexDistribute()`
2112 @*/
2113 PetscErrorCode DMPlexDistributeGetDefault(DM dm, PetscBool *dist)
2114 {
2115   PetscFunctionBegin;
2116   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2117   PetscValidBoolPointer(dist, 2);
2118   PetscUseMethod(dm, "DMPlexDistributeGetDefault_C", (DM, PetscBool *), (dm, dist));
2119   PetscFunctionReturn(PETSC_SUCCESS);
2120 }
2121 
2122 /*@C
2123   DMPlexGetGatherDM - Get a copy of the DMPlex that gathers all points on the
2124   root process of the original's communicator.
2125 
2126   Collective on dm
2127 
2128   Input Parameter:
2129 . dm - the original DMPlex object
2130 
2131   Output Parameters:
2132 + sf - the PetscSF used for point distribution (optional)
2133 - gatherMesh - the gathered DM object, or NULL
2134 
2135   Level: intermediate
2136 
2137 .seealso: `DMPlexDistribute()`, `DMPlexGetRedundantDM()`
2138 @*/
2139 PetscErrorCode DMPlexGetGatherDM(DM dm, PetscSF *sf, DM *gatherMesh)
2140 {
2141   MPI_Comm         comm;
2142   PetscMPIInt      size;
2143   PetscPartitioner oldPart, gatherPart;
2144 
2145   PetscFunctionBegin;
2146   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2147   PetscValidPointer(gatherMesh, 3);
2148   *gatherMesh = NULL;
2149   if (sf) *sf = NULL;
2150   comm = PetscObjectComm((PetscObject)dm);
2151   PetscCallMPI(MPI_Comm_size(comm, &size));
2152   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
2153   PetscCall(DMPlexGetPartitioner(dm, &oldPart));
2154   PetscCall(PetscObjectReference((PetscObject)oldPart));
2155   PetscCall(PetscPartitionerCreate(comm, &gatherPart));
2156   PetscCall(PetscPartitionerSetType(gatherPart, PETSCPARTITIONERGATHER));
2157   PetscCall(DMPlexSetPartitioner(dm, gatherPart));
2158   PetscCall(DMPlexDistribute(dm, 0, sf, gatherMesh));
2159 
2160   PetscCall(DMPlexSetPartitioner(dm, oldPart));
2161   PetscCall(PetscPartitionerDestroy(&gatherPart));
2162   PetscCall(PetscPartitionerDestroy(&oldPart));
2163   PetscFunctionReturn(PETSC_SUCCESS);
2164 }
2165 
2166 /*@C
2167   DMPlexGetRedundantDM - Get a copy of the DMPlex that is completely copied on each process.
2168 
2169   Collective on dm
2170 
2171   Input Parameter:
2172 . dm - the original DMPlex object
2173 
2174   Output Parameters:
2175 + sf - the PetscSF used for point distribution (optional)
2176 - redundantMesh - the redundant DM object, or NULL
2177 
2178   Level: intermediate
2179 
2180 .seealso: `DMPlexDistribute()`, `DMPlexGetGatherDM()`
2181 @*/
2182 PetscErrorCode DMPlexGetRedundantDM(DM dm, PetscSF *sf, DM *redundantMesh)
2183 {
2184   MPI_Comm     comm;
2185   PetscMPIInt  size, rank;
2186   PetscInt     pStart, pEnd, p;
2187   PetscInt     numPoints = -1;
2188   PetscSF      migrationSF, sfPoint, gatherSF;
2189   DM           gatherDM, dmCoord;
2190   PetscSFNode *points;
2191 
2192   PetscFunctionBegin;
2193   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2194   PetscValidPointer(redundantMesh, 3);
2195   *redundantMesh = NULL;
2196   comm           = PetscObjectComm((PetscObject)dm);
2197   PetscCallMPI(MPI_Comm_size(comm, &size));
2198   if (size == 1) {
2199     PetscCall(PetscObjectReference((PetscObject)dm));
2200     *redundantMesh = dm;
2201     if (sf) *sf = NULL;
2202     PetscFunctionReturn(PETSC_SUCCESS);
2203   }
2204   PetscCall(DMPlexGetGatherDM(dm, &gatherSF, &gatherDM));
2205   if (!gatherDM) PetscFunctionReturn(PETSC_SUCCESS);
2206   PetscCallMPI(MPI_Comm_rank(comm, &rank));
2207   PetscCall(DMPlexGetChart(gatherDM, &pStart, &pEnd));
2208   numPoints = pEnd - pStart;
2209   PetscCallMPI(MPI_Bcast(&numPoints, 1, MPIU_INT, 0, comm));
2210   PetscCall(PetscMalloc1(numPoints, &points));
2211   PetscCall(PetscSFCreate(comm, &migrationSF));
2212   for (p = 0; p < numPoints; p++) {
2213     points[p].index = p;
2214     points[p].rank  = 0;
2215   }
2216   PetscCall(PetscSFSetGraph(migrationSF, pEnd - pStart, numPoints, NULL, PETSC_OWN_POINTER, points, PETSC_OWN_POINTER));
2217   PetscCall(DMPlexCreate(comm, redundantMesh));
2218   PetscCall(PetscObjectSetName((PetscObject)*redundantMesh, "Redundant Mesh"));
2219   PetscCall(DMPlexMigrate(gatherDM, migrationSF, *redundantMesh));
2220   /* This is to express that all point are in overlap */
2221   PetscCall(DMPlexSetOverlap_Plex(*redundantMesh, NULL, PETSC_MAX_INT));
2222   PetscCall(DMPlexCreatePointSF(*redundantMesh, migrationSF, PETSC_FALSE, &sfPoint));
2223   PetscCall(DMSetPointSF(*redundantMesh, sfPoint));
2224   PetscCall(DMGetCoordinateDM(*redundantMesh, &dmCoord));
2225   if (dmCoord) PetscCall(DMSetPointSF(dmCoord, sfPoint));
2226   PetscCall(PetscSFDestroy(&sfPoint));
2227   if (sf) {
2228     PetscSF tsf;
2229 
2230     PetscCall(PetscSFCompose(gatherSF, migrationSF, &tsf));
2231     PetscCall(DMPlexStratifyMigrationSF(dm, tsf, sf));
2232     PetscCall(PetscSFDestroy(&tsf));
2233   }
2234   PetscCall(PetscSFDestroy(&migrationSF));
2235   PetscCall(PetscSFDestroy(&gatherSF));
2236   PetscCall(DMDestroy(&gatherDM));
2237   PetscCall(DMCopyDisc(dm, *redundantMesh));
2238   PetscCall(DMPlexCopy_Internal(dm, PETSC_TRUE, PETSC_FALSE, *redundantMesh));
2239   PetscFunctionReturn(PETSC_SUCCESS);
2240 }
2241 
2242 /*@
2243   DMPlexIsDistributed - Find out whether this DM is distributed, i.e. more than one rank owns some points.
2244 
2245   Collective
2246 
2247   Input Parameter:
2248 . dm      - The DM object
2249 
2250   Output Parameter:
2251 . distributed - Flag whether the DM is distributed
2252 
2253   Level: intermediate
2254 
2255   Notes:
2256   This currently finds out whether at least two ranks have any DAG points.
2257   This involves MPI_Allreduce() with one integer.
2258   The result is currently not stashed so every call to this routine involves this global communication.
2259 
2260 .seealso: `DMPlexDistribute()`, `DMPlexGetOverlap()`, `DMPlexIsInterpolated()`
2261 @*/
2262 PetscErrorCode DMPlexIsDistributed(DM dm, PetscBool *distributed)
2263 {
2264   PetscInt    pStart, pEnd, count;
2265   MPI_Comm    comm;
2266   PetscMPIInt size;
2267 
2268   PetscFunctionBegin;
2269   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2270   PetscValidBoolPointer(distributed, 2);
2271   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
2272   PetscCallMPI(MPI_Comm_size(comm, &size));
2273   if (size == 1) {
2274     *distributed = PETSC_FALSE;
2275     PetscFunctionReturn(PETSC_SUCCESS);
2276   }
2277   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
2278   count = (pEnd - pStart) > 0 ? 1 : 0;
2279   PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE, &count, 1, MPIU_INT, MPI_SUM, comm));
2280   *distributed = count > 1 ? PETSC_TRUE : PETSC_FALSE;
2281   PetscFunctionReturn(PETSC_SUCCESS);
2282 }
2283 
2284 /*@C
2285   DMPlexDistributionSetName - Set the name of the specific parallel distribution
2286 
2287   Input Parameters:
2288 + dm   - The DM
2289 - name - The name of the specific parallel distribution
2290 
2291   Note:
2292   If distribution name is set when saving, DMPlexTopologyView() saves the plex's
2293   parallel distribution (i.e., partition, ownership, and local ordering of points) under
2294   this name. Conversely, if distribution name is set when loading, DMPlexTopologyLoad()
2295   loads the parallel distribution stored in file under this name.
2296 
2297   Level: developer
2298 
2299 .seealso: `DMPlexDistributionGetName()`, `DMPlexTopologyView()`, `DMPlexTopologyLoad()`
2300 @*/
2301 PetscErrorCode DMPlexDistributionSetName(DM dm, const char name[])
2302 {
2303   DM_Plex *mesh = (DM_Plex *)dm->data;
2304 
2305   PetscFunctionBegin;
2306   PetscValidHeaderSpecificType(dm, DM_CLASSID, 1, DMPLEX);
2307   if (name) PetscValidCharPointer(name, 2);
2308   PetscCall(PetscFree(mesh->distributionName));
2309   PetscCall(PetscStrallocpy(name, &mesh->distributionName));
2310   PetscFunctionReturn(PETSC_SUCCESS);
2311 }
2312 
2313 /*@C
2314   DMPlexDistributionGetName - Retrieve the name of the specific parallel distribution
2315 
2316   Input Parameter:
2317 . dm - The DM
2318 
2319   Output Parameter:
2320 . name - The name of the specific parallel distribution
2321 
2322   Note:
2323   If distribution name is set when saving, DMPlexTopologyView() saves the plex's
2324   parallel distribution (i.e., partition, ownership, and local ordering of points) under
2325   this name. Conversely, if distribution name is set when loading, DMPlexTopologyLoad()
2326   loads the parallel distribution stored in file under this name.
2327 
2328   Level: developer
2329 
2330 .seealso: `DMPlexDistributionSetName()`, `DMPlexTopologyView()`, `DMPlexTopologyLoad()`
2331 @*/
2332 PetscErrorCode DMPlexDistributionGetName(DM dm, const char *name[])
2333 {
2334   DM_Plex *mesh = (DM_Plex *)dm->data;
2335 
2336   PetscFunctionBegin;
2337   PetscValidHeaderSpecificType(dm, DM_CLASSID, 1, DMPLEX);
2338   PetscValidPointer(name, 2);
2339   *name = mesh->distributionName;
2340   PetscFunctionReturn(PETSC_SUCCESS);
2341 }
2342