xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision ecceeb7d86a3b9d2c0da2aced471d46acf67b452)
1 #include <petsc/private/dmpleximpl.h>  /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/dmlabelimpl.h> /*I      "petscdmlabel.h"  I*/
3 
4 /*@C
5   DMPlexSetAdjacencyUser - Define adjacency in the mesh using a user-provided callback
6 
7   Input Parameters:
8 + dm   - The DM object
9 . user - The user callback, may be `NULL` (to clear the callback)
10 - ctx  - context for callback evaluation, may be `NULL`
11 
12   Level: advanced
13 
14   Notes:
15   The caller of `DMPlexGetAdjacency()` may need to arrange that a large enough array is available for the adjacency.
16 
17   Any setting here overrides other configuration of `DMPLEX` adjacency determination.
18 
19 .seealso: `DMPLEX`, `DMSetAdjacency()`, `DMPlexDistribute()`, `DMPlexPreallocateOperator()`, `DMPlexGetAdjacency()`, `DMPlexGetAdjacencyUser()`
20 @*/
21 PetscErrorCode DMPlexSetAdjacencyUser(DM dm, PetscErrorCode (*user)(DM, PetscInt, PetscInt *, PetscInt[], void *), void *ctx)
22 {
23   DM_Plex *mesh = (DM_Plex *)dm->data;
24 
25   PetscFunctionBegin;
26   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
27   mesh->useradjacency    = user;
28   mesh->useradjacencyctx = ctx;
29   PetscFunctionReturn(PETSC_SUCCESS);
30 }
31 
32 /*@C
33   DMPlexGetAdjacencyUser - get the user-defined adjacency callback
34 
35   Input Parameter:
36 . dm - The `DM` object
37 
38   Output Parameters:
39 + user - The callback
40 - ctx  - context for callback evaluation
41 
42   Level: advanced
43 
44 .seealso: `DMPLEX`, `DMSetAdjacency()`, `DMPlexDistribute()`, `DMPlexPreallocateOperator()`, `DMPlexGetAdjacency()`, `DMPlexSetAdjacencyUser()`
45 @*/
46 PetscErrorCode DMPlexGetAdjacencyUser(DM dm, PetscErrorCode (**user)(DM, PetscInt, PetscInt *, PetscInt[], void *), void **ctx)
47 {
48   DM_Plex *mesh = (DM_Plex *)dm->data;
49 
50   PetscFunctionBegin;
51   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
52   if (user) *user = mesh->useradjacency;
53   if (ctx) *ctx = mesh->useradjacencyctx;
54   PetscFunctionReturn(PETSC_SUCCESS);
55 }
56 
57 /*@
58   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
59 
60   Input Parameters:
61 + dm         - The `DM` object
62 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
63 
64   Level: intermediate
65 
66 .seealso: `DMPLEX`, `DMGetAdjacency()`, `DMSetAdjacency()`, `DMPlexDistribute()`, `DMPlexPreallocateOperator()`, `DMPlexSetAnchors()`
67 @*/
68 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
69 {
70   DM_Plex *mesh = (DM_Plex *)dm->data;
71 
72   PetscFunctionBegin;
73   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
74   mesh->useAnchors = useAnchors;
75   PetscFunctionReturn(PETSC_SUCCESS);
76 }
77 
78 /*@
79   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
80 
81   Input Parameter:
82 . dm - The `DM` object
83 
84   Output Parameter:
85 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
86 
87   Level: intermediate
88 
89 .seealso: `DMPLEX`, `DMPlexSetAdjacencyUseAnchors()`, `DMSetAdjacency()`, `DMGetAdjacency()`, `DMPlexDistribute()`, `DMPlexPreallocateOperator()`, `DMPlexSetAnchors()`
90 @*/
91 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
92 {
93   DM_Plex *mesh = (DM_Plex *)dm->data;
94 
95   PetscFunctionBegin;
96   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
97   PetscAssertPointer(useAnchors, 2);
98   *useAnchors = mesh->useAnchors;
99   PetscFunctionReturn(PETSC_SUCCESS);
100 }
101 
102 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
103 {
104   const PetscInt *cone   = NULL;
105   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
106 
107   PetscFunctionBeginHot;
108   PetscCall(DMPlexGetConeSize(dm, p, &coneSize));
109   PetscCall(DMPlexGetCone(dm, p, &cone));
110   for (c = 0; c <= coneSize; ++c) {
111     const PetscInt  point   = !c ? p : cone[c - 1];
112     const PetscInt *support = NULL;
113     PetscInt        supportSize, s, q;
114 
115     PetscCall(DMPlexGetSupportSize(dm, point, &supportSize));
116     PetscCall(DMPlexGetSupport(dm, point, &support));
117     for (s = 0; s < supportSize; ++s) {
118       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = support[s]), 0); ++q) {
119         if (support[s] == adj[q]) break;
120       }
121       PetscCheck(numAdj <= maxAdjSize, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%" PetscInt_FMT ")", maxAdjSize);
122     }
123   }
124   *adjSize = numAdj;
125   PetscFunctionReturn(PETSC_SUCCESS);
126 }
127 
128 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
129 {
130   const PetscInt *support = NULL;
131   PetscInt        numAdj = 0, maxAdjSize = *adjSize, supportSize, s;
132 
133   PetscFunctionBeginHot;
134   PetscCall(DMPlexGetSupportSize(dm, p, &supportSize));
135   PetscCall(DMPlexGetSupport(dm, p, &support));
136   for (s = 0; s <= supportSize; ++s) {
137     const PetscInt  point = !s ? p : support[s - 1];
138     const PetscInt *cone  = NULL;
139     PetscInt        coneSize, c, q;
140 
141     PetscCall(DMPlexGetConeSize(dm, point, &coneSize));
142     PetscCall(DMPlexGetCone(dm, point, &cone));
143     for (c = 0; c < coneSize; ++c) {
144       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = cone[c]), 0); ++q) {
145         if (cone[c] == adj[q]) break;
146       }
147       PetscCheck(numAdj <= maxAdjSize, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%" PetscInt_FMT ")", maxAdjSize);
148     }
149   }
150   *adjSize = numAdj;
151   PetscFunctionReturn(PETSC_SUCCESS);
152 }
153 
154 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
155 {
156   PetscInt *star   = NULL;
157   PetscInt  numAdj = 0, maxAdjSize = *adjSize, starSize, s;
158 
159   PetscFunctionBeginHot;
160   PetscCall(DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star));
161   for (s = 0; s < starSize * 2; s += 2) {
162     const PetscInt *closure = NULL;
163     PetscInt        closureSize, c, q;
164 
165     PetscCall(DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt **)&closure));
166     for (c = 0; c < closureSize * 2; c += 2) {
167       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = closure[c]), 0); ++q) {
168         if (closure[c] == adj[q]) break;
169       }
170       PetscCheck(numAdj <= maxAdjSize, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%" PetscInt_FMT ")", maxAdjSize);
171     }
172     PetscCall(DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt **)&closure));
173   }
174   PetscCall(DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star));
175   *adjSize = numAdj;
176   PetscFunctionReturn(PETSC_SUCCESS);
177 }
178 
179 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
180 {
181   static PetscInt asiz       = 0;
182   PetscInt        maxAnchors = 1;
183   PetscInt        aStart = -1, aEnd = -1;
184   PetscInt        maxAdjSize;
185   PetscSection    aSec = NULL;
186   IS              aIS  = NULL;
187   const PetscInt *anchors;
188   DM_Plex        *mesh = (DM_Plex *)dm->data;
189 
190   PetscFunctionBeginHot;
191   if (useAnchors) {
192     PetscCall(DMPlexGetAnchors(dm, &aSec, &aIS));
193     if (aSec) {
194       PetscCall(PetscSectionGetMaxDof(aSec, &maxAnchors));
195       maxAnchors = PetscMax(1, maxAnchors);
196       PetscCall(PetscSectionGetChart(aSec, &aStart, &aEnd));
197       PetscCall(ISGetIndices(aIS, &anchors));
198     }
199   }
200   if (!*adj) {
201     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
202 
203     PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
204     PetscCall(DMPlexGetDepth(dm, &depth));
205     depth = PetscMax(depth, -depth);
206     PetscCall(DMPlexGetMaxSizes(dm, &maxC, &maxS));
207     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC, depth + 1) - 1) / (maxC - 1)) : depth + 1;
208     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS, depth + 1) - 1) / (maxS - 1)) : depth + 1;
209     asiz          = PetscMax(PetscPowInt(maxS, depth) * coneSeries, PetscPowInt(maxC, depth) * supportSeries);
210     asiz *= maxAnchors;
211     asiz = PetscMin(asiz, pEnd - pStart);
212     PetscCall(PetscMalloc1(asiz, adj));
213   }
214   if (*adjSize < 0) *adjSize = asiz;
215   maxAdjSize = *adjSize;
216   if (mesh->useradjacency) {
217     PetscCall((*mesh->useradjacency)(dm, p, adjSize, *adj, mesh->useradjacencyctx));
218   } else if (useTransitiveClosure) {
219     PetscCall(DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj));
220   } else if (useCone) {
221     PetscCall(DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj));
222   } else {
223     PetscCall(DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj));
224   }
225   if (useAnchors && aSec) {
226     PetscInt  origSize = *adjSize;
227     PetscInt  numAdj   = origSize;
228     PetscInt  i        = 0, j;
229     PetscInt *orig     = *adj;
230 
231     while (i < origSize) {
232       PetscInt p    = orig[i];
233       PetscInt aDof = 0;
234 
235       if (p >= aStart && p < aEnd) PetscCall(PetscSectionGetDof(aSec, p, &aDof));
236       if (aDof) {
237         PetscInt aOff;
238         PetscInt s, q;
239 
240         for (j = i + 1; j < numAdj; j++) orig[j - 1] = orig[j];
241         origSize--;
242         numAdj--;
243         PetscCall(PetscSectionGetOffset(aSec, p, &aOff));
244         for (s = 0; s < aDof; ++s) {
245           for (q = 0; q < numAdj || ((void)(orig[numAdj++] = anchors[aOff + s]), 0); ++q) {
246             if (anchors[aOff + s] == orig[q]) break;
247           }
248           PetscCheck(numAdj <= maxAdjSize, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%" PetscInt_FMT ")", maxAdjSize);
249         }
250       } else {
251         i++;
252       }
253     }
254     *adjSize = numAdj;
255     PetscCall(ISRestoreIndices(aIS, &anchors));
256   }
257   PetscFunctionReturn(PETSC_SUCCESS);
258 }
259 
260 /*@
261   DMPlexGetAdjacency - Return all points adjacent to the given point
262 
263   Input Parameters:
264 + dm - The `DM` object
265 - p  - The point
266 
267   Input/Output Parameters:
268 + adjSize - The maximum size of `adj` if it is non-`NULL`, or `PETSC_DETERMINE`;
269             on output the number of adjacent points
270 - adj     - Either `NULL` so that the array is allocated, or an existing array with size `adjSize`;
271         on output contains the adjacent points
272 
273   Level: advanced
274 
275   Notes:
276   The user must `PetscFree()` the `adj` array if it was not passed in.
277 
278 .seealso: `DMPLEX`, `DMSetAdjacency()`, `DMPlexDistribute()`, `DMCreateMatrix()`, `DMPlexPreallocateOperator()`
279 @*/
280 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
281 {
282   PetscBool useCone, useClosure, useAnchors;
283 
284   PetscFunctionBeginHot;
285   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
286   PetscAssertPointer(adjSize, 3);
287   PetscAssertPointer(adj, 4);
288   PetscCall(DMGetBasicAdjacency(dm, &useCone, &useClosure));
289   PetscCall(DMPlexGetAdjacencyUseAnchors(dm, &useAnchors));
290   PetscCall(DMPlexGetAdjacency_Internal(dm, p, useCone, useClosure, useAnchors, adjSize, adj));
291   PetscFunctionReturn(PETSC_SUCCESS);
292 }
293 
294 /*@
295   DMPlexCreateTwoSidedProcessSF - Create an `PetscSF` which just has process connectivity
296 
297   Collective
298 
299   Input Parameters:
300 + dm              - The `DM`
301 . sfPoint         - The `PetscSF` which encodes point connectivity
302 . rootRankSection - to be documented
303 . rootRanks       - to be documented
304 . leafRankSection - to be documented
305 - leafRanks       - to be documented
306 
307   Output Parameters:
308 + processRanks - A list of process neighbors, or `NULL`
309 - sfProcess    - An `PetscSF` encoding the two-sided process connectivity, or `NULL`
310 
311   Level: developer
312 
313 .seealso: `DMPLEX`, `PetscSFCreate()`, `DMPlexCreateProcessSF()`
314 @*/
315 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
316 {
317   const PetscSFNode *remotePoints;
318   PetscInt          *localPointsNew;
319   PetscSFNode       *remotePointsNew;
320   const PetscInt    *nranks;
321   PetscInt          *ranksNew;
322   PetscBT            neighbors;
323   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
324   PetscMPIInt        size, proc, rank;
325 
326   PetscFunctionBegin;
327   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
328   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
329   if (processRanks) PetscAssertPointer(processRanks, 7);
330   if (sfProcess) PetscAssertPointer(sfProcess, 8);
331   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)dm), &size));
332   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
333   PetscCall(PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints));
334   PetscCall(PetscBTCreate(size, &neighbors));
335   PetscCall(PetscBTMemzero(size, neighbors));
336   /* Compute root-to-leaf process connectivity */
337   PetscCall(PetscSectionGetChart(rootRankSection, &pStart, &pEnd));
338   PetscCall(ISGetIndices(rootRanks, &nranks));
339   for (p = pStart; p < pEnd; ++p) {
340     PetscInt ndof, noff, n;
341 
342     PetscCall(PetscSectionGetDof(rootRankSection, p, &ndof));
343     PetscCall(PetscSectionGetOffset(rootRankSection, p, &noff));
344     for (n = 0; n < ndof; ++n) PetscCall(PetscBTSet(neighbors, nranks[noff + n]));
345   }
346   PetscCall(ISRestoreIndices(rootRanks, &nranks));
347   /* Compute leaf-to-neighbor process connectivity */
348   PetscCall(PetscSectionGetChart(leafRankSection, &pStart, &pEnd));
349   PetscCall(ISGetIndices(leafRanks, &nranks));
350   for (p = pStart; p < pEnd; ++p) {
351     PetscInt ndof, noff, n;
352 
353     PetscCall(PetscSectionGetDof(leafRankSection, p, &ndof));
354     PetscCall(PetscSectionGetOffset(leafRankSection, p, &noff));
355     for (n = 0; n < ndof; ++n) PetscCall(PetscBTSet(neighbors, nranks[noff + n]));
356   }
357   PetscCall(ISRestoreIndices(leafRanks, &nranks));
358   /* Compute leaf-to-root process connectivity */
359   for (l = 0; l < numLeaves; ++l) PetscCall(PetscBTSet(neighbors, remotePoints[l].rank));
360   /* Calculate edges */
361   PetscCall(PetscBTClear(neighbors, rank));
362   for (proc = 0, numNeighbors = 0; proc < size; ++proc) {
363     if (PetscBTLookup(neighbors, proc)) ++numNeighbors;
364   }
365   PetscCall(PetscMalloc1(numNeighbors, &ranksNew));
366   PetscCall(PetscMalloc1(numNeighbors, &localPointsNew));
367   PetscCall(PetscMalloc1(numNeighbors, &remotePointsNew));
368   for (proc = 0, n = 0; proc < size; ++proc) {
369     if (PetscBTLookup(neighbors, proc)) {
370       ranksNew[n]              = proc;
371       localPointsNew[n]        = proc;
372       remotePointsNew[n].index = rank;
373       remotePointsNew[n].rank  = proc;
374       ++n;
375     }
376   }
377   PetscCall(PetscBTDestroy(&neighbors));
378   if (processRanks) PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks));
379   else PetscCall(PetscFree(ranksNew));
380   if (sfProcess) {
381     PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess));
382     PetscCall(PetscObjectSetName((PetscObject)*sfProcess, "Two-Sided Process SF"));
383     PetscCall(PetscSFSetFromOptions(*sfProcess));
384     PetscCall(PetscSFSetGraph(*sfProcess, size, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER));
385   }
386   PetscFunctionReturn(PETSC_SUCCESS);
387 }
388 
389 /*@
390   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
391 
392   Collective
393 
394   Input Parameter:
395 . dm - The `DM`
396 
397   Output Parameters:
398 + rootSection - The number of leaves for a given root point
399 . rootrank    - The rank of each edge into the root point
400 . leafSection - The number of processes sharing a given leaf point
401 - leafrank    - The rank of each process sharing a leaf point
402 
403   Level: developer
404 
405 .seealso: `DMPLEX`, `DMPlexCreateOverlapLabel()`, `DMPlexDistribute()`, `DMPlexDistributeOverlap()`
406 @*/
407 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
408 {
409   MPI_Comm        comm;
410   PetscSF         sfPoint;
411   const PetscInt *rootdegree;
412   PetscInt       *myrank, *remoterank;
413   PetscInt        pStart, pEnd, p, nedges;
414   PetscMPIInt     rank;
415 
416   PetscFunctionBegin;
417   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
418   PetscCallMPI(MPI_Comm_rank(comm, &rank));
419   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
420   PetscCall(DMGetPointSF(dm, &sfPoint));
421   /* Compute number of leaves for each root */
422   PetscCall(PetscObjectSetName((PetscObject)rootSection, "Root Section"));
423   PetscCall(PetscSectionSetChart(rootSection, pStart, pEnd));
424   PetscCall(PetscSFComputeDegreeBegin(sfPoint, &rootdegree));
425   PetscCall(PetscSFComputeDegreeEnd(sfPoint, &rootdegree));
426   for (p = pStart; p < pEnd; ++p) PetscCall(PetscSectionSetDof(rootSection, p, rootdegree[p - pStart]));
427   PetscCall(PetscSectionSetUp(rootSection));
428   /* Gather rank of each leaf to root */
429   PetscCall(PetscSectionGetStorageSize(rootSection, &nedges));
430   PetscCall(PetscMalloc1(pEnd - pStart, &myrank));
431   PetscCall(PetscMalloc1(nedges, &remoterank));
432   for (p = 0; p < pEnd - pStart; ++p) myrank[p] = rank;
433   PetscCall(PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank));
434   PetscCall(PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank));
435   PetscCall(PetscFree(myrank));
436   PetscCall(ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank));
437   /* Distribute remote ranks to leaves */
438   PetscCall(PetscObjectSetName((PetscObject)leafSection, "Leaf Section"));
439   PetscCall(DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank));
440   PetscFunctionReturn(PETSC_SUCCESS);
441 }
442 
443 /*@C
444   DMPlexCreateOverlapLabel - Compute a label indicating what overlap points should be sent to new processes
445 
446   Collective
447 
448   Input Parameters:
449 + dm          - The `DM`
450 . levels      - Number of overlap levels
451 . rootSection - The number of leaves for a given root point
452 . rootrank    - The rank of each edge into the root point
453 . leafSection - The number of processes sharing a given leaf point
454 - leafrank    - The rank of each process sharing a leaf point
455 
456   Output Parameter:
457 . ovLabel - `DMLabel` containing remote overlap contributions as point/rank pairings
458 
459   Level: developer
460 
461 .seealso: `DMPLEX`, `DMPlexCreateOverlapLabelFromLabels()`, `DMPlexGetAdjacency()`, `DMPlexDistributeOwnership()`, `DMPlexDistribute()`
462 @*/
463 PetscErrorCode DMPlexCreateOverlapLabel(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
464 {
465   MPI_Comm           comm;
466   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
467   PetscSF            sfPoint;
468   const PetscSFNode *remote;
469   const PetscInt    *local;
470   const PetscInt    *nrank, *rrank;
471   PetscInt          *adj = NULL;
472   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l;
473   PetscMPIInt        rank, size;
474   PetscBool          flg;
475 
476   PetscFunctionBegin;
477   *ovLabel = NULL;
478   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
479   PetscCallMPI(MPI_Comm_size(comm, &size));
480   PetscCallMPI(MPI_Comm_rank(comm, &rank));
481   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
482   PetscCall(DMGetPointSF(dm, &sfPoint));
483   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
484   if (!levels) {
485     /* Add owned points */
486     PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel));
487     for (p = pStart; p < pEnd; ++p) PetscCall(DMLabelSetValue(*ovLabel, p, rank));
488     PetscFunctionReturn(PETSC_SUCCESS);
489   }
490   PetscCall(PetscSectionGetChart(leafSection, &sStart, &sEnd));
491   PetscCall(PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote));
492   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap adjacency", &ovAdjByRank));
493   /* Handle leaves: shared with the root point */
494   for (l = 0; l < nleaves; ++l) {
495     PetscInt adjSize = PETSC_DETERMINE, a;
496 
497     PetscCall(DMPlexGetAdjacency(dm, local ? local[l] : l, &adjSize, &adj));
498     for (a = 0; a < adjSize; ++a) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank));
499   }
500   PetscCall(ISGetIndices(rootrank, &rrank));
501   PetscCall(ISGetIndices(leafrank, &nrank));
502   /* Handle roots */
503   for (p = pStart; p < pEnd; ++p) {
504     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
505 
506     if ((p >= sStart) && (p < sEnd)) {
507       /* Some leaves share a root with other leaves on different processes */
508       PetscCall(PetscSectionGetDof(leafSection, p, &neighbors));
509       if (neighbors) {
510         PetscCall(PetscSectionGetOffset(leafSection, p, &noff));
511         PetscCall(DMPlexGetAdjacency(dm, p, &adjSize, &adj));
512         for (n = 0; n < neighbors; ++n) {
513           const PetscInt remoteRank = nrank[noff + n];
514 
515           if (remoteRank == rank) continue;
516           for (a = 0; a < adjSize; ++a) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remoteRank));
517         }
518       }
519     }
520     /* Roots are shared with leaves */
521     PetscCall(PetscSectionGetDof(rootSection, p, &neighbors));
522     if (!neighbors) continue;
523     PetscCall(PetscSectionGetOffset(rootSection, p, &noff));
524     PetscCall(DMPlexGetAdjacency(dm, p, &adjSize, &adj));
525     for (n = 0; n < neighbors; ++n) {
526       const PetscInt remoteRank = rrank[noff + n];
527 
528       if (remoteRank == rank) continue;
529       for (a = 0; a < adjSize; ++a) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remoteRank));
530     }
531   }
532   PetscCall(PetscFree(adj));
533   PetscCall(ISRestoreIndices(rootrank, &rrank));
534   PetscCall(ISRestoreIndices(leafrank, &nrank));
535   /* Add additional overlap levels */
536   for (l = 1; l < levels; l++) {
537     /* Propagate point donations over SF to capture remote connections */
538     PetscCall(DMPlexPartitionLabelPropagate(dm, ovAdjByRank));
539     /* Add next level of point donations to the label */
540     PetscCall(DMPlexPartitionLabelAdjacency(dm, ovAdjByRank));
541   }
542   /* We require the closure in the overlap */
543   PetscCall(DMPlexPartitionLabelClosure(dm, ovAdjByRank));
544   PetscCall(PetscOptionsHasName(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-overlap_view", &flg));
545   if (flg) {
546     PetscViewer viewer;
547     PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm), &viewer));
548     PetscCall(DMLabelView(ovAdjByRank, viewer));
549   }
550   /* Invert sender to receiver label */
551   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel));
552   PetscCall(DMPlexPartitionLabelInvert(dm, ovAdjByRank, NULL, *ovLabel));
553   /* Add owned points, except for shared local points */
554   for (p = pStart; p < pEnd; ++p) PetscCall(DMLabelSetValue(*ovLabel, p, rank));
555   for (l = 0; l < nleaves; ++l) {
556     PetscCall(DMLabelClearValue(*ovLabel, local[l], rank));
557     PetscCall(DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank));
558   }
559   /* Clean up */
560   PetscCall(DMLabelDestroy(&ovAdjByRank));
561   PetscFunctionReturn(PETSC_SUCCESS);
562 }
563 
564 static PetscErrorCode HandlePoint_Private(DM dm, PetscInt p, PetscSection section, const PetscInt ranks[], PetscInt numExLabels, const DMLabel exLabel[], const PetscInt exValue[], DMLabel ovAdjByRank)
565 {
566   PetscInt neighbors, el;
567 
568   PetscFunctionBegin;
569   PetscCall(PetscSectionGetDof(section, p, &neighbors));
570   if (neighbors) {
571     PetscInt   *adj     = NULL;
572     PetscInt    adjSize = PETSC_DETERMINE, noff, n, a;
573     PetscMPIInt rank;
574 
575     PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
576     PetscCall(PetscSectionGetOffset(section, p, &noff));
577     PetscCall(DMPlexGetAdjacency(dm, p, &adjSize, &adj));
578     for (n = 0; n < neighbors; ++n) {
579       const PetscInt remoteRank = ranks[noff + n];
580 
581       if (remoteRank == rank) continue;
582       for (a = 0; a < adjSize; ++a) {
583         PetscBool insert = PETSC_TRUE;
584 
585         for (el = 0; el < numExLabels; ++el) {
586           PetscInt exVal;
587           PetscCall(DMLabelGetValue(exLabel[el], adj[a], &exVal));
588           if (exVal == exValue[el]) {
589             insert = PETSC_FALSE;
590             break;
591           }
592         }
593         if (insert) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remoteRank));
594       }
595     }
596     PetscCall(PetscFree(adj));
597   }
598   PetscFunctionReturn(PETSC_SUCCESS);
599 }
600 
601 /*@C
602   DMPlexCreateOverlapLabelFromLabels - Compute a label indicating what overlap points should be sent to new processes
603 
604   Collective
605 
606   Input Parameters:
607 + dm          - The `DM`
608 . numLabels   - The number of labels to draw candidate points from
609 . label       - An array of labels containing candidate points
610 . value       - An array of label values marking the candidate points
611 . numExLabels - The number of labels to use for exclusion
612 . exLabel     - An array of labels indicating points to be excluded, or `NULL`
613 . exValue     - An array of label values to be excluded, or `NULL`
614 . rootSection - The number of leaves for a given root point
615 . rootrank    - The rank of each edge into the root point
616 . leafSection - The number of processes sharing a given leaf point
617 - leafrank    - The rank of each process sharing a leaf point
618 
619   Output Parameter:
620 . ovLabel - `DMLabel` containing remote overlap contributions as point/rank pairings
621 
622   Level: developer
623 
624   Note:
625   The candidate points are only added to the overlap if they are adjacent to a shared point
626 
627 .seealso: `DMPLEX`, `DMPlexCreateOverlapLabel()`, `DMPlexGetAdjacency()`, `DMPlexDistributeOwnership()`, `DMPlexDistribute()`
628 @*/
629 PetscErrorCode DMPlexCreateOverlapLabelFromLabels(DM dm, PetscInt numLabels, const DMLabel label[], const PetscInt value[], PetscInt numExLabels, const DMLabel exLabel[], const PetscInt exValue[], PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
630 {
631   MPI_Comm           comm;
632   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
633   PetscSF            sfPoint;
634   const PetscSFNode *remote;
635   const PetscInt    *local;
636   const PetscInt    *nrank, *rrank;
637   PetscInt          *adj = NULL;
638   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l, el;
639   PetscMPIInt        rank, size;
640   PetscBool          flg;
641 
642   PetscFunctionBegin;
643   *ovLabel = NULL;
644   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
645   PetscCallMPI(MPI_Comm_size(comm, &size));
646   PetscCallMPI(MPI_Comm_rank(comm, &rank));
647   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
648   PetscCall(DMGetPointSF(dm, &sfPoint));
649   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
650   PetscCall(PetscSectionGetChart(leafSection, &sStart, &sEnd));
651   PetscCall(PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote));
652   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap adjacency", &ovAdjByRank));
653   PetscCall(ISGetIndices(rootrank, &rrank));
654   PetscCall(ISGetIndices(leafrank, &nrank));
655   for (l = 0; l < numLabels; ++l) {
656     IS              valIS;
657     const PetscInt *points;
658     PetscInt        n;
659 
660     PetscCall(DMLabelGetStratumIS(label[l], value[l], &valIS));
661     if (!valIS) continue;
662     PetscCall(ISGetIndices(valIS, &points));
663     PetscCall(ISGetLocalSize(valIS, &n));
664     for (PetscInt i = 0; i < n; ++i) {
665       const PetscInt p = points[i];
666 
667       if ((p >= sStart) && (p < sEnd)) {
668         PetscInt loc, adjSize = PETSC_DETERMINE;
669 
670         /* Handle leaves: shared with the root point */
671         if (local) PetscCall(PetscFindInt(p, nleaves, local, &loc));
672         else loc = (p >= 0 && p < nleaves) ? p : -1;
673         if (loc >= 0) {
674           const PetscInt remoteRank = remote[loc].rank;
675 
676           PetscCall(DMPlexGetAdjacency(dm, p, &adjSize, &adj));
677           for (PetscInt a = 0; a < adjSize; ++a) {
678             PetscBool insert = PETSC_TRUE;
679 
680             for (el = 0; el < numExLabels; ++el) {
681               PetscInt exVal;
682               PetscCall(DMLabelGetValue(exLabel[el], adj[a], &exVal));
683               if (exVal == exValue[el]) {
684                 insert = PETSC_FALSE;
685                 break;
686               }
687             }
688             if (insert) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remoteRank));
689           }
690         }
691         /* Some leaves share a root with other leaves on different processes */
692         PetscCall(HandlePoint_Private(dm, p, leafSection, nrank, numExLabels, exLabel, exValue, ovAdjByRank));
693       }
694       /* Roots are shared with leaves */
695       PetscCall(HandlePoint_Private(dm, p, rootSection, rrank, numExLabels, exLabel, exValue, ovAdjByRank));
696     }
697     PetscCall(ISRestoreIndices(valIS, &points));
698     PetscCall(ISDestroy(&valIS));
699   }
700   PetscCall(PetscFree(adj));
701   PetscCall(ISRestoreIndices(rootrank, &rrank));
702   PetscCall(ISRestoreIndices(leafrank, &nrank));
703   /* We require the closure in the overlap */
704   PetscCall(DMPlexPartitionLabelClosure(dm, ovAdjByRank));
705   PetscCall(PetscOptionsHasName(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-overlap_view", &flg));
706   if (flg) {
707     PetscViewer viewer;
708     PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm), &viewer));
709     PetscCall(DMLabelView(ovAdjByRank, viewer));
710   }
711   /* Invert sender to receiver label */
712   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel));
713   PetscCall(DMPlexPartitionLabelInvert(dm, ovAdjByRank, NULL, *ovLabel));
714   /* Add owned points, except for shared local points */
715   for (p = pStart; p < pEnd; ++p) PetscCall(DMLabelSetValue(*ovLabel, p, rank));
716   for (l = 0; l < nleaves; ++l) {
717     PetscCall(DMLabelClearValue(*ovLabel, local[l], rank));
718     PetscCall(DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank));
719   }
720   /* Clean up */
721   PetscCall(DMLabelDestroy(&ovAdjByRank));
722   PetscFunctionReturn(PETSC_SUCCESS);
723 }
724 
725 /*@C
726   DMPlexCreateOverlapMigrationSF - Create a `PetscSF` describing the new mesh distribution to make the overlap described by the input `PetscSF`
727 
728   Collective
729 
730   Input Parameters:
731 + dm        - The `DM`
732 - overlapSF - The `PetscSF` mapping ghost points in overlap to owner points on other processes
733 
734   Output Parameter:
735 . migrationSF - A `PetscSF` that maps original points in old locations to points in new locations
736 
737   Level: developer
738 
739 .seealso: `DMPLEX`, `DMPlexCreateOverlapLabel()`, `DMPlexDistributeOverlap()`, `DMPlexDistribute()`
740 @*/
741 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
742 {
743   MPI_Comm           comm;
744   PetscMPIInt        rank, size;
745   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
746   PetscInt          *pointDepths, *remoteDepths, *ilocal;
747   PetscInt          *depthRecv, *depthShift, *depthIdx;
748   PetscSFNode       *iremote;
749   PetscSF            pointSF;
750   const PetscInt    *sharedLocal;
751   const PetscSFNode *overlapRemote, *sharedRemote;
752 
753   PetscFunctionBegin;
754   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
755   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
756   PetscCallMPI(MPI_Comm_rank(comm, &rank));
757   PetscCallMPI(MPI_Comm_size(comm, &size));
758   PetscCall(DMGetDimension(dm, &dim));
759 
760   /* Before building the migration SF we need to know the new stratum offsets */
761   PetscCall(PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote));
762   PetscCall(PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths));
763   for (d = 0; d < dim + 1; d++) {
764     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
765     for (p = pStart; p < pEnd; p++) pointDepths[p] = d;
766   }
767   for (p = 0; p < nleaves; p++) remoteDepths[p] = -1;
768   PetscCall(PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths, MPI_REPLACE));
769   PetscCall(PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths, MPI_REPLACE));
770 
771   /* Count received points in each stratum and compute the internal strata shift */
772   PetscCall(PetscMalloc3(dim + 1, &depthRecv, dim + 1, &depthShift, dim + 1, &depthIdx));
773   for (d = 0; d < dim + 1; d++) depthRecv[d] = 0;
774   for (p = 0; p < nleaves; p++) depthRecv[remoteDepths[p]]++;
775   depthShift[dim] = 0;
776   for (d = 0; d < dim; d++) depthShift[d] = depthRecv[dim];
777   for (d = 1; d < dim; d++) depthShift[d] += depthRecv[0];
778   for (d = dim - 2; d > 0; d--) depthShift[d] += depthRecv[d + 1];
779   for (d = 0; d < dim + 1; d++) {
780     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
781     depthIdx[d] = pStart + depthShift[d];
782   }
783 
784   /* Form the overlap SF build an SF that describes the full overlap migration SF */
785   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
786   newLeaves = pEnd - pStart + nleaves;
787   PetscCall(PetscMalloc1(newLeaves, &ilocal));
788   PetscCall(PetscMalloc1(newLeaves, &iremote));
789   /* First map local points to themselves */
790   for (d = 0; d < dim + 1; d++) {
791     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
792     for (p = pStart; p < pEnd; p++) {
793       point                = p + depthShift[d];
794       ilocal[point]        = point;
795       iremote[point].index = p;
796       iremote[point].rank  = rank;
797       depthIdx[d]++;
798     }
799   }
800 
801   /* Add in the remote roots for currently shared points */
802   PetscCall(DMGetPointSF(dm, &pointSF));
803   PetscCall(PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote));
804   for (d = 0; d < dim + 1; d++) {
805     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
806     for (p = 0; p < numSharedPoints; p++) {
807       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
808         point                = sharedLocal[p] + depthShift[d];
809         iremote[point].index = sharedRemote[p].index;
810         iremote[point].rank  = sharedRemote[p].rank;
811       }
812     }
813   }
814 
815   /* Now add the incoming overlap points */
816   for (p = 0; p < nleaves; p++) {
817     point                = depthIdx[remoteDepths[p]];
818     ilocal[point]        = point;
819     iremote[point].index = overlapRemote[p].index;
820     iremote[point].rank  = overlapRemote[p].rank;
821     depthIdx[remoteDepths[p]]++;
822   }
823   PetscCall(PetscFree2(pointDepths, remoteDepths));
824 
825   PetscCall(PetscSFCreate(comm, migrationSF));
826   PetscCall(PetscObjectSetName((PetscObject)*migrationSF, "Overlap Migration SF"));
827   PetscCall(PetscSFSetFromOptions(*migrationSF));
828   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
829   PetscCall(PetscSFSetGraph(*migrationSF, pEnd - pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER));
830 
831   PetscCall(PetscFree3(depthRecv, depthShift, depthIdx));
832   PetscFunctionReturn(PETSC_SUCCESS);
833 }
834 
835 /*@
836   DMPlexStratifyMigrationSF - Rearrange the leaves of a migration sf for stratification.
837 
838   Input Parameters:
839 + dm - The DM
840 - sf - A star forest with non-ordered leaves, usually defining a DM point migration
841 
842   Output Parameter:
843 . migrationSF - A star forest with added leaf indirection that ensures the resulting DM is stratified
844 
845   Level: developer
846 
847   Note:
848   This lexicographically sorts by (depth, cellType)
849 
850 .seealso: `DMPLEX`, `DMPlexPartitionLabelCreateSF()`, `DMPlexDistribute()`, `DMPlexDistributeOverlap()`
851 @*/
852 PetscErrorCode DMPlexStratifyMigrationSF(DM dm, PetscSF sf, PetscSF *migrationSF)
853 {
854   MPI_Comm           comm;
855   PetscMPIInt        rank, size;
856   PetscInt           d, ldepth, depth, dim, p, pStart, pEnd, nroots, nleaves;
857   PetscSFNode       *pointDepths, *remoteDepths;
858   PetscInt          *ilocal;
859   PetscInt          *depthRecv, *depthShift, *depthIdx;
860   PetscInt          *ctRecv, *ctShift, *ctIdx;
861   const PetscSFNode *iremote;
862 
863   PetscFunctionBegin;
864   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
865   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
866   PetscCallMPI(MPI_Comm_rank(comm, &rank));
867   PetscCallMPI(MPI_Comm_size(comm, &size));
868   PetscCall(DMPlexGetDepth(dm, &ldepth));
869   PetscCall(DMGetDimension(dm, &dim));
870   PetscCall(MPIU_Allreduce(&ldepth, &depth, 1, MPIU_INT, MPI_MAX, comm));
871   PetscCheck(!(ldepth >= 0) || !(depth != ldepth), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %" PetscInt_FMT " != %" PetscInt_FMT, ldepth, depth);
872   PetscCall(PetscLogEventBegin(DMPLEX_PartStratSF, dm, 0, 0, 0));
873 
874   /* Before building the migration SF we need to know the new stratum offsets */
875   PetscCall(PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote));
876   PetscCall(PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths));
877   for (d = 0; d < depth + 1; ++d) {
878     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
879     for (p = pStart; p < pEnd; ++p) {
880       DMPolytopeType ct;
881 
882       PetscCall(DMPlexGetCellType(dm, p, &ct));
883       pointDepths[p].index = d;
884       pointDepths[p].rank  = ct;
885     }
886   }
887   for (p = 0; p < nleaves; ++p) {
888     remoteDepths[p].index = -1;
889     remoteDepths[p].rank  = -1;
890   }
891   PetscCall(PetscSFBcastBegin(sf, MPIU_2INT, pointDepths, remoteDepths, MPI_REPLACE));
892   PetscCall(PetscSFBcastEnd(sf, MPIU_2INT, pointDepths, remoteDepths, MPI_REPLACE));
893   /* Count received points in each stratum and compute the internal strata shift */
894   PetscCall(PetscCalloc6(depth + 1, &depthRecv, depth + 1, &depthShift, depth + 1, &depthIdx, DM_NUM_POLYTOPES, &ctRecv, DM_NUM_POLYTOPES, &ctShift, DM_NUM_POLYTOPES, &ctIdx));
895   for (p = 0; p < nleaves; ++p) {
896     if (remoteDepths[p].rank < 0) {
897       ++depthRecv[remoteDepths[p].index];
898     } else {
899       ++ctRecv[remoteDepths[p].rank];
900     }
901   }
902   {
903     PetscInt depths[4], dims[4], shift = 0, i, c;
904 
905     /* Cells (depth), Vertices (0), Faces (depth-1), Edges (1)
906          Consider DM_POLYTOPE_FV_GHOST and DM_POLYTOPE_INTERIOR_GHOST as cells
907      */
908     depths[0] = depth;
909     depths[1] = 0;
910     depths[2] = depth - 1;
911     depths[3] = 1;
912     dims[0]   = dim;
913     dims[1]   = 0;
914     dims[2]   = dim - 1;
915     dims[3]   = 1;
916     for (i = 0; i <= depth; ++i) {
917       const PetscInt dep = depths[i];
918       const PetscInt dim = dims[i];
919 
920       for (c = 0; c < DM_NUM_POLYTOPES; ++c) {
921         if (DMPolytopeTypeGetDim((DMPolytopeType)c) != dim && !(i == 0 && (c == DM_POLYTOPE_FV_GHOST || c == DM_POLYTOPE_INTERIOR_GHOST))) continue;
922         ctShift[c] = shift;
923         shift += ctRecv[c];
924       }
925       depthShift[dep] = shift;
926       shift += depthRecv[dep];
927     }
928     for (c = 0; c < DM_NUM_POLYTOPES; ++c) {
929       const PetscInt ctDim = DMPolytopeTypeGetDim((DMPolytopeType)c);
930 
931       if ((ctDim < 0 || ctDim > dim) && (c != DM_POLYTOPE_FV_GHOST && c != DM_POLYTOPE_INTERIOR_GHOST)) {
932         ctShift[c] = shift;
933         shift += ctRecv[c];
934       }
935     }
936   }
937   /* Derive a new local permutation based on stratified indices */
938   PetscCall(PetscMalloc1(nleaves, &ilocal));
939   for (p = 0; p < nleaves; ++p) {
940     const PetscInt       dep = remoteDepths[p].index;
941     const DMPolytopeType ct  = (DMPolytopeType)remoteDepths[p].rank;
942 
943     if ((PetscInt)ct < 0) {
944       ilocal[p] = depthShift[dep] + depthIdx[dep];
945       ++depthIdx[dep];
946     } else {
947       ilocal[p] = ctShift[ct] + ctIdx[ct];
948       ++ctIdx[ct];
949     }
950   }
951   PetscCall(PetscSFCreate(comm, migrationSF));
952   PetscCall(PetscObjectSetName((PetscObject)*migrationSF, "Migration SF"));
953   PetscCall(PetscSFSetGraph(*migrationSF, nroots, nleaves, ilocal, PETSC_OWN_POINTER, (PetscSFNode *)iremote, PETSC_COPY_VALUES));
954   PetscCall(PetscFree2(pointDepths, remoteDepths));
955   PetscCall(PetscFree6(depthRecv, depthShift, depthIdx, ctRecv, ctShift, ctIdx));
956   PetscCall(PetscLogEventEnd(DMPLEX_PartStratSF, dm, 0, 0, 0));
957   PetscFunctionReturn(PETSC_SUCCESS);
958 }
959 
960 /*@
961   DMPlexDistributeField - Distribute field data to match a given `PetscSF`, usually the `PetscSF` from mesh distribution
962 
963   Collective
964 
965   Input Parameters:
966 + dm              - The `DMPLEX` object
967 . pointSF         - The `PetscSF` describing the communication pattern
968 . originalSection - The `PetscSection` for existing data layout
969 - originalVec     - The existing data in a local vector
970 
971   Output Parameters:
972 + newSection - The `PetscSF` describing the new data layout
973 - newVec     - The new data in a local vector
974 
975   Level: developer
976 
977 .seealso: `DMPLEX`, `DMPlexDistribute()`, `DMPlexDistributeFieldIS()`, `DMPlexDistributeData()`
978 @*/
979 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
980 {
981   PetscSF      fieldSF;
982   PetscInt    *remoteOffsets, fieldSize;
983   PetscScalar *originalValues, *newValues;
984 
985   PetscFunctionBegin;
986   PetscCall(PetscLogEventBegin(DMPLEX_DistributeField, dm, 0, 0, 0));
987   PetscCall(PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection));
988 
989   PetscCall(PetscSectionGetStorageSize(newSection, &fieldSize));
990   PetscCall(VecSetSizes(newVec, fieldSize, PETSC_DETERMINE));
991   PetscCall(VecSetType(newVec, dm->vectype));
992 
993   PetscCall(VecGetArray(originalVec, &originalValues));
994   PetscCall(VecGetArray(newVec, &newValues));
995   PetscCall(PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF));
996   PetscCall(PetscFree(remoteOffsets));
997   PetscCall(PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues, MPI_REPLACE));
998   PetscCall(PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues, MPI_REPLACE));
999   PetscCall(PetscSFDestroy(&fieldSF));
1000   PetscCall(VecRestoreArray(newVec, &newValues));
1001   PetscCall(VecRestoreArray(originalVec, &originalValues));
1002   PetscCall(PetscLogEventEnd(DMPLEX_DistributeField, dm, 0, 0, 0));
1003   PetscFunctionReturn(PETSC_SUCCESS);
1004 }
1005 
1006 /*@
1007   DMPlexDistributeFieldIS - Distribute field data to match a given `PetscSF`, usually the `PetscSF` from mesh distribution
1008 
1009   Collective
1010 
1011   Input Parameters:
1012 + dm              - The `DMPLEX` object
1013 . pointSF         - The `PetscSF` describing the communication pattern
1014 . originalSection - The `PetscSection` for existing data layout
1015 - originalIS      - The existing data
1016 
1017   Output Parameters:
1018 + newSection - The `PetscSF` describing the new data layout
1019 - newIS      - The new data
1020 
1021   Level: developer
1022 
1023 .seealso: `DMPLEX`, `DMPlexDistribute()`, `DMPlexDistributeField()`, `DMPlexDistributeData()`
1024 @*/
1025 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
1026 {
1027   PetscSF         fieldSF;
1028   PetscInt       *newValues, *remoteOffsets, fieldSize;
1029   const PetscInt *originalValues;
1030 
1031   PetscFunctionBegin;
1032   PetscCall(PetscLogEventBegin(DMPLEX_DistributeField, dm, 0, 0, 0));
1033   PetscCall(PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection));
1034 
1035   PetscCall(PetscSectionGetStorageSize(newSection, &fieldSize));
1036   PetscCall(PetscMalloc1(fieldSize, &newValues));
1037 
1038   PetscCall(ISGetIndices(originalIS, &originalValues));
1039   PetscCall(PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF));
1040   PetscCall(PetscFree(remoteOffsets));
1041   PetscCall(PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *)originalValues, newValues, MPI_REPLACE));
1042   PetscCall(PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *)originalValues, newValues, MPI_REPLACE));
1043   PetscCall(PetscSFDestroy(&fieldSF));
1044   PetscCall(ISRestoreIndices(originalIS, &originalValues));
1045   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS));
1046   PetscCall(PetscLogEventEnd(DMPLEX_DistributeField, dm, 0, 0, 0));
1047   PetscFunctionReturn(PETSC_SUCCESS);
1048 }
1049 
1050 /*@
1051   DMPlexDistributeData - Distribute field data to match a given `PetscSF`, usually the `PetscSF` from mesh distribution
1052 
1053   Collective
1054 
1055   Input Parameters:
1056 + dm              - The `DMPLEX` object
1057 . pointSF         - The `PetscSF` describing the communication pattern
1058 . originalSection - The `PetscSection` for existing data layout
1059 . datatype        - The type of data
1060 - originalData    - The existing data
1061 
1062   Output Parameters:
1063 + newSection - The `PetscSection` describing the new data layout
1064 - newData    - The new data
1065 
1066   Level: developer
1067 
1068 .seealso: `DMPLEX`, `DMPlexDistribute()`, `DMPlexDistributeField()`
1069 @*/
1070 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
1071 {
1072   PetscSF     fieldSF;
1073   PetscInt   *remoteOffsets, fieldSize;
1074   PetscMPIInt dataSize;
1075 
1076   PetscFunctionBegin;
1077   PetscCall(PetscLogEventBegin(DMPLEX_DistributeData, dm, 0, 0, 0));
1078   PetscCall(PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection));
1079 
1080   PetscCall(PetscSectionGetStorageSize(newSection, &fieldSize));
1081   PetscCallMPI(MPI_Type_size(datatype, &dataSize));
1082   PetscCall(PetscMalloc(fieldSize * dataSize, newData));
1083 
1084   PetscCall(PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF));
1085   PetscCall(PetscFree(remoteOffsets));
1086   PetscCall(PetscSFBcastBegin(fieldSF, datatype, originalData, *newData, MPI_REPLACE));
1087   PetscCall(PetscSFBcastEnd(fieldSF, datatype, originalData, *newData, MPI_REPLACE));
1088   PetscCall(PetscSFDestroy(&fieldSF));
1089   PetscCall(PetscLogEventEnd(DMPLEX_DistributeData, dm, 0, 0, 0));
1090   PetscFunctionReturn(PETSC_SUCCESS);
1091 }
1092 
1093 static PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1094 {
1095   DM_Plex     *pmesh = (DM_Plex *)(dmParallel)->data;
1096   MPI_Comm     comm;
1097   PetscSF      coneSF;
1098   PetscSection originalConeSection, newConeSection;
1099   PetscInt    *remoteOffsets, *cones, *globCones, *newCones, newConesSize;
1100   PetscBool    flg;
1101 
1102   PetscFunctionBegin;
1103   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1104   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
1105   PetscCall(PetscLogEventBegin(DMPLEX_DistributeCones, dm, 0, 0, 0));
1106   /* Distribute cone section */
1107   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1108   PetscCall(DMPlexGetConeSection(dm, &originalConeSection));
1109   PetscCall(DMPlexGetConeSection(dmParallel, &newConeSection));
1110   PetscCall(PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection));
1111   PetscCall(DMSetUp(dmParallel));
1112   /* Communicate and renumber cones */
1113   PetscCall(PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF));
1114   PetscCall(PetscFree(remoteOffsets));
1115   PetscCall(DMPlexGetCones(dm, &cones));
1116   if (original) {
1117     PetscInt numCones;
1118 
1119     PetscCall(PetscSectionGetStorageSize(originalConeSection, &numCones));
1120     PetscCall(PetscMalloc1(numCones, &globCones));
1121     PetscCall(ISLocalToGlobalMappingApplyBlock(original, numCones, cones, globCones));
1122   } else {
1123     globCones = cones;
1124   }
1125   PetscCall(DMPlexGetCones(dmParallel, &newCones));
1126   PetscCall(PetscSFBcastBegin(coneSF, MPIU_INT, globCones, newCones, MPI_REPLACE));
1127   PetscCall(PetscSFBcastEnd(coneSF, MPIU_INT, globCones, newCones, MPI_REPLACE));
1128   if (original) PetscCall(PetscFree(globCones));
1129   PetscCall(PetscSectionGetStorageSize(newConeSection, &newConesSize));
1130   PetscCall(ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones));
1131   if (PetscDefined(USE_DEBUG)) {
1132     PetscInt  p;
1133     PetscBool valid = PETSC_TRUE;
1134     for (p = 0; p < newConesSize; ++p) {
1135       if (newCones[p] < 0) {
1136         valid = PETSC_FALSE;
1137         PetscCall(PetscPrintf(PETSC_COMM_SELF, "[%d] Point %" PetscInt_FMT " not in overlap SF\n", PetscGlobalRank, p));
1138       }
1139     }
1140     PetscCheck(valid, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1141   }
1142   PetscCall(PetscOptionsHasName(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-cones_view", &flg));
1143   if (flg) {
1144     PetscCall(PetscPrintf(comm, "Serial Cone Section:\n"));
1145     PetscCall(PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_(comm)));
1146     PetscCall(PetscPrintf(comm, "Parallel Cone Section:\n"));
1147     PetscCall(PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_(comm)));
1148     PetscCall(PetscSFView(coneSF, NULL));
1149   }
1150   PetscCall(DMPlexGetConeOrientations(dm, &cones));
1151   PetscCall(DMPlexGetConeOrientations(dmParallel, &newCones));
1152   PetscCall(PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones, MPI_REPLACE));
1153   PetscCall(PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones, MPI_REPLACE));
1154   PetscCall(PetscSFDestroy(&coneSF));
1155   PetscCall(PetscLogEventEnd(DMPLEX_DistributeCones, dm, 0, 0, 0));
1156   /* Create supports and stratify DMPlex */
1157   {
1158     PetscInt pStart, pEnd;
1159 
1160     PetscCall(PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd));
1161     PetscCall(PetscSectionSetChart(pmesh->supportSection, pStart, pEnd));
1162   }
1163   PetscCall(DMPlexSymmetrize(dmParallel));
1164   PetscCall(DMPlexStratify(dmParallel));
1165   {
1166     PetscBool useCone, useClosure, useAnchors;
1167 
1168     PetscCall(DMGetBasicAdjacency(dm, &useCone, &useClosure));
1169     PetscCall(DMSetBasicAdjacency(dmParallel, useCone, useClosure));
1170     PetscCall(DMPlexGetAdjacencyUseAnchors(dm, &useAnchors));
1171     PetscCall(DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors));
1172   }
1173   PetscFunctionReturn(PETSC_SUCCESS);
1174 }
1175 
1176 static PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
1177 {
1178   MPI_Comm         comm;
1179   DM               cdm, cdmParallel;
1180   PetscSection     originalCoordSection, newCoordSection;
1181   Vec              originalCoordinates, newCoordinates;
1182   PetscInt         bs;
1183   const char      *name;
1184   const PetscReal *maxCell, *Lstart, *L;
1185 
1186   PetscFunctionBegin;
1187   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1188   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1189 
1190   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1191   PetscCall(DMGetCoordinateDM(dm, &cdm));
1192   PetscCall(DMGetCoordinateDM(dmParallel, &cdmParallel));
1193   PetscCall(DMCopyDisc(cdm, cdmParallel));
1194   PetscCall(DMGetCoordinateSection(dm, &originalCoordSection));
1195   PetscCall(DMGetCoordinateSection(dmParallel, &newCoordSection));
1196   PetscCall(DMGetCoordinatesLocal(dm, &originalCoordinates));
1197   if (originalCoordinates) {
1198     PetscCall(VecCreate(PETSC_COMM_SELF, &newCoordinates));
1199     PetscCall(PetscObjectGetName((PetscObject)originalCoordinates, &name));
1200     PetscCall(PetscObjectSetName((PetscObject)newCoordinates, name));
1201 
1202     PetscCall(DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates));
1203     PetscCall(DMSetCoordinatesLocal(dmParallel, newCoordinates));
1204     PetscCall(VecGetBlockSize(originalCoordinates, &bs));
1205     PetscCall(VecSetBlockSize(newCoordinates, bs));
1206     PetscCall(VecDestroy(&newCoordinates));
1207   }
1208 
1209   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1210   PetscCall(DMGetPeriodicity(dm, &maxCell, &Lstart, &L));
1211   PetscCall(DMSetPeriodicity(dmParallel, maxCell, Lstart, L));
1212   PetscCall(DMGetCellCoordinateDM(dm, &cdm));
1213   if (cdm) {
1214     PetscCall(DMClone(dmParallel, &cdmParallel));
1215     PetscCall(DMSetCellCoordinateDM(dmParallel, cdmParallel));
1216     PetscCall(DMCopyDisc(cdm, cdmParallel));
1217     PetscCall(DMDestroy(&cdmParallel));
1218     PetscCall(DMGetCellCoordinateSection(dm, &originalCoordSection));
1219     PetscCall(DMGetCellCoordinatesLocal(dm, &originalCoordinates));
1220     PetscCall(PetscSectionCreate(comm, &newCoordSection));
1221     if (originalCoordinates) {
1222       PetscCall(VecCreate(PETSC_COMM_SELF, &newCoordinates));
1223       PetscCall(PetscObjectGetName((PetscObject)originalCoordinates, &name));
1224       PetscCall(PetscObjectSetName((PetscObject)newCoordinates, name));
1225 
1226       PetscCall(DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates));
1227       PetscCall(VecGetBlockSize(originalCoordinates, &bs));
1228       PetscCall(VecSetBlockSize(newCoordinates, bs));
1229       PetscCall(DMSetCellCoordinateSection(dmParallel, bs, newCoordSection));
1230       PetscCall(DMSetCellCoordinatesLocal(dmParallel, newCoordinates));
1231       PetscCall(VecDestroy(&newCoordinates));
1232     }
1233     PetscCall(PetscSectionDestroy(&newCoordSection));
1234   }
1235   PetscFunctionReturn(PETSC_SUCCESS);
1236 }
1237 
1238 static PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1239 {
1240   DM_Plex         *mesh = (DM_Plex *)dm->data;
1241   MPI_Comm         comm;
1242   DMLabel          depthLabel;
1243   PetscMPIInt      rank;
1244   PetscInt         depth, d, numLabels, numLocalLabels, l;
1245   PetscBool        hasLabels  = PETSC_FALSE, lsendDepth, sendDepth;
1246   PetscObjectState depthState = -1;
1247 
1248   PetscFunctionBegin;
1249   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1250   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1251 
1252   PetscCall(PetscLogEventBegin(DMPLEX_DistributeLabels, dm, 0, 0, 0));
1253   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1254   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1255 
1256   /* If the user has changed the depth label, communicate it instead */
1257   PetscCall(DMPlexGetDepth(dm, &depth));
1258   PetscCall(DMPlexGetDepthLabel(dm, &depthLabel));
1259   if (depthLabel) PetscCall(PetscObjectStateGet((PetscObject)depthLabel, &depthState));
1260   lsendDepth = mesh->depthState != depthState ? PETSC_TRUE : PETSC_FALSE;
1261   PetscCall(MPIU_Allreduce(&lsendDepth, &sendDepth, 1, MPIU_BOOL, MPI_LOR, comm));
1262   if (sendDepth) {
1263     PetscCall(DMPlexGetDepthLabel(dmParallel, &dmParallel->depthLabel));
1264     PetscCall(DMRemoveLabelBySelf(dmParallel, &dmParallel->depthLabel, PETSC_FALSE));
1265   }
1266   /* Everyone must have either the same number of labels, or none */
1267   PetscCall(DMGetNumLabels(dm, &numLocalLabels));
1268   numLabels = numLocalLabels;
1269   PetscCallMPI(MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm));
1270   if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1271   for (l = 0; l < numLabels; ++l) {
1272     DMLabel     label = NULL, labelNew = NULL;
1273     PetscBool   isDepth, lisOutput     = PETSC_TRUE, isOutput;
1274     const char *name = NULL;
1275 
1276     if (hasLabels) {
1277       PetscCall(DMGetLabelByNum(dm, l, &label));
1278       /* Skip "depth" because it is recreated */
1279       PetscCall(PetscObjectGetName((PetscObject)label, &name));
1280       PetscCall(PetscStrcmp(name, "depth", &isDepth));
1281     } else {
1282       isDepth = PETSC_FALSE;
1283     }
1284     PetscCallMPI(MPI_Bcast(&isDepth, 1, MPIU_BOOL, 0, comm));
1285     if (isDepth && !sendDepth) continue;
1286     PetscCall(DMLabelDistribute(label, migrationSF, &labelNew));
1287     if (isDepth) {
1288       /* Put in any missing strata which can occur if users are managing the depth label themselves */
1289       PetscInt gdepth;
1290 
1291       PetscCall(MPIU_Allreduce(&depth, &gdepth, 1, MPIU_INT, MPI_MAX, comm));
1292       PetscCheck(!(depth >= 0) || !(gdepth != depth), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %" PetscInt_FMT " != %" PetscInt_FMT, depth, gdepth);
1293       for (d = 0; d <= gdepth; ++d) {
1294         PetscBool has;
1295 
1296         PetscCall(DMLabelHasStratum(labelNew, d, &has));
1297         if (!has) PetscCall(DMLabelAddStratum(labelNew, d));
1298       }
1299     }
1300     PetscCall(DMAddLabel(dmParallel, labelNew));
1301     /* Put the output flag in the new label */
1302     if (hasLabels) PetscCall(DMGetLabelOutput(dm, name, &lisOutput));
1303     PetscCall(MPIU_Allreduce(&lisOutput, &isOutput, 1, MPIU_BOOL, MPI_LAND, comm));
1304     PetscCall(PetscObjectGetName((PetscObject)labelNew, &name));
1305     PetscCall(DMSetLabelOutput(dmParallel, name, isOutput));
1306     PetscCall(DMLabelDestroy(&labelNew));
1307   }
1308   {
1309     DMLabel ctLabel;
1310 
1311     // Reset label for fast lookup
1312     PetscCall(DMPlexGetCellTypeLabel(dmParallel, &ctLabel));
1313     PetscCall(DMLabelMakeAllInvalid_Internal(ctLabel));
1314   }
1315   PetscCall(PetscLogEventEnd(DMPLEX_DistributeLabels, dm, 0, 0, 0));
1316   PetscFunctionReturn(PETSC_SUCCESS);
1317 }
1318 
1319 static PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1320 {
1321   DM_Plex     *mesh  = (DM_Plex *)dm->data;
1322   DM_Plex     *pmesh = (DM_Plex *)(dmParallel)->data;
1323   MPI_Comm     comm;
1324   DM           refTree;
1325   PetscSection origParentSection, newParentSection;
1326   PetscInt    *origParents, *origChildIDs;
1327   PetscBool    flg;
1328 
1329   PetscFunctionBegin;
1330   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1331   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
1332   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1333 
1334   /* Set up tree */
1335   PetscCall(DMPlexGetReferenceTree(dm, &refTree));
1336   PetscCall(DMPlexSetReferenceTree(dmParallel, refTree));
1337   PetscCall(DMPlexGetTree(dm, &origParentSection, &origParents, &origChildIDs, NULL, NULL));
1338   if (origParentSection) {
1339     PetscInt  pStart, pEnd;
1340     PetscInt *newParents, *newChildIDs, *globParents;
1341     PetscInt *remoteOffsetsParents, newParentSize;
1342     PetscSF   parentSF;
1343 
1344     PetscCall(DMPlexGetChart(dmParallel, &pStart, &pEnd));
1345     PetscCall(PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel), &newParentSection));
1346     PetscCall(PetscSectionSetChart(newParentSection, pStart, pEnd));
1347     PetscCall(PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection));
1348     PetscCall(PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF));
1349     PetscCall(PetscFree(remoteOffsetsParents));
1350     PetscCall(PetscSectionGetStorageSize(newParentSection, &newParentSize));
1351     PetscCall(PetscMalloc2(newParentSize, &newParents, newParentSize, &newChildIDs));
1352     if (original) {
1353       PetscInt numParents;
1354 
1355       PetscCall(PetscSectionGetStorageSize(origParentSection, &numParents));
1356       PetscCall(PetscMalloc1(numParents, &globParents));
1357       PetscCall(ISLocalToGlobalMappingApplyBlock(original, numParents, origParents, globParents));
1358     } else {
1359       globParents = origParents;
1360     }
1361     PetscCall(PetscSFBcastBegin(parentSF, MPIU_INT, globParents, newParents, MPI_REPLACE));
1362     PetscCall(PetscSFBcastEnd(parentSF, MPIU_INT, globParents, newParents, MPI_REPLACE));
1363     if (original) PetscCall(PetscFree(globParents));
1364     PetscCall(PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs, MPI_REPLACE));
1365     PetscCall(PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs, MPI_REPLACE));
1366     PetscCall(ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents));
1367     if (PetscDefined(USE_DEBUG)) {
1368       PetscInt  p;
1369       PetscBool valid = PETSC_TRUE;
1370       for (p = 0; p < newParentSize; ++p) {
1371         if (newParents[p] < 0) {
1372           valid = PETSC_FALSE;
1373           PetscCall(PetscPrintf(PETSC_COMM_SELF, "Point %" PetscInt_FMT " not in overlap SF\n", p));
1374         }
1375       }
1376       PetscCheck(valid, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1377     }
1378     PetscCall(PetscOptionsHasName(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-parents_view", &flg));
1379     if (flg) {
1380       PetscCall(PetscPrintf(comm, "Serial Parent Section: \n"));
1381       PetscCall(PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_(comm)));
1382       PetscCall(PetscPrintf(comm, "Parallel Parent Section: \n"));
1383       PetscCall(PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_(comm)));
1384       PetscCall(PetscSFView(parentSF, NULL));
1385     }
1386     PetscCall(DMPlexSetTree(dmParallel, newParentSection, newParents, newChildIDs));
1387     PetscCall(PetscSectionDestroy(&newParentSection));
1388     PetscCall(PetscFree2(newParents, newChildIDs));
1389     PetscCall(PetscSFDestroy(&parentSF));
1390   }
1391   pmesh->useAnchors = mesh->useAnchors;
1392   PetscFunctionReturn(PETSC_SUCCESS);
1393 }
1394 
1395 /*@
1396   DMPlexSetPartitionBalance - Should distribution of the `DM` attempt to balance the shared point partition?
1397 
1398   Input Parameters:
1399 + dm  - The `DMPLEX` object
1400 - flg - Balance the partition?
1401 
1402   Level: intermediate
1403 
1404 .seealso: `DMPLEX`, `DMPlexDistribute()`, `DMPlexGetPartitionBalance()`
1405 @*/
1406 PetscErrorCode DMPlexSetPartitionBalance(DM dm, PetscBool flg)
1407 {
1408   DM_Plex *mesh = (DM_Plex *)dm->data;
1409 
1410   PetscFunctionBegin;
1411   mesh->partitionBalance = flg;
1412   PetscFunctionReturn(PETSC_SUCCESS);
1413 }
1414 
1415 /*@
1416   DMPlexGetPartitionBalance - Does distribution of the `DM` attempt to balance the shared point partition?
1417 
1418   Input Parameter:
1419 . dm - The `DMPLEX` object
1420 
1421   Output Parameter:
1422 . flg - Balance the partition?
1423 
1424   Level: intermediate
1425 
1426 .seealso: `DMPLEX`, `DMPlexDistribute()`, `DMPlexSetPartitionBalance()`
1427 @*/
1428 PetscErrorCode DMPlexGetPartitionBalance(DM dm, PetscBool *flg)
1429 {
1430   DM_Plex *mesh = (DM_Plex *)dm->data;
1431 
1432   PetscFunctionBegin;
1433   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1434   PetscAssertPointer(flg, 2);
1435   *flg = mesh->partitionBalance;
1436   PetscFunctionReturn(PETSC_SUCCESS);
1437 }
1438 
1439 typedef struct {
1440   PetscInt vote, rank, index;
1441 } Petsc3Int;
1442 
1443 /* MaxLoc, but carry a third piece of information around */
1444 static void MPIAPI MaxLocCarry(void *in_, void *inout_, PetscMPIInt *len_, MPI_Datatype *dtype)
1445 {
1446   Petsc3Int *a = (Petsc3Int *)inout_;
1447   Petsc3Int *b = (Petsc3Int *)in_;
1448   PetscInt   i, len = *len_;
1449   for (i = 0; i < len; i++) {
1450     if (a[i].vote < b[i].vote) {
1451       a[i].vote  = b[i].vote;
1452       a[i].rank  = b[i].rank;
1453       a[i].index = b[i].index;
1454     } else if (a[i].vote <= b[i].vote) {
1455       if (a[i].rank >= b[i].rank) {
1456         a[i].rank  = b[i].rank;
1457         a[i].index = b[i].index;
1458       }
1459     }
1460   }
1461 }
1462 
1463 /*@C
1464   DMPlexCreatePointSF - Build a point `PetscSF` from an `PetscSF` describing a point migration
1465 
1466   Input Parameters:
1467 + dm          - The source `DMPLEX` object
1468 . migrationSF - The star forest that describes the parallel point remapping
1469 - ownership   - Flag causing a vote to determine point ownership
1470 
1471   Output Parameter:
1472 . pointSF - The star forest describing the point overlap in the remapped `DM`
1473 
1474   Level: developer
1475 
1476   Note:
1477   Output `pointSF` is guaranteed to have the array of local indices (leaves) sorted.
1478 
1479 .seealso: `DMPLEX`, `PetscSF`, `DM`, `DMPlexDistribute()`, `DMPlexDistributeOverlap()`
1480 @*/
1481 PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownership, PetscSF *pointSF)
1482 {
1483   PetscMPIInt        rank, size;
1484   PetscInt           p, nroots, nleaves, idx, npointLeaves;
1485   PetscInt          *pointLocal;
1486   const PetscInt    *leaves;
1487   const PetscSFNode *roots;
1488   PetscSFNode       *rootNodes, *leafNodes, *pointRemote;
1489   Vec                shifts;
1490   const PetscInt     numShifts  = 13759;
1491   const PetscScalar *shift      = NULL;
1492   const PetscBool    shiftDebug = PETSC_FALSE;
1493   PetscBool          balance;
1494 
1495   PetscFunctionBegin;
1496   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1497   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
1498   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)dm), &size));
1499   PetscCall(PetscLogEventBegin(DMPLEX_CreatePointSF, dm, 0, 0, 0));
1500 
1501   PetscCall(DMPlexGetPartitionBalance(dm, &balance));
1502   PetscCall(PetscSFGetGraph(migrationSF, &nroots, &nleaves, &leaves, &roots));
1503   PetscCall(PetscMalloc2(nroots, &rootNodes, nleaves, &leafNodes));
1504   if (ownership) {
1505     MPI_Op       op;
1506     MPI_Datatype datatype;
1507     Petsc3Int   *rootVote = NULL, *leafVote = NULL;
1508     /* If balancing, we compute a random cyclic shift of the rank for each remote point. That way, the max will evenly distribute among ranks. */
1509     if (balance) {
1510       PetscRandom r;
1511 
1512       PetscCall(PetscRandomCreate(PETSC_COMM_SELF, &r));
1513       PetscCall(PetscRandomSetInterval(r, 0, 2467 * size));
1514       PetscCall(VecCreate(PETSC_COMM_SELF, &shifts));
1515       PetscCall(VecSetSizes(shifts, numShifts, numShifts));
1516       PetscCall(VecSetType(shifts, VECSTANDARD));
1517       PetscCall(VecSetRandom(shifts, r));
1518       PetscCall(PetscRandomDestroy(&r));
1519       PetscCall(VecGetArrayRead(shifts, &shift));
1520     }
1521 
1522     PetscCall(PetscMalloc1(nroots, &rootVote));
1523     PetscCall(PetscMalloc1(nleaves, &leafVote));
1524     /* Point ownership vote: Process with highest rank owns shared points */
1525     for (p = 0; p < nleaves; ++p) {
1526       if (shiftDebug) {
1527         PetscCall(PetscSynchronizedPrintf(PetscObjectComm((PetscObject)dm), "[%d] Point %" PetscInt_FMT " RemotePoint %" PetscInt_FMT " Shift %" PetscInt_FMT " MyRank %" PetscInt_FMT "\n", rank, leaves ? leaves[p] : p, roots[p].index,
1528                                           (PetscInt)PetscRealPart(shift[roots[p].index % numShifts]), (rank + (shift ? (PetscInt)PetscRealPart(shift[roots[p].index % numShifts]) : 0)) % size));
1529       }
1530       /* Either put in a bid or we know we own it */
1531       leafVote[p].vote  = (rank + (shift ? (PetscInt)PetscRealPart(shift[roots[p].index % numShifts]) : 0)) % size;
1532       leafVote[p].rank  = rank;
1533       leafVote[p].index = p;
1534     }
1535     for (p = 0; p < nroots; p++) {
1536       /* Root must not participate in the reduction, flag so that MAXLOC does not use */
1537       rootVote[p].vote  = -3;
1538       rootVote[p].rank  = -3;
1539       rootVote[p].index = -3;
1540     }
1541     PetscCallMPI(MPI_Type_contiguous(3, MPIU_INT, &datatype));
1542     PetscCallMPI(MPI_Type_commit(&datatype));
1543     PetscCallMPI(MPI_Op_create(&MaxLocCarry, 1, &op));
1544     PetscCall(PetscSFReduceBegin(migrationSF, datatype, leafVote, rootVote, op));
1545     PetscCall(PetscSFReduceEnd(migrationSF, datatype, leafVote, rootVote, op));
1546     PetscCallMPI(MPI_Op_free(&op));
1547     PetscCallMPI(MPI_Type_free(&datatype));
1548     for (p = 0; p < nroots; p++) {
1549       rootNodes[p].rank  = rootVote[p].rank;
1550       rootNodes[p].index = rootVote[p].index;
1551     }
1552     PetscCall(PetscFree(leafVote));
1553     PetscCall(PetscFree(rootVote));
1554   } else {
1555     for (p = 0; p < nroots; p++) {
1556       rootNodes[p].index = -1;
1557       rootNodes[p].rank  = rank;
1558     }
1559     for (p = 0; p < nleaves; p++) {
1560       /* Write new local id into old location */
1561       if (roots[p].rank == rank) rootNodes[roots[p].index].index = leaves ? leaves[p] : p;
1562     }
1563   }
1564   PetscCall(PetscSFBcastBegin(migrationSF, MPIU_2INT, rootNodes, leafNodes, MPI_REPLACE));
1565   PetscCall(PetscSFBcastEnd(migrationSF, MPIU_2INT, rootNodes, leafNodes, MPI_REPLACE));
1566 
1567   for (npointLeaves = 0, p = 0; p < nleaves; p++) {
1568     if (leafNodes[p].rank != rank) npointLeaves++;
1569   }
1570   PetscCall(PetscMalloc1(npointLeaves, &pointLocal));
1571   PetscCall(PetscMalloc1(npointLeaves, &pointRemote));
1572   for (idx = 0, p = 0; p < nleaves; p++) {
1573     if (leafNodes[p].rank != rank) {
1574       /* Note that pointLocal is automatically sorted as it is sublist of 0, ..., nleaves-1 */
1575       pointLocal[idx]  = p;
1576       pointRemote[idx] = leafNodes[p];
1577       idx++;
1578     }
1579   }
1580   if (shift) {
1581     PetscCall(VecRestoreArrayRead(shifts, &shift));
1582     PetscCall(VecDestroy(&shifts));
1583   }
1584   if (shiftDebug) PetscCall(PetscSynchronizedFlush(PetscObjectComm((PetscObject)dm), PETSC_STDOUT));
1585   PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)dm), pointSF));
1586   PetscCall(PetscSFSetFromOptions(*pointSF));
1587   PetscCall(PetscSFSetGraph(*pointSF, nleaves, npointLeaves, pointLocal, PETSC_OWN_POINTER, pointRemote, PETSC_OWN_POINTER));
1588   PetscCall(PetscFree2(rootNodes, leafNodes));
1589   PetscCall(PetscLogEventEnd(DMPLEX_CreatePointSF, dm, 0, 0, 0));
1590   if (PetscDefined(USE_DEBUG)) PetscCall(DMPlexCheckPointSF(dm, *pointSF, PETSC_FALSE));
1591   PetscFunctionReturn(PETSC_SUCCESS);
1592 }
1593 
1594 /*@C
1595   DMPlexMigrate  - Migrates internal `DM` data over the supplied star forest
1596 
1597   Collective
1598 
1599   Input Parameters:
1600 + dm - The source `DMPLEX` object
1601 - sf - The star forest communication context describing the migration pattern
1602 
1603   Output Parameter:
1604 . targetDM - The target `DMPLEX` object
1605 
1606   Level: intermediate
1607 
1608 .seealso: `DMPLEX`, `PetscSF`, `DM`, `DMPlexDistribute()`, `DMPlexDistributeOverlap()`
1609 @*/
1610 PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM)
1611 {
1612   MPI_Comm               comm;
1613   PetscInt               dim, cdim, nroots;
1614   PetscSF                sfPoint;
1615   ISLocalToGlobalMapping ltogMigration;
1616   ISLocalToGlobalMapping ltogOriginal = NULL;
1617 
1618   PetscFunctionBegin;
1619   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1620   PetscCall(PetscLogEventBegin(DMPLEX_Migrate, dm, 0, 0, 0));
1621   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1622   PetscCall(DMGetDimension(dm, &dim));
1623   PetscCall(DMSetDimension(targetDM, dim));
1624   PetscCall(DMGetCoordinateDim(dm, &cdim));
1625   PetscCall(DMSetCoordinateDim(targetDM, cdim));
1626 
1627   /* Check for a one-to-all distribution pattern */
1628   PetscCall(DMGetPointSF(dm, &sfPoint));
1629   PetscCall(PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL));
1630   if (nroots >= 0) {
1631     IS        isOriginal;
1632     PetscInt  n, size, nleaves;
1633     PetscInt *numbering_orig, *numbering_new;
1634 
1635     /* Get the original point numbering */
1636     PetscCall(DMPlexCreatePointNumbering(dm, &isOriginal));
1637     PetscCall(ISLocalToGlobalMappingCreateIS(isOriginal, &ltogOriginal));
1638     PetscCall(ISLocalToGlobalMappingGetSize(ltogOriginal, &size));
1639     PetscCall(ISLocalToGlobalMappingGetBlockIndices(ltogOriginal, (const PetscInt **)&numbering_orig));
1640     /* Convert to positive global numbers */
1641     for (n = 0; n < size; n++) {
1642       if (numbering_orig[n] < 0) numbering_orig[n] = -(numbering_orig[n] + 1);
1643     }
1644     /* Derive the new local-to-global mapping from the old one */
1645     PetscCall(PetscSFGetGraph(sf, NULL, &nleaves, NULL, NULL));
1646     PetscCall(PetscMalloc1(nleaves, &numbering_new));
1647     PetscCall(PetscSFBcastBegin(sf, MPIU_INT, numbering_orig, numbering_new, MPI_REPLACE));
1648     PetscCall(PetscSFBcastEnd(sf, MPIU_INT, numbering_orig, numbering_new, MPI_REPLACE));
1649     PetscCall(ISLocalToGlobalMappingCreate(comm, 1, nleaves, numbering_new, PETSC_OWN_POINTER, &ltogMigration));
1650     PetscCall(ISLocalToGlobalMappingRestoreIndices(ltogOriginal, (const PetscInt **)&numbering_orig));
1651     PetscCall(ISDestroy(&isOriginal));
1652   } else {
1653     /* One-to-all distribution pattern: We can derive LToG from SF */
1654     PetscCall(ISLocalToGlobalMappingCreateSF(sf, 0, &ltogMigration));
1655   }
1656   PetscCall(PetscObjectSetName((PetscObject)ltogMigration, "Point renumbering for DM migration"));
1657   PetscCall(ISLocalToGlobalMappingViewFromOptions(ltogMigration, (PetscObject)dm, "-partition_view"));
1658   /* Migrate DM data to target DM */
1659   PetscCall(DMPlexDistributeCones(dm, sf, ltogOriginal, ltogMigration, targetDM));
1660   PetscCall(DMPlexDistributeLabels(dm, sf, targetDM));
1661   PetscCall(DMPlexDistributeCoordinates(dm, sf, targetDM));
1662   PetscCall(DMPlexDistributeSetupTree(dm, sf, ltogOriginal, ltogMigration, targetDM));
1663   PetscCall(ISLocalToGlobalMappingDestroy(&ltogOriginal));
1664   PetscCall(ISLocalToGlobalMappingDestroy(&ltogMigration));
1665   PetscCall(PetscLogEventEnd(DMPLEX_Migrate, dm, 0, 0, 0));
1666   PetscFunctionReturn(PETSC_SUCCESS);
1667 }
1668 
1669 /*@
1670   DMPlexRemapMigrationSF - Rewrite the distribution SF to account for overlap
1671 
1672   Collective
1673 
1674   Input Parameters:
1675 + sfOverlap   - The `PetscSF` object just for the overlap
1676 - sfMigration - The original distribution `PetscSF` object
1677 
1678   Output Parameters:
1679 . sfMigrationNew - A rewritten `PetscSF` object that incorporates the overlap
1680 
1681   Level: developer
1682 
1683 .seealso: `DMPLEX`, `DM`, `DMPlexDistribute()`, `DMPlexDistributeOverlap()`, `DMPlexGetOverlap()`
1684 @*/
1685 PetscErrorCode DMPlexRemapMigrationSF(PetscSF sfOverlap, PetscSF sfMigration, PetscSF *sfMigrationNew)
1686 {
1687   PetscSFNode       *newRemote, *permRemote;
1688   const PetscInt    *oldLeaves;
1689   const PetscSFNode *oldRemote;
1690   PetscInt           nroots, nleaves, noldleaves;
1691 
1692   PetscFunctionBegin;
1693   PetscCall(PetscSFGetGraph(sfMigration, &nroots, &noldleaves, &oldLeaves, &oldRemote));
1694   PetscCall(PetscSFGetGraph(sfOverlap, NULL, &nleaves, NULL, NULL));
1695   PetscCall(PetscMalloc1(nleaves, &newRemote));
1696   /* oldRemote: original root point mapping to original leaf point
1697      newRemote: original leaf point mapping to overlapped leaf point */
1698   if (oldLeaves) {
1699     /* After stratification, the migration remotes may not be in root (canonical) order, so we reorder using the leaf numbering */
1700     PetscCall(PetscMalloc1(noldleaves, &permRemote));
1701     for (PetscInt l = 0; l < noldleaves; ++l) permRemote[oldLeaves[l]] = oldRemote[l];
1702     oldRemote = permRemote;
1703   }
1704   PetscCall(PetscSFBcastBegin(sfOverlap, MPIU_2INT, oldRemote, newRemote, MPI_REPLACE));
1705   PetscCall(PetscSFBcastEnd(sfOverlap, MPIU_2INT, oldRemote, newRemote, MPI_REPLACE));
1706   if (oldLeaves) PetscCall(PetscFree(oldRemote));
1707   PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)sfOverlap), sfMigrationNew));
1708   PetscCall(PetscSFSetGraph(*sfMigrationNew, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER));
1709   PetscFunctionReturn(PETSC_SUCCESS);
1710 }
1711 
1712 /*@C
1713   DMPlexDistribute - Distributes the mesh and any associated sections.
1714 
1715   Collective
1716 
1717   Input Parameters:
1718 + dm      - The original `DMPLEX` object
1719 - overlap - The overlap of partitions, 0 is the default
1720 
1721   Output Parameters:
1722 + sf         - The `PetscSF` used for point distribution, or `NULL` if not needed
1723 - dmParallel - The distributed `DMPLEX` object
1724 
1725   Level: intermediate
1726 
1727   Note:
1728   If the mesh was not distributed, the output `dmParallel` will be `NULL`.
1729 
1730   The user can control the definition of adjacency for the mesh using `DMSetAdjacency()`. They should choose the combination appropriate for the function
1731   representation on the mesh.
1732 
1733 .seealso: `DMPLEX`, `DM`, `DMPlexCreate()`, `DMSetAdjacency()`, `DMPlexGetOverlap()`
1734 @*/
1735 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1736 {
1737   MPI_Comm         comm;
1738   PetscPartitioner partitioner;
1739   IS               cellPart;
1740   PetscSection     cellPartSection;
1741   DM               dmCoord;
1742   DMLabel          lblPartition, lblMigration;
1743   PetscSF          sfMigration, sfStratified, sfPoint;
1744   VecType          vec_type;
1745   PetscBool        flg, balance;
1746   PetscMPIInt      rank, size;
1747 
1748   PetscFunctionBegin;
1749   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1750   PetscValidLogicalCollectiveInt(dm, overlap, 2);
1751   if (sf) PetscAssertPointer(sf, 3);
1752   PetscAssertPointer(dmParallel, 4);
1753 
1754   if (sf) *sf = NULL;
1755   *dmParallel = NULL;
1756   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1757   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1758   PetscCallMPI(MPI_Comm_size(comm, &size));
1759   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
1760 
1761   PetscCall(PetscLogEventBegin(DMPLEX_Distribute, dm, 0, 0, 0));
1762   /* Create cell partition */
1763   PetscCall(PetscLogEventBegin(DMPLEX_Partition, dm, 0, 0, 0));
1764   PetscCall(PetscSectionCreate(comm, &cellPartSection));
1765   PetscCall(DMPlexGetPartitioner(dm, &partitioner));
1766   PetscCall(PetscPartitionerDMPlexPartition(partitioner, dm, NULL, cellPartSection, &cellPart));
1767   PetscCall(PetscLogEventBegin(DMPLEX_PartSelf, dm, 0, 0, 0));
1768   {
1769     /* Convert partition to DMLabel */
1770     IS              is;
1771     PetscHSetI      ht;
1772     const PetscInt *points;
1773     PetscInt       *iranks;
1774     PetscInt        pStart, pEnd, proc, npoints, poff = 0, nranks;
1775 
1776     PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Point Partition", &lblPartition));
1777     /* Preallocate strata */
1778     PetscCall(PetscHSetICreate(&ht));
1779     PetscCall(PetscSectionGetChart(cellPartSection, &pStart, &pEnd));
1780     for (proc = pStart; proc < pEnd; proc++) {
1781       PetscCall(PetscSectionGetDof(cellPartSection, proc, &npoints));
1782       if (npoints) PetscCall(PetscHSetIAdd(ht, proc));
1783     }
1784     PetscCall(PetscHSetIGetSize(ht, &nranks));
1785     PetscCall(PetscMalloc1(nranks, &iranks));
1786     PetscCall(PetscHSetIGetElems(ht, &poff, iranks));
1787     PetscCall(PetscHSetIDestroy(&ht));
1788     PetscCall(DMLabelAddStrata(lblPartition, nranks, iranks));
1789     PetscCall(PetscFree(iranks));
1790     /* Inline DMPlexPartitionLabelClosure() */
1791     PetscCall(ISGetIndices(cellPart, &points));
1792     PetscCall(PetscSectionGetChart(cellPartSection, &pStart, &pEnd));
1793     for (proc = pStart; proc < pEnd; proc++) {
1794       PetscCall(PetscSectionGetDof(cellPartSection, proc, &npoints));
1795       if (!npoints) continue;
1796       PetscCall(PetscSectionGetOffset(cellPartSection, proc, &poff));
1797       PetscCall(DMPlexClosurePoints_Private(dm, npoints, points + poff, &is));
1798       PetscCall(DMLabelSetStratumIS(lblPartition, proc, is));
1799       PetscCall(ISDestroy(&is));
1800     }
1801     PetscCall(ISRestoreIndices(cellPart, &points));
1802   }
1803   PetscCall(PetscLogEventEnd(DMPLEX_PartSelf, dm, 0, 0, 0));
1804 
1805   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Point migration", &lblMigration));
1806   PetscCall(DMPlexPartitionLabelInvert(dm, lblPartition, NULL, lblMigration));
1807   PetscCall(DMPlexPartitionLabelCreateSF(dm, lblMigration, &sfMigration));
1808   PetscCall(DMPlexStratifyMigrationSF(dm, sfMigration, &sfStratified));
1809   PetscCall(PetscSFDestroy(&sfMigration));
1810   sfMigration = sfStratified;
1811   PetscCall(PetscSFSetUp(sfMigration));
1812   PetscCall(PetscLogEventEnd(DMPLEX_Partition, dm, 0, 0, 0));
1813   PetscCall(PetscOptionsHasName(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-partition_view", &flg));
1814   if (flg) {
1815     PetscCall(DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_(comm)));
1816     PetscCall(PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_(comm)));
1817   }
1818 
1819   /* Create non-overlapping parallel DM and migrate internal data */
1820   PetscCall(DMPlexCreate(comm, dmParallel));
1821   PetscCall(PetscObjectSetName((PetscObject)*dmParallel, "Parallel Mesh"));
1822   PetscCall(DMPlexMigrate(dm, sfMigration, *dmParallel));
1823 
1824   /* Build the point SF without overlap */
1825   PetscCall(DMPlexGetPartitionBalance(dm, &balance));
1826   PetscCall(DMPlexSetPartitionBalance(*dmParallel, balance));
1827   PetscCall(DMPlexCreatePointSF(*dmParallel, sfMigration, PETSC_TRUE, &sfPoint));
1828   PetscCall(DMSetPointSF(*dmParallel, sfPoint));
1829   PetscCall(DMPlexMigrateIsoperiodicFaceSF_Internal(dm, *dmParallel, sfMigration));
1830   PetscCall(DMGetCoordinateDM(*dmParallel, &dmCoord));
1831   if (dmCoord) PetscCall(DMSetPointSF(dmCoord, sfPoint));
1832   if (flg) PetscCall(PetscSFView(sfPoint, NULL));
1833 
1834   if (overlap > 0) {
1835     DM      dmOverlap;
1836     PetscSF sfOverlap, sfOverlapPoint;
1837 
1838     /* Add the partition overlap to the distributed DM */
1839     PetscCall(DMPlexDistributeOverlap(*dmParallel, overlap, &sfOverlap, &dmOverlap));
1840     PetscCall(DMDestroy(dmParallel));
1841     *dmParallel = dmOverlap;
1842     if (flg) {
1843       PetscCall(PetscPrintf(comm, "Overlap Migration SF:\n"));
1844       PetscCall(PetscSFView(sfOverlap, NULL));
1845     }
1846 
1847     /* Re-map the migration SF to establish the full migration pattern */
1848     PetscCall(DMPlexRemapMigrationSF(sfOverlap, sfMigration, &sfOverlapPoint));
1849     PetscCall(PetscSFDestroy(&sfOverlap));
1850     PetscCall(PetscSFDestroy(&sfMigration));
1851     sfMigration = sfOverlapPoint;
1852   }
1853   /* Cleanup Partition */
1854   PetscCall(DMLabelDestroy(&lblPartition));
1855   PetscCall(DMLabelDestroy(&lblMigration));
1856   PetscCall(PetscSectionDestroy(&cellPartSection));
1857   PetscCall(ISDestroy(&cellPart));
1858   /* Copy discretization */
1859   PetscCall(DMCopyDisc(dm, *dmParallel));
1860   PetscCall(DMGetVecType(dm, &vec_type));
1861   PetscCall(DMSetVecType(*dmParallel, vec_type));
1862   /* Create sfNatural */
1863   if (dm->useNatural) {
1864     PetscSection section;
1865 
1866     PetscCall(DMSetUseNatural(*dmParallel, PETSC_TRUE));
1867     PetscCall(DMGetLocalSection(dm, &section));
1868 
1869     /* First DM with useNatural = PETSC_TRUE is considered natural */
1870     /* sfMigration and sfNatural are respectively the point and dofs SFs mapping to this natural DM */
1871     /* Compose with a previous sfNatural if present */
1872     if (dm->sfNatural) {
1873       PetscSF natSF;
1874 
1875       PetscCall(DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &natSF));
1876       PetscCall(PetscSFCompose(dm->sfNatural, natSF, &(*dmParallel)->sfNatural));
1877       PetscCall(PetscSFDestroy(&natSF));
1878     } else {
1879       PetscCall(DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &(*dmParallel)->sfNatural));
1880     }
1881     /* Compose with a previous sfMigration if present */
1882     if (dm->sfMigration) {
1883       PetscSF naturalPointSF;
1884 
1885       PetscCall(PetscSFCompose(dm->sfMigration, sfMigration, &naturalPointSF));
1886       PetscCall(PetscSFDestroy(&sfMigration));
1887       sfMigration = naturalPointSF;
1888     }
1889   }
1890   PetscCall(DMPlexCopy_Internal(dm, PETSC_TRUE, PETSC_FALSE, *dmParallel));
1891   /* Cleanup */
1892   if (sf) {
1893     *sf = sfMigration;
1894   } else PetscCall(PetscSFDestroy(&sfMigration));
1895   PetscCall(PetscSFDestroy(&sfPoint));
1896   PetscCall(PetscLogEventEnd(DMPLEX_Distribute, dm, 0, 0, 0));
1897   PetscFunctionReturn(PETSC_SUCCESS);
1898 }
1899 
1900 /*@C
1901   DMPlexDistributeOverlap - Add partition overlap to a distributed non-overlapping `DM`.
1902 
1903   Collective
1904 
1905   Input Parameters:
1906 + dm      - The non-overlapping distributed `DMPLEX` object
1907 - overlap - The overlap of partitions (the same on all ranks)
1908 
1909   Output Parameters:
1910 + sf        - The `PetscSF` used for point distribution
1911 - dmOverlap - The overlapping distributed `DMPLEX` object, or `NULL`
1912 
1913   Options Database Keys:
1914 + -dm_plex_overlap_labels <name1,name2,...> - List of overlap label names
1915 . -dm_plex_overlap_values <int1,int2,...>   - List of overlap label values
1916 . -dm_plex_overlap_exclude_label <name>     - Label used to exclude points from overlap
1917 - -dm_plex_overlap_exclude_value <int>      - Label value used to exclude points from overlap
1918 
1919   Level: advanced
1920 
1921   Notes:
1922   If the mesh was not distributed, the return value is `NULL`.
1923 
1924   The user can control the definition of adjacency for the mesh using `DMSetAdjacency()`. They should choose the combination appropriate for the function
1925   representation on the mesh.
1926 
1927 .seealso: `DMPLEX`, `PetscSF`, `DM`, `DMPlexCreate()`, `DMSetAdjacency()`, `DMPlexDistribute()`, `DMPlexCreateOverlapLabel()`, `DMPlexGetOverlap()`
1928 @*/
1929 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, PetscSF *sf, DM *dmOverlap)
1930 {
1931   DM_Plex     *mesh = (DM_Plex *)dm->data;
1932   MPI_Comm     comm;
1933   PetscMPIInt  size, rank;
1934   PetscSection rootSection, leafSection;
1935   IS           rootrank, leafrank;
1936   DM           dmCoord;
1937   DMLabel      lblOverlap;
1938   PetscSF      sfOverlap, sfStratified, sfPoint;
1939 
1940   PetscFunctionBegin;
1941   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1942   PetscValidLogicalCollectiveInt(dm, overlap, 2);
1943   if (sf) PetscAssertPointer(sf, 3);
1944   PetscAssertPointer(dmOverlap, 4);
1945 
1946   if (sf) *sf = NULL;
1947   *dmOverlap = NULL;
1948   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1949   PetscCallMPI(MPI_Comm_size(comm, &size));
1950   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1951   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
1952   {
1953     // We need to get options for the _already_distributed mesh, so it must be done here
1954     PetscInt    overlap;
1955     const char *prefix;
1956     char        oldPrefix[PETSC_MAX_PATH_LEN];
1957 
1958     oldPrefix[0] = '\0';
1959     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)dm, &prefix));
1960     PetscCall(PetscStrncpy(oldPrefix, prefix, sizeof(oldPrefix)));
1961     PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)dm, "dist_"));
1962     PetscCall(DMPlexGetOverlap(dm, &overlap));
1963     PetscObjectOptionsBegin((PetscObject)dm);
1964     PetscCall(DMSetFromOptions_Overlap_Plex(dm, PetscOptionsObject, &overlap));
1965     PetscOptionsEnd();
1966     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)dm, oldPrefix[0] == '\0' ? NULL : oldPrefix));
1967   }
1968   PetscCall(PetscLogEventBegin(DMPLEX_DistributeOverlap, dm, 0, 0, 0));
1969   /* Compute point overlap with neighbouring processes on the distributed DM */
1970   PetscCall(PetscLogEventBegin(DMPLEX_Partition, dm, 0, 0, 0));
1971   PetscCall(PetscSectionCreate(comm, &rootSection));
1972   PetscCall(PetscSectionCreate(comm, &leafSection));
1973   PetscCall(DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank));
1974   if (mesh->numOvLabels) PetscCall(DMPlexCreateOverlapLabelFromLabels(dm, mesh->numOvLabels, mesh->ovLabels, mesh->ovValues, mesh->numOvExLabels, mesh->ovExLabels, mesh->ovExValues, rootSection, rootrank, leafSection, leafrank, &lblOverlap));
1975   else PetscCall(DMPlexCreateOverlapLabel(dm, overlap, rootSection, rootrank, leafSection, leafrank, &lblOverlap));
1976   /* Convert overlap label to stratified migration SF */
1977   PetscCall(DMPlexPartitionLabelCreateSF(dm, lblOverlap, &sfOverlap));
1978   PetscCall(DMPlexStratifyMigrationSF(dm, sfOverlap, &sfStratified));
1979   PetscCall(PetscSFDestroy(&sfOverlap));
1980   sfOverlap = sfStratified;
1981   PetscCall(PetscObjectSetName((PetscObject)sfOverlap, "Overlap SF"));
1982   PetscCall(PetscSFSetFromOptions(sfOverlap));
1983 
1984   PetscCall(PetscSectionDestroy(&rootSection));
1985   PetscCall(PetscSectionDestroy(&leafSection));
1986   PetscCall(ISDestroy(&rootrank));
1987   PetscCall(ISDestroy(&leafrank));
1988   PetscCall(PetscLogEventEnd(DMPLEX_Partition, dm, 0, 0, 0));
1989 
1990   /* Build the overlapping DM */
1991   PetscCall(DMPlexCreate(comm, dmOverlap));
1992   PetscCall(PetscObjectSetName((PetscObject)*dmOverlap, "Parallel Mesh"));
1993   PetscCall(DMPlexMigrate(dm, sfOverlap, *dmOverlap));
1994   /* Store the overlap in the new DM */
1995   PetscCall(DMPlexSetOverlap_Plex(*dmOverlap, dm, overlap));
1996   /* Build the new point SF */
1997   PetscCall(DMPlexCreatePointSF(*dmOverlap, sfOverlap, PETSC_FALSE, &sfPoint));
1998   PetscCall(DMSetPointSF(*dmOverlap, sfPoint));
1999   PetscCall(DMGetCoordinateDM(*dmOverlap, &dmCoord));
2000   if (dmCoord) PetscCall(DMSetPointSF(dmCoord, sfPoint));
2001   PetscCall(DMGetCellCoordinateDM(*dmOverlap, &dmCoord));
2002   if (dmCoord) PetscCall(DMSetPointSF(dmCoord, sfPoint));
2003   PetscCall(PetscSFDestroy(&sfPoint));
2004   /* Cleanup overlap partition */
2005   PetscCall(DMLabelDestroy(&lblOverlap));
2006   if (sf) *sf = sfOverlap;
2007   else PetscCall(PetscSFDestroy(&sfOverlap));
2008   PetscCall(PetscLogEventEnd(DMPLEX_DistributeOverlap, dm, 0, 0, 0));
2009   PetscFunctionReturn(PETSC_SUCCESS);
2010 }
2011 
2012 PetscErrorCode DMPlexGetOverlap_Plex(DM dm, PetscInt *overlap)
2013 {
2014   DM_Plex *mesh = (DM_Plex *)dm->data;
2015 
2016   PetscFunctionBegin;
2017   *overlap = mesh->overlap;
2018   PetscFunctionReturn(PETSC_SUCCESS);
2019 }
2020 
2021 PetscErrorCode DMPlexSetOverlap_Plex(DM dm, DM dmSrc, PetscInt overlap)
2022 {
2023   DM_Plex *mesh    = NULL;
2024   DM_Plex *meshSrc = NULL;
2025 
2026   PetscFunctionBegin;
2027   PetscValidHeaderSpecificType(dm, DM_CLASSID, 1, DMPLEX);
2028   mesh          = (DM_Plex *)dm->data;
2029   mesh->overlap = overlap;
2030   if (dmSrc) {
2031     PetscValidHeaderSpecificType(dmSrc, DM_CLASSID, 2, DMPLEX);
2032     meshSrc = (DM_Plex *)dmSrc->data;
2033     mesh->overlap += meshSrc->overlap;
2034   }
2035   PetscFunctionReturn(PETSC_SUCCESS);
2036 }
2037 
2038 /*@
2039   DMPlexGetOverlap - Get the width of the cell overlap
2040 
2041   Not Collective
2042 
2043   Input Parameter:
2044 . dm - The `DM`
2045 
2046   Output Parameter:
2047 . overlap - the width of the cell overlap
2048 
2049   Level: intermediate
2050 
2051 .seealso: `DMPLEX`, `DMPlexSetOverlap()`, `DMPlexDistribute()`
2052 @*/
2053 PetscErrorCode DMPlexGetOverlap(DM dm, PetscInt *overlap)
2054 {
2055   PetscFunctionBegin;
2056   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2057   PetscAssertPointer(overlap, 2);
2058   PetscUseMethod(dm, "DMPlexGetOverlap_C", (DM, PetscInt *), (dm, overlap));
2059   PetscFunctionReturn(PETSC_SUCCESS);
2060 }
2061 
2062 /*@
2063   DMPlexSetOverlap - Set the width of the cell overlap
2064 
2065   Logically Collective
2066 
2067   Input Parameters:
2068 + dm      - The `DM`
2069 . dmSrc   - The `DM` that produced this one, or `NULL`
2070 - overlap - the width of the cell overlap
2071 
2072   Level: intermediate
2073 
2074   Note:
2075   The overlap from `dmSrc` is added to `dm`
2076 
2077 .seealso: `DMPLEX`, `DMPlexGetOverlap()`, `DMPlexDistribute()`
2078 @*/
2079 PetscErrorCode DMPlexSetOverlap(DM dm, DM dmSrc, PetscInt overlap)
2080 {
2081   PetscFunctionBegin;
2082   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2083   PetscValidLogicalCollectiveInt(dm, overlap, 3);
2084   PetscTryMethod(dm, "DMPlexSetOverlap_C", (DM, DM, PetscInt), (dm, dmSrc, overlap));
2085   PetscFunctionReturn(PETSC_SUCCESS);
2086 }
2087 
2088 PetscErrorCode DMPlexDistributeSetDefault_Plex(DM dm, PetscBool dist)
2089 {
2090   DM_Plex *mesh = (DM_Plex *)dm->data;
2091 
2092   PetscFunctionBegin;
2093   mesh->distDefault = dist;
2094   PetscFunctionReturn(PETSC_SUCCESS);
2095 }
2096 
2097 /*@
2098   DMPlexDistributeSetDefault - Set flag indicating whether the `DM` should be distributed by default
2099 
2100   Logically Collective
2101 
2102   Input Parameters:
2103 + dm   - The `DM`
2104 - dist - Flag for distribution
2105 
2106   Level: intermediate
2107 
2108 .seealso: `DMPLEX`, `DMPlexDistributeGetDefault()`, `DMPlexDistribute()`
2109 @*/
2110 PetscErrorCode DMPlexDistributeSetDefault(DM dm, PetscBool dist)
2111 {
2112   PetscFunctionBegin;
2113   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2114   PetscValidLogicalCollectiveBool(dm, dist, 2);
2115   PetscTryMethod(dm, "DMPlexDistributeSetDefault_C", (DM, PetscBool), (dm, dist));
2116   PetscFunctionReturn(PETSC_SUCCESS);
2117 }
2118 
2119 PetscErrorCode DMPlexDistributeGetDefault_Plex(DM dm, PetscBool *dist)
2120 {
2121   DM_Plex *mesh = (DM_Plex *)dm->data;
2122 
2123   PetscFunctionBegin;
2124   *dist = mesh->distDefault;
2125   PetscFunctionReturn(PETSC_SUCCESS);
2126 }
2127 
2128 /*@
2129   DMPlexDistributeGetDefault - Get flag indicating whether the `DM` should be distributed by default
2130 
2131   Not Collective
2132 
2133   Input Parameter:
2134 . dm - The `DM`
2135 
2136   Output Parameter:
2137 . dist - Flag for distribution
2138 
2139   Level: intermediate
2140 
2141 .seealso: `DMPLEX`, `DM`, `DMPlexDistributeSetDefault()`, `DMPlexDistribute()`
2142 @*/
2143 PetscErrorCode DMPlexDistributeGetDefault(DM dm, PetscBool *dist)
2144 {
2145   PetscFunctionBegin;
2146   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2147   PetscAssertPointer(dist, 2);
2148   PetscUseMethod(dm, "DMPlexDistributeGetDefault_C", (DM, PetscBool *), (dm, dist));
2149   PetscFunctionReturn(PETSC_SUCCESS);
2150 }
2151 
2152 /*@C
2153   DMPlexGetGatherDM - Get a copy of the `DMPLEX` that gathers all points on the
2154   root process of the original's communicator.
2155 
2156   Collective
2157 
2158   Input Parameter:
2159 . dm - the original `DMPLEX` object
2160 
2161   Output Parameters:
2162 + sf         - the `PetscSF` used for point distribution (optional)
2163 - gatherMesh - the gathered `DM` object, or `NULL`
2164 
2165   Level: intermediate
2166 
2167 .seealso: `DMPLEX`, `DM`, `PetscSF`, `DMPlexDistribute()`, `DMPlexGetRedundantDM()`
2168 @*/
2169 PetscErrorCode DMPlexGetGatherDM(DM dm, PetscSF *sf, DM *gatherMesh)
2170 {
2171   MPI_Comm         comm;
2172   PetscMPIInt      size;
2173   PetscPartitioner oldPart, gatherPart;
2174 
2175   PetscFunctionBegin;
2176   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2177   PetscAssertPointer(gatherMesh, 3);
2178   *gatherMesh = NULL;
2179   if (sf) *sf = NULL;
2180   comm = PetscObjectComm((PetscObject)dm);
2181   PetscCallMPI(MPI_Comm_size(comm, &size));
2182   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
2183   PetscCall(DMPlexGetPartitioner(dm, &oldPart));
2184   PetscCall(PetscObjectReference((PetscObject)oldPart));
2185   PetscCall(PetscPartitionerCreate(comm, &gatherPart));
2186   PetscCall(PetscPartitionerSetType(gatherPart, PETSCPARTITIONERGATHER));
2187   PetscCall(DMPlexSetPartitioner(dm, gatherPart));
2188   PetscCall(DMPlexDistribute(dm, 0, sf, gatherMesh));
2189 
2190   PetscCall(DMPlexSetPartitioner(dm, oldPart));
2191   PetscCall(PetscPartitionerDestroy(&gatherPart));
2192   PetscCall(PetscPartitionerDestroy(&oldPart));
2193   PetscFunctionReturn(PETSC_SUCCESS);
2194 }
2195 
2196 /*@C
2197   DMPlexGetRedundantDM - Get a copy of the `DMPLEX` that is completely copied on each process.
2198 
2199   Collective
2200 
2201   Input Parameter:
2202 . dm - the original `DMPLEX` object
2203 
2204   Output Parameters:
2205 + sf            - the `PetscSF` used for point distribution (optional)
2206 - redundantMesh - the redundant `DM` object, or `NULL`
2207 
2208   Level: intermediate
2209 
2210 .seealso: `DMPLEX`, `DMPlexDistribute()`, `DMPlexGetGatherDM()`
2211 @*/
2212 PetscErrorCode DMPlexGetRedundantDM(DM dm, PetscSF *sf, DM *redundantMesh)
2213 {
2214   MPI_Comm     comm;
2215   PetscMPIInt  size, rank;
2216   PetscInt     pStart, pEnd, p;
2217   PetscInt     numPoints = -1;
2218   PetscSF      migrationSF, sfPoint, gatherSF;
2219   DM           gatherDM, dmCoord;
2220   PetscSFNode *points;
2221 
2222   PetscFunctionBegin;
2223   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2224   PetscAssertPointer(redundantMesh, 3);
2225   *redundantMesh = NULL;
2226   comm           = PetscObjectComm((PetscObject)dm);
2227   PetscCallMPI(MPI_Comm_size(comm, &size));
2228   if (size == 1) {
2229     PetscCall(PetscObjectReference((PetscObject)dm));
2230     *redundantMesh = dm;
2231     if (sf) *sf = NULL;
2232     PetscFunctionReturn(PETSC_SUCCESS);
2233   }
2234   PetscCall(DMPlexGetGatherDM(dm, &gatherSF, &gatherDM));
2235   if (!gatherDM) PetscFunctionReturn(PETSC_SUCCESS);
2236   PetscCallMPI(MPI_Comm_rank(comm, &rank));
2237   PetscCall(DMPlexGetChart(gatherDM, &pStart, &pEnd));
2238   numPoints = pEnd - pStart;
2239   PetscCallMPI(MPI_Bcast(&numPoints, 1, MPIU_INT, 0, comm));
2240   PetscCall(PetscMalloc1(numPoints, &points));
2241   PetscCall(PetscSFCreate(comm, &migrationSF));
2242   for (p = 0; p < numPoints; p++) {
2243     points[p].index = p;
2244     points[p].rank  = 0;
2245   }
2246   PetscCall(PetscSFSetGraph(migrationSF, pEnd - pStart, numPoints, NULL, PETSC_OWN_POINTER, points, PETSC_OWN_POINTER));
2247   PetscCall(DMPlexCreate(comm, redundantMesh));
2248   PetscCall(PetscObjectSetName((PetscObject)*redundantMesh, "Redundant Mesh"));
2249   PetscCall(DMPlexMigrate(gatherDM, migrationSF, *redundantMesh));
2250   /* This is to express that all point are in overlap */
2251   PetscCall(DMPlexSetOverlap_Plex(*redundantMesh, NULL, PETSC_MAX_INT));
2252   PetscCall(DMPlexCreatePointSF(*redundantMesh, migrationSF, PETSC_FALSE, &sfPoint));
2253   PetscCall(DMSetPointSF(*redundantMesh, sfPoint));
2254   PetscCall(DMGetCoordinateDM(*redundantMesh, &dmCoord));
2255   if (dmCoord) PetscCall(DMSetPointSF(dmCoord, sfPoint));
2256   PetscCall(PetscSFDestroy(&sfPoint));
2257   if (sf) {
2258     PetscSF tsf;
2259 
2260     PetscCall(PetscSFCompose(gatherSF, migrationSF, &tsf));
2261     PetscCall(DMPlexStratifyMigrationSF(dm, tsf, sf));
2262     PetscCall(PetscSFDestroy(&tsf));
2263   }
2264   PetscCall(PetscSFDestroy(&migrationSF));
2265   PetscCall(PetscSFDestroy(&gatherSF));
2266   PetscCall(DMDestroy(&gatherDM));
2267   PetscCall(DMCopyDisc(dm, *redundantMesh));
2268   PetscCall(DMPlexCopy_Internal(dm, PETSC_TRUE, PETSC_FALSE, *redundantMesh));
2269   PetscFunctionReturn(PETSC_SUCCESS);
2270 }
2271 
2272 /*@
2273   DMPlexIsDistributed - Find out whether this `DM` is distributed, i.e. more than one rank owns some points.
2274 
2275   Collective
2276 
2277   Input Parameter:
2278 . dm - The `DM` object
2279 
2280   Output Parameter:
2281 . distributed - Flag whether the `DM` is distributed
2282 
2283   Level: intermediate
2284 
2285   Notes:
2286   This currently finds out whether at least two ranks have any DAG points.
2287   This involves `MPI_Allreduce()` with one integer.
2288   The result is currently not stashed so every call to this routine involves this global communication.
2289 
2290 .seealso: `DMPLEX`, `DMPlexDistribute()`, `DMPlexGetOverlap()`, `DMPlexIsInterpolated()`
2291 @*/
2292 PetscErrorCode DMPlexIsDistributed(DM dm, PetscBool *distributed)
2293 {
2294   PetscInt    pStart, pEnd, count;
2295   MPI_Comm    comm;
2296   PetscMPIInt size;
2297 
2298   PetscFunctionBegin;
2299   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2300   PetscAssertPointer(distributed, 2);
2301   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
2302   PetscCallMPI(MPI_Comm_size(comm, &size));
2303   if (size == 1) {
2304     *distributed = PETSC_FALSE;
2305     PetscFunctionReturn(PETSC_SUCCESS);
2306   }
2307   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
2308   count = (pEnd - pStart) > 0 ? 1 : 0;
2309   PetscCall(MPIU_Allreduce(MPI_IN_PLACE, &count, 1, MPIU_INT, MPI_SUM, comm));
2310   *distributed = count > 1 ? PETSC_TRUE : PETSC_FALSE;
2311   PetscFunctionReturn(PETSC_SUCCESS);
2312 }
2313 
2314 /*@C
2315   DMPlexDistributionSetName - Set the name of the specific parallel distribution
2316 
2317   Input Parameters:
2318 + dm   - The `DM`
2319 - name - The name of the specific parallel distribution
2320 
2321   Level: developer
2322 
2323   Note:
2324   If distribution name is set when saving, `DMPlexTopologyView()` saves the plex's
2325   parallel distribution (i.e., partition, ownership, and local ordering of points) under
2326   this name. Conversely, if distribution name is set when loading, `DMPlexTopologyLoad()`
2327   loads the parallel distribution stored in file under this name.
2328 
2329 .seealso: `DMPLEX`, `DMPlexDistributionGetName()`, `DMPlexTopologyView()`, `DMPlexTopologyLoad()`
2330 @*/
2331 PetscErrorCode DMPlexDistributionSetName(DM dm, const char name[])
2332 {
2333   DM_Plex *mesh = (DM_Plex *)dm->data;
2334 
2335   PetscFunctionBegin;
2336   PetscValidHeaderSpecificType(dm, DM_CLASSID, 1, DMPLEX);
2337   if (name) PetscAssertPointer(name, 2);
2338   PetscCall(PetscFree(mesh->distributionName));
2339   PetscCall(PetscStrallocpy(name, &mesh->distributionName));
2340   PetscFunctionReturn(PETSC_SUCCESS);
2341 }
2342 
2343 /*@C
2344   DMPlexDistributionGetName - Retrieve the name of the specific parallel distribution
2345 
2346   Input Parameter:
2347 . dm - The `DM`
2348 
2349   Output Parameter:
2350 . name - The name of the specific parallel distribution
2351 
2352   Level: developer
2353 
2354   Note:
2355   If distribution name is set when saving, `DMPlexTopologyView()` saves the plex's
2356   parallel distribution (i.e., partition, ownership, and local ordering of points) under
2357   this name. Conversely, if distribution name is set when loading, `DMPlexTopologyLoad()`
2358   loads the parallel distribution stored in file under this name.
2359 
2360 .seealso: `DMPLEX`, `DMPlexDistributionSetName()`, `DMPlexTopologyView()`, `DMPlexTopologyLoad()`
2361 @*/
2362 PetscErrorCode DMPlexDistributionGetName(DM dm, const char *name[])
2363 {
2364   DM_Plex *mesh = (DM_Plex *)dm->data;
2365 
2366   PetscFunctionBegin;
2367   PetscValidHeaderSpecificType(dm, DM_CLASSID, 1, DMPLEX);
2368   PetscAssertPointer(name, 2);
2369   *name = mesh->distributionName;
2370   PetscFunctionReturn(PETSC_SUCCESS);
2371 }
2372