xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision 9371c9d470a9602b6d10a8bf50c9b2280a79e45a)
1 #include <petsc/private/dmpleximpl.h>  /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/dmlabelimpl.h> /*I      "petscdmlabel.h"  I*/
3 
4 /*@C
5   DMPlexSetAdjacencyUser - Define adjacency in the mesh using a user-provided callback
6 
7   Input Parameters:
8 + dm      - The DM object
9 . user    - The user callback, may be NULL (to clear the callback)
10 - ctx     - context for callback evaluation, may be NULL
11 
12   Level: advanced
13 
14   Notes:
15      The caller of DMPlexGetAdjacency may need to arrange that a large enough array is available for the adjacency.
16 
17      Any setting here overrides other configuration of DMPlex adjacency determination.
18 
19 .seealso: `DMSetAdjacency()`, `DMPlexDistribute()`, `DMPlexPreallocateOperator()`, `DMPlexGetAdjacency()`, `DMPlexGetAdjacencyUser()`
20 @*/
21 PetscErrorCode DMPlexSetAdjacencyUser(DM dm, PetscErrorCode (*user)(DM, PetscInt, PetscInt *, PetscInt[], void *), void *ctx) {
22   DM_Plex *mesh = (DM_Plex *)dm->data;
23 
24   PetscFunctionBegin;
25   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
26   mesh->useradjacency    = user;
27   mesh->useradjacencyctx = ctx;
28   PetscFunctionReturn(0);
29 }
30 
31 /*@C
32   DMPlexGetAdjacencyUser - get the user-defined adjacency callback
33 
34   Input Parameter:
35 . dm      - The DM object
36 
37   Output Parameters:
38 + user    - The callback
39 - ctx     - context for callback evaluation
40 
41   Level: advanced
42 
43 .seealso: `DMSetAdjacency()`, `DMPlexDistribute()`, `DMPlexPreallocateOperator()`, `DMPlexGetAdjacency()`, `DMPlexSetAdjacencyUser()`
44 @*/
45 PetscErrorCode DMPlexGetAdjacencyUser(DM dm, PetscErrorCode (**user)(DM, PetscInt, PetscInt *, PetscInt[], void *), void **ctx) {
46   DM_Plex *mesh = (DM_Plex *)dm->data;
47 
48   PetscFunctionBegin;
49   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
50   if (user) *user = mesh->useradjacency;
51   if (ctx) *ctx = mesh->useradjacencyctx;
52   PetscFunctionReturn(0);
53 }
54 
55 /*@
56   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
57 
58   Input Parameters:
59 + dm      - The DM object
60 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
61 
62   Level: intermediate
63 
64 .seealso: `DMGetAdjacency()`, `DMSetAdjacency()`, `DMPlexDistribute()`, `DMPlexPreallocateOperator()`, `DMPlexSetAnchors()`
65 @*/
66 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors) {
67   DM_Plex *mesh = (DM_Plex *)dm->data;
68 
69   PetscFunctionBegin;
70   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
71   mesh->useAnchors = useAnchors;
72   PetscFunctionReturn(0);
73 }
74 
75 /*@
76   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
77 
78   Input Parameter:
79 . dm      - The DM object
80 
81   Output Parameter:
82 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
83 
84   Level: intermediate
85 
86 .seealso: `DMPlexSetAdjacencyUseAnchors()`, `DMSetAdjacency()`, `DMGetAdjacency()`, `DMPlexDistribute()`, `DMPlexPreallocateOperator()`, `DMPlexSetAnchors()`
87 @*/
88 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors) {
89   DM_Plex *mesh = (DM_Plex *)dm->data;
90 
91   PetscFunctionBegin;
92   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
93   PetscValidBoolPointer(useAnchors, 2);
94   *useAnchors = mesh->useAnchors;
95   PetscFunctionReturn(0);
96 }
97 
98 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[]) {
99   const PetscInt *cone   = NULL;
100   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
101 
102   PetscFunctionBeginHot;
103   PetscCall(DMPlexGetConeSize(dm, p, &coneSize));
104   PetscCall(DMPlexGetCone(dm, p, &cone));
105   for (c = 0; c <= coneSize; ++c) {
106     const PetscInt  point   = !c ? p : cone[c - 1];
107     const PetscInt *support = NULL;
108     PetscInt        supportSize, s, q;
109 
110     PetscCall(DMPlexGetSupportSize(dm, point, &supportSize));
111     PetscCall(DMPlexGetSupport(dm, point, &support));
112     for (s = 0; s < supportSize; ++s) {
113       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = support[s]), 0); ++q) {
114         if (support[s] == adj[q]) break;
115       }
116       PetscCheck(numAdj <= maxAdjSize, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%" PetscInt_FMT ")", maxAdjSize);
117     }
118   }
119   *adjSize = numAdj;
120   PetscFunctionReturn(0);
121 }
122 
123 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[]) {
124   const PetscInt *support = NULL;
125   PetscInt        numAdj = 0, maxAdjSize = *adjSize, supportSize, s;
126 
127   PetscFunctionBeginHot;
128   PetscCall(DMPlexGetSupportSize(dm, p, &supportSize));
129   PetscCall(DMPlexGetSupport(dm, p, &support));
130   for (s = 0; s <= supportSize; ++s) {
131     const PetscInt  point = !s ? p : support[s - 1];
132     const PetscInt *cone  = NULL;
133     PetscInt        coneSize, c, q;
134 
135     PetscCall(DMPlexGetConeSize(dm, point, &coneSize));
136     PetscCall(DMPlexGetCone(dm, point, &cone));
137     for (c = 0; c < coneSize; ++c) {
138       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = cone[c]), 0); ++q) {
139         if (cone[c] == adj[q]) break;
140       }
141       PetscCheck(numAdj <= maxAdjSize, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%" PetscInt_FMT ")", maxAdjSize);
142     }
143   }
144   *adjSize = numAdj;
145   PetscFunctionReturn(0);
146 }
147 
148 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[]) {
149   PetscInt *star   = NULL;
150   PetscInt  numAdj = 0, maxAdjSize = *adjSize, starSize, s;
151 
152   PetscFunctionBeginHot;
153   PetscCall(DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star));
154   for (s = 0; s < starSize * 2; s += 2) {
155     const PetscInt *closure = NULL;
156     PetscInt        closureSize, c, q;
157 
158     PetscCall(DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt **)&closure));
159     for (c = 0; c < closureSize * 2; c += 2) {
160       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = closure[c]), 0); ++q) {
161         if (closure[c] == adj[q]) break;
162       }
163       PetscCheck(numAdj <= maxAdjSize, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%" PetscInt_FMT ")", maxAdjSize);
164     }
165     PetscCall(DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt **)&closure));
166   }
167   PetscCall(DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star));
168   *adjSize = numAdj;
169   PetscFunctionReturn(0);
170 }
171 
172 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[]) {
173   static PetscInt asiz       = 0;
174   PetscInt        maxAnchors = 1;
175   PetscInt        aStart = -1, aEnd = -1;
176   PetscInt        maxAdjSize;
177   PetscSection    aSec = NULL;
178   IS              aIS  = NULL;
179   const PetscInt *anchors;
180   DM_Plex        *mesh = (DM_Plex *)dm->data;
181 
182   PetscFunctionBeginHot;
183   if (useAnchors) {
184     PetscCall(DMPlexGetAnchors(dm, &aSec, &aIS));
185     if (aSec) {
186       PetscCall(PetscSectionGetMaxDof(aSec, &maxAnchors));
187       maxAnchors = PetscMax(1, maxAnchors);
188       PetscCall(PetscSectionGetChart(aSec, &aStart, &aEnd));
189       PetscCall(ISGetIndices(aIS, &anchors));
190     }
191   }
192   if (!*adj) {
193     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
194 
195     PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
196     PetscCall(DMPlexGetDepth(dm, &depth));
197     depth = PetscMax(depth, -depth);
198     PetscCall(DMPlexGetMaxSizes(dm, &maxC, &maxS));
199     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC, depth + 1) - 1) / (maxC - 1)) : depth + 1;
200     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS, depth + 1) - 1) / (maxS - 1)) : depth + 1;
201     asiz          = PetscMax(PetscPowInt(maxS, depth) * coneSeries, PetscPowInt(maxC, depth) * supportSeries);
202     asiz *= maxAnchors;
203     asiz = PetscMin(asiz, pEnd - pStart);
204     PetscCall(PetscMalloc1(asiz, adj));
205   }
206   if (*adjSize < 0) *adjSize = asiz;
207   maxAdjSize = *adjSize;
208   if (mesh->useradjacency) {
209     PetscCall((*mesh->useradjacency)(dm, p, adjSize, *adj, mesh->useradjacencyctx));
210   } else if (useTransitiveClosure) {
211     PetscCall(DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj));
212   } else if (useCone) {
213     PetscCall(DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj));
214   } else {
215     PetscCall(DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj));
216   }
217   if (useAnchors && aSec) {
218     PetscInt  origSize = *adjSize;
219     PetscInt  numAdj   = origSize;
220     PetscInt  i        = 0, j;
221     PetscInt *orig     = *adj;
222 
223     while (i < origSize) {
224       PetscInt p    = orig[i];
225       PetscInt aDof = 0;
226 
227       if (p >= aStart && p < aEnd) { PetscCall(PetscSectionGetDof(aSec, p, &aDof)); }
228       if (aDof) {
229         PetscInt aOff;
230         PetscInt s, q;
231 
232         for (j = i + 1; j < numAdj; j++) { orig[j - 1] = orig[j]; }
233         origSize--;
234         numAdj--;
235         PetscCall(PetscSectionGetOffset(aSec, p, &aOff));
236         for (s = 0; s < aDof; ++s) {
237           for (q = 0; q < numAdj || ((void)(orig[numAdj++] = anchors[aOff + s]), 0); ++q) {
238             if (anchors[aOff + s] == orig[q]) break;
239           }
240           PetscCheck(numAdj <= maxAdjSize, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%" PetscInt_FMT ")", maxAdjSize);
241         }
242       } else {
243         i++;
244       }
245     }
246     *adjSize = numAdj;
247     PetscCall(ISRestoreIndices(aIS, &anchors));
248   }
249   PetscFunctionReturn(0);
250 }
251 
252 /*@
253   DMPlexGetAdjacency - Return all points adjacent to the given point
254 
255   Input Parameters:
256 + dm - The DM object
257 - p  - The point
258 
259   Input/Output Parameters:
260 + adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE;
261             on output the number of adjacent points
262 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize;
263         on output contains the adjacent points
264 
265   Level: advanced
266 
267   Notes:
268     The user must PetscFree the adj array if it was not passed in.
269 
270 .seealso: `DMSetAdjacency()`, `DMPlexDistribute()`, `DMCreateMatrix()`, `DMPlexPreallocateOperator()`
271 @*/
272 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[]) {
273   PetscBool useCone, useClosure, useAnchors;
274 
275   PetscFunctionBeginHot;
276   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
277   PetscValidIntPointer(adjSize, 3);
278   PetscValidPointer(adj, 4);
279   PetscCall(DMGetBasicAdjacency(dm, &useCone, &useClosure));
280   PetscCall(DMPlexGetAdjacencyUseAnchors(dm, &useAnchors));
281   PetscCall(DMPlexGetAdjacency_Internal(dm, p, useCone, useClosure, useAnchors, adjSize, adj));
282   PetscFunctionReturn(0);
283 }
284 
285 /*@
286   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
287 
288   Collective on dm
289 
290   Input Parameters:
291 + dm      - The DM
292 . sfPoint - The PetscSF which encodes point connectivity
293 . rootRankSection -
294 . rootRanks -
295 . leftRankSection -
296 - leafRanks -
297 
298   Output Parameters:
299 + processRanks - A list of process neighbors, or NULL
300 - sfProcess    - An SF encoding the two-sided process connectivity, or NULL
301 
302   Level: developer
303 
304 .seealso: `PetscSFCreate()`, `DMPlexCreateProcessSF()`
305 @*/
306 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess) {
307   const PetscSFNode *remotePoints;
308   PetscInt          *localPointsNew;
309   PetscSFNode       *remotePointsNew;
310   const PetscInt    *nranks;
311   PetscInt          *ranksNew;
312   PetscBT            neighbors;
313   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
314   PetscMPIInt        size, proc, rank;
315 
316   PetscFunctionBegin;
317   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
318   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
319   if (processRanks) { PetscValidPointer(processRanks, 7); }
320   if (sfProcess) { PetscValidPointer(sfProcess, 8); }
321   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)dm), &size));
322   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
323   PetscCall(PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints));
324   PetscCall(PetscBTCreate(size, &neighbors));
325   PetscCall(PetscBTMemzero(size, neighbors));
326   /* Compute root-to-leaf process connectivity */
327   PetscCall(PetscSectionGetChart(rootRankSection, &pStart, &pEnd));
328   PetscCall(ISGetIndices(rootRanks, &nranks));
329   for (p = pStart; p < pEnd; ++p) {
330     PetscInt ndof, noff, n;
331 
332     PetscCall(PetscSectionGetDof(rootRankSection, p, &ndof));
333     PetscCall(PetscSectionGetOffset(rootRankSection, p, &noff));
334     for (n = 0; n < ndof; ++n) PetscCall(PetscBTSet(neighbors, nranks[noff + n]));
335   }
336   PetscCall(ISRestoreIndices(rootRanks, &nranks));
337   /* Compute leaf-to-neighbor process connectivity */
338   PetscCall(PetscSectionGetChart(leafRankSection, &pStart, &pEnd));
339   PetscCall(ISGetIndices(leafRanks, &nranks));
340   for (p = pStart; p < pEnd; ++p) {
341     PetscInt ndof, noff, n;
342 
343     PetscCall(PetscSectionGetDof(leafRankSection, p, &ndof));
344     PetscCall(PetscSectionGetOffset(leafRankSection, p, &noff));
345     for (n = 0; n < ndof; ++n) PetscCall(PetscBTSet(neighbors, nranks[noff + n]));
346   }
347   PetscCall(ISRestoreIndices(leafRanks, &nranks));
348   /* Compute leaf-to-root process connectivity */
349   for (l = 0; l < numLeaves; ++l) { PetscBTSet(neighbors, remotePoints[l].rank); }
350   /* Calculate edges */
351   PetscBTClear(neighbors, rank);
352   for (proc = 0, numNeighbors = 0; proc < size; ++proc) {
353     if (PetscBTLookup(neighbors, proc)) ++numNeighbors;
354   }
355   PetscCall(PetscMalloc1(numNeighbors, &ranksNew));
356   PetscCall(PetscMalloc1(numNeighbors, &localPointsNew));
357   PetscCall(PetscMalloc1(numNeighbors, &remotePointsNew));
358   for (proc = 0, n = 0; proc < size; ++proc) {
359     if (PetscBTLookup(neighbors, proc)) {
360       ranksNew[n]              = proc;
361       localPointsNew[n]        = proc;
362       remotePointsNew[n].index = rank;
363       remotePointsNew[n].rank  = proc;
364       ++n;
365     }
366   }
367   PetscCall(PetscBTDestroy(&neighbors));
368   if (processRanks) PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks));
369   else PetscCall(PetscFree(ranksNew));
370   if (sfProcess) {
371     PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess));
372     PetscCall(PetscObjectSetName((PetscObject)*sfProcess, "Two-Sided Process SF"));
373     PetscCall(PetscSFSetFromOptions(*sfProcess));
374     PetscCall(PetscSFSetGraph(*sfProcess, size, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER));
375   }
376   PetscFunctionReturn(0);
377 }
378 
379 /*@
380   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
381 
382   Collective on dm
383 
384   Input Parameter:
385 . dm - The DM
386 
387   Output Parameters:
388 + rootSection - The number of leaves for a given root point
389 . rootrank    - The rank of each edge into the root point
390 . leafSection - The number of processes sharing a given leaf point
391 - leafrank    - The rank of each process sharing a leaf point
392 
393   Level: developer
394 
395 .seealso: `DMPlexCreateOverlapLabel()`, `DMPlexDistribute()`, `DMPlexDistributeOverlap()`
396 @*/
397 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank) {
398   MPI_Comm        comm;
399   PetscSF         sfPoint;
400   const PetscInt *rootdegree;
401   PetscInt       *myrank, *remoterank;
402   PetscInt        pStart, pEnd, p, nedges;
403   PetscMPIInt     rank;
404 
405   PetscFunctionBegin;
406   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
407   PetscCallMPI(MPI_Comm_rank(comm, &rank));
408   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
409   PetscCall(DMGetPointSF(dm, &sfPoint));
410   /* Compute number of leaves for each root */
411   PetscCall(PetscObjectSetName((PetscObject)rootSection, "Root Section"));
412   PetscCall(PetscSectionSetChart(rootSection, pStart, pEnd));
413   PetscCall(PetscSFComputeDegreeBegin(sfPoint, &rootdegree));
414   PetscCall(PetscSFComputeDegreeEnd(sfPoint, &rootdegree));
415   for (p = pStart; p < pEnd; ++p) PetscCall(PetscSectionSetDof(rootSection, p, rootdegree[p - pStart]));
416   PetscCall(PetscSectionSetUp(rootSection));
417   /* Gather rank of each leaf to root */
418   PetscCall(PetscSectionGetStorageSize(rootSection, &nedges));
419   PetscCall(PetscMalloc1(pEnd - pStart, &myrank));
420   PetscCall(PetscMalloc1(nedges, &remoterank));
421   for (p = 0; p < pEnd - pStart; ++p) myrank[p] = rank;
422   PetscCall(PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank));
423   PetscCall(PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank));
424   PetscCall(PetscFree(myrank));
425   PetscCall(ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank));
426   /* Distribute remote ranks to leaves */
427   PetscCall(PetscObjectSetName((PetscObject)leafSection, "Leaf Section"));
428   PetscCall(DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank));
429   PetscFunctionReturn(0);
430 }
431 
432 #if 0
433 static PetscErrorCode DMPlexCopyOverlapLabels(DM dm, DM ndm)
434 {
435   DM_Plex *mesh  = (DM_Plex *) dm->data;
436   DM_Plex *nmesh = (DM_Plex *) ndm->data;
437 
438   PetscFunctionBegin;
439   if (mesh->numOvLabels) {
440     const char *name;
441     PetscInt    l;
442 
443     nmesh->numOvLabels = mesh->numOvLabels;
444     for (l = 0; l < mesh->numOvLabels; ++l) {
445       PetscCall(PetscObjectGetName((PetscObject) mesh->ovLabels[l], &name));
446       PetscCall(DMGetLabel(ndm, name, &nmesh->ovLabels[l]));
447       nmesh->ovValues[l] = mesh->ovValues[l];
448     }
449     PetscCall(PetscObjectGetName((PetscObject) mesh->ovExLabel, &name));
450     PetscCall(DMGetLabel(ndm, name, &nmesh->ovExLabel));
451     nmesh->ovExValue = mesh->ovExValue;
452   }
453   PetscFunctionReturn(0);
454 }
455 #endif
456 
457 /*@C
458   DMPlexCreateOverlapLabel - Compute a label indicating what overlap points should be sent to new processes
459 
460   Collective on dm
461 
462   Input Parameters:
463 + dm          - The DM
464 . levels      - Number of overlap levels
465 . rootSection - The number of leaves for a given root point
466 . rootrank    - The rank of each edge into the root point
467 . leafSection - The number of processes sharing a given leaf point
468 - leafrank    - The rank of each process sharing a leaf point
469 
470   Output Parameter:
471 . ovLabel     - DMLabel containing remote overlap contributions as point/rank pairings
472 
473   Level: developer
474 
475 .seealso: `DMPlexCreateOverlapLabelFromLabels()`, `DMPlexGetAdjacency()`, `DMPlexDistributeOwnership()`, `DMPlexDistribute()`
476 @*/
477 PetscErrorCode DMPlexCreateOverlapLabel(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel) {
478   MPI_Comm           comm;
479   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
480   PetscSF            sfPoint;
481   const PetscSFNode *remote;
482   const PetscInt    *local;
483   const PetscInt    *nrank, *rrank;
484   PetscInt          *adj = NULL;
485   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l;
486   PetscMPIInt        rank, size;
487   PetscBool          flg;
488 
489   PetscFunctionBegin;
490   *ovLabel = NULL;
491   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
492   PetscCallMPI(MPI_Comm_size(comm, &size));
493   PetscCallMPI(MPI_Comm_rank(comm, &rank));
494   if (size == 1) PetscFunctionReturn(0);
495   PetscCall(DMGetPointSF(dm, &sfPoint));
496   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
497   if (!levels) {
498     /* Add owned points */
499     PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel));
500     for (p = pStart; p < pEnd; ++p) PetscCall(DMLabelSetValue(*ovLabel, p, rank));
501     PetscFunctionReturn(0);
502   }
503   PetscCall(PetscSectionGetChart(leafSection, &sStart, &sEnd));
504   PetscCall(PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote));
505   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap adjacency", &ovAdjByRank));
506   /* Handle leaves: shared with the root point */
507   for (l = 0; l < nleaves; ++l) {
508     PetscInt adjSize = PETSC_DETERMINE, a;
509 
510     PetscCall(DMPlexGetAdjacency(dm, local ? local[l] : l, &adjSize, &adj));
511     for (a = 0; a < adjSize; ++a) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank));
512   }
513   PetscCall(ISGetIndices(rootrank, &rrank));
514   PetscCall(ISGetIndices(leafrank, &nrank));
515   /* Handle roots */
516   for (p = pStart; p < pEnd; ++p) {
517     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
518 
519     if ((p >= sStart) && (p < sEnd)) {
520       /* Some leaves share a root with other leaves on different processes */
521       PetscCall(PetscSectionGetDof(leafSection, p, &neighbors));
522       if (neighbors) {
523         PetscCall(PetscSectionGetOffset(leafSection, p, &noff));
524         PetscCall(DMPlexGetAdjacency(dm, p, &adjSize, &adj));
525         for (n = 0; n < neighbors; ++n) {
526           const PetscInt remoteRank = nrank[noff + n];
527 
528           if (remoteRank == rank) continue;
529           for (a = 0; a < adjSize; ++a) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remoteRank));
530         }
531       }
532     }
533     /* Roots are shared with leaves */
534     PetscCall(PetscSectionGetDof(rootSection, p, &neighbors));
535     if (!neighbors) continue;
536     PetscCall(PetscSectionGetOffset(rootSection, p, &noff));
537     PetscCall(DMPlexGetAdjacency(dm, p, &adjSize, &adj));
538     for (n = 0; n < neighbors; ++n) {
539       const PetscInt remoteRank = rrank[noff + n];
540 
541       if (remoteRank == rank) continue;
542       for (a = 0; a < adjSize; ++a) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remoteRank));
543     }
544   }
545   PetscCall(PetscFree(adj));
546   PetscCall(ISRestoreIndices(rootrank, &rrank));
547   PetscCall(ISRestoreIndices(leafrank, &nrank));
548   /* Add additional overlap levels */
549   for (l = 1; l < levels; l++) {
550     /* Propagate point donations over SF to capture remote connections */
551     PetscCall(DMPlexPartitionLabelPropagate(dm, ovAdjByRank));
552     /* Add next level of point donations to the label */
553     PetscCall(DMPlexPartitionLabelAdjacency(dm, ovAdjByRank));
554   }
555   /* We require the closure in the overlap */
556   PetscCall(DMPlexPartitionLabelClosure(dm, ovAdjByRank));
557   PetscCall(PetscOptionsHasName(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-overlap_view", &flg));
558   if (flg) {
559     PetscViewer viewer;
560     PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm), &viewer));
561     PetscCall(DMLabelView(ovAdjByRank, viewer));
562   }
563   /* Invert sender to receiver label */
564   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel));
565   PetscCall(DMPlexPartitionLabelInvert(dm, ovAdjByRank, NULL, *ovLabel));
566   /* Add owned points, except for shared local points */
567   for (p = pStart; p < pEnd; ++p) PetscCall(DMLabelSetValue(*ovLabel, p, rank));
568   for (l = 0; l < nleaves; ++l) {
569     PetscCall(DMLabelClearValue(*ovLabel, local[l], rank));
570     PetscCall(DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank));
571   }
572   /* Clean up */
573   PetscCall(DMLabelDestroy(&ovAdjByRank));
574   PetscFunctionReturn(0);
575 }
576 
577 static PetscErrorCode HandlePoint_Private(DM dm, PetscInt p, PetscSection section, const PetscInt ranks[], PetscInt numExLabels, const DMLabel exLabel[], const PetscInt exValue[], DMLabel ovAdjByRank) {
578   PetscInt neighbors, el;
579 
580   PetscFunctionBegin;
581   PetscCall(PetscSectionGetDof(section, p, &neighbors));
582   if (neighbors) {
583     PetscInt   *adj     = NULL;
584     PetscInt    adjSize = PETSC_DETERMINE, noff, n, a;
585     PetscMPIInt rank;
586 
587     PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
588     PetscCall(PetscSectionGetOffset(section, p, &noff));
589     PetscCall(DMPlexGetAdjacency(dm, p, &adjSize, &adj));
590     for (n = 0; n < neighbors; ++n) {
591       const PetscInt remoteRank = ranks[noff + n];
592 
593       if (remoteRank == rank) continue;
594       for (a = 0; a < adjSize; ++a) {
595         PetscBool insert = PETSC_TRUE;
596 
597         for (el = 0; el < numExLabels; ++el) {
598           PetscInt exVal;
599           PetscCall(DMLabelGetValue(exLabel[el], adj[a], &exVal));
600           if (exVal == exValue[el]) {
601             insert = PETSC_FALSE;
602             break;
603           }
604         }
605         if (insert) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remoteRank));
606       }
607     }
608   }
609   PetscFunctionReturn(0);
610 }
611 
612 /*@C
613   DMPlexCreateOverlapLabelFromLabels - Compute a label indicating what overlap points should be sent to new processes
614 
615   Collective on dm
616 
617   Input Parameters:
618 + dm          - The DM
619 . numLabels   - The number of labels to draw candidate points from
620 . label       - An array of labels containing candidate points
621 . value       - An array of label values marking the candidate points
622 . numExLabels - The number of labels to use for exclusion
623 . exLabel     - An array of labels indicating points to be excluded, or NULL
624 . exValue     - An array of label values to be excluded, or NULL
625 . rootSection - The number of leaves for a given root point
626 . rootrank    - The rank of each edge into the root point
627 . leafSection - The number of processes sharing a given leaf point
628 - leafrank    - The rank of each process sharing a leaf point
629 
630   Output Parameter:
631 . ovLabel     - DMLabel containing remote overlap contributions as point/rank pairings
632 
633   Note:
634   The candidate points are only added to the overlap if they are adjacent to a shared point
635 
636   Level: developer
637 
638 .seealso: `DMPlexCreateOverlapLabel()`, `DMPlexGetAdjacency()`, `DMPlexDistributeOwnership()`, `DMPlexDistribute()`
639 @*/
640 PetscErrorCode DMPlexCreateOverlapLabelFromLabels(DM dm, PetscInt numLabels, const DMLabel label[], const PetscInt value[], PetscInt numExLabels, const DMLabel exLabel[], const PetscInt exValue[], PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel) {
641   MPI_Comm           comm;
642   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
643   PetscSF            sfPoint;
644   const PetscSFNode *remote;
645   const PetscInt    *local;
646   const PetscInt    *nrank, *rrank;
647   PetscInt          *adj = NULL;
648   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l, el;
649   PetscMPIInt        rank, size;
650   PetscBool          flg;
651 
652   PetscFunctionBegin;
653   *ovLabel = NULL;
654   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
655   PetscCallMPI(MPI_Comm_size(comm, &size));
656   PetscCallMPI(MPI_Comm_rank(comm, &rank));
657   if (size == 1) PetscFunctionReturn(0);
658   PetscCall(DMGetPointSF(dm, &sfPoint));
659   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
660   PetscCall(PetscSectionGetChart(leafSection, &sStart, &sEnd));
661   PetscCall(PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote));
662   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap adjacency", &ovAdjByRank));
663   PetscCall(ISGetIndices(rootrank, &rrank));
664   PetscCall(ISGetIndices(leafrank, &nrank));
665   for (l = 0; l < numLabels; ++l) {
666     IS              valIS;
667     const PetscInt *points;
668     PetscInt        n;
669 
670     PetscCall(DMLabelGetStratumIS(label[l], value[l], &valIS));
671     if (!valIS) continue;
672     PetscCall(ISGetIndices(valIS, &points));
673     PetscCall(ISGetLocalSize(valIS, &n));
674     for (PetscInt i = 0; i < n; ++i) {
675       const PetscInt p = points[i];
676 
677       if ((p >= sStart) && (p < sEnd)) {
678         PetscInt loc, adjSize = PETSC_DETERMINE;
679 
680         /* Handle leaves: shared with the root point */
681         if (local) PetscCall(PetscFindInt(p, nleaves, local, &loc));
682         else loc = (p >= 0 && p < nleaves) ? p : -1;
683         if (loc >= 0) {
684           const PetscInt remoteRank = remote[loc].rank;
685 
686           PetscCall(DMPlexGetAdjacency(dm, p, &adjSize, &adj));
687           for (PetscInt a = 0; a < adjSize; ++a) {
688             PetscBool insert = PETSC_TRUE;
689 
690             for (el = 0; el < numExLabels; ++el) {
691               PetscInt exVal;
692               PetscCall(DMLabelGetValue(exLabel[el], adj[a], &exVal));
693               if (exVal == exValue[el]) {
694                 insert = PETSC_FALSE;
695                 break;
696               }
697             }
698             if (insert) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remoteRank));
699           }
700         }
701         /* Some leaves share a root with other leaves on different processes */
702         HandlePoint_Private(dm, p, leafSection, nrank, numExLabels, exLabel, exValue, ovAdjByRank);
703       }
704       /* Roots are shared with leaves */
705       HandlePoint_Private(dm, p, rootSection, rrank, numExLabels, exLabel, exValue, ovAdjByRank);
706     }
707     PetscCall(ISRestoreIndices(valIS, &points));
708     PetscCall(ISDestroy(&valIS));
709   }
710   PetscCall(PetscFree(adj));
711   PetscCall(ISRestoreIndices(rootrank, &rrank));
712   PetscCall(ISRestoreIndices(leafrank, &nrank));
713   /* We require the closure in the overlap */
714   PetscCall(DMPlexPartitionLabelClosure(dm, ovAdjByRank));
715   PetscCall(PetscOptionsHasName(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-overlap_view", &flg));
716   if (flg) {
717     PetscViewer viewer;
718     PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm), &viewer));
719     PetscCall(DMLabelView(ovAdjByRank, viewer));
720   }
721   /* Invert sender to receiver label */
722   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel));
723   PetscCall(DMPlexPartitionLabelInvert(dm, ovAdjByRank, NULL, *ovLabel));
724   /* Add owned points, except for shared local points */
725   for (p = pStart; p < pEnd; ++p) PetscCall(DMLabelSetValue(*ovLabel, p, rank));
726   for (l = 0; l < nleaves; ++l) {
727     PetscCall(DMLabelClearValue(*ovLabel, local[l], rank));
728     PetscCall(DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank));
729   }
730   /* Clean up */
731   PetscCall(DMLabelDestroy(&ovAdjByRank));
732   PetscFunctionReturn(0);
733 }
734 
735 /*@C
736   DMPlexCreateOverlapMigrationSF - Create an SF describing the new mesh distribution to make the overlap described by the input SF
737 
738   Collective on dm
739 
740   Input Parameters:
741 + dm          - The DM
742 - overlapSF   - The SF mapping ghost points in overlap to owner points on other processes
743 
744   Output Parameter:
745 . migrationSF - An SF that maps original points in old locations to points in new locations
746 
747   Level: developer
748 
749 .seealso: `DMPlexCreateOverlapLabel()`, `DMPlexDistributeOverlap()`, `DMPlexDistribute()`
750 @*/
751 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF) {
752   MPI_Comm           comm;
753   PetscMPIInt        rank, size;
754   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
755   PetscInt          *pointDepths, *remoteDepths, *ilocal;
756   PetscInt          *depthRecv, *depthShift, *depthIdx;
757   PetscSFNode       *iremote;
758   PetscSF            pointSF;
759   const PetscInt    *sharedLocal;
760   const PetscSFNode *overlapRemote, *sharedRemote;
761 
762   PetscFunctionBegin;
763   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
764   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
765   PetscCallMPI(MPI_Comm_rank(comm, &rank));
766   PetscCallMPI(MPI_Comm_size(comm, &size));
767   PetscCall(DMGetDimension(dm, &dim));
768 
769   /* Before building the migration SF we need to know the new stratum offsets */
770   PetscCall(PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote));
771   PetscCall(PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths));
772   for (d = 0; d < dim + 1; d++) {
773     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
774     for (p = pStart; p < pEnd; p++) pointDepths[p] = d;
775   }
776   for (p = 0; p < nleaves; p++) remoteDepths[p] = -1;
777   PetscCall(PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths, MPI_REPLACE));
778   PetscCall(PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths, MPI_REPLACE));
779 
780   /* Count received points in each stratum and compute the internal strata shift */
781   PetscCall(PetscMalloc3(dim + 1, &depthRecv, dim + 1, &depthShift, dim + 1, &depthIdx));
782   for (d = 0; d < dim + 1; d++) depthRecv[d] = 0;
783   for (p = 0; p < nleaves; p++) depthRecv[remoteDepths[p]]++;
784   depthShift[dim] = 0;
785   for (d = 0; d < dim; d++) depthShift[d] = depthRecv[dim];
786   for (d = 1; d < dim; d++) depthShift[d] += depthRecv[0];
787   for (d = dim - 2; d > 0; d--) depthShift[d] += depthRecv[d + 1];
788   for (d = 0; d < dim + 1; d++) {
789     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
790     depthIdx[d] = pStart + depthShift[d];
791   }
792 
793   /* Form the overlap SF build an SF that describes the full overlap migration SF */
794   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
795   newLeaves = pEnd - pStart + nleaves;
796   PetscCall(PetscMalloc1(newLeaves, &ilocal));
797   PetscCall(PetscMalloc1(newLeaves, &iremote));
798   /* First map local points to themselves */
799   for (d = 0; d < dim + 1; d++) {
800     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
801     for (p = pStart; p < pEnd; p++) {
802       point                = p + depthShift[d];
803       ilocal[point]        = point;
804       iremote[point].index = p;
805       iremote[point].rank  = rank;
806       depthIdx[d]++;
807     }
808   }
809 
810   /* Add in the remote roots for currently shared points */
811   PetscCall(DMGetPointSF(dm, &pointSF));
812   PetscCall(PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote));
813   for (d = 0; d < dim + 1; d++) {
814     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
815     for (p = 0; p < numSharedPoints; p++) {
816       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
817         point                = sharedLocal[p] + depthShift[d];
818         iremote[point].index = sharedRemote[p].index;
819         iremote[point].rank  = sharedRemote[p].rank;
820       }
821     }
822   }
823 
824   /* Now add the incoming overlap points */
825   for (p = 0; p < nleaves; p++) {
826     point                = depthIdx[remoteDepths[p]];
827     ilocal[point]        = point;
828     iremote[point].index = overlapRemote[p].index;
829     iremote[point].rank  = overlapRemote[p].rank;
830     depthIdx[remoteDepths[p]]++;
831   }
832   PetscCall(PetscFree2(pointDepths, remoteDepths));
833 
834   PetscCall(PetscSFCreate(comm, migrationSF));
835   PetscCall(PetscObjectSetName((PetscObject)*migrationSF, "Overlap Migration SF"));
836   PetscCall(PetscSFSetFromOptions(*migrationSF));
837   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
838   PetscCall(PetscSFSetGraph(*migrationSF, pEnd - pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER));
839 
840   PetscCall(PetscFree3(depthRecv, depthShift, depthIdx));
841   PetscFunctionReturn(0);
842 }
843 
844 /*@
845   DMPlexStratifyMigrationSF - Rearrange the leaves of a migration sf for stratification.
846 
847   Input Parameters:
848 + dm          - The DM
849 - sf          - A star forest with non-ordered leaves, usually defining a DM point migration
850 
851   Output Parameter:
852 . migrationSF - A star forest with added leaf indirection that ensures the resulting DM is stratified
853 
854   Note:
855   This lexicographically sorts by (depth, cellType)
856 
857   Level: developer
858 
859 .seealso: `DMPlexPartitionLabelCreateSF()`, `DMPlexDistribute()`, `DMPlexDistributeOverlap()`
860 @*/
861 PetscErrorCode DMPlexStratifyMigrationSF(DM dm, PetscSF sf, PetscSF *migrationSF) {
862   MPI_Comm           comm;
863   PetscMPIInt        rank, size;
864   PetscInt           d, ldepth, depth, dim, p, pStart, pEnd, nroots, nleaves;
865   PetscSFNode       *pointDepths, *remoteDepths;
866   PetscInt          *ilocal;
867   PetscInt          *depthRecv, *depthShift, *depthIdx;
868   PetscInt          *ctRecv, *ctShift, *ctIdx;
869   const PetscSFNode *iremote;
870 
871   PetscFunctionBegin;
872   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
873   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
874   PetscCallMPI(MPI_Comm_rank(comm, &rank));
875   PetscCallMPI(MPI_Comm_size(comm, &size));
876   PetscCall(DMPlexGetDepth(dm, &ldepth));
877   PetscCall(DMGetDimension(dm, &dim));
878   PetscCall(MPIU_Allreduce(&ldepth, &depth, 1, MPIU_INT, MPI_MAX, comm));
879   PetscCheck(!(ldepth >= 0) || !(depth != ldepth), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %" PetscInt_FMT " != %" PetscInt_FMT, ldepth, depth);
880   PetscCall(PetscLogEventBegin(DMPLEX_PartStratSF, dm, 0, 0, 0));
881 
882   /* Before building the migration SF we need to know the new stratum offsets */
883   PetscCall(PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote));
884   PetscCall(PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths));
885   for (d = 0; d < depth + 1; ++d) {
886     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
887     for (p = pStart; p < pEnd; ++p) {
888       DMPolytopeType ct;
889 
890       PetscCall(DMPlexGetCellType(dm, p, &ct));
891       pointDepths[p].index = d;
892       pointDepths[p].rank  = ct;
893     }
894   }
895   for (p = 0; p < nleaves; ++p) {
896     remoteDepths[p].index = -1;
897     remoteDepths[p].rank  = -1;
898   }
899   PetscCall(PetscSFBcastBegin(sf, MPIU_2INT, pointDepths, remoteDepths, MPI_REPLACE));
900   PetscCall(PetscSFBcastEnd(sf, MPIU_2INT, pointDepths, remoteDepths, MPI_REPLACE));
901   /* Count received points in each stratum and compute the internal strata shift */
902   PetscCall(PetscCalloc6(depth + 1, &depthRecv, depth + 1, &depthShift, depth + 1, &depthIdx, DM_NUM_POLYTOPES, &ctRecv, DM_NUM_POLYTOPES, &ctShift, DM_NUM_POLYTOPES, &ctIdx));
903   for (p = 0; p < nleaves; ++p) {
904     if (remoteDepths[p].rank < 0) {
905       ++depthRecv[remoteDepths[p].index];
906     } else {
907       ++ctRecv[remoteDepths[p].rank];
908     }
909   }
910   {
911     PetscInt depths[4], dims[4], shift = 0, i, c;
912 
913     /* Cells (depth), Vertices (0), Faces (depth-1), Edges (1)
914          Consider DM_POLYTOPE_FV_GHOST and DM_POLYTOPE_INTERIOR_GHOST as cells
915      */
916     depths[0] = depth;
917     depths[1] = 0;
918     depths[2] = depth - 1;
919     depths[3] = 1;
920     dims[0]   = dim;
921     dims[1]   = 0;
922     dims[2]   = dim - 1;
923     dims[3]   = 1;
924     for (i = 0; i <= depth; ++i) {
925       const PetscInt dep = depths[i];
926       const PetscInt dim = dims[i];
927 
928       for (c = 0; c < DM_NUM_POLYTOPES; ++c) {
929         if (DMPolytopeTypeGetDim((DMPolytopeType)c) != dim && !(i == 0 && (c == DM_POLYTOPE_FV_GHOST || c == DM_POLYTOPE_INTERIOR_GHOST))) continue;
930         ctShift[c] = shift;
931         shift += ctRecv[c];
932       }
933       depthShift[dep] = shift;
934       shift += depthRecv[dep];
935     }
936     for (c = 0; c < DM_NUM_POLYTOPES; ++c) {
937       const PetscInt ctDim = DMPolytopeTypeGetDim((DMPolytopeType)c);
938 
939       if ((ctDim < 0 || ctDim > dim) && (c != DM_POLYTOPE_FV_GHOST && c != DM_POLYTOPE_INTERIOR_GHOST)) {
940         ctShift[c] = shift;
941         shift += ctRecv[c];
942       }
943     }
944   }
945   /* Derive a new local permutation based on stratified indices */
946   PetscCall(PetscMalloc1(nleaves, &ilocal));
947   for (p = 0; p < nleaves; ++p) {
948     const PetscInt       dep = remoteDepths[p].index;
949     const DMPolytopeType ct  = (DMPolytopeType)remoteDepths[p].rank;
950 
951     if ((PetscInt)ct < 0) {
952       ilocal[p] = depthShift[dep] + depthIdx[dep];
953       ++depthIdx[dep];
954     } else {
955       ilocal[p] = ctShift[ct] + ctIdx[ct];
956       ++ctIdx[ct];
957     }
958   }
959   PetscCall(PetscSFCreate(comm, migrationSF));
960   PetscCall(PetscObjectSetName((PetscObject)*migrationSF, "Migration SF"));
961   PetscCall(PetscSFSetGraph(*migrationSF, nroots, nleaves, ilocal, PETSC_OWN_POINTER, (PetscSFNode *)iremote, PETSC_COPY_VALUES));
962   PetscCall(PetscFree2(pointDepths, remoteDepths));
963   PetscCall(PetscFree6(depthRecv, depthShift, depthIdx, ctRecv, ctShift, ctIdx));
964   PetscCall(PetscLogEventEnd(DMPLEX_PartStratSF, dm, 0, 0, 0));
965   PetscFunctionReturn(0);
966 }
967 
968 /*@
969   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
970 
971   Collective on dm
972 
973   Input Parameters:
974 + dm - The DMPlex object
975 . pointSF - The PetscSF describing the communication pattern
976 . originalSection - The PetscSection for existing data layout
977 - originalVec - The existing data in a local vector
978 
979   Output Parameters:
980 + newSection - The PetscSF describing the new data layout
981 - newVec - The new data in a local vector
982 
983   Level: developer
984 
985 .seealso: `DMPlexDistribute()`, `DMPlexDistributeFieldIS()`, `DMPlexDistributeData()`
986 @*/
987 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec) {
988   PetscSF      fieldSF;
989   PetscInt    *remoteOffsets, fieldSize;
990   PetscScalar *originalValues, *newValues;
991 
992   PetscFunctionBegin;
993   PetscCall(PetscLogEventBegin(DMPLEX_DistributeField, dm, 0, 0, 0));
994   PetscCall(PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection));
995 
996   PetscCall(PetscSectionGetStorageSize(newSection, &fieldSize));
997   PetscCall(VecSetSizes(newVec, fieldSize, PETSC_DETERMINE));
998   PetscCall(VecSetType(newVec, dm->vectype));
999 
1000   PetscCall(VecGetArray(originalVec, &originalValues));
1001   PetscCall(VecGetArray(newVec, &newValues));
1002   PetscCall(PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF));
1003   PetscCall(PetscFree(remoteOffsets));
1004   PetscCall(PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues, MPI_REPLACE));
1005   PetscCall(PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues, MPI_REPLACE));
1006   PetscCall(PetscSFDestroy(&fieldSF));
1007   PetscCall(VecRestoreArray(newVec, &newValues));
1008   PetscCall(VecRestoreArray(originalVec, &originalValues));
1009   PetscCall(PetscLogEventEnd(DMPLEX_DistributeField, dm, 0, 0, 0));
1010   PetscFunctionReturn(0);
1011 }
1012 
1013 /*@
1014   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
1015 
1016   Collective on dm
1017 
1018   Input Parameters:
1019 + dm - The DMPlex object
1020 . pointSF - The PetscSF describing the communication pattern
1021 . originalSection - The PetscSection for existing data layout
1022 - originalIS - The existing data
1023 
1024   Output Parameters:
1025 + newSection - The PetscSF describing the new data layout
1026 - newIS - The new data
1027 
1028   Level: developer
1029 
1030 .seealso: `DMPlexDistribute()`, `DMPlexDistributeField()`, `DMPlexDistributeData()`
1031 @*/
1032 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS) {
1033   PetscSF         fieldSF;
1034   PetscInt       *newValues, *remoteOffsets, fieldSize;
1035   const PetscInt *originalValues;
1036 
1037   PetscFunctionBegin;
1038   PetscCall(PetscLogEventBegin(DMPLEX_DistributeField, dm, 0, 0, 0));
1039   PetscCall(PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection));
1040 
1041   PetscCall(PetscSectionGetStorageSize(newSection, &fieldSize));
1042   PetscCall(PetscMalloc1(fieldSize, &newValues));
1043 
1044   PetscCall(ISGetIndices(originalIS, &originalValues));
1045   PetscCall(PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF));
1046   PetscCall(PetscFree(remoteOffsets));
1047   PetscCall(PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *)originalValues, newValues, MPI_REPLACE));
1048   PetscCall(PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *)originalValues, newValues, MPI_REPLACE));
1049   PetscCall(PetscSFDestroy(&fieldSF));
1050   PetscCall(ISRestoreIndices(originalIS, &originalValues));
1051   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS));
1052   PetscCall(PetscLogEventEnd(DMPLEX_DistributeField, dm, 0, 0, 0));
1053   PetscFunctionReturn(0);
1054 }
1055 
1056 /*@
1057   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
1058 
1059   Collective on dm
1060 
1061   Input Parameters:
1062 + dm - The DMPlex object
1063 . pointSF - The PetscSF describing the communication pattern
1064 . originalSection - The PetscSection for existing data layout
1065 . datatype - The type of data
1066 - originalData - The existing data
1067 
1068   Output Parameters:
1069 + newSection - The PetscSection describing the new data layout
1070 - newData - The new data
1071 
1072   Level: developer
1073 
1074 .seealso: `DMPlexDistribute()`, `DMPlexDistributeField()`
1075 @*/
1076 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData) {
1077   PetscSF     fieldSF;
1078   PetscInt   *remoteOffsets, fieldSize;
1079   PetscMPIInt dataSize;
1080 
1081   PetscFunctionBegin;
1082   PetscCall(PetscLogEventBegin(DMPLEX_DistributeData, dm, 0, 0, 0));
1083   PetscCall(PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection));
1084 
1085   PetscCall(PetscSectionGetStorageSize(newSection, &fieldSize));
1086   PetscCallMPI(MPI_Type_size(datatype, &dataSize));
1087   PetscCall(PetscMalloc(fieldSize * dataSize, newData));
1088 
1089   PetscCall(PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF));
1090   PetscCall(PetscFree(remoteOffsets));
1091   PetscCall(PetscSFBcastBegin(fieldSF, datatype, originalData, *newData, MPI_REPLACE));
1092   PetscCall(PetscSFBcastEnd(fieldSF, datatype, originalData, *newData, MPI_REPLACE));
1093   PetscCall(PetscSFDestroy(&fieldSF));
1094   PetscCall(PetscLogEventEnd(DMPLEX_DistributeData, dm, 0, 0, 0));
1095   PetscFunctionReturn(0);
1096 }
1097 
1098 static PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel) {
1099   DM_Plex     *pmesh = (DM_Plex *)(dmParallel)->data;
1100   MPI_Comm     comm;
1101   PetscSF      coneSF;
1102   PetscSection originalConeSection, newConeSection;
1103   PetscInt    *remoteOffsets, *cones, *globCones, *newCones, newConesSize;
1104   PetscBool    flg;
1105 
1106   PetscFunctionBegin;
1107   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1108   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
1109   PetscCall(PetscLogEventBegin(DMPLEX_DistributeCones, dm, 0, 0, 0));
1110   /* Distribute cone section */
1111   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1112   PetscCall(DMPlexGetConeSection(dm, &originalConeSection));
1113   PetscCall(DMPlexGetConeSection(dmParallel, &newConeSection));
1114   PetscCall(PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection));
1115   PetscCall(DMSetUp(dmParallel));
1116   /* Communicate and renumber cones */
1117   PetscCall(PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF));
1118   PetscCall(PetscFree(remoteOffsets));
1119   PetscCall(DMPlexGetCones(dm, &cones));
1120   if (original) {
1121     PetscInt numCones;
1122 
1123     PetscCall(PetscSectionGetStorageSize(originalConeSection, &numCones));
1124     PetscCall(PetscMalloc1(numCones, &globCones));
1125     PetscCall(ISLocalToGlobalMappingApplyBlock(original, numCones, cones, globCones));
1126   } else {
1127     globCones = cones;
1128   }
1129   PetscCall(DMPlexGetCones(dmParallel, &newCones));
1130   PetscCall(PetscSFBcastBegin(coneSF, MPIU_INT, globCones, newCones, MPI_REPLACE));
1131   PetscCall(PetscSFBcastEnd(coneSF, MPIU_INT, globCones, newCones, MPI_REPLACE));
1132   if (original) PetscCall(PetscFree(globCones));
1133   PetscCall(PetscSectionGetStorageSize(newConeSection, &newConesSize));
1134   PetscCall(ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones));
1135   if (PetscDefined(USE_DEBUG)) {
1136     PetscInt  p;
1137     PetscBool valid = PETSC_TRUE;
1138     for (p = 0; p < newConesSize; ++p) {
1139       if (newCones[p] < 0) {
1140         valid = PETSC_FALSE;
1141         PetscCall(PetscPrintf(PETSC_COMM_SELF, "[%d] Point %" PetscInt_FMT " not in overlap SF\n", PetscGlobalRank, p));
1142       }
1143     }
1144     PetscCheck(valid, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1145   }
1146   PetscCall(PetscOptionsHasName(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-cones_view", &flg));
1147   if (flg) {
1148     PetscCall(PetscPrintf(comm, "Serial Cone Section:\n"));
1149     PetscCall(PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_(comm)));
1150     PetscCall(PetscPrintf(comm, "Parallel Cone Section:\n"));
1151     PetscCall(PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_(comm)));
1152     PetscCall(PetscSFView(coneSF, NULL));
1153   }
1154   PetscCall(DMPlexGetConeOrientations(dm, &cones));
1155   PetscCall(DMPlexGetConeOrientations(dmParallel, &newCones));
1156   PetscCall(PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones, MPI_REPLACE));
1157   PetscCall(PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones, MPI_REPLACE));
1158   PetscCall(PetscSFDestroy(&coneSF));
1159   PetscCall(PetscLogEventEnd(DMPLEX_DistributeCones, dm, 0, 0, 0));
1160   /* Create supports and stratify DMPlex */
1161   {
1162     PetscInt pStart, pEnd;
1163 
1164     PetscCall(PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd));
1165     PetscCall(PetscSectionSetChart(pmesh->supportSection, pStart, pEnd));
1166   }
1167   PetscCall(DMPlexSymmetrize(dmParallel));
1168   PetscCall(DMPlexStratify(dmParallel));
1169   {
1170     PetscBool useCone, useClosure, useAnchors;
1171 
1172     PetscCall(DMGetBasicAdjacency(dm, &useCone, &useClosure));
1173     PetscCall(DMSetBasicAdjacency(dmParallel, useCone, useClosure));
1174     PetscCall(DMPlexGetAdjacencyUseAnchors(dm, &useAnchors));
1175     PetscCall(DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors));
1176   }
1177   PetscFunctionReturn(0);
1178 }
1179 
1180 static PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel) {
1181   MPI_Comm         comm;
1182   DM               cdm, cdmParallel;
1183   PetscSection     originalCoordSection, newCoordSection;
1184   Vec              originalCoordinates, newCoordinates;
1185   PetscInt         bs;
1186   const char      *name;
1187   const PetscReal *maxCell, *Lstart, *L;
1188 
1189   PetscFunctionBegin;
1190   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1191   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1192 
1193   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1194   PetscCall(DMGetCoordinateDM(dm, &cdm));
1195   PetscCall(DMGetCoordinateDM(dmParallel, &cdmParallel));
1196   PetscCall(DMCopyDisc(cdm, cdmParallel));
1197   PetscCall(DMGetCoordinateSection(dm, &originalCoordSection));
1198   PetscCall(DMGetCoordinateSection(dmParallel, &newCoordSection));
1199   PetscCall(DMGetCoordinatesLocal(dm, &originalCoordinates));
1200   if (originalCoordinates) {
1201     PetscCall(VecCreate(PETSC_COMM_SELF, &newCoordinates));
1202     PetscCall(PetscObjectGetName((PetscObject)originalCoordinates, &name));
1203     PetscCall(PetscObjectSetName((PetscObject)newCoordinates, name));
1204 
1205     PetscCall(DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates));
1206     PetscCall(DMSetCoordinatesLocal(dmParallel, newCoordinates));
1207     PetscCall(VecGetBlockSize(originalCoordinates, &bs));
1208     PetscCall(VecSetBlockSize(newCoordinates, bs));
1209     PetscCall(VecDestroy(&newCoordinates));
1210   }
1211 
1212   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1213   PetscCall(DMGetPeriodicity(dm, &maxCell, &Lstart, &L));
1214   PetscCall(DMSetPeriodicity(dmParallel, maxCell, Lstart, L));
1215   PetscCall(DMGetCellCoordinateDM(dm, &cdm));
1216   if (cdm) {
1217     PetscCall(DMClone(dmParallel, &cdmParallel));
1218     PetscCall(DMSetCellCoordinateDM(dmParallel, cdmParallel));
1219     PetscCall(DMCopyDisc(cdm, cdmParallel));
1220     PetscCall(DMDestroy(&cdmParallel));
1221     PetscCall(DMGetCellCoordinateSection(dm, &originalCoordSection));
1222     PetscCall(DMGetCellCoordinatesLocal(dm, &originalCoordinates));
1223     PetscCall(PetscSectionCreate(comm, &newCoordSection));
1224     if (originalCoordinates) {
1225       PetscCall(VecCreate(PETSC_COMM_SELF, &newCoordinates));
1226       PetscCall(PetscObjectGetName((PetscObject)originalCoordinates, &name));
1227       PetscCall(PetscObjectSetName((PetscObject)newCoordinates, name));
1228 
1229       PetscCall(DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates));
1230       PetscCall(VecGetBlockSize(originalCoordinates, &bs));
1231       PetscCall(VecSetBlockSize(newCoordinates, bs));
1232       PetscCall(DMSetCellCoordinateSection(dmParallel, bs, newCoordSection));
1233       PetscCall(DMSetCellCoordinatesLocal(dmParallel, newCoordinates));
1234       PetscCall(VecDestroy(&newCoordinates));
1235     }
1236     PetscCall(PetscSectionDestroy(&newCoordSection));
1237   }
1238   PetscFunctionReturn(0);
1239 }
1240 
1241 static PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel) {
1242   DM_Plex         *mesh = (DM_Plex *)dm->data;
1243   MPI_Comm         comm;
1244   DMLabel          depthLabel;
1245   PetscMPIInt      rank;
1246   PetscInt         depth, d, numLabels, numLocalLabels, l;
1247   PetscBool        hasLabels  = PETSC_FALSE, lsendDepth, sendDepth;
1248   PetscObjectState depthState = -1;
1249 
1250   PetscFunctionBegin;
1251   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1252   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1253 
1254   PetscCall(PetscLogEventBegin(DMPLEX_DistributeLabels, dm, 0, 0, 0));
1255   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1256   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1257 
1258   /* If the user has changed the depth label, communicate it instead */
1259   PetscCall(DMPlexGetDepth(dm, &depth));
1260   PetscCall(DMPlexGetDepthLabel(dm, &depthLabel));
1261   if (depthLabel) PetscCall(PetscObjectStateGet((PetscObject)depthLabel, &depthState));
1262   lsendDepth = mesh->depthState != depthState ? PETSC_TRUE : PETSC_FALSE;
1263   PetscCall(MPIU_Allreduce(&lsendDepth, &sendDepth, 1, MPIU_BOOL, MPI_LOR, comm));
1264   if (sendDepth) {
1265     PetscCall(DMPlexGetDepthLabel(dmParallel, &dmParallel->depthLabel));
1266     PetscCall(DMRemoveLabelBySelf(dmParallel, &dmParallel->depthLabel, PETSC_FALSE));
1267   }
1268   /* Everyone must have either the same number of labels, or none */
1269   PetscCall(DMGetNumLabels(dm, &numLocalLabels));
1270   numLabels = numLocalLabels;
1271   PetscCallMPI(MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm));
1272   if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1273   for (l = 0; l < numLabels; ++l) {
1274     DMLabel     label = NULL, labelNew = NULL;
1275     PetscBool   isDepth, lisOutput     = PETSC_TRUE, isOutput;
1276     const char *name = NULL;
1277 
1278     if (hasLabels) {
1279       PetscCall(DMGetLabelByNum(dm, l, &label));
1280       /* Skip "depth" because it is recreated */
1281       PetscCall(PetscObjectGetName((PetscObject)label, &name));
1282       PetscCall(PetscStrcmp(name, "depth", &isDepth));
1283     }
1284     PetscCallMPI(MPI_Bcast(&isDepth, 1, MPIU_BOOL, 0, comm));
1285     if (isDepth && !sendDepth) continue;
1286     PetscCall(DMLabelDistribute(label, migrationSF, &labelNew));
1287     if (isDepth) {
1288       /* Put in any missing strata which can occur if users are managing the depth label themselves */
1289       PetscInt gdepth;
1290 
1291       PetscCall(MPIU_Allreduce(&depth, &gdepth, 1, MPIU_INT, MPI_MAX, comm));
1292       PetscCheck(!(depth >= 0) || !(gdepth != depth), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %" PetscInt_FMT " != %" PetscInt_FMT, depth, gdepth);
1293       for (d = 0; d <= gdepth; ++d) {
1294         PetscBool has;
1295 
1296         PetscCall(DMLabelHasStratum(labelNew, d, &has));
1297         if (!has) PetscCall(DMLabelAddStratum(labelNew, d));
1298       }
1299     }
1300     PetscCall(DMAddLabel(dmParallel, labelNew));
1301     /* Put the output flag in the new label */
1302     if (hasLabels) PetscCall(DMGetLabelOutput(dm, name, &lisOutput));
1303     PetscCall(MPIU_Allreduce(&lisOutput, &isOutput, 1, MPIU_BOOL, MPI_LAND, comm));
1304     PetscCall(PetscObjectGetName((PetscObject)labelNew, &name));
1305     PetscCall(DMSetLabelOutput(dmParallel, name, isOutput));
1306     PetscCall(DMLabelDestroy(&labelNew));
1307   }
1308   {
1309     DMLabel ctLabel;
1310 
1311     // Reset label for fast lookup
1312     PetscCall(DMPlexGetCellTypeLabel(dmParallel, &ctLabel));
1313     PetscCall(DMLabelMakeAllInvalid_Internal(ctLabel));
1314   }
1315   PetscCall(PetscLogEventEnd(DMPLEX_DistributeLabels, dm, 0, 0, 0));
1316   PetscFunctionReturn(0);
1317 }
1318 
1319 static PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel) {
1320   DM_Plex     *mesh  = (DM_Plex *)dm->data;
1321   DM_Plex     *pmesh = (DM_Plex *)(dmParallel)->data;
1322   MPI_Comm     comm;
1323   DM           refTree;
1324   PetscSection origParentSection, newParentSection;
1325   PetscInt    *origParents, *origChildIDs;
1326   PetscBool    flg;
1327 
1328   PetscFunctionBegin;
1329   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1330   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
1331   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1332 
1333   /* Set up tree */
1334   PetscCall(DMPlexGetReferenceTree(dm, &refTree));
1335   PetscCall(DMPlexSetReferenceTree(dmParallel, refTree));
1336   PetscCall(DMPlexGetTree(dm, &origParentSection, &origParents, &origChildIDs, NULL, NULL));
1337   if (origParentSection) {
1338     PetscInt  pStart, pEnd;
1339     PetscInt *newParents, *newChildIDs, *globParents;
1340     PetscInt *remoteOffsetsParents, newParentSize;
1341     PetscSF   parentSF;
1342 
1343     PetscCall(DMPlexGetChart(dmParallel, &pStart, &pEnd));
1344     PetscCall(PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel), &newParentSection));
1345     PetscCall(PetscSectionSetChart(newParentSection, pStart, pEnd));
1346     PetscCall(PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection));
1347     PetscCall(PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF));
1348     PetscCall(PetscFree(remoteOffsetsParents));
1349     PetscCall(PetscSectionGetStorageSize(newParentSection, &newParentSize));
1350     PetscCall(PetscMalloc2(newParentSize, &newParents, newParentSize, &newChildIDs));
1351     if (original) {
1352       PetscInt numParents;
1353 
1354       PetscCall(PetscSectionGetStorageSize(origParentSection, &numParents));
1355       PetscCall(PetscMalloc1(numParents, &globParents));
1356       PetscCall(ISLocalToGlobalMappingApplyBlock(original, numParents, origParents, globParents));
1357     } else {
1358       globParents = origParents;
1359     }
1360     PetscCall(PetscSFBcastBegin(parentSF, MPIU_INT, globParents, newParents, MPI_REPLACE));
1361     PetscCall(PetscSFBcastEnd(parentSF, MPIU_INT, globParents, newParents, MPI_REPLACE));
1362     if (original) PetscCall(PetscFree(globParents));
1363     PetscCall(PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs, MPI_REPLACE));
1364     PetscCall(PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs, MPI_REPLACE));
1365     PetscCall(ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents));
1366     if (PetscDefined(USE_DEBUG)) {
1367       PetscInt  p;
1368       PetscBool valid = PETSC_TRUE;
1369       for (p = 0; p < newParentSize; ++p) {
1370         if (newParents[p] < 0) {
1371           valid = PETSC_FALSE;
1372           PetscCall(PetscPrintf(PETSC_COMM_SELF, "Point %" PetscInt_FMT " not in overlap SF\n", p));
1373         }
1374       }
1375       PetscCheck(valid, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1376     }
1377     PetscCall(PetscOptionsHasName(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-parents_view", &flg));
1378     if (flg) {
1379       PetscCall(PetscPrintf(comm, "Serial Parent Section: \n"));
1380       PetscCall(PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_(comm)));
1381       PetscCall(PetscPrintf(comm, "Parallel Parent Section: \n"));
1382       PetscCall(PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_(comm)));
1383       PetscCall(PetscSFView(parentSF, NULL));
1384     }
1385     PetscCall(DMPlexSetTree(dmParallel, newParentSection, newParents, newChildIDs));
1386     PetscCall(PetscSectionDestroy(&newParentSection));
1387     PetscCall(PetscFree2(newParents, newChildIDs));
1388     PetscCall(PetscSFDestroy(&parentSF));
1389   }
1390   pmesh->useAnchors = mesh->useAnchors;
1391   PetscFunctionReturn(0);
1392 }
1393 
1394 PETSC_UNUSED static PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, DM dmParallel) {
1395   PetscMPIInt rank, size;
1396   MPI_Comm    comm;
1397 
1398   PetscFunctionBegin;
1399   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1400   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1401 
1402   /* Create point SF for parallel mesh */
1403   PetscCall(PetscLogEventBegin(DMPLEX_DistributeSF, dm, 0, 0, 0));
1404   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1405   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1406   PetscCallMPI(MPI_Comm_size(comm, &size));
1407   {
1408     const PetscInt *leaves;
1409     PetscSFNode    *remotePoints, *rowners, *lowners;
1410     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1411     PetscInt        pStart, pEnd;
1412 
1413     PetscCall(DMPlexGetChart(dmParallel, &pStart, &pEnd));
1414     PetscCall(PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL));
1415     PetscCall(PetscMalloc2(numRoots, &rowners, numLeaves, &lowners));
1416     for (p = 0; p < numRoots; p++) {
1417       rowners[p].rank  = -1;
1418       rowners[p].index = -1;
1419     }
1420     PetscCall(PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners, MPI_REPLACE));
1421     PetscCall(PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners, MPI_REPLACE));
1422     for (p = 0; p < numLeaves; ++p) {
1423       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1424         lowners[p].rank  = rank;
1425         lowners[p].index = leaves ? leaves[p] : p;
1426       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1427         lowners[p].rank  = -2;
1428         lowners[p].index = -2;
1429       }
1430     }
1431     for (p = 0; p < numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1432       rowners[p].rank  = -3;
1433       rowners[p].index = -3;
1434     }
1435     PetscCall(PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC));
1436     PetscCall(PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC));
1437     PetscCall(PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners, MPI_REPLACE));
1438     PetscCall(PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners, MPI_REPLACE));
1439     for (p = 0; p < numLeaves; ++p) {
1440       PetscCheck(lowners[p].rank >= 0 && lowners[p].index >= 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Cell partition corrupt: point not claimed");
1441       if (lowners[p].rank != rank) ++numGhostPoints;
1442     }
1443     PetscCall(PetscMalloc1(numGhostPoints, &ghostPoints));
1444     PetscCall(PetscMalloc1(numGhostPoints, &remotePoints));
1445     for (p = 0, gp = 0; p < numLeaves; ++p) {
1446       if (lowners[p].rank != rank) {
1447         ghostPoints[gp]        = leaves ? leaves[p] : p;
1448         remotePoints[gp].rank  = lowners[p].rank;
1449         remotePoints[gp].index = lowners[p].index;
1450         ++gp;
1451       }
1452     }
1453     PetscCall(PetscFree2(rowners, lowners));
1454     PetscCall(PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER));
1455     PetscCall(PetscSFSetFromOptions((dmParallel)->sf));
1456   }
1457   {
1458     PetscBool useCone, useClosure, useAnchors;
1459 
1460     PetscCall(DMGetBasicAdjacency(dm, &useCone, &useClosure));
1461     PetscCall(DMSetBasicAdjacency(dmParallel, useCone, useClosure));
1462     PetscCall(DMPlexGetAdjacencyUseAnchors(dm, &useAnchors));
1463     PetscCall(DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors));
1464   }
1465   PetscCall(PetscLogEventEnd(DMPLEX_DistributeSF, dm, 0, 0, 0));
1466   PetscFunctionReturn(0);
1467 }
1468 
1469 /*@
1470   DMPlexSetPartitionBalance - Should distribution of the DM attempt to balance the shared point partition?
1471 
1472   Input Parameters:
1473 + dm - The DMPlex object
1474 - flg - Balance the partition?
1475 
1476   Level: intermediate
1477 
1478 .seealso: `DMPlexDistribute()`, `DMPlexGetPartitionBalance()`
1479 @*/
1480 PetscErrorCode DMPlexSetPartitionBalance(DM dm, PetscBool flg) {
1481   DM_Plex *mesh = (DM_Plex *)dm->data;
1482 
1483   PetscFunctionBegin;
1484   mesh->partitionBalance = flg;
1485   PetscFunctionReturn(0);
1486 }
1487 
1488 /*@
1489   DMPlexGetPartitionBalance - Does distribution of the DM attempt to balance the shared point partition?
1490 
1491   Input Parameter:
1492 . dm - The DMPlex object
1493 
1494   Output Parameter:
1495 . flg - Balance the partition?
1496 
1497   Level: intermediate
1498 
1499 .seealso: `DMPlexDistribute()`, `DMPlexSetPartitionBalance()`
1500 @*/
1501 PetscErrorCode DMPlexGetPartitionBalance(DM dm, PetscBool *flg) {
1502   DM_Plex *mesh = (DM_Plex *)dm->data;
1503 
1504   PetscFunctionBegin;
1505   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1506   PetscValidBoolPointer(flg, 2);
1507   *flg = mesh->partitionBalance;
1508   PetscFunctionReturn(0);
1509 }
1510 
1511 typedef struct {
1512   PetscInt vote, rank, index;
1513 } Petsc3Int;
1514 
1515 /* MaxLoc, but carry a third piece of information around */
1516 static void MPIAPI MaxLocCarry(void *in_, void *inout_, PetscMPIInt *len_, MPI_Datatype *dtype) {
1517   Petsc3Int *a = (Petsc3Int *)inout_;
1518   Petsc3Int *b = (Petsc3Int *)in_;
1519   PetscInt   i, len = *len_;
1520   for (i = 0; i < len; i++) {
1521     if (a[i].vote < b[i].vote) {
1522       a[i].vote  = b[i].vote;
1523       a[i].rank  = b[i].rank;
1524       a[i].index = b[i].index;
1525     } else if (a[i].vote <= b[i].vote) {
1526       if (a[i].rank >= b[i].rank) {
1527         a[i].rank  = b[i].rank;
1528         a[i].index = b[i].index;
1529       }
1530     }
1531   }
1532 }
1533 
1534 /*@C
1535   DMPlexCreatePointSF - Build a point SF from an SF describing a point migration
1536 
1537   Input Parameters:
1538 + dm          - The source DMPlex object
1539 . migrationSF - The star forest that describes the parallel point remapping
1540 - ownership   - Flag causing a vote to determine point ownership
1541 
1542   Output Parameter:
1543 . pointSF     - The star forest describing the point overlap in the remapped DM
1544 
1545   Notes:
1546   Output pointSF is guaranteed to have the array of local indices (leaves) sorted.
1547 
1548   Level: developer
1549 
1550 .seealso: `DMPlexDistribute()`, `DMPlexDistributeOverlap()`
1551 @*/
1552 PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownership, PetscSF *pointSF) {
1553   PetscMPIInt        rank, size;
1554   PetscInt           p, nroots, nleaves, idx, npointLeaves;
1555   PetscInt          *pointLocal;
1556   const PetscInt    *leaves;
1557   const PetscSFNode *roots;
1558   PetscSFNode       *rootNodes, *leafNodes, *pointRemote;
1559   Vec                shifts;
1560   const PetscInt     numShifts  = 13759;
1561   const PetscScalar *shift      = NULL;
1562   const PetscBool    shiftDebug = PETSC_FALSE;
1563   PetscBool          balance;
1564 
1565   PetscFunctionBegin;
1566   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1567   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
1568   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)dm), &size));
1569   PetscCall(PetscLogEventBegin(DMPLEX_CreatePointSF, dm, 0, 0, 0));
1570 
1571   PetscCall(DMPlexGetPartitionBalance(dm, &balance));
1572   PetscCall(PetscSFGetGraph(migrationSF, &nroots, &nleaves, &leaves, &roots));
1573   PetscCall(PetscMalloc2(nroots, &rootNodes, nleaves, &leafNodes));
1574   if (ownership) {
1575     MPI_Op       op;
1576     MPI_Datatype datatype;
1577     Petsc3Int   *rootVote = NULL, *leafVote = NULL;
1578     /* If balancing, we compute a random cyclic shift of the rank for each remote point. That way, the max will evenly distribute among ranks. */
1579     if (balance) {
1580       PetscRandom r;
1581 
1582       PetscCall(PetscRandomCreate(PETSC_COMM_SELF, &r));
1583       PetscCall(PetscRandomSetInterval(r, 0, 2467 * size));
1584       PetscCall(VecCreate(PETSC_COMM_SELF, &shifts));
1585       PetscCall(VecSetSizes(shifts, numShifts, numShifts));
1586       PetscCall(VecSetType(shifts, VECSTANDARD));
1587       PetscCall(VecSetRandom(shifts, r));
1588       PetscCall(PetscRandomDestroy(&r));
1589       PetscCall(VecGetArrayRead(shifts, &shift));
1590     }
1591 
1592     PetscCall(PetscMalloc1(nroots, &rootVote));
1593     PetscCall(PetscMalloc1(nleaves, &leafVote));
1594     /* Point ownership vote: Process with highest rank owns shared points */
1595     for (p = 0; p < nleaves; ++p) {
1596       if (shiftDebug) {
1597         PetscCall(PetscSynchronizedPrintf(PetscObjectComm((PetscObject)dm), "[%d] Point %" PetscInt_FMT " RemotePoint %" PetscInt_FMT " Shift %" PetscInt_FMT " MyRank %" PetscInt_FMT "\n", rank, leaves ? leaves[p] : p, roots[p].index,
1598                                           (PetscInt)PetscRealPart(shift[roots[p].index % numShifts]), (rank + (shift ? (PetscInt)PetscRealPart(shift[roots[p].index % numShifts]) : 0)) % size));
1599       }
1600       /* Either put in a bid or we know we own it */
1601       leafVote[p].vote  = (rank + (shift ? (PetscInt)PetscRealPart(shift[roots[p].index % numShifts]) : 0)) % size;
1602       leafVote[p].rank  = rank;
1603       leafVote[p].index = p;
1604     }
1605     for (p = 0; p < nroots; p++) {
1606       /* Root must not participate in the reduction, flag so that MAXLOC does not use */
1607       rootVote[p].vote  = -3;
1608       rootVote[p].rank  = -3;
1609       rootVote[p].index = -3;
1610     }
1611     PetscCallMPI(MPI_Type_contiguous(3, MPIU_INT, &datatype));
1612     PetscCallMPI(MPI_Type_commit(&datatype));
1613     PetscCallMPI(MPI_Op_create(&MaxLocCarry, 1, &op));
1614     PetscCall(PetscSFReduceBegin(migrationSF, datatype, leafVote, rootVote, op));
1615     PetscCall(PetscSFReduceEnd(migrationSF, datatype, leafVote, rootVote, op));
1616     PetscCallMPI(MPI_Op_free(&op));
1617     PetscCallMPI(MPI_Type_free(&datatype));
1618     for (p = 0; p < nroots; p++) {
1619       rootNodes[p].rank  = rootVote[p].rank;
1620       rootNodes[p].index = rootVote[p].index;
1621     }
1622     PetscCall(PetscFree(leafVote));
1623     PetscCall(PetscFree(rootVote));
1624   } else {
1625     for (p = 0; p < nroots; p++) {
1626       rootNodes[p].index = -1;
1627       rootNodes[p].rank  = rank;
1628     }
1629     for (p = 0; p < nleaves; p++) {
1630       /* Write new local id into old location */
1631       if (roots[p].rank == rank) { rootNodes[roots[p].index].index = leaves ? leaves[p] : p; }
1632     }
1633   }
1634   PetscCall(PetscSFBcastBegin(migrationSF, MPIU_2INT, rootNodes, leafNodes, MPI_REPLACE));
1635   PetscCall(PetscSFBcastEnd(migrationSF, MPIU_2INT, rootNodes, leafNodes, MPI_REPLACE));
1636 
1637   for (npointLeaves = 0, p = 0; p < nleaves; p++) {
1638     if (leafNodes[p].rank != rank) npointLeaves++;
1639   }
1640   PetscCall(PetscMalloc1(npointLeaves, &pointLocal));
1641   PetscCall(PetscMalloc1(npointLeaves, &pointRemote));
1642   for (idx = 0, p = 0; p < nleaves; p++) {
1643     if (leafNodes[p].rank != rank) {
1644       /* Note that pointLocal is automatically sorted as it is sublist of 0, ..., nleaves-1 */
1645       pointLocal[idx]  = p;
1646       pointRemote[idx] = leafNodes[p];
1647       idx++;
1648     }
1649   }
1650   if (shift) {
1651     PetscCall(VecRestoreArrayRead(shifts, &shift));
1652     PetscCall(VecDestroy(&shifts));
1653   }
1654   if (shiftDebug) PetscCall(PetscSynchronizedFlush(PetscObjectComm((PetscObject)dm), PETSC_STDOUT));
1655   PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)dm), pointSF));
1656   PetscCall(PetscSFSetFromOptions(*pointSF));
1657   PetscCall(PetscSFSetGraph(*pointSF, nleaves, npointLeaves, pointLocal, PETSC_OWN_POINTER, pointRemote, PETSC_OWN_POINTER));
1658   PetscCall(PetscFree2(rootNodes, leafNodes));
1659   PetscCall(PetscLogEventEnd(DMPLEX_CreatePointSF, dm, 0, 0, 0));
1660   if (PetscDefined(USE_DEBUG)) PetscCall(DMPlexCheckPointSF(dm, *pointSF));
1661   PetscFunctionReturn(0);
1662 }
1663 
1664 /*@C
1665   DMPlexMigrate  - Migrates internal DM data over the supplied star forest
1666 
1667   Collective on dm
1668 
1669   Input Parameters:
1670 + dm       - The source DMPlex object
1671 - sf       - The star forest communication context describing the migration pattern
1672 
1673   Output Parameter:
1674 . targetDM - The target DMPlex object
1675 
1676   Level: intermediate
1677 
1678 .seealso: `DMPlexDistribute()`, `DMPlexDistributeOverlap()`
1679 @*/
1680 PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM) {
1681   MPI_Comm               comm;
1682   PetscInt               dim, cdim, nroots;
1683   PetscSF                sfPoint;
1684   ISLocalToGlobalMapping ltogMigration;
1685   ISLocalToGlobalMapping ltogOriginal = NULL;
1686 
1687   PetscFunctionBegin;
1688   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1689   PetscCall(PetscLogEventBegin(DMPLEX_Migrate, dm, 0, 0, 0));
1690   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1691   PetscCall(DMGetDimension(dm, &dim));
1692   PetscCall(DMSetDimension(targetDM, dim));
1693   PetscCall(DMGetCoordinateDim(dm, &cdim));
1694   PetscCall(DMSetCoordinateDim(targetDM, cdim));
1695 
1696   /* Check for a one-to-all distribution pattern */
1697   PetscCall(DMGetPointSF(dm, &sfPoint));
1698   PetscCall(PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL));
1699   if (nroots >= 0) {
1700     IS        isOriginal;
1701     PetscInt  n, size, nleaves;
1702     PetscInt *numbering_orig, *numbering_new;
1703 
1704     /* Get the original point numbering */
1705     PetscCall(DMPlexCreatePointNumbering(dm, &isOriginal));
1706     PetscCall(ISLocalToGlobalMappingCreateIS(isOriginal, &ltogOriginal));
1707     PetscCall(ISLocalToGlobalMappingGetSize(ltogOriginal, &size));
1708     PetscCall(ISLocalToGlobalMappingGetBlockIndices(ltogOriginal, (const PetscInt **)&numbering_orig));
1709     /* Convert to positive global numbers */
1710     for (n = 0; n < size; n++) {
1711       if (numbering_orig[n] < 0) numbering_orig[n] = -(numbering_orig[n] + 1);
1712     }
1713     /* Derive the new local-to-global mapping from the old one */
1714     PetscCall(PetscSFGetGraph(sf, NULL, &nleaves, NULL, NULL));
1715     PetscCall(PetscMalloc1(nleaves, &numbering_new));
1716     PetscCall(PetscSFBcastBegin(sf, MPIU_INT, numbering_orig, numbering_new, MPI_REPLACE));
1717     PetscCall(PetscSFBcastEnd(sf, MPIU_INT, numbering_orig, numbering_new, MPI_REPLACE));
1718     PetscCall(ISLocalToGlobalMappingCreate(comm, 1, nleaves, numbering_new, PETSC_OWN_POINTER, &ltogMigration));
1719     PetscCall(ISLocalToGlobalMappingRestoreIndices(ltogOriginal, (const PetscInt **)&numbering_orig));
1720     PetscCall(ISDestroy(&isOriginal));
1721   } else {
1722     /* One-to-all distribution pattern: We can derive LToG from SF */
1723     PetscCall(ISLocalToGlobalMappingCreateSF(sf, 0, &ltogMigration));
1724   }
1725   PetscCall(PetscObjectSetName((PetscObject)ltogMigration, "Point renumbering for DM migration"));
1726   PetscCall(ISLocalToGlobalMappingViewFromOptions(ltogMigration, (PetscObject)dm, "-partition_view"));
1727   /* Migrate DM data to target DM */
1728   PetscCall(DMPlexDistributeCones(dm, sf, ltogOriginal, ltogMigration, targetDM));
1729   PetscCall(DMPlexDistributeLabels(dm, sf, targetDM));
1730   PetscCall(DMPlexDistributeCoordinates(dm, sf, targetDM));
1731   PetscCall(DMPlexDistributeSetupTree(dm, sf, ltogOriginal, ltogMigration, targetDM));
1732   PetscCall(ISLocalToGlobalMappingDestroy(&ltogOriginal));
1733   PetscCall(ISLocalToGlobalMappingDestroy(&ltogMigration));
1734   PetscCall(PetscLogEventEnd(DMPLEX_Migrate, dm, 0, 0, 0));
1735   PetscFunctionReturn(0);
1736 }
1737 
1738 /*@C
1739   DMPlexDistribute - Distributes the mesh and any associated sections.
1740 
1741   Collective on dm
1742 
1743   Input Parameters:
1744 + dm  - The original DMPlex object
1745 - overlap - The overlap of partitions, 0 is the default
1746 
1747   Output Parameters:
1748 + sf - The PetscSF used for point distribution, or NULL if not needed
1749 - dmParallel - The distributed DMPlex object
1750 
1751   Note: If the mesh was not distributed, the output dmParallel will be NULL.
1752 
1753   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
1754   representation on the mesh.
1755 
1756   Level: intermediate
1757 
1758 .seealso: `DMPlexCreate()`, `DMSetAdjacency()`, `DMPlexGetOverlap()`
1759 @*/
1760 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel) {
1761   MPI_Comm         comm;
1762   PetscPartitioner partitioner;
1763   IS               cellPart;
1764   PetscSection     cellPartSection;
1765   DM               dmCoord;
1766   DMLabel          lblPartition, lblMigration;
1767   PetscSF          sfMigration, sfStratified, sfPoint;
1768   PetscBool        flg, balance;
1769   PetscMPIInt      rank, size;
1770 
1771   PetscFunctionBegin;
1772   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1773   PetscValidLogicalCollectiveInt(dm, overlap, 2);
1774   if (sf) PetscValidPointer(sf, 3);
1775   PetscValidPointer(dmParallel, 4);
1776 
1777   if (sf) *sf = NULL;
1778   *dmParallel = NULL;
1779   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1780   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1781   PetscCallMPI(MPI_Comm_size(comm, &size));
1782   if (size == 1) PetscFunctionReturn(0);
1783 
1784   PetscCall(PetscLogEventBegin(DMPLEX_Distribute, dm, 0, 0, 0));
1785   /* Create cell partition */
1786   PetscCall(PetscLogEventBegin(DMPLEX_Partition, dm, 0, 0, 0));
1787   PetscCall(PetscSectionCreate(comm, &cellPartSection));
1788   PetscCall(DMPlexGetPartitioner(dm, &partitioner));
1789   PetscCall(PetscPartitionerDMPlexPartition(partitioner, dm, NULL, cellPartSection, &cellPart));
1790   PetscCall(PetscLogEventBegin(DMPLEX_PartSelf, dm, 0, 0, 0));
1791   {
1792     /* Convert partition to DMLabel */
1793     IS              is;
1794     PetscHSetI      ht;
1795     const PetscInt *points;
1796     PetscInt       *iranks;
1797     PetscInt        pStart, pEnd, proc, npoints, poff = 0, nranks;
1798 
1799     PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Point Partition", &lblPartition));
1800     /* Preallocate strata */
1801     PetscCall(PetscHSetICreate(&ht));
1802     PetscCall(PetscSectionGetChart(cellPartSection, &pStart, &pEnd));
1803     for (proc = pStart; proc < pEnd; proc++) {
1804       PetscCall(PetscSectionGetDof(cellPartSection, proc, &npoints));
1805       if (npoints) PetscCall(PetscHSetIAdd(ht, proc));
1806     }
1807     PetscCall(PetscHSetIGetSize(ht, &nranks));
1808     PetscCall(PetscMalloc1(nranks, &iranks));
1809     PetscCall(PetscHSetIGetElems(ht, &poff, iranks));
1810     PetscCall(PetscHSetIDestroy(&ht));
1811     PetscCall(DMLabelAddStrata(lblPartition, nranks, iranks));
1812     PetscCall(PetscFree(iranks));
1813     /* Inline DMPlexPartitionLabelClosure() */
1814     PetscCall(ISGetIndices(cellPart, &points));
1815     PetscCall(PetscSectionGetChart(cellPartSection, &pStart, &pEnd));
1816     for (proc = pStart; proc < pEnd; proc++) {
1817       PetscCall(PetscSectionGetDof(cellPartSection, proc, &npoints));
1818       if (!npoints) continue;
1819       PetscCall(PetscSectionGetOffset(cellPartSection, proc, &poff));
1820       PetscCall(DMPlexClosurePoints_Private(dm, npoints, points + poff, &is));
1821       PetscCall(DMLabelSetStratumIS(lblPartition, proc, is));
1822       PetscCall(ISDestroy(&is));
1823     }
1824     PetscCall(ISRestoreIndices(cellPart, &points));
1825   }
1826   PetscCall(PetscLogEventEnd(DMPLEX_PartSelf, dm, 0, 0, 0));
1827 
1828   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Point migration", &lblMigration));
1829   PetscCall(DMPlexPartitionLabelInvert(dm, lblPartition, NULL, lblMigration));
1830   PetscCall(DMPlexPartitionLabelCreateSF(dm, lblMigration, &sfMigration));
1831   PetscCall(DMPlexStratifyMigrationSF(dm, sfMigration, &sfStratified));
1832   PetscCall(PetscSFDestroy(&sfMigration));
1833   sfMigration = sfStratified;
1834   PetscCall(PetscSFSetUp(sfMigration));
1835   PetscCall(PetscLogEventEnd(DMPLEX_Partition, dm, 0, 0, 0));
1836   PetscCall(PetscOptionsHasName(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-partition_view", &flg));
1837   if (flg) {
1838     PetscCall(DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_(comm)));
1839     PetscCall(PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_(comm)));
1840   }
1841 
1842   /* Create non-overlapping parallel DM and migrate internal data */
1843   PetscCall(DMPlexCreate(comm, dmParallel));
1844   PetscCall(PetscObjectSetName((PetscObject)*dmParallel, "Parallel Mesh"));
1845   PetscCall(DMPlexMigrate(dm, sfMigration, *dmParallel));
1846 
1847   /* Build the point SF without overlap */
1848   PetscCall(DMPlexGetPartitionBalance(dm, &balance));
1849   PetscCall(DMPlexSetPartitionBalance(*dmParallel, balance));
1850   PetscCall(DMPlexCreatePointSF(*dmParallel, sfMigration, PETSC_TRUE, &sfPoint));
1851   PetscCall(DMSetPointSF(*dmParallel, sfPoint));
1852   PetscCall(DMGetCoordinateDM(*dmParallel, &dmCoord));
1853   if (dmCoord) PetscCall(DMSetPointSF(dmCoord, sfPoint));
1854   if (flg) PetscCall(PetscSFView(sfPoint, NULL));
1855 
1856   if (overlap > 0) {
1857     DM                 dmOverlap;
1858     PetscInt           nroots, nleaves, noldleaves, l;
1859     const PetscInt    *oldLeaves;
1860     PetscSFNode       *newRemote, *permRemote;
1861     const PetscSFNode *oldRemote;
1862     PetscSF            sfOverlap, sfOverlapPoint;
1863 
1864     /* Add the partition overlap to the distributed DM */
1865     PetscCall(DMPlexDistributeOverlap(*dmParallel, overlap, &sfOverlap, &dmOverlap));
1866     PetscCall(DMDestroy(dmParallel));
1867     *dmParallel = dmOverlap;
1868     if (flg) {
1869       PetscCall(PetscPrintf(comm, "Overlap Migration SF:\n"));
1870       PetscCall(PetscSFView(sfOverlap, NULL));
1871     }
1872 
1873     /* Re-map the migration SF to establish the full migration pattern */
1874     PetscCall(PetscSFGetGraph(sfMigration, &nroots, &noldleaves, &oldLeaves, &oldRemote));
1875     PetscCall(PetscSFGetGraph(sfOverlap, NULL, &nleaves, NULL, NULL));
1876     PetscCall(PetscMalloc1(nleaves, &newRemote));
1877     /* oldRemote: original root point mapping to original leaf point
1878        newRemote: original leaf point mapping to overlapped leaf point */
1879     if (oldLeaves) {
1880       /* After stratification, the migration remotes may not be in root (canonical) order, so we reorder using the leaf numbering */
1881       PetscCall(PetscMalloc1(noldleaves, &permRemote));
1882       for (l = 0; l < noldleaves; ++l) permRemote[oldLeaves[l]] = oldRemote[l];
1883       oldRemote = permRemote;
1884     }
1885     PetscCall(PetscSFBcastBegin(sfOverlap, MPIU_2INT, oldRemote, newRemote, MPI_REPLACE));
1886     PetscCall(PetscSFBcastEnd(sfOverlap, MPIU_2INT, oldRemote, newRemote, MPI_REPLACE));
1887     if (oldLeaves) PetscCall(PetscFree(oldRemote));
1888     PetscCall(PetscSFCreate(comm, &sfOverlapPoint));
1889     PetscCall(PetscSFSetGraph(sfOverlapPoint, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER));
1890     PetscCall(PetscSFDestroy(&sfOverlap));
1891     PetscCall(PetscSFDestroy(&sfMigration));
1892     sfMigration = sfOverlapPoint;
1893   }
1894   /* Cleanup Partition */
1895   PetscCall(DMLabelDestroy(&lblPartition));
1896   PetscCall(DMLabelDestroy(&lblMigration));
1897   PetscCall(PetscSectionDestroy(&cellPartSection));
1898   PetscCall(ISDestroy(&cellPart));
1899   /* Copy discretization */
1900   PetscCall(DMCopyDisc(dm, *dmParallel));
1901   /* Create sfNatural */
1902   if (dm->useNatural) {
1903     PetscSection section;
1904 
1905     /* First DM with useNatural = PETSC_TRUE is considered natural */
1906     /* sfMigration and sfNatural are respectively the point and dofs SFs mapping to this natural DM */
1907     /* Compose with a previous sfMigration if present */
1908     if (dm->sfMigration) {
1909       PetscSF naturalPointSF;
1910 
1911       PetscCall(PetscSFCompose(dm->sfMigration, sfMigration, &naturalPointSF));
1912       PetscCall(PetscSFDestroy(&sfMigration));
1913       sfMigration = naturalPointSF;
1914     }
1915     PetscCall(DMGetLocalSection(dm, &section));
1916     PetscCall(DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &(*dmParallel)->sfNatural));
1917     PetscCall(DMSetUseNatural(*dmParallel, PETSC_TRUE));
1918   }
1919   PetscCall(DMPlexCopy_Internal(dm, PETSC_TRUE, PETSC_FALSE, *dmParallel));
1920   /* Cleanup */
1921   if (sf) {
1922     *sf = sfMigration;
1923   } else PetscCall(PetscSFDestroy(&sfMigration));
1924   PetscCall(PetscSFDestroy(&sfPoint));
1925   PetscCall(PetscLogEventEnd(DMPLEX_Distribute, dm, 0, 0, 0));
1926   PetscFunctionReturn(0);
1927 }
1928 
1929 /*@C
1930   DMPlexDistributeOverlap - Add partition overlap to a distributed non-overlapping DM.
1931 
1932   Collective on dm
1933 
1934   Input Parameters:
1935 + dm  - The non-overlapping distributed DMPlex object
1936 - overlap - The overlap of partitions (the same on all ranks)
1937 
1938   Output Parameters:
1939 + sf - The PetscSF used for point distribution
1940 - dmOverlap - The overlapping distributed DMPlex object, or NULL
1941 
1942   Notes:
1943   If the mesh was not distributed, the return value is NULL.
1944 
1945   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
1946   representation on the mesh.
1947 
1948   Options Database Keys:
1949 + -dm_plex_overlap_labels <name1,name2,...> - List of overlap label names
1950 . -dm_plex_overlap_values <int1,int2,...>   - List of overlap label values
1951 . -dm_plex_overlap_exclude_label <name>     - Label used to exclude points from overlap
1952 - -dm_plex_overlap_exclude_value <int>      - Label value used to exclude points from overlap
1953 
1954   Level: advanced
1955 
1956 .seealso: `DMPlexCreate()`, `DMSetAdjacency()`, `DMPlexDistribute()`, `DMPlexCreateOverlapLabel()`, `DMPlexGetOverlap()`
1957 @*/
1958 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, PetscSF *sf, DM *dmOverlap) {
1959   DM_Plex     *mesh = (DM_Plex *)dm->data;
1960   MPI_Comm     comm;
1961   PetscMPIInt  size, rank;
1962   PetscSection rootSection, leafSection;
1963   IS           rootrank, leafrank;
1964   DM           dmCoord;
1965   DMLabel      lblOverlap;
1966   PetscSF      sfOverlap, sfStratified, sfPoint;
1967 
1968   PetscFunctionBegin;
1969   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1970   PetscValidLogicalCollectiveInt(dm, overlap, 2);
1971   if (sf) PetscValidPointer(sf, 3);
1972   PetscValidPointer(dmOverlap, 4);
1973 
1974   if (sf) *sf = NULL;
1975   *dmOverlap = NULL;
1976   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1977   PetscCallMPI(MPI_Comm_size(comm, &size));
1978   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1979   if (size == 1) PetscFunctionReturn(0);
1980   {
1981     // We need to get options for the _already_distributed mesh, so it must be done here
1982     PetscInt    overlap;
1983     const char *prefix;
1984     char        oldPrefix[PETSC_MAX_PATH_LEN];
1985 
1986     oldPrefix[0] = '\0';
1987     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)dm, &prefix));
1988     PetscCall(PetscStrcpy(oldPrefix, prefix));
1989     PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)dm, "dist_"));
1990     PetscCall(DMPlexGetOverlap(dm, &overlap));
1991     PetscObjectOptionsBegin((PetscObject)dm);
1992     PetscCall(DMSetFromOptions_Overlap_Plex(dm, PetscOptionsObject, &overlap));
1993     PetscOptionsEnd();
1994     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)dm, oldPrefix[0] == '\0' ? NULL : oldPrefix));
1995   }
1996   PetscCall(PetscLogEventBegin(DMPLEX_DistributeOverlap, dm, 0, 0, 0));
1997   /* Compute point overlap with neighbouring processes on the distributed DM */
1998   PetscCall(PetscLogEventBegin(DMPLEX_Partition, dm, 0, 0, 0));
1999   PetscCall(PetscSectionCreate(comm, &rootSection));
2000   PetscCall(PetscSectionCreate(comm, &leafSection));
2001   PetscCall(DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank));
2002   if (mesh->numOvLabels) PetscCall(DMPlexCreateOverlapLabelFromLabels(dm, mesh->numOvLabels, mesh->ovLabels, mesh->ovValues, mesh->numOvExLabels, mesh->ovExLabels, mesh->ovExValues, rootSection, rootrank, leafSection, leafrank, &lblOverlap));
2003   else PetscCall(DMPlexCreateOverlapLabel(dm, overlap, rootSection, rootrank, leafSection, leafrank, &lblOverlap));
2004   /* Convert overlap label to stratified migration SF */
2005   PetscCall(DMPlexPartitionLabelCreateSF(dm, lblOverlap, &sfOverlap));
2006   PetscCall(DMPlexStratifyMigrationSF(dm, sfOverlap, &sfStratified));
2007   PetscCall(PetscSFDestroy(&sfOverlap));
2008   sfOverlap = sfStratified;
2009   PetscCall(PetscObjectSetName((PetscObject)sfOverlap, "Overlap SF"));
2010   PetscCall(PetscSFSetFromOptions(sfOverlap));
2011 
2012   PetscCall(PetscSectionDestroy(&rootSection));
2013   PetscCall(PetscSectionDestroy(&leafSection));
2014   PetscCall(ISDestroy(&rootrank));
2015   PetscCall(ISDestroy(&leafrank));
2016   PetscCall(PetscLogEventEnd(DMPLEX_Partition, dm, 0, 0, 0));
2017 
2018   /* Build the overlapping DM */
2019   PetscCall(DMPlexCreate(comm, dmOverlap));
2020   PetscCall(PetscObjectSetName((PetscObject)*dmOverlap, "Parallel Mesh"));
2021   PetscCall(DMPlexMigrate(dm, sfOverlap, *dmOverlap));
2022   /* Store the overlap in the new DM */
2023   PetscCall(DMPlexSetOverlap_Plex(*dmOverlap, dm, overlap));
2024   /* Build the new point SF */
2025   PetscCall(DMPlexCreatePointSF(*dmOverlap, sfOverlap, PETSC_FALSE, &sfPoint));
2026   PetscCall(DMSetPointSF(*dmOverlap, sfPoint));
2027   PetscCall(DMGetCoordinateDM(*dmOverlap, &dmCoord));
2028   if (dmCoord) PetscCall(DMSetPointSF(dmCoord, sfPoint));
2029   PetscCall(DMGetCellCoordinateDM(*dmOverlap, &dmCoord));
2030   if (dmCoord) PetscCall(DMSetPointSF(dmCoord, sfPoint));
2031   PetscCall(PetscSFDestroy(&sfPoint));
2032   /* Cleanup overlap partition */
2033   PetscCall(DMLabelDestroy(&lblOverlap));
2034   if (sf) *sf = sfOverlap;
2035   else PetscCall(PetscSFDestroy(&sfOverlap));
2036   PetscCall(PetscLogEventEnd(DMPLEX_DistributeOverlap, dm, 0, 0, 0));
2037   PetscFunctionReturn(0);
2038 }
2039 
2040 PetscErrorCode DMPlexGetOverlap_Plex(DM dm, PetscInt *overlap) {
2041   DM_Plex *mesh = (DM_Plex *)dm->data;
2042 
2043   PetscFunctionBegin;
2044   *overlap = mesh->overlap;
2045   PetscFunctionReturn(0);
2046 }
2047 
2048 PetscErrorCode DMPlexSetOverlap_Plex(DM dm, DM dmSrc, PetscInt overlap) {
2049   DM_Plex *mesh    = NULL;
2050   DM_Plex *meshSrc = NULL;
2051 
2052   PetscFunctionBegin;
2053   PetscValidHeaderSpecificType(dm, DM_CLASSID, 1, DMPLEX);
2054   mesh          = (DM_Plex *)dm->data;
2055   mesh->overlap = overlap;
2056   if (dmSrc) {
2057     PetscValidHeaderSpecificType(dmSrc, DM_CLASSID, 2, DMPLEX);
2058     meshSrc = (DM_Plex *)dmSrc->data;
2059     mesh->overlap += meshSrc->overlap;
2060   }
2061   PetscFunctionReturn(0);
2062 }
2063 
2064 /*@
2065   DMPlexGetOverlap - Get the width of the cell overlap
2066 
2067   Not collective
2068 
2069   Input Parameter:
2070 . dm   - The DM
2071 
2072   Output Parameter:
2073 . overlap - the width of the cell overlap
2074 
2075   Level: intermediate
2076 
2077 .seealso: `DMPlexSetOverlap()`, `DMPlexDistribute()`
2078 @*/
2079 PetscErrorCode DMPlexGetOverlap(DM dm, PetscInt *overlap) {
2080   PetscFunctionBegin;
2081   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2082   PetscValidIntPointer(overlap, 2);
2083   PetscUseMethod(dm, "DMPlexGetOverlap_C", (DM, PetscInt *), (dm, overlap));
2084   PetscFunctionReturn(0);
2085 }
2086 
2087 /*@
2088   DMPlexSetOverlap - Set the width of the cell overlap
2089 
2090   Logically collective
2091 
2092   Input Parameters:
2093 + dm      - The DM
2094 . dmSrc   - The DM that produced this one, or NULL
2095 - overlap - the width of the cell overlap
2096 
2097   Note:
2098   The overlap from dmSrc is added to dm
2099 
2100   Level: intermediate
2101 
2102 .seealso: `DMPlexGetOverlap()`, `DMPlexDistribute()`
2103 @*/
2104 PetscErrorCode DMPlexSetOverlap(DM dm, DM dmSrc, PetscInt overlap) {
2105   PetscFunctionBegin;
2106   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2107   PetscValidLogicalCollectiveInt(dm, overlap, 3);
2108   PetscTryMethod(dm, "DMPlexSetOverlap_C", (DM, DM, PetscInt), (dm, dmSrc, overlap));
2109   PetscFunctionReturn(0);
2110 }
2111 
2112 PetscErrorCode DMPlexDistributeSetDefault_Plex(DM dm, PetscBool dist) {
2113   DM_Plex *mesh = (DM_Plex *)dm->data;
2114 
2115   PetscFunctionBegin;
2116   mesh->distDefault = dist;
2117   PetscFunctionReturn(0);
2118 }
2119 
2120 /*@
2121   DMPlexDistributeSetDefault - Set flag indicating whether the DM should be distributed by default
2122 
2123   Logically collective
2124 
2125   Input Parameters:
2126 + dm   - The DM
2127 - dist - Flag for distribution
2128 
2129   Level: intermediate
2130 
2131 .seealso: `DMPlexDistributeGetDefault()`, `DMPlexDistribute()`
2132 @*/
2133 PetscErrorCode DMPlexDistributeSetDefault(DM dm, PetscBool dist) {
2134   PetscFunctionBegin;
2135   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2136   PetscValidLogicalCollectiveBool(dm, dist, 2);
2137   PetscTryMethod(dm, "DMPlexDistributeSetDefault_C", (DM, PetscBool), (dm, dist));
2138   PetscFunctionReturn(0);
2139 }
2140 
2141 PetscErrorCode DMPlexDistributeGetDefault_Plex(DM dm, PetscBool *dist) {
2142   DM_Plex *mesh = (DM_Plex *)dm->data;
2143 
2144   PetscFunctionBegin;
2145   *dist = mesh->distDefault;
2146   PetscFunctionReturn(0);
2147 }
2148 
2149 /*@
2150   DMPlexDistributeGetDefault - Get flag indicating whether the DM should be distributed by default
2151 
2152   Not collective
2153 
2154   Input Parameter:
2155 . dm   - The DM
2156 
2157   Output Parameter:
2158 . dist - Flag for distribution
2159 
2160   Level: intermediate
2161 
2162 .seealso: `DMPlexDistributeSetDefault()`, `DMPlexDistribute()`
2163 @*/
2164 PetscErrorCode DMPlexDistributeGetDefault(DM dm, PetscBool *dist) {
2165   PetscFunctionBegin;
2166   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2167   PetscValidBoolPointer(dist, 2);
2168   PetscUseMethod(dm, "DMPlexDistributeGetDefault_C", (DM, PetscBool *), (dm, dist));
2169   PetscFunctionReturn(0);
2170 }
2171 
2172 /*@C
2173   DMPlexGetGatherDM - Get a copy of the DMPlex that gathers all points on the
2174   root process of the original's communicator.
2175 
2176   Collective on dm
2177 
2178   Input Parameter:
2179 . dm - the original DMPlex object
2180 
2181   Output Parameters:
2182 + sf - the PetscSF used for point distribution (optional)
2183 - gatherMesh - the gathered DM object, or NULL
2184 
2185   Level: intermediate
2186 
2187 .seealso: `DMPlexDistribute()`, `DMPlexGetRedundantDM()`
2188 @*/
2189 PetscErrorCode DMPlexGetGatherDM(DM dm, PetscSF *sf, DM *gatherMesh) {
2190   MPI_Comm         comm;
2191   PetscMPIInt      size;
2192   PetscPartitioner oldPart, gatherPart;
2193 
2194   PetscFunctionBegin;
2195   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2196   PetscValidPointer(gatherMesh, 3);
2197   *gatherMesh = NULL;
2198   if (sf) *sf = NULL;
2199   comm = PetscObjectComm((PetscObject)dm);
2200   PetscCallMPI(MPI_Comm_size(comm, &size));
2201   if (size == 1) PetscFunctionReturn(0);
2202   PetscCall(DMPlexGetPartitioner(dm, &oldPart));
2203   PetscCall(PetscObjectReference((PetscObject)oldPart));
2204   PetscCall(PetscPartitionerCreate(comm, &gatherPart));
2205   PetscCall(PetscPartitionerSetType(gatherPart, PETSCPARTITIONERGATHER));
2206   PetscCall(DMPlexSetPartitioner(dm, gatherPart));
2207   PetscCall(DMPlexDistribute(dm, 0, sf, gatherMesh));
2208 
2209   PetscCall(DMPlexSetPartitioner(dm, oldPart));
2210   PetscCall(PetscPartitionerDestroy(&gatherPart));
2211   PetscCall(PetscPartitionerDestroy(&oldPart));
2212   PetscFunctionReturn(0);
2213 }
2214 
2215 /*@C
2216   DMPlexGetRedundantDM - Get a copy of the DMPlex that is completely copied on each process.
2217 
2218   Collective on dm
2219 
2220   Input Parameter:
2221 . dm - the original DMPlex object
2222 
2223   Output Parameters:
2224 + sf - the PetscSF used for point distribution (optional)
2225 - redundantMesh - the redundant DM object, or NULL
2226 
2227   Level: intermediate
2228 
2229 .seealso: `DMPlexDistribute()`, `DMPlexGetGatherDM()`
2230 @*/
2231 PetscErrorCode DMPlexGetRedundantDM(DM dm, PetscSF *sf, DM *redundantMesh) {
2232   MPI_Comm     comm;
2233   PetscMPIInt  size, rank;
2234   PetscInt     pStart, pEnd, p;
2235   PetscInt     numPoints = -1;
2236   PetscSF      migrationSF, sfPoint, gatherSF;
2237   DM           gatherDM, dmCoord;
2238   PetscSFNode *points;
2239 
2240   PetscFunctionBegin;
2241   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2242   PetscValidPointer(redundantMesh, 3);
2243   *redundantMesh = NULL;
2244   comm           = PetscObjectComm((PetscObject)dm);
2245   PetscCallMPI(MPI_Comm_size(comm, &size));
2246   if (size == 1) {
2247     PetscCall(PetscObjectReference((PetscObject)dm));
2248     *redundantMesh = dm;
2249     if (sf) *sf = NULL;
2250     PetscFunctionReturn(0);
2251   }
2252   PetscCall(DMPlexGetGatherDM(dm, &gatherSF, &gatherDM));
2253   if (!gatherDM) PetscFunctionReturn(0);
2254   PetscCallMPI(MPI_Comm_rank(comm, &rank));
2255   PetscCall(DMPlexGetChart(gatherDM, &pStart, &pEnd));
2256   numPoints = pEnd - pStart;
2257   PetscCallMPI(MPI_Bcast(&numPoints, 1, MPIU_INT, 0, comm));
2258   PetscCall(PetscMalloc1(numPoints, &points));
2259   PetscCall(PetscSFCreate(comm, &migrationSF));
2260   for (p = 0; p < numPoints; p++) {
2261     points[p].index = p;
2262     points[p].rank  = 0;
2263   }
2264   PetscCall(PetscSFSetGraph(migrationSF, pEnd - pStart, numPoints, NULL, PETSC_OWN_POINTER, points, PETSC_OWN_POINTER));
2265   PetscCall(DMPlexCreate(comm, redundantMesh));
2266   PetscCall(PetscObjectSetName((PetscObject)*redundantMesh, "Redundant Mesh"));
2267   PetscCall(DMPlexMigrate(gatherDM, migrationSF, *redundantMesh));
2268   /* This is to express that all point are in overlap */
2269   PetscCall(DMPlexSetOverlap_Plex(*redundantMesh, NULL, PETSC_MAX_INT));
2270   PetscCall(DMPlexCreatePointSF(*redundantMesh, migrationSF, PETSC_FALSE, &sfPoint));
2271   PetscCall(DMSetPointSF(*redundantMesh, sfPoint));
2272   PetscCall(DMGetCoordinateDM(*redundantMesh, &dmCoord));
2273   if (dmCoord) PetscCall(DMSetPointSF(dmCoord, sfPoint));
2274   PetscCall(PetscSFDestroy(&sfPoint));
2275   if (sf) {
2276     PetscSF tsf;
2277 
2278     PetscCall(PetscSFCompose(gatherSF, migrationSF, &tsf));
2279     PetscCall(DMPlexStratifyMigrationSF(dm, tsf, sf));
2280     PetscCall(PetscSFDestroy(&tsf));
2281   }
2282   PetscCall(PetscSFDestroy(&migrationSF));
2283   PetscCall(PetscSFDestroy(&gatherSF));
2284   PetscCall(DMDestroy(&gatherDM));
2285   PetscCall(DMCopyDisc(dm, *redundantMesh));
2286   PetscCall(DMPlexCopy_Internal(dm, PETSC_TRUE, PETSC_FALSE, *redundantMesh));
2287   PetscFunctionReturn(0);
2288 }
2289 
2290 /*@
2291   DMPlexIsDistributed - Find out whether this DM is distributed, i.e. more than one rank owns some points.
2292 
2293   Collective
2294 
2295   Input Parameter:
2296 . dm      - The DM object
2297 
2298   Output Parameter:
2299 . distributed - Flag whether the DM is distributed
2300 
2301   Level: intermediate
2302 
2303   Notes:
2304   This currently finds out whether at least two ranks have any DAG points.
2305   This involves MPI_Allreduce() with one integer.
2306   The result is currently not stashed so every call to this routine involves this global communication.
2307 
2308 .seealso: `DMPlexDistribute()`, `DMPlexGetOverlap()`, `DMPlexIsInterpolated()`
2309 @*/
2310 PetscErrorCode DMPlexIsDistributed(DM dm, PetscBool *distributed) {
2311   PetscInt    pStart, pEnd, count;
2312   MPI_Comm    comm;
2313   PetscMPIInt size;
2314 
2315   PetscFunctionBegin;
2316   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2317   PetscValidBoolPointer(distributed, 2);
2318   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
2319   PetscCallMPI(MPI_Comm_size(comm, &size));
2320   if (size == 1) {
2321     *distributed = PETSC_FALSE;
2322     PetscFunctionReturn(0);
2323   }
2324   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
2325   count = (pEnd - pStart) > 0 ? 1 : 0;
2326   PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE, &count, 1, MPIU_INT, MPI_SUM, comm));
2327   *distributed = count > 1 ? PETSC_TRUE : PETSC_FALSE;
2328   PetscFunctionReturn(0);
2329 }
2330