xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision c9786dd0daa9d442b0e4c3582dc6253c23e913a2)
1 #include <petsc/private/dmpleximpl.h>  /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/dmlabelimpl.h> /*I      "petscdmlabel.h"  I*/
3 
4 /*@C
5   DMPlexSetAdjacencyUser - Define adjacency in the mesh using a user-provided callback
6 
7   Input Parameters:
8 + dm   - The DM object
9 . user - The user callback, may be `NULL` (to clear the callback)
10 - ctx  - context for callback evaluation, may be `NULL`
11 
12   Level: advanced
13 
14   Notes:
15   The caller of `DMPlexGetAdjacency()` may need to arrange that a large enough array is available for the adjacency.
16 
17   Any setting here overrides other configuration of `DMPLEX` adjacency determination.
18 
19 .seealso: `DMPLEX`, `DMSetAdjacency()`, `DMPlexDistribute()`, `DMPlexPreallocateOperator()`, `DMPlexGetAdjacency()`, `DMPlexGetAdjacencyUser()`
20 @*/
21 PetscErrorCode DMPlexSetAdjacencyUser(DM dm, PetscErrorCode (*user)(DM, PetscInt, PetscInt *, PetscInt[], void *), void *ctx)
22 {
23   DM_Plex *mesh = (DM_Plex *)dm->data;
24 
25   PetscFunctionBegin;
26   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
27   mesh->useradjacency    = user;
28   mesh->useradjacencyctx = ctx;
29   PetscFunctionReturn(PETSC_SUCCESS);
30 }
31 
32 /*@C
33   DMPlexGetAdjacencyUser - get the user-defined adjacency callback
34 
35   Input Parameter:
36 . dm - The `DM` object
37 
38   Output Parameters:
39 + user - The callback
40 - ctx  - context for callback evaluation
41 
42   Level: advanced
43 
44 .seealso: `DMPLEX`, `DMSetAdjacency()`, `DMPlexDistribute()`, `DMPlexPreallocateOperator()`, `DMPlexGetAdjacency()`, `DMPlexSetAdjacencyUser()`
45 @*/
46 PetscErrorCode DMPlexGetAdjacencyUser(DM dm, PetscErrorCode (**user)(DM, PetscInt, PetscInt *, PetscInt[], void *), void **ctx)
47 {
48   DM_Plex *mesh = (DM_Plex *)dm->data;
49 
50   PetscFunctionBegin;
51   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
52   if (user) *user = mesh->useradjacency;
53   if (ctx) *ctx = mesh->useradjacencyctx;
54   PetscFunctionReturn(PETSC_SUCCESS);
55 }
56 
57 /*@
58   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
59 
60   Input Parameters:
61 + dm         - The `DM` object
62 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
63 
64   Level: intermediate
65 
66 .seealso: `DMPLEX`, `DMGetAdjacency()`, `DMSetAdjacency()`, `DMPlexDistribute()`, `DMPlexPreallocateOperator()`, `DMPlexSetAnchors()`
67 @*/
68 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
69 {
70   DM_Plex *mesh = (DM_Plex *)dm->data;
71 
72   PetscFunctionBegin;
73   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
74   mesh->useAnchors = useAnchors;
75   PetscFunctionReturn(PETSC_SUCCESS);
76 }
77 
78 /*@
79   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
80 
81   Input Parameter:
82 . dm - The `DM` object
83 
84   Output Parameter:
85 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
86 
87   Level: intermediate
88 
89 .seealso: `DMPLEX`, `DMPlexSetAdjacencyUseAnchors()`, `DMSetAdjacency()`, `DMGetAdjacency()`, `DMPlexDistribute()`, `DMPlexPreallocateOperator()`, `DMPlexSetAnchors()`
90 @*/
91 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
92 {
93   DM_Plex *mesh = (DM_Plex *)dm->data;
94 
95   PetscFunctionBegin;
96   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
97   PetscAssertPointer(useAnchors, 2);
98   *useAnchors = mesh->useAnchors;
99   PetscFunctionReturn(PETSC_SUCCESS);
100 }
101 
102 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
103 {
104   const PetscInt *cone   = NULL;
105   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
106 
107   PetscFunctionBeginHot;
108   PetscCall(DMPlexGetConeSize(dm, p, &coneSize));
109   PetscCall(DMPlexGetCone(dm, p, &cone));
110   for (c = 0; c <= coneSize; ++c) {
111     const PetscInt  point   = !c ? p : cone[c - 1];
112     const PetscInt *support = NULL;
113     PetscInt        supportSize, s, q;
114 
115     PetscCall(DMPlexGetSupportSize(dm, point, &supportSize));
116     PetscCall(DMPlexGetSupport(dm, point, &support));
117     for (s = 0; s < supportSize; ++s) {
118       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = support[s]), 0); ++q) {
119         if (support[s] == adj[q]) break;
120       }
121       PetscCheck(numAdj <= maxAdjSize, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%" PetscInt_FMT ")", maxAdjSize);
122     }
123   }
124   *adjSize = numAdj;
125   PetscFunctionReturn(PETSC_SUCCESS);
126 }
127 
128 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
129 {
130   const PetscInt *support = NULL;
131   PetscInt        numAdj = 0, maxAdjSize = *adjSize, supportSize, s;
132 
133   PetscFunctionBeginHot;
134   PetscCall(DMPlexGetSupportSize(dm, p, &supportSize));
135   PetscCall(DMPlexGetSupport(dm, p, &support));
136   for (s = 0; s <= supportSize; ++s) {
137     const PetscInt  point = !s ? p : support[s - 1];
138     const PetscInt *cone  = NULL;
139     PetscInt        coneSize, c, q;
140 
141     PetscCall(DMPlexGetConeSize(dm, point, &coneSize));
142     PetscCall(DMPlexGetCone(dm, point, &cone));
143     for (c = 0; c < coneSize; ++c) {
144       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = cone[c]), 0); ++q) {
145         if (cone[c] == adj[q]) break;
146       }
147       PetscCheck(numAdj <= maxAdjSize, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%" PetscInt_FMT ")", maxAdjSize);
148     }
149   }
150   *adjSize = numAdj;
151   PetscFunctionReturn(PETSC_SUCCESS);
152 }
153 
154 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
155 {
156   PetscInt *star   = NULL;
157   PetscInt  numAdj = 0, maxAdjSize = *adjSize, starSize, s;
158 
159   PetscFunctionBeginHot;
160   PetscCall(DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star));
161   for (s = 0; s < starSize * 2; s += 2) {
162     const PetscInt *closure = NULL;
163     PetscInt        closureSize, c, q;
164 
165     PetscCall(DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt **)&closure));
166     for (c = 0; c < closureSize * 2; c += 2) {
167       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = closure[c]), 0); ++q) {
168         if (closure[c] == adj[q]) break;
169       }
170       PetscCheck(numAdj <= maxAdjSize, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%" PetscInt_FMT ")", maxAdjSize);
171     }
172     PetscCall(DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt **)&closure));
173   }
174   PetscCall(DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star));
175   *adjSize = numAdj;
176   PetscFunctionReturn(PETSC_SUCCESS);
177 }
178 
179 // Returns the maximum number of adjancent points in the DMPlex
180 PetscErrorCode DMPlexGetMaxAdjacencySize_Internal(DM dm, PetscBool useAnchors, PetscInt *max_adjacency_size)
181 {
182   PetscInt depth, maxC, maxS, maxP, pStart, pEnd, asiz, maxAnchors = 1;
183 
184   PetscFunctionBeginUser;
185   if (useAnchors) {
186     PetscSection aSec = NULL;
187     IS           aIS  = NULL;
188     PetscInt     aStart, aEnd;
189     PetscCall(DMPlexGetAnchors(dm, &aSec, &aIS));
190     if (aSec) {
191       PetscCall(PetscSectionGetMaxDof(aSec, &maxAnchors));
192       maxAnchors = PetscMax(1, maxAnchors);
193       PetscCall(PetscSectionGetChart(aSec, &aStart, &aEnd));
194     }
195   }
196 
197   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
198   PetscCall(DMPlexGetDepth(dm, &depth));
199   depth = PetscMax(depth, -depth);
200   PetscCall(DMPlexGetMaxSizes(dm, &maxC, &maxS));
201   maxP = maxS * maxC;
202   /* Adjacency can be as large as supp(cl(cell)) or cl(supp(vertex)),
203           supp(cell) + supp(maxC faces) + supp(maxC^2 edges) + supp(maxC^3 vertices)
204         = 0 + maxS*maxC + maxS^2*maxC^2 + maxS^3*maxC^3
205         = \sum^d_{i=0} (maxS*maxC)^i - 1
206         = (maxS*maxC)^{d+1} - 1 / (maxS*maxC - 1) - 1
207     We could improve this by getting the max by strata:
208           supp[d](cell) + supp[d-1](maxC[d] faces) + supp[1](maxC[d]*maxC[d-1] edges) + supp[0](maxC[d]*maxC[d-1]*maxC[d-2] vertices)
209         = 0 + maxS[d-1]*maxC[d] + maxS[1]*maxC[d]*maxC[d-1] + maxS[0]*maxC[d]*maxC[d-1]*maxC[d-2]
210     and the same with S and C reversed
211   */
212   if ((depth == 3 && maxP > 200) || (depth == 2 && maxP > 580)) asiz = pEnd - pStart;
213   else asiz = (maxP > 1) ? ((PetscPowInt(maxP, depth + 1) - 1) / (maxP - 1)) : depth + 1;
214   asiz *= maxAnchors;
215   *max_adjacency_size = PetscMin(asiz, pEnd - pStart);
216   PetscFunctionReturn(PETSC_SUCCESS);
217 }
218 
219 // Returns Adjacent mesh points to the selected point given specific criteria
220 //
221 // + adjSize - Number of adjacent points
222 // - adj - Array of the adjacent points
223 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
224 {
225   static PetscInt asiz   = 0;
226   PetscInt        aStart = -1, aEnd = -1;
227   PetscInt        maxAdjSize;
228   PetscSection    aSec = NULL;
229   IS              aIS  = NULL;
230   const PetscInt *anchors;
231   DM_Plex        *mesh = (DM_Plex *)dm->data;
232 
233   PetscFunctionBeginHot;
234   if (useAnchors) {
235     PetscCall(DMPlexGetAnchors(dm, &aSec, &aIS));
236     if (aSec) {
237       PetscCall(PetscSectionGetChart(aSec, &aStart, &aEnd));
238       PetscCall(ISGetIndices(aIS, &anchors));
239     }
240   }
241   if (!*adj) {
242     PetscCall(DMPlexGetMaxAdjacencySize_Internal(dm, useAnchors, &asiz));
243     PetscCall(PetscMalloc1(asiz, adj));
244   }
245   if (*adjSize < 0) *adjSize = asiz;
246   maxAdjSize = *adjSize;
247   if (mesh->useradjacency) {
248     PetscCall((*mesh->useradjacency)(dm, p, adjSize, *adj, mesh->useradjacencyctx));
249   } else if (useTransitiveClosure) {
250     PetscCall(DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj));
251   } else if (useCone) {
252     PetscCall(DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj));
253   } else {
254     PetscCall(DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj));
255   }
256   if (useAnchors && aSec) {
257     PetscInt  origSize = *adjSize;
258     PetscInt  numAdj   = origSize;
259     PetscInt  i        = 0, j;
260     PetscInt *orig     = *adj;
261 
262     while (i < origSize) {
263       PetscInt p    = orig[i];
264       PetscInt aDof = 0;
265 
266       if (p >= aStart && p < aEnd) PetscCall(PetscSectionGetDof(aSec, p, &aDof));
267       if (aDof) {
268         PetscInt aOff;
269         PetscInt s, q;
270 
271         for (j = i + 1; j < numAdj; j++) orig[j - 1] = orig[j];
272         origSize--;
273         numAdj--;
274         PetscCall(PetscSectionGetOffset(aSec, p, &aOff));
275         for (s = 0; s < aDof; ++s) {
276           for (q = 0; q < numAdj || ((void)(orig[numAdj++] = anchors[aOff + s]), 0); ++q) {
277             if (anchors[aOff + s] == orig[q]) break;
278           }
279           PetscCheck(numAdj <= maxAdjSize, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%" PetscInt_FMT ")", maxAdjSize);
280         }
281       } else {
282         i++;
283       }
284     }
285     *adjSize = numAdj;
286     PetscCall(ISRestoreIndices(aIS, &anchors));
287   }
288   PetscFunctionReturn(PETSC_SUCCESS);
289 }
290 
291 /*@
292   DMPlexGetAdjacency - Return all points adjacent to the given point
293 
294   Input Parameters:
295 + dm - The `DM` object
296 - p  - The point
297 
298   Input/Output Parameters:
299 + adjSize - The maximum size of `adj` if it is non-`NULL`, or `PETSC_DETERMINE`;
300             on output the number of adjacent points
301 - adj     - Either `NULL` so that the array is allocated, or an existing array with size `adjSize`;
302             on output contains the adjacent points
303 
304   Level: advanced
305 
306   Notes:
307   The user must `PetscFree()` the `adj` array if it was not passed in.
308 
309 .seealso: `DMPLEX`, `DMSetAdjacency()`, `DMPlexDistribute()`, `DMCreateMatrix()`, `DMPlexPreallocateOperator()`
310 @*/
311 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
312 {
313   PetscBool useCone, useClosure, useAnchors;
314 
315   PetscFunctionBeginHot;
316   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
317   PetscAssertPointer(adjSize, 3);
318   PetscAssertPointer(adj, 4);
319   PetscCall(DMGetBasicAdjacency(dm, &useCone, &useClosure));
320   PetscCall(DMPlexGetAdjacencyUseAnchors(dm, &useAnchors));
321   PetscCall(DMPlexGetAdjacency_Internal(dm, p, useCone, useClosure, useAnchors, adjSize, adj));
322   PetscFunctionReturn(PETSC_SUCCESS);
323 }
324 
325 /*@
326   DMPlexCreateTwoSidedProcessSF - Create an `PetscSF` which just has process connectivity
327 
328   Collective
329 
330   Input Parameters:
331 + dm              - The `DM`
332 . sfPoint         - The `PetscSF` which encodes point connectivity
333 . rootRankSection - to be documented
334 . rootRanks       - to be documented
335 . leafRankSection - to be documented
336 - leafRanks       - to be documented
337 
338   Output Parameters:
339 + processRanks - A list of process neighbors, or `NULL`
340 - sfProcess    - An `PetscSF` encoding the two-sided process connectivity, or `NULL`
341 
342   Level: developer
343 
344 .seealso: `DMPLEX`, `PetscSFCreate()`, `DMPlexCreateProcessSF()`
345 @*/
346 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, PeOp IS *processRanks, PeOp PetscSF *sfProcess)
347 {
348   const PetscSFNode *remotePoints;
349   PetscInt          *localPointsNew;
350   PetscSFNode       *remotePointsNew;
351   const PetscInt    *nranks;
352   PetscInt          *ranksNew;
353   PetscBT            neighbors;
354   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
355   PetscMPIInt        size, proc, rank;
356 
357   PetscFunctionBegin;
358   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
359   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
360   if (processRanks) PetscAssertPointer(processRanks, 7);
361   if (sfProcess) PetscAssertPointer(sfProcess, 8);
362   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)dm), &size));
363   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
364   PetscCall(PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints));
365   PetscCall(PetscBTCreate(size, &neighbors));
366   PetscCall(PetscBTMemzero(size, neighbors));
367   /* Compute root-to-leaf process connectivity */
368   PetscCall(PetscSectionGetChart(rootRankSection, &pStart, &pEnd));
369   PetscCall(ISGetIndices(rootRanks, &nranks));
370   for (p = pStart; p < pEnd; ++p) {
371     PetscInt ndof, noff, n;
372 
373     PetscCall(PetscSectionGetDof(rootRankSection, p, &ndof));
374     PetscCall(PetscSectionGetOffset(rootRankSection, p, &noff));
375     for (n = 0; n < ndof; ++n) PetscCall(PetscBTSet(neighbors, nranks[noff + n]));
376   }
377   PetscCall(ISRestoreIndices(rootRanks, &nranks));
378   /* Compute leaf-to-neighbor process connectivity */
379   PetscCall(PetscSectionGetChart(leafRankSection, &pStart, &pEnd));
380   PetscCall(ISGetIndices(leafRanks, &nranks));
381   for (p = pStart; p < pEnd; ++p) {
382     PetscInt ndof, noff, n;
383 
384     PetscCall(PetscSectionGetDof(leafRankSection, p, &ndof));
385     PetscCall(PetscSectionGetOffset(leafRankSection, p, &noff));
386     for (n = 0; n < ndof; ++n) PetscCall(PetscBTSet(neighbors, nranks[noff + n]));
387   }
388   PetscCall(ISRestoreIndices(leafRanks, &nranks));
389   /* Compute leaf-to-root process connectivity */
390   for (l = 0; l < numLeaves; ++l) PetscCall(PetscBTSet(neighbors, remotePoints[l].rank));
391   /* Calculate edges */
392   PetscCall(PetscBTClear(neighbors, rank));
393   for (proc = 0, numNeighbors = 0; proc < size; ++proc) {
394     if (PetscBTLookup(neighbors, proc)) ++numNeighbors;
395   }
396   PetscCall(PetscMalloc1(numNeighbors, &ranksNew));
397   PetscCall(PetscMalloc1(numNeighbors, &localPointsNew));
398   PetscCall(PetscMalloc1(numNeighbors, &remotePointsNew));
399   for (proc = 0, n = 0; proc < size; ++proc) {
400     if (PetscBTLookup(neighbors, proc)) {
401       ranksNew[n]              = proc;
402       localPointsNew[n]        = proc;
403       remotePointsNew[n].index = rank;
404       remotePointsNew[n].rank  = proc;
405       ++n;
406     }
407   }
408   PetscCall(PetscBTDestroy(&neighbors));
409   if (processRanks) PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks));
410   else PetscCall(PetscFree(ranksNew));
411   if (sfProcess) {
412     PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess));
413     PetscCall(PetscObjectSetName((PetscObject)*sfProcess, "Two-Sided Process SF"));
414     PetscCall(PetscSFSetFromOptions(*sfProcess));
415     PetscCall(PetscSFSetGraph(*sfProcess, size, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER));
416   }
417   PetscFunctionReturn(PETSC_SUCCESS);
418 }
419 
420 /*@
421   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
422 
423   Collective
424 
425   Input Parameter:
426 . dm - The `DM`
427 
428   Output Parameters:
429 + rootSection - The number of leaves for a given root point
430 . rootrank    - The rank of each edge into the root point
431 . leafSection - The number of processes sharing a given leaf point
432 - leafrank    - The rank of each process sharing a leaf point
433 
434   Level: developer
435 
436 .seealso: `DMPLEX`, `DMPlexCreateOverlapLabel()`, `DMPlexDistribute()`, `DMPlexDistributeOverlap()`
437 @*/
438 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
439 {
440   MPI_Comm        comm;
441   PetscSF         sfPoint;
442   const PetscInt *rootdegree;
443   PetscInt       *myrank, *remoterank;
444   PetscInt        pStart, pEnd, p, nedges;
445   PetscMPIInt     rank;
446 
447   PetscFunctionBegin;
448   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
449   PetscCallMPI(MPI_Comm_rank(comm, &rank));
450   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
451   PetscCall(DMGetPointSF(dm, &sfPoint));
452   /* Compute number of leaves for each root */
453   PetscCall(PetscObjectSetName((PetscObject)rootSection, "Root Section"));
454   PetscCall(PetscSectionSetChart(rootSection, pStart, pEnd));
455   PetscCall(PetscSFComputeDegreeBegin(sfPoint, &rootdegree));
456   PetscCall(PetscSFComputeDegreeEnd(sfPoint, &rootdegree));
457   for (p = pStart; p < pEnd; ++p) PetscCall(PetscSectionSetDof(rootSection, p, rootdegree[p - pStart]));
458   PetscCall(PetscSectionSetUp(rootSection));
459   /* Gather rank of each leaf to root */
460   PetscCall(PetscSectionGetStorageSize(rootSection, &nedges));
461   PetscCall(PetscMalloc1(pEnd - pStart, &myrank));
462   PetscCall(PetscMalloc1(nedges, &remoterank));
463   for (p = 0; p < pEnd - pStart; ++p) myrank[p] = rank;
464   PetscCall(PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank));
465   PetscCall(PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank));
466   PetscCall(PetscFree(myrank));
467   PetscCall(ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank));
468   /* Distribute remote ranks to leaves */
469   PetscCall(PetscObjectSetName((PetscObject)leafSection, "Leaf Section"));
470   PetscCall(DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank));
471   PetscFunctionReturn(PETSC_SUCCESS);
472 }
473 
474 /*@
475   DMPlexCreateOverlapLabel - Compute a label indicating what overlap points should be sent to new processes
476 
477   Collective
478 
479   Input Parameters:
480 + dm          - The `DM`
481 . levels      - Number of overlap levels
482 . rootSection - The number of leaves for a given root point
483 . rootrank    - The rank of each edge into the root point
484 . leafSection - The number of processes sharing a given leaf point
485 - leafrank    - The rank of each process sharing a leaf point
486 
487   Output Parameter:
488 . ovLabel - `DMLabel` containing remote overlap contributions as point/rank pairings
489 
490   Level: developer
491 
492 .seealso: `DMPLEX`, `DMPlexCreateOverlapLabelFromLabels()`, `DMPlexGetAdjacency()`, `DMPlexDistributeOwnership()`, `DMPlexDistribute()`
493 @*/
494 PetscErrorCode DMPlexCreateOverlapLabel(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
495 {
496   MPI_Comm           comm;
497   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
498   PetscSF            sfPoint;
499   const PetscSFNode *remote;
500   const PetscInt    *local;
501   const PetscInt    *nrank, *rrank;
502   PetscInt          *adj = NULL;
503   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l;
504   PetscMPIInt        rank, size;
505   PetscBool          flg;
506 
507   PetscFunctionBegin;
508   *ovLabel = NULL;
509   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
510   PetscCallMPI(MPI_Comm_size(comm, &size));
511   PetscCallMPI(MPI_Comm_rank(comm, &rank));
512   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
513   PetscCall(DMGetPointSF(dm, &sfPoint));
514   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
515   if (!levels) {
516     /* Add owned points */
517     PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel));
518     for (p = pStart; p < pEnd; ++p) PetscCall(DMLabelSetValue(*ovLabel, p, rank));
519     PetscFunctionReturn(PETSC_SUCCESS);
520   }
521   PetscCall(PetscSectionGetChart(leafSection, &sStart, &sEnd));
522   PetscCall(PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote));
523   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap adjacency", &ovAdjByRank));
524   /* Handle leaves: shared with the root point */
525   for (l = 0; l < nleaves; ++l) {
526     PetscInt adjSize = PETSC_DETERMINE, a;
527 
528     PetscCall(DMPlexGetAdjacency(dm, local ? local[l] : l, &adjSize, &adj));
529     for (a = 0; a < adjSize; ++a) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank));
530   }
531   PetscCall(ISGetIndices(rootrank, &rrank));
532   PetscCall(ISGetIndices(leafrank, &nrank));
533   /* Handle roots */
534   for (p = pStart; p < pEnd; ++p) {
535     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
536 
537     if ((p >= sStart) && (p < sEnd)) {
538       /* Some leaves share a root with other leaves on different processes */
539       PetscCall(PetscSectionGetDof(leafSection, p, &neighbors));
540       if (neighbors) {
541         PetscCall(PetscSectionGetOffset(leafSection, p, &noff));
542         PetscCall(DMPlexGetAdjacency(dm, p, &adjSize, &adj));
543         for (n = 0; n < neighbors; ++n) {
544           const PetscInt remoteRank = nrank[noff + n];
545 
546           if (remoteRank == rank) continue;
547           for (a = 0; a < adjSize; ++a) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remoteRank));
548         }
549       }
550     }
551     /* Roots are shared with leaves */
552     PetscCall(PetscSectionGetDof(rootSection, p, &neighbors));
553     if (!neighbors) continue;
554     PetscCall(PetscSectionGetOffset(rootSection, p, &noff));
555     PetscCall(DMPlexGetAdjacency(dm, p, &adjSize, &adj));
556     for (n = 0; n < neighbors; ++n) {
557       const PetscInt remoteRank = rrank[noff + n];
558 
559       if (remoteRank == rank) continue;
560       for (a = 0; a < adjSize; ++a) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remoteRank));
561     }
562   }
563   PetscCall(PetscFree(adj));
564   PetscCall(ISRestoreIndices(rootrank, &rrank));
565   PetscCall(ISRestoreIndices(leafrank, &nrank));
566   /* Add additional overlap levels */
567   for (l = 1; l < levels; l++) {
568     /* Propagate point donations over SF to capture remote connections */
569     PetscCall(DMPlexPartitionLabelPropagate(dm, ovAdjByRank));
570     /* Add next level of point donations to the label */
571     PetscCall(DMPlexPartitionLabelAdjacency(dm, ovAdjByRank));
572   }
573   /* We require the closure in the overlap */
574   PetscCall(DMPlexPartitionLabelClosure(dm, ovAdjByRank));
575   PetscCall(PetscOptionsHasName(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-overlap_view", &flg));
576   if (flg) {
577     PetscViewer viewer;
578     PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm), &viewer));
579     PetscCall(DMLabelView(ovAdjByRank, viewer));
580   }
581   /* Invert sender to receiver label */
582   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel));
583   PetscCall(DMPlexPartitionLabelInvert(dm, ovAdjByRank, NULL, *ovLabel));
584   /* Add owned points, except for shared local points */
585   for (p = pStart; p < pEnd; ++p) PetscCall(DMLabelSetValue(*ovLabel, p, rank));
586   for (l = 0; l < nleaves; ++l) {
587     PetscCall(DMLabelClearValue(*ovLabel, local[l], rank));
588     PetscCall(DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank));
589   }
590   /* Clean up */
591   PetscCall(DMLabelDestroy(&ovAdjByRank));
592   PetscFunctionReturn(PETSC_SUCCESS);
593 }
594 
595 static PetscErrorCode HandlePoint_Private(DM dm, PetscInt p, PetscSection section, const PetscInt ranks[], PetscInt numExLabels, const DMLabel exLabel[], const PetscInt exValue[], DMLabel ovAdjByRank)
596 {
597   PetscInt neighbors, el;
598 
599   PetscFunctionBegin;
600   PetscCall(PetscSectionGetDof(section, p, &neighbors));
601   if (neighbors) {
602     PetscInt   *adj     = NULL;
603     PetscInt    adjSize = PETSC_DETERMINE, noff, n, a;
604     PetscMPIInt rank;
605 
606     PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
607     PetscCall(PetscSectionGetOffset(section, p, &noff));
608     PetscCall(DMPlexGetAdjacency(dm, p, &adjSize, &adj));
609     for (n = 0; n < neighbors; ++n) {
610       const PetscInt remoteRank = ranks[noff + n];
611 
612       if (remoteRank == rank) continue;
613       for (a = 0; a < adjSize; ++a) {
614         PetscBool insert = PETSC_TRUE;
615 
616         for (el = 0; el < numExLabels; ++el) {
617           PetscInt exVal;
618           PetscCall(DMLabelGetValue(exLabel[el], adj[a], &exVal));
619           if (exVal == exValue[el]) {
620             insert = PETSC_FALSE;
621             break;
622           }
623         }
624         if (insert) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remoteRank));
625       }
626     }
627     PetscCall(PetscFree(adj));
628   }
629   PetscFunctionReturn(PETSC_SUCCESS);
630 }
631 
632 /*@C
633   DMPlexCreateOverlapLabelFromLabels - Compute a label indicating what overlap points should be sent to new processes
634 
635   Collective
636 
637   Input Parameters:
638 + dm          - The `DM`
639 . numLabels   - The number of labels to draw candidate points from
640 . label       - An array of labels containing candidate points
641 . value       - An array of label values marking the candidate points
642 . numExLabels - The number of labels to use for exclusion
643 . exLabel     - An array of labels indicating points to be excluded, or `NULL`
644 . exValue     - An array of label values to be excluded, or `NULL`
645 . rootSection - The number of leaves for a given root point
646 . rootrank    - The rank of each edge into the root point
647 . leafSection - The number of processes sharing a given leaf point
648 - leafrank    - The rank of each process sharing a leaf point
649 
650   Output Parameter:
651 . ovLabel - `DMLabel` containing remote overlap contributions as point/rank pairings
652 
653   Level: developer
654 
655   Note:
656   The candidate points are only added to the overlap if they are adjacent to a shared point
657 
658 .seealso: `DMPLEX`, `DMPlexCreateOverlapLabel()`, `DMPlexGetAdjacency()`, `DMPlexDistributeOwnership()`, `DMPlexDistribute()`
659 @*/
660 PetscErrorCode DMPlexCreateOverlapLabelFromLabels(DM dm, PetscInt numLabels, const DMLabel label[], const PetscInt value[], PetscInt numExLabels, const DMLabel exLabel[], const PetscInt exValue[], PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
661 {
662   MPI_Comm           comm;
663   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
664   PetscSF            sfPoint;
665   const PetscSFNode *remote;
666   const PetscInt    *local;
667   const PetscInt    *nrank, *rrank;
668   PetscInt          *adj = NULL;
669   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l, el;
670   PetscMPIInt        rank, size;
671   PetscBool          flg;
672 
673   PetscFunctionBegin;
674   *ovLabel = NULL;
675   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
676   PetscCallMPI(MPI_Comm_size(comm, &size));
677   PetscCallMPI(MPI_Comm_rank(comm, &rank));
678   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
679   PetscCall(DMGetPointSF(dm, &sfPoint));
680   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
681   PetscCall(PetscSectionGetChart(leafSection, &sStart, &sEnd));
682   PetscCall(PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote));
683   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap adjacency", &ovAdjByRank));
684   PetscCall(ISGetIndices(rootrank, &rrank));
685   PetscCall(ISGetIndices(leafrank, &nrank));
686   for (l = 0; l < numLabels; ++l) {
687     IS              valIS;
688     const PetscInt *points;
689     PetscInt        n;
690 
691     PetscCall(DMLabelGetStratumIS(label[l], value[l], &valIS));
692     if (!valIS) continue;
693     PetscCall(ISGetIndices(valIS, &points));
694     PetscCall(ISGetLocalSize(valIS, &n));
695     for (PetscInt i = 0; i < n; ++i) {
696       const PetscInt p = points[i];
697 
698       if ((p >= sStart) && (p < sEnd)) {
699         PetscInt loc, adjSize = PETSC_DETERMINE;
700 
701         /* Handle leaves: shared with the root point */
702         if (local) PetscCall(PetscFindInt(p, nleaves, local, &loc));
703         else loc = (p >= 0 && p < nleaves) ? p : -1;
704         if (loc >= 0) {
705           const PetscInt remoteRank = remote[loc].rank;
706 
707           PetscCall(DMPlexGetAdjacency(dm, p, &adjSize, &adj));
708           for (PetscInt a = 0; a < adjSize; ++a) {
709             PetscBool insert = PETSC_TRUE;
710 
711             for (el = 0; el < numExLabels; ++el) {
712               PetscInt exVal;
713               PetscCall(DMLabelGetValue(exLabel[el], adj[a], &exVal));
714               if (exVal == exValue[el]) {
715                 insert = PETSC_FALSE;
716                 break;
717               }
718             }
719             if (insert) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remoteRank));
720           }
721         }
722         /* Some leaves share a root with other leaves on different processes */
723         PetscCall(HandlePoint_Private(dm, p, leafSection, nrank, numExLabels, exLabel, exValue, ovAdjByRank));
724       }
725       /* Roots are shared with leaves */
726       PetscCall(HandlePoint_Private(dm, p, rootSection, rrank, numExLabels, exLabel, exValue, ovAdjByRank));
727     }
728     PetscCall(ISRestoreIndices(valIS, &points));
729     PetscCall(ISDestroy(&valIS));
730   }
731   PetscCall(PetscFree(adj));
732   PetscCall(ISRestoreIndices(rootrank, &rrank));
733   PetscCall(ISRestoreIndices(leafrank, &nrank));
734   /* We require the closure in the overlap */
735   PetscCall(DMPlexPartitionLabelClosure(dm, ovAdjByRank));
736   PetscCall(PetscOptionsHasName(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-overlap_view", &flg));
737   if (flg) {
738     PetscViewer viewer;
739     PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm), &viewer));
740     PetscCall(DMLabelView(ovAdjByRank, viewer));
741   }
742   /* Invert sender to receiver label */
743   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel));
744   PetscCall(DMPlexPartitionLabelInvert(dm, ovAdjByRank, NULL, *ovLabel));
745   /* Add owned points, except for shared local points */
746   for (p = pStart; p < pEnd; ++p) PetscCall(DMLabelSetValue(*ovLabel, p, rank));
747   for (l = 0; l < nleaves; ++l) {
748     PetscCall(DMLabelClearValue(*ovLabel, local[l], rank));
749     PetscCall(DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank));
750   }
751   /* Clean up */
752   PetscCall(DMLabelDestroy(&ovAdjByRank));
753   PetscFunctionReturn(PETSC_SUCCESS);
754 }
755 
756 /*@
757   DMPlexCreateOverlapMigrationSF - Create a `PetscSF` describing the new mesh distribution to make the overlap described by the input `PetscSF`
758 
759   Collective
760 
761   Input Parameters:
762 + dm        - The `DM`
763 - overlapSF - The `PetscSF` mapping ghost points in overlap to owner points on other processes
764 
765   Output Parameter:
766 . migrationSF - A `PetscSF` that maps original points in old locations to points in new locations
767 
768   Level: developer
769 
770 .seealso: `DMPLEX`, `DMPlexCreateOverlapLabel()`, `DMPlexDistributeOverlap()`, `DMPlexDistribute()`
771 @*/
772 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
773 {
774   MPI_Comm           comm;
775   PetscMPIInt        rank, size;
776   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
777   PetscInt          *pointDepths, *remoteDepths, *ilocal;
778   PetscInt          *depthRecv, *depthShift, *depthIdx;
779   PetscSFNode       *iremote;
780   PetscSF            pointSF;
781   const PetscInt    *sharedLocal;
782   const PetscSFNode *overlapRemote, *sharedRemote;
783 
784   PetscFunctionBegin;
785   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
786   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
787   PetscCallMPI(MPI_Comm_rank(comm, &rank));
788   PetscCallMPI(MPI_Comm_size(comm, &size));
789   PetscCall(DMGetDimension(dm, &dim));
790 
791   /* Before building the migration SF we need to know the new stratum offsets */
792   PetscCall(PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote));
793   PetscCall(PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths));
794   for (d = 0; d < dim + 1; d++) {
795     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
796     for (p = pStart; p < pEnd; p++) pointDepths[p] = d;
797   }
798   for (p = 0; p < nleaves; p++) remoteDepths[p] = -1;
799   PetscCall(PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths, MPI_REPLACE));
800   PetscCall(PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths, MPI_REPLACE));
801 
802   /* Count received points in each stratum and compute the internal strata shift */
803   PetscCall(PetscMalloc3(dim + 1, &depthRecv, dim + 1, &depthShift, dim + 1, &depthIdx));
804   for (d = 0; d < dim + 1; d++) depthRecv[d] = 0;
805   for (p = 0; p < nleaves; p++) depthRecv[remoteDepths[p]]++;
806   depthShift[dim] = 0;
807   for (d = 0; d < dim; d++) depthShift[d] = depthRecv[dim];
808   for (d = 1; d < dim; d++) depthShift[d] += depthRecv[0];
809   for (d = dim - 2; d > 0; d--) depthShift[d] += depthRecv[d + 1];
810   for (d = 0; d < dim + 1; d++) {
811     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
812     depthIdx[d] = pStart + depthShift[d];
813   }
814 
815   /* Form the overlap SF build an SF that describes the full overlap migration SF */
816   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
817   newLeaves = pEnd - pStart + nleaves;
818   PetscCall(PetscMalloc1(newLeaves, &ilocal));
819   PetscCall(PetscMalloc1(newLeaves, &iremote));
820   /* First map local points to themselves */
821   for (d = 0; d < dim + 1; d++) {
822     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
823     for (p = pStart; p < pEnd; p++) {
824       point                = p + depthShift[d];
825       ilocal[point]        = point;
826       iremote[point].index = p;
827       iremote[point].rank  = rank;
828       depthIdx[d]++;
829     }
830   }
831 
832   /* Add in the remote roots for currently shared points */
833   PetscCall(DMGetPointSF(dm, &pointSF));
834   PetscCall(PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote));
835   for (d = 0; d < dim + 1; d++) {
836     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
837     for (p = 0; p < numSharedPoints; p++) {
838       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
839         point                = sharedLocal[p] + depthShift[d];
840         iremote[point].index = sharedRemote[p].index;
841         iremote[point].rank  = sharedRemote[p].rank;
842       }
843     }
844   }
845 
846   /* Now add the incoming overlap points */
847   for (p = 0; p < nleaves; p++) {
848     point                = depthIdx[remoteDepths[p]];
849     ilocal[point]        = point;
850     iremote[point].index = overlapRemote[p].index;
851     iremote[point].rank  = overlapRemote[p].rank;
852     depthIdx[remoteDepths[p]]++;
853   }
854   PetscCall(PetscFree2(pointDepths, remoteDepths));
855 
856   PetscCall(PetscSFCreate(comm, migrationSF));
857   PetscCall(PetscObjectSetName((PetscObject)*migrationSF, "Overlap Migration SF"));
858   PetscCall(PetscSFSetFromOptions(*migrationSF));
859   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
860   PetscCall(PetscSFSetGraph(*migrationSF, pEnd - pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER));
861 
862   PetscCall(PetscFree3(depthRecv, depthShift, depthIdx));
863   PetscFunctionReturn(PETSC_SUCCESS);
864 }
865 
866 /*@
867   DMPlexStratifyMigrationSF - Rearrange the leaves of a migration sf for stratification.
868 
869   Input Parameters:
870 + dm - The DM
871 - sf - A star forest with non-ordered leaves, usually defining a DM point migration
872 
873   Output Parameter:
874 . migrationSF - A star forest with added leaf indirection that ensures the resulting DM is stratified
875 
876   Level: developer
877 
878   Note:
879   This lexicographically sorts by (depth, cellType)
880 
881 .seealso: `DMPLEX`, `DMPlexPartitionLabelCreateSF()`, `DMPlexDistribute()`, `DMPlexDistributeOverlap()`
882 @*/
883 PetscErrorCode DMPlexStratifyMigrationSF(DM dm, PetscSF sf, PetscSF *migrationSF)
884 {
885   MPI_Comm           comm;
886   PetscMPIInt        rank, size;
887   PetscInt           d, ldepth, depth, dim, p, pStart, pEnd, nroots, nleaves;
888   PetscSFNode       *pointDepths, *remoteDepths;
889   PetscInt          *ilocal;
890   PetscInt          *depthRecv, *depthShift, *depthIdx;
891   PetscInt          *ctRecv, *ctShift, *ctIdx;
892   const PetscSFNode *iremote;
893 
894   PetscFunctionBegin;
895   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
896   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
897   PetscCallMPI(MPI_Comm_rank(comm, &rank));
898   PetscCallMPI(MPI_Comm_size(comm, &size));
899   PetscCall(DMPlexGetDepth(dm, &ldepth));
900   PetscCall(DMGetDimension(dm, &dim));
901   PetscCallMPI(MPIU_Allreduce(&ldepth, &depth, 1, MPIU_INT, MPI_MAX, comm));
902   PetscCheck(!(ldepth >= 0) || !(depth != ldepth), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %" PetscInt_FMT " != %" PetscInt_FMT, ldepth, depth);
903   PetscCall(PetscLogEventBegin(DMPLEX_PartStratSF, dm, 0, 0, 0));
904 
905   /* Before building the migration SF we need to know the new stratum offsets */
906   PetscCall(PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote));
907   PetscCall(PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths));
908   for (d = 0; d < depth + 1; ++d) {
909     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
910     for (p = pStart; p < pEnd; ++p) {
911       DMPolytopeType ct;
912 
913       PetscCall(DMPlexGetCellType(dm, p, &ct));
914       pointDepths[p].index = d;
915       pointDepths[p].rank  = ct;
916     }
917   }
918   for (p = 0; p < nleaves; ++p) {
919     remoteDepths[p].index = -1;
920     remoteDepths[p].rank  = -1;
921   }
922   PetscCall(PetscSFBcastBegin(sf, MPIU_SF_NODE, pointDepths, remoteDepths, MPI_REPLACE));
923   PetscCall(PetscSFBcastEnd(sf, MPIU_SF_NODE, pointDepths, remoteDepths, MPI_REPLACE));
924   /* Count received points in each stratum and compute the internal strata shift */
925   PetscCall(PetscCalloc6(depth + 1, &depthRecv, depth + 1, &depthShift, depth + 1, &depthIdx, DM_NUM_POLYTOPES, &ctRecv, DM_NUM_POLYTOPES, &ctShift, DM_NUM_POLYTOPES, &ctIdx));
926   for (p = 0; p < nleaves; ++p) {
927     if (remoteDepths[p].rank < 0) {
928       ++depthRecv[remoteDepths[p].index];
929     } else {
930       ++ctRecv[remoteDepths[p].rank];
931     }
932   }
933   {
934     PetscInt depths[4], dims[4], shift = 0, i, c;
935 
936     /* Cells (depth), Vertices (0), Faces (depth-1), Edges (1)
937          Consider DM_POLYTOPE_FV_GHOST, DM_POLYTOPE_INTERIOR_GHOST and DM_POLYTOPE_UNKNOWN_CELL as cells
938      */
939     depths[0] = depth;
940     depths[1] = 0;
941     depths[2] = depth - 1;
942     depths[3] = 1;
943     dims[0]   = dim;
944     dims[1]   = 0;
945     dims[2]   = dim - 1;
946     dims[3]   = 1;
947     for (i = 0; i <= depth; ++i) {
948       const PetscInt dep = depths[i];
949       const PetscInt dim = dims[i];
950 
951       for (c = 0; c < DM_NUM_POLYTOPES; ++c) {
952         if (DMPolytopeTypeGetDim((DMPolytopeType)c) != dim && !(i == 0 && (c == DM_POLYTOPE_FV_GHOST || c == DM_POLYTOPE_INTERIOR_GHOST || c == DM_POLYTOPE_UNKNOWN_CELL))) continue;
953         ctShift[c] = shift;
954         shift += ctRecv[c];
955       }
956       depthShift[dep] = shift;
957       shift += depthRecv[dep];
958     }
959     for (c = 0; c < DM_NUM_POLYTOPES; ++c) {
960       const PetscInt ctDim = DMPolytopeTypeGetDim((DMPolytopeType)c);
961 
962       if ((ctDim < 0 || ctDim > dim) && c != DM_POLYTOPE_FV_GHOST && c != DM_POLYTOPE_INTERIOR_GHOST && c != DM_POLYTOPE_UNKNOWN_CELL) {
963         ctShift[c] = shift;
964         shift += ctRecv[c];
965       }
966     }
967   }
968   /* Derive a new local permutation based on stratified indices */
969   PetscCall(PetscMalloc1(nleaves, &ilocal));
970   for (p = 0; p < nleaves; ++p) {
971     const DMPolytopeType ct = (DMPolytopeType)remoteDepths[p].rank;
972 
973     ilocal[p] = ctShift[ct] + ctIdx[ct];
974     ++ctIdx[ct];
975   }
976   PetscCall(PetscSFCreate(comm, migrationSF));
977   PetscCall(PetscObjectSetName((PetscObject)*migrationSF, "Migration SF"));
978   PetscCall(PetscSFSetGraph(*migrationSF, nroots, nleaves, ilocal, PETSC_OWN_POINTER, (PetscSFNode *)iremote, PETSC_COPY_VALUES));
979   PetscCall(PetscFree2(pointDepths, remoteDepths));
980   PetscCall(PetscFree6(depthRecv, depthShift, depthIdx, ctRecv, ctShift, ctIdx));
981   PetscCall(PetscLogEventEnd(DMPLEX_PartStratSF, dm, 0, 0, 0));
982   PetscFunctionReturn(PETSC_SUCCESS);
983 }
984 
985 /*@
986   DMPlexDistributeField - Distribute field data to match a given `PetscSF`, usually the `PetscSF` from mesh distribution
987 
988   Collective
989 
990   Input Parameters:
991 + dm              - The `DMPLEX` object
992 . pointSF         - The `PetscSF` describing the communication pattern
993 . originalSection - The `PetscSection` for existing data layout
994 - originalVec     - The existing data in a local vector
995 
996   Output Parameters:
997 + newSection - The `PetscSF` describing the new data layout
998 - newVec     - The new data in a local vector
999 
1000   Level: developer
1001 
1002 .seealso: `DMPLEX`, `DMPlexDistribute()`, `DMPlexDistributeFieldIS()`, `DMPlexDistributeData()`
1003 @*/
1004 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
1005 {
1006   PetscSF      fieldSF;
1007   PetscInt    *remoteOffsets, fieldSize;
1008   PetscScalar *originalValues, *newValues;
1009 
1010   PetscFunctionBegin;
1011   PetscCall(PetscLogEventBegin(DMPLEX_DistributeField, dm, 0, 0, 0));
1012   PetscCall(PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection));
1013 
1014   PetscCall(PetscSectionGetStorageSize(newSection, &fieldSize));
1015   PetscCall(VecSetSizes(newVec, fieldSize, PETSC_DETERMINE));
1016   PetscCall(VecSetType(newVec, dm->vectype));
1017 
1018   PetscCall(VecGetArray(originalVec, &originalValues));
1019   PetscCall(VecGetArray(newVec, &newValues));
1020   PetscCall(PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF));
1021   PetscCall(PetscFree(remoteOffsets));
1022   PetscCall(PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues, MPI_REPLACE));
1023   PetscCall(PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues, MPI_REPLACE));
1024   PetscCall(PetscSFDestroy(&fieldSF));
1025   PetscCall(VecRestoreArray(newVec, &newValues));
1026   PetscCall(VecRestoreArray(originalVec, &originalValues));
1027   PetscCall(PetscLogEventEnd(DMPLEX_DistributeField, dm, 0, 0, 0));
1028   PetscFunctionReturn(PETSC_SUCCESS);
1029 }
1030 
1031 /*@
1032   DMPlexDistributeFieldIS - Distribute field data to match a given `PetscSF`, usually the `PetscSF` from mesh distribution
1033 
1034   Collective
1035 
1036   Input Parameters:
1037 + dm              - The `DMPLEX` object
1038 . pointSF         - The `PetscSF` describing the communication pattern
1039 . originalSection - The `PetscSection` for existing data layout
1040 - originalIS      - The existing data
1041 
1042   Output Parameters:
1043 + newSection - The `PetscSF` describing the new data layout
1044 - newIS      - The new data
1045 
1046   Level: developer
1047 
1048 .seealso: `DMPLEX`, `DMPlexDistribute()`, `DMPlexDistributeField()`, `DMPlexDistributeData()`
1049 @*/
1050 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
1051 {
1052   PetscSF         fieldSF;
1053   PetscInt       *newValues, *remoteOffsets, fieldSize;
1054   const PetscInt *originalValues;
1055 
1056   PetscFunctionBegin;
1057   PetscCall(PetscLogEventBegin(DMPLEX_DistributeField, dm, 0, 0, 0));
1058   PetscCall(PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection));
1059 
1060   PetscCall(PetscSectionGetStorageSize(newSection, &fieldSize));
1061   PetscCall(PetscMalloc1(fieldSize, &newValues));
1062 
1063   PetscCall(ISGetIndices(originalIS, &originalValues));
1064   PetscCall(PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF));
1065   PetscCall(PetscFree(remoteOffsets));
1066   PetscCall(PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *)originalValues, newValues, MPI_REPLACE));
1067   PetscCall(PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *)originalValues, newValues, MPI_REPLACE));
1068   PetscCall(PetscSFDestroy(&fieldSF));
1069   PetscCall(ISRestoreIndices(originalIS, &originalValues));
1070   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS));
1071   PetscCall(PetscLogEventEnd(DMPLEX_DistributeField, dm, 0, 0, 0));
1072   PetscFunctionReturn(PETSC_SUCCESS);
1073 }
1074 
1075 /*@
1076   DMPlexDistributeData - Distribute field data to match a given `PetscSF`, usually the `PetscSF` from mesh distribution
1077 
1078   Collective
1079 
1080   Input Parameters:
1081 + dm              - The `DMPLEX` object
1082 . pointSF         - The `PetscSF` describing the communication pattern
1083 . originalSection - The `PetscSection` for existing data layout
1084 . datatype        - The type of data
1085 - originalData    - The existing data
1086 
1087   Output Parameters:
1088 + newSection - The `PetscSection` describing the new data layout
1089 - newData    - The new data
1090 
1091   Level: developer
1092 
1093 .seealso: `DMPLEX`, `DMPlexDistribute()`, `DMPlexDistributeField()`
1094 @*/
1095 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
1096 {
1097   PetscSF     fieldSF;
1098   PetscInt   *remoteOffsets, fieldSize;
1099   PetscMPIInt dataSize;
1100 
1101   PetscFunctionBegin;
1102   PetscCall(PetscLogEventBegin(DMPLEX_DistributeData, dm, 0, 0, 0));
1103   PetscCall(PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection));
1104 
1105   PetscCall(PetscSectionGetStorageSize(newSection, &fieldSize));
1106   PetscCallMPI(MPI_Type_size(datatype, &dataSize));
1107   PetscCall(PetscMalloc(fieldSize * dataSize, newData));
1108 
1109   PetscCall(PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF));
1110   PetscCall(PetscFree(remoteOffsets));
1111   PetscCall(PetscSFBcastBegin(fieldSF, datatype, originalData, *newData, MPI_REPLACE));
1112   PetscCall(PetscSFBcastEnd(fieldSF, datatype, originalData, *newData, MPI_REPLACE));
1113   PetscCall(PetscSFDestroy(&fieldSF));
1114   PetscCall(PetscLogEventEnd(DMPLEX_DistributeData, dm, 0, 0, 0));
1115   PetscFunctionReturn(PETSC_SUCCESS);
1116 }
1117 
1118 static PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1119 {
1120   DM_Plex     *pmesh = (DM_Plex *)dmParallel->data;
1121   MPI_Comm     comm;
1122   PetscSF      coneSF;
1123   PetscSection originalConeSection, newConeSection;
1124   PetscInt    *remoteOffsets, *cones, *globCones, *newCones, newConesSize;
1125   PetscBool    flg;
1126 
1127   PetscFunctionBegin;
1128   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1129   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
1130   PetscCall(PetscLogEventBegin(DMPLEX_DistributeCones, dm, 0, 0, 0));
1131   /* Distribute cone section */
1132   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1133   PetscCall(DMPlexGetConeSection(dm, &originalConeSection));
1134   PetscCall(DMPlexGetConeSection(dmParallel, &newConeSection));
1135   PetscCall(PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection));
1136   PetscCall(DMSetUp(dmParallel));
1137   /* Communicate and renumber cones */
1138   PetscCall(PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF));
1139   PetscCall(PetscFree(remoteOffsets));
1140   PetscCall(DMPlexGetCones(dm, &cones));
1141   if (original) {
1142     PetscInt numCones;
1143 
1144     PetscCall(PetscSectionGetStorageSize(originalConeSection, &numCones));
1145     PetscCall(PetscMalloc1(numCones, &globCones));
1146     PetscCall(ISLocalToGlobalMappingApplyBlock(original, numCones, cones, globCones));
1147   } else {
1148     globCones = cones;
1149   }
1150   PetscCall(DMPlexGetCones(dmParallel, &newCones));
1151   PetscCall(PetscSFBcastBegin(coneSF, MPIU_INT, globCones, newCones, MPI_REPLACE));
1152   PetscCall(PetscSFBcastEnd(coneSF, MPIU_INT, globCones, newCones, MPI_REPLACE));
1153   if (original) PetscCall(PetscFree(globCones));
1154   PetscCall(PetscSectionGetStorageSize(newConeSection, &newConesSize));
1155   PetscCall(ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones));
1156   if (PetscDefined(USE_DEBUG)) {
1157     PetscInt  p;
1158     PetscBool valid = PETSC_TRUE;
1159     for (p = 0; p < newConesSize; ++p) {
1160       if (newCones[p] < 0) {
1161         valid = PETSC_FALSE;
1162         PetscCall(PetscPrintf(PETSC_COMM_SELF, "[%d] Point %" PetscInt_FMT " not in overlap SF\n", PetscGlobalRank, p));
1163       }
1164     }
1165     PetscCheck(valid, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1166   }
1167   PetscCall(PetscOptionsHasName(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-cones_view", &flg));
1168   if (flg) {
1169     PetscCall(PetscPrintf(comm, "Serial Cone Section:\n"));
1170     PetscCall(PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_(comm)));
1171     PetscCall(PetscPrintf(comm, "Parallel Cone Section:\n"));
1172     PetscCall(PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_(comm)));
1173     PetscCall(PetscSFView(coneSF, NULL));
1174   }
1175   PetscCall(DMPlexGetConeOrientations(dm, &cones));
1176   PetscCall(DMPlexGetConeOrientations(dmParallel, &newCones));
1177   PetscCall(PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones, MPI_REPLACE));
1178   PetscCall(PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones, MPI_REPLACE));
1179   PetscCall(PetscSFDestroy(&coneSF));
1180   PetscCall(PetscLogEventEnd(DMPLEX_DistributeCones, dm, 0, 0, 0));
1181   /* Create supports and stratify DMPlex */
1182   {
1183     PetscInt pStart, pEnd;
1184 
1185     PetscCall(PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd));
1186     PetscCall(PetscSectionSetChart(pmesh->supportSection, pStart, pEnd));
1187   }
1188   PetscCall(DMPlexSymmetrize(dmParallel));
1189   PetscCall(DMPlexStratify(dmParallel));
1190   {
1191     PetscBool useCone, useClosure, useAnchors;
1192 
1193     PetscCall(DMGetBasicAdjacency(dm, &useCone, &useClosure));
1194     PetscCall(DMSetBasicAdjacency(dmParallel, useCone, useClosure));
1195     PetscCall(DMPlexGetAdjacencyUseAnchors(dm, &useAnchors));
1196     PetscCall(DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors));
1197   }
1198   PetscFunctionReturn(PETSC_SUCCESS);
1199 }
1200 
1201 static PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
1202 {
1203   MPI_Comm         comm;
1204   DM               cdm, cdmParallel;
1205   PetscSection     originalCoordSection, newCoordSection;
1206   Vec              originalCoordinates, newCoordinates;
1207   PetscInt         bs;
1208   const char      *name;
1209   const PetscReal *maxCell, *Lstart, *L;
1210 
1211   PetscFunctionBegin;
1212   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1213   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1214 
1215   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1216   PetscCall(DMGetCoordinateDM(dm, &cdm));
1217   PetscCall(DMGetCoordinateDM(dmParallel, &cdmParallel));
1218   PetscCall(DMCopyDisc(cdm, cdmParallel));
1219   PetscCall(DMGetCoordinateSection(dm, &originalCoordSection));
1220   PetscCall(DMGetCoordinateSection(dmParallel, &newCoordSection));
1221   PetscCall(DMGetCoordinatesLocal(dm, &originalCoordinates));
1222   if (originalCoordinates) {
1223     PetscCall(VecCreate(PETSC_COMM_SELF, &newCoordinates));
1224     PetscCall(PetscObjectGetName((PetscObject)originalCoordinates, &name));
1225     PetscCall(PetscObjectSetName((PetscObject)newCoordinates, name));
1226 
1227     PetscCall(DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates));
1228     PetscCall(DMSetCoordinatesLocal(dmParallel, newCoordinates));
1229     PetscCall(VecGetBlockSize(originalCoordinates, &bs));
1230     PetscCall(VecSetBlockSize(newCoordinates, bs));
1231     PetscCall(VecDestroy(&newCoordinates));
1232   }
1233 
1234   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1235   PetscCall(DMGetPeriodicity(dm, &maxCell, &Lstart, &L));
1236   PetscCall(DMSetPeriodicity(dmParallel, maxCell, Lstart, L));
1237   PetscCall(DMGetCellCoordinateDM(dm, &cdm));
1238   if (cdm) {
1239     PetscCall(DMClone(dmParallel, &cdmParallel));
1240     PetscCall(DMSetCellCoordinateDM(dmParallel, cdmParallel));
1241     PetscCall(DMCopyDisc(cdm, cdmParallel));
1242     PetscCall(DMDestroy(&cdmParallel));
1243     PetscCall(DMGetCellCoordinateSection(dm, &originalCoordSection));
1244     PetscCall(DMGetCellCoordinatesLocal(dm, &originalCoordinates));
1245     PetscCall(PetscSectionCreate(comm, &newCoordSection));
1246     if (originalCoordinates) {
1247       PetscCall(VecCreate(PETSC_COMM_SELF, &newCoordinates));
1248       PetscCall(PetscObjectGetName((PetscObject)originalCoordinates, &name));
1249       PetscCall(PetscObjectSetName((PetscObject)newCoordinates, name));
1250 
1251       PetscCall(DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates));
1252       PetscCall(VecGetBlockSize(originalCoordinates, &bs));
1253       PetscCall(VecSetBlockSize(newCoordinates, bs));
1254       PetscCall(DMSetCellCoordinateSection(dmParallel, bs, newCoordSection));
1255       PetscCall(DMSetCellCoordinatesLocal(dmParallel, newCoordinates));
1256       PetscCall(VecDestroy(&newCoordinates));
1257     }
1258     PetscCall(PetscSectionDestroy(&newCoordSection));
1259   }
1260   PetscFunctionReturn(PETSC_SUCCESS);
1261 }
1262 
1263 static PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1264 {
1265   DM_Plex         *mesh = (DM_Plex *)dm->data;
1266   MPI_Comm         comm;
1267   DMLabel          depthLabel;
1268   PetscMPIInt      rank;
1269   PetscInt         depth, d, numLabels, numLocalLabels, l;
1270   PetscBool        hasLabels  = PETSC_FALSE, lsendDepth, sendDepth;
1271   PetscObjectState depthState = -1;
1272 
1273   PetscFunctionBegin;
1274   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1275   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1276 
1277   PetscCall(PetscLogEventBegin(DMPLEX_DistributeLabels, dm, 0, 0, 0));
1278   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1279   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1280 
1281   /* If the user has changed the depth label, communicate it instead */
1282   PetscCall(DMPlexGetDepth(dm, &depth));
1283   PetscCall(DMPlexGetDepthLabel(dm, &depthLabel));
1284   if (depthLabel) PetscCall(PetscObjectStateGet((PetscObject)depthLabel, &depthState));
1285   lsendDepth = mesh->depthState != depthState ? PETSC_TRUE : PETSC_FALSE;
1286   PetscCallMPI(MPIU_Allreduce(&lsendDepth, &sendDepth, 1, MPI_C_BOOL, MPI_LOR, comm));
1287   if (sendDepth) {
1288     PetscCall(DMPlexGetDepthLabel(dmParallel, &dmParallel->depthLabel));
1289     PetscCall(DMRemoveLabelBySelf(dmParallel, &dmParallel->depthLabel, PETSC_FALSE));
1290   }
1291   /* Everyone must have either the same number of labels, or none */
1292   PetscCall(DMGetNumLabels(dm, &numLocalLabels));
1293   numLabels = numLocalLabels;
1294   PetscCallMPI(MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm));
1295   if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1296   for (l = 0; l < numLabels; ++l) {
1297     DMLabel     label = NULL, labelNew = NULL;
1298     PetscBool   isDepth, lisOutput     = PETSC_TRUE, isOutput;
1299     const char *name = NULL;
1300 
1301     if (hasLabels) {
1302       PetscCall(DMGetLabelByNum(dm, l, &label));
1303       /* Skip "depth" because it is recreated */
1304       PetscCall(PetscObjectGetName((PetscObject)label, &name));
1305       PetscCall(PetscStrcmp(name, "depth", &isDepth));
1306     } else {
1307       isDepth = PETSC_FALSE;
1308     }
1309     PetscCallMPI(MPI_Bcast(&isDepth, 1, MPI_C_BOOL, 0, comm));
1310     if (isDepth && !sendDepth) continue;
1311     PetscCall(DMLabelDistribute(label, migrationSF, &labelNew));
1312     if (isDepth) {
1313       /* Put in any missing strata which can occur if users are managing the depth label themselves */
1314       PetscInt gdepth;
1315 
1316       PetscCallMPI(MPIU_Allreduce(&depth, &gdepth, 1, MPIU_INT, MPI_MAX, comm));
1317       PetscCheck(!(depth >= 0) || !(gdepth != depth), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %" PetscInt_FMT " != %" PetscInt_FMT, depth, gdepth);
1318       for (d = 0; d <= gdepth; ++d) {
1319         PetscBool has;
1320 
1321         PetscCall(DMLabelHasStratum(labelNew, d, &has));
1322         if (!has) PetscCall(DMLabelAddStratum(labelNew, d));
1323       }
1324     }
1325     PetscCall(DMAddLabel(dmParallel, labelNew));
1326     /* Put the output flag in the new label */
1327     if (hasLabels) PetscCall(DMGetLabelOutput(dm, name, &lisOutput));
1328     PetscCallMPI(MPIU_Allreduce(&lisOutput, &isOutput, 1, MPI_C_BOOL, MPI_LAND, comm));
1329     PetscCall(PetscObjectGetName((PetscObject)labelNew, &name));
1330     PetscCall(DMSetLabelOutput(dmParallel, name, isOutput));
1331     PetscCall(DMLabelDestroy(&labelNew));
1332   }
1333   {
1334     DMLabel ctLabel;
1335 
1336     // Reset label for fast lookup
1337     PetscCall(DMPlexGetCellTypeLabel(dmParallel, &ctLabel));
1338     PetscCall(DMLabelMakeAllInvalid_Internal(ctLabel));
1339   }
1340   PetscCall(PetscLogEventEnd(DMPLEX_DistributeLabels, dm, 0, 0, 0));
1341   PetscFunctionReturn(PETSC_SUCCESS);
1342 }
1343 
1344 static PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1345 {
1346   DM_Plex     *mesh  = (DM_Plex *)dm->data;
1347   DM_Plex     *pmesh = (DM_Plex *)dmParallel->data;
1348   MPI_Comm     comm;
1349   DM           refTree;
1350   PetscSection origParentSection, newParentSection;
1351   PetscInt    *origParents, *origChildIDs;
1352   PetscBool    flg;
1353 
1354   PetscFunctionBegin;
1355   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1356   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
1357   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1358 
1359   /* Set up tree */
1360   PetscCall(DMPlexGetReferenceTree(dm, &refTree));
1361   PetscCall(DMPlexSetReferenceTree(dmParallel, refTree));
1362   PetscCall(DMPlexGetTree(dm, &origParentSection, &origParents, &origChildIDs, NULL, NULL));
1363   if (origParentSection) {
1364     PetscInt  pStart, pEnd;
1365     PetscInt *newParents, *newChildIDs, *globParents;
1366     PetscInt *remoteOffsetsParents, newParentSize;
1367     PetscSF   parentSF;
1368 
1369     PetscCall(DMPlexGetChart(dmParallel, &pStart, &pEnd));
1370     PetscCall(PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel), &newParentSection));
1371     PetscCall(PetscSectionSetChart(newParentSection, pStart, pEnd));
1372     PetscCall(PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection));
1373     PetscCall(PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF));
1374     PetscCall(PetscFree(remoteOffsetsParents));
1375     PetscCall(PetscSectionGetStorageSize(newParentSection, &newParentSize));
1376     PetscCall(PetscMalloc2(newParentSize, &newParents, newParentSize, &newChildIDs));
1377     if (original) {
1378       PetscInt numParents;
1379 
1380       PetscCall(PetscSectionGetStorageSize(origParentSection, &numParents));
1381       PetscCall(PetscMalloc1(numParents, &globParents));
1382       PetscCall(ISLocalToGlobalMappingApplyBlock(original, numParents, origParents, globParents));
1383     } else {
1384       globParents = origParents;
1385     }
1386     PetscCall(PetscSFBcastBegin(parentSF, MPIU_INT, globParents, newParents, MPI_REPLACE));
1387     PetscCall(PetscSFBcastEnd(parentSF, MPIU_INT, globParents, newParents, MPI_REPLACE));
1388     if (original) PetscCall(PetscFree(globParents));
1389     PetscCall(PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs, MPI_REPLACE));
1390     PetscCall(PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs, MPI_REPLACE));
1391     PetscCall(ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents));
1392     if (PetscDefined(USE_DEBUG)) {
1393       PetscInt  p;
1394       PetscBool valid = PETSC_TRUE;
1395       for (p = 0; p < newParentSize; ++p) {
1396         if (newParents[p] < 0) {
1397           valid = PETSC_FALSE;
1398           PetscCall(PetscPrintf(PETSC_COMM_SELF, "Point %" PetscInt_FMT " not in overlap SF\n", p));
1399         }
1400       }
1401       PetscCheck(valid, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1402     }
1403     PetscCall(PetscOptionsHasName(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-parents_view", &flg));
1404     if (flg) {
1405       PetscCall(PetscPrintf(comm, "Serial Parent Section: \n"));
1406       PetscCall(PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_(comm)));
1407       PetscCall(PetscPrintf(comm, "Parallel Parent Section: \n"));
1408       PetscCall(PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_(comm)));
1409       PetscCall(PetscSFView(parentSF, NULL));
1410     }
1411     PetscCall(DMPlexSetTree(dmParallel, newParentSection, newParents, newChildIDs));
1412     PetscCall(PetscSectionDestroy(&newParentSection));
1413     PetscCall(PetscFree2(newParents, newChildIDs));
1414     PetscCall(PetscSFDestroy(&parentSF));
1415   }
1416   pmesh->useAnchors = mesh->useAnchors;
1417   PetscFunctionReturn(PETSC_SUCCESS);
1418 }
1419 
1420 /*@
1421   DMPlexSetPartitionBalance - Should distribution of the `DM` attempt to balance the shared point partition?
1422 
1423   Input Parameters:
1424 + dm  - The `DMPLEX` object
1425 - flg - Balance the partition?
1426 
1427   Level: intermediate
1428 
1429 .seealso: `DMPLEX`, `DMPlexDistribute()`, `DMPlexGetPartitionBalance()`
1430 @*/
1431 PetscErrorCode DMPlexSetPartitionBalance(DM dm, PetscBool flg)
1432 {
1433   DM_Plex *mesh = (DM_Plex *)dm->data;
1434 
1435   PetscFunctionBegin;
1436   mesh->partitionBalance = flg;
1437   PetscFunctionReturn(PETSC_SUCCESS);
1438 }
1439 
1440 /*@
1441   DMPlexGetPartitionBalance - Does distribution of the `DM` attempt to balance the shared point partition?
1442 
1443   Input Parameter:
1444 . dm - The `DMPLEX` object
1445 
1446   Output Parameter:
1447 . flg - Balance the partition?
1448 
1449   Level: intermediate
1450 
1451 .seealso: `DMPLEX`, `DMPlexDistribute()`, `DMPlexSetPartitionBalance()`
1452 @*/
1453 PetscErrorCode DMPlexGetPartitionBalance(DM dm, PetscBool *flg)
1454 {
1455   DM_Plex *mesh = (DM_Plex *)dm->data;
1456 
1457   PetscFunctionBegin;
1458   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1459   PetscAssertPointer(flg, 2);
1460   *flg = mesh->partitionBalance;
1461   PetscFunctionReturn(PETSC_SUCCESS);
1462 }
1463 
1464 typedef struct {
1465   PetscInt vote, rank, index;
1466 } Petsc3Int;
1467 
1468 /* MaxLoc, but carry a third piece of information around */
1469 static void MPIAPI MaxLocCarry(void *in_, void *inout_, PetscMPIInt *len_, MPI_Datatype *dtype)
1470 {
1471   Petsc3Int *a = (Petsc3Int *)inout_;
1472   Petsc3Int *b = (Petsc3Int *)in_;
1473   PetscInt   i, len = *len_;
1474   for (i = 0; i < len; i++) {
1475     if (a[i].vote < b[i].vote) {
1476       a[i].vote  = b[i].vote;
1477       a[i].rank  = b[i].rank;
1478       a[i].index = b[i].index;
1479     } else if (a[i].vote <= b[i].vote) {
1480       if (a[i].rank >= b[i].rank) {
1481         a[i].rank  = b[i].rank;
1482         a[i].index = b[i].index;
1483       }
1484     }
1485   }
1486 }
1487 
1488 /*@
1489   DMPlexCreatePointSF - Build a point `PetscSF` from an `PetscSF` describing a point migration
1490 
1491   Input Parameters:
1492 + dm          - The source `DMPLEX` object
1493 . migrationSF - The star forest that describes the parallel point remapping
1494 - ownership   - Flag causing a vote to determine point ownership
1495 
1496   Output Parameter:
1497 . pointSF - The star forest describing the point overlap in the remapped `DM`
1498 
1499   Level: developer
1500 
1501   Note:
1502   Output `pointSF` is guaranteed to have the array of local indices (leaves) sorted.
1503 
1504 .seealso: `DMPLEX`, `PetscSF`, `DM`, `DMPlexDistribute()`, `DMPlexDistributeOverlap()`
1505 @*/
1506 PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownership, PetscSF *pointSF)
1507 {
1508   PetscMPIInt        rank, size;
1509   PetscInt           p, nroots, nleaves, idx, npointLeaves;
1510   PetscInt          *pointLocal;
1511   const PetscInt    *leaves;
1512   const PetscSFNode *roots;
1513   PetscSFNode       *rootNodes, *leafNodes, *pointRemote;
1514   Vec                shifts;
1515   const PetscInt     numShifts  = 13759;
1516   const PetscScalar *shift      = NULL;
1517   const PetscBool    shiftDebug = PETSC_FALSE;
1518   PetscBool          balance;
1519 
1520   PetscFunctionBegin;
1521   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1522   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
1523   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)dm), &size));
1524   PetscCall(PetscLogEventBegin(DMPLEX_CreatePointSF, dm, 0, 0, 0));
1525   PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)dm), pointSF));
1526   PetscCall(PetscSFSetFromOptions(*pointSF));
1527   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
1528 
1529   PetscCall(DMPlexGetPartitionBalance(dm, &balance));
1530   PetscCall(PetscSFGetGraph(migrationSF, &nroots, &nleaves, &leaves, &roots));
1531   PetscCall(PetscMalloc2(nroots, &rootNodes, nleaves, &leafNodes));
1532   if (ownership) {
1533     MPI_Op       op;
1534     MPI_Datatype datatype;
1535     Petsc3Int   *rootVote = NULL, *leafVote = NULL;
1536 
1537     /* If balancing, we compute a random cyclic shift of the rank for each remote point. That way, the max will evenly distribute among ranks. */
1538     if (balance) {
1539       PetscRandom r;
1540 
1541       PetscCall(PetscRandomCreate(PETSC_COMM_SELF, &r));
1542       PetscCall(PetscRandomSetInterval(r, 0, 2467 * size));
1543       PetscCall(VecCreate(PETSC_COMM_SELF, &shifts));
1544       PetscCall(VecSetSizes(shifts, numShifts, numShifts));
1545       PetscCall(VecSetType(shifts, VECSTANDARD));
1546       PetscCall(VecSetRandom(shifts, r));
1547       PetscCall(PetscRandomDestroy(&r));
1548       PetscCall(VecGetArrayRead(shifts, &shift));
1549     }
1550 
1551     PetscCall(PetscMalloc1(nroots, &rootVote));
1552     PetscCall(PetscMalloc1(nleaves, &leafVote));
1553     /* Point ownership vote: Process with highest rank owns shared points */
1554     for (p = 0; p < nleaves; ++p) {
1555       if (shiftDebug) {
1556         PetscCall(PetscSynchronizedPrintf(PetscObjectComm((PetscObject)dm), "[%d] Point %" PetscInt_FMT " RemotePoint %" PetscInt_FMT " Shift %" PetscInt_FMT " MyRank %" PetscInt_FMT "\n", rank, leaves ? leaves[p] : p, roots[p].index,
1557                                           (PetscInt)PetscRealPart(shift[roots[p].index % numShifts]), (rank + (shift ? (PetscInt)PetscRealPart(shift[roots[p].index % numShifts]) : 0)) % size));
1558       }
1559       /* Either put in a bid or we know we own it */
1560       leafVote[p].vote  = (rank + (shift ? (PetscInt)PetscRealPart(shift[roots[p].index % numShifts]) : 0)) % size;
1561       leafVote[p].rank  = rank;
1562       leafVote[p].index = p;
1563     }
1564     for (p = 0; p < nroots; p++) {
1565       /* Root must not participate in the reduction, flag so that MAXLOC does not use */
1566       rootVote[p].vote  = -3;
1567       rootVote[p].rank  = -3;
1568       rootVote[p].index = -3;
1569     }
1570     PetscCallMPI(MPI_Type_contiguous(3, MPIU_INT, &datatype));
1571     PetscCallMPI(MPI_Type_commit(&datatype));
1572     PetscCallMPI(MPI_Op_create(&MaxLocCarry, 1, &op));
1573     PetscCall(PetscSFReduceBegin(migrationSF, datatype, leafVote, rootVote, op));
1574     PetscCall(PetscSFReduceEnd(migrationSF, datatype, leafVote, rootVote, op));
1575     PetscCallMPI(MPI_Op_free(&op));
1576     PetscCallMPI(MPI_Type_free(&datatype));
1577     for (p = 0; p < nroots; p++) {
1578       rootNodes[p].rank  = rootVote[p].rank;
1579       rootNodes[p].index = rootVote[p].index;
1580     }
1581     PetscCall(PetscFree(leafVote));
1582     PetscCall(PetscFree(rootVote));
1583   } else {
1584     for (p = 0; p < nroots; p++) {
1585       rootNodes[p].index = -1;
1586       rootNodes[p].rank  = rank;
1587     }
1588     for (p = 0; p < nleaves; p++) {
1589       /* Write new local id into old location */
1590       if (roots[p].rank == rank) rootNodes[roots[p].index].index = leaves ? leaves[p] : p;
1591     }
1592   }
1593   PetscCall(PetscSFBcastBegin(migrationSF, MPIU_SF_NODE, rootNodes, leafNodes, MPI_REPLACE));
1594   PetscCall(PetscSFBcastEnd(migrationSF, MPIU_SF_NODE, rootNodes, leafNodes, MPI_REPLACE));
1595 
1596   for (npointLeaves = 0, p = 0; p < nleaves; p++) {
1597     if (leafNodes[p].rank != rank) npointLeaves++;
1598   }
1599   PetscCall(PetscMalloc1(npointLeaves, &pointLocal));
1600   PetscCall(PetscMalloc1(npointLeaves, &pointRemote));
1601   for (idx = 0, p = 0; p < nleaves; p++) {
1602     if (leafNodes[p].rank != rank) {
1603       /* Note that pointLocal is automatically sorted as it is sublist of 0, ..., nleaves-1 */
1604       pointLocal[idx]  = p;
1605       pointRemote[idx] = leafNodes[p];
1606       idx++;
1607     }
1608   }
1609   if (shift) {
1610     PetscCall(VecRestoreArrayRead(shifts, &shift));
1611     PetscCall(VecDestroy(&shifts));
1612   }
1613   if (shiftDebug) PetscCall(PetscSynchronizedFlush(PetscObjectComm((PetscObject)dm), PETSC_STDOUT));
1614   PetscCall(PetscSFSetGraph(*pointSF, nleaves, npointLeaves, pointLocal, PETSC_OWN_POINTER, pointRemote, PETSC_OWN_POINTER));
1615   PetscCall(PetscFree2(rootNodes, leafNodes));
1616   PetscCall(PetscLogEventEnd(DMPLEX_CreatePointSF, dm, 0, 0, 0));
1617   if (PetscDefined(USE_DEBUG)) PetscCall(DMPlexCheckPointSF(dm, *pointSF, PETSC_FALSE));
1618   PetscFunctionReturn(PETSC_SUCCESS);
1619 }
1620 
1621 /*@
1622   DMPlexMigrate  - Migrates internal `DM` data over the supplied star forest
1623 
1624   Collective
1625 
1626   Input Parameters:
1627 + dm - The source `DMPLEX` object
1628 - sf - The star forest communication context describing the migration pattern
1629 
1630   Output Parameter:
1631 . targetDM - The target `DMPLEX` object
1632 
1633   Level: intermediate
1634 
1635 .seealso: `DMPLEX`, `PetscSF`, `DM`, `DMPlexDistribute()`, `DMPlexDistributeOverlap()`
1636 @*/
1637 PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM)
1638 {
1639   MPI_Comm               comm;
1640   PetscInt               dim, cdim, nroots;
1641   PetscSF                sfPoint;
1642   ISLocalToGlobalMapping ltogMigration;
1643   ISLocalToGlobalMapping ltogOriginal = NULL;
1644 
1645   PetscFunctionBegin;
1646   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1647   PetscCall(PetscLogEventBegin(DMPLEX_Migrate, dm, 0, 0, 0));
1648   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1649   PetscCall(DMGetDimension(dm, &dim));
1650   PetscCall(DMSetDimension(targetDM, dim));
1651   PetscCall(DMGetCoordinateDim(dm, &cdim));
1652   PetscCall(DMSetCoordinateDim(targetDM, cdim));
1653 
1654   /* Check for a one-to-all distribution pattern */
1655   PetscCall(DMGetPointSF(dm, &sfPoint));
1656   PetscCall(PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL));
1657   if (nroots >= 0) {
1658     IS        isOriginal;
1659     PetscInt  n, size, nleaves;
1660     PetscInt *numbering_orig, *numbering_new;
1661 
1662     /* Get the original point numbering */
1663     PetscCall(DMPlexCreatePointNumbering(dm, &isOriginal));
1664     PetscCall(ISLocalToGlobalMappingCreateIS(isOriginal, &ltogOriginal));
1665     PetscCall(ISLocalToGlobalMappingGetSize(ltogOriginal, &size));
1666     PetscCall(ISLocalToGlobalMappingGetBlockIndices(ltogOriginal, (const PetscInt **)&numbering_orig));
1667     /* Convert to positive global numbers */
1668     for (n = 0; n < size; n++) {
1669       if (numbering_orig[n] < 0) numbering_orig[n] = -(numbering_orig[n] + 1);
1670     }
1671     /* Derive the new local-to-global mapping from the old one */
1672     PetscCall(PetscSFGetGraph(sf, NULL, &nleaves, NULL, NULL));
1673     PetscCall(PetscMalloc1(nleaves, &numbering_new));
1674     PetscCall(PetscSFBcastBegin(sf, MPIU_INT, numbering_orig, numbering_new, MPI_REPLACE));
1675     PetscCall(PetscSFBcastEnd(sf, MPIU_INT, numbering_orig, numbering_new, MPI_REPLACE));
1676     PetscCall(ISLocalToGlobalMappingCreate(comm, 1, nleaves, numbering_new, PETSC_OWN_POINTER, &ltogMigration));
1677     PetscCall(ISLocalToGlobalMappingRestoreIndices(ltogOriginal, (const PetscInt **)&numbering_orig));
1678     PetscCall(ISDestroy(&isOriginal));
1679   } else {
1680     /* One-to-all distribution pattern: We can derive LToG from SF */
1681     PetscCall(ISLocalToGlobalMappingCreateSF(sf, 0, &ltogMigration));
1682   }
1683   PetscCall(PetscObjectSetName((PetscObject)ltogMigration, "Point renumbering for DM migration"));
1684   PetscCall(ISLocalToGlobalMappingViewFromOptions(ltogMigration, (PetscObject)dm, "-partition_view"));
1685   /* Migrate DM data to target DM */
1686   PetscCall(DMPlexDistributeCones(dm, sf, ltogOriginal, ltogMigration, targetDM));
1687   PetscCall(DMPlexDistributeLabels(dm, sf, targetDM));
1688   PetscCall(DMPlexDistributeCoordinates(dm, sf, targetDM));
1689   PetscCall(DMPlexDistributeSetupTree(dm, sf, ltogOriginal, ltogMigration, targetDM));
1690   PetscCall(ISLocalToGlobalMappingDestroy(&ltogOriginal));
1691   PetscCall(ISLocalToGlobalMappingDestroy(&ltogMigration));
1692   PetscCall(PetscLogEventEnd(DMPLEX_Migrate, dm, 0, 0, 0));
1693   PetscFunctionReturn(PETSC_SUCCESS);
1694 }
1695 
1696 /*@
1697   DMPlexRemapMigrationSF - Rewrite the distribution SF to account for overlap
1698 
1699   Collective
1700 
1701   Input Parameters:
1702 + sfOverlap   - The `PetscSF` object just for the overlap
1703 - sfMigration - The original distribution `PetscSF` object
1704 
1705   Output Parameters:
1706 . sfMigrationNew - A rewritten `PetscSF` object that incorporates the overlap
1707 
1708   Level: developer
1709 
1710 .seealso: `DMPLEX`, `DM`, `DMPlexDistribute()`, `DMPlexDistributeOverlap()`, `DMPlexGetOverlap()`
1711 @*/
1712 PetscErrorCode DMPlexRemapMigrationSF(PetscSF sfOverlap, PetscSF sfMigration, PetscSF *sfMigrationNew)
1713 {
1714   PetscSFNode       *newRemote, *permRemote = NULL;
1715   const PetscInt    *oldLeaves;
1716   const PetscSFNode *oldRemote;
1717   PetscInt           nroots, nleaves, noldleaves;
1718 
1719   PetscFunctionBegin;
1720   PetscCall(PetscSFGetGraph(sfMigration, &nroots, &noldleaves, &oldLeaves, &oldRemote));
1721   PetscCall(PetscSFGetGraph(sfOverlap, NULL, &nleaves, NULL, NULL));
1722   PetscCall(PetscMalloc1(nleaves, &newRemote));
1723   /* oldRemote: original root point mapping to original leaf point
1724      newRemote: original leaf point mapping to overlapped leaf point */
1725   if (oldLeaves) {
1726     /* After stratification, the migration remotes may not be in root (canonical) order, so we reorder using the leaf numbering */
1727     PetscCall(PetscMalloc1(noldleaves, &permRemote));
1728     for (PetscInt l = 0; l < noldleaves; ++l) permRemote[oldLeaves[l]] = oldRemote[l];
1729     oldRemote = permRemote;
1730   }
1731   PetscCall(PetscSFBcastBegin(sfOverlap, MPIU_SF_NODE, oldRemote, newRemote, MPI_REPLACE));
1732   PetscCall(PetscSFBcastEnd(sfOverlap, MPIU_SF_NODE, oldRemote, newRemote, MPI_REPLACE));
1733   PetscCall(PetscFree(permRemote));
1734   PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)sfOverlap), sfMigrationNew));
1735   PetscCall(PetscSFSetGraph(*sfMigrationNew, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER));
1736   PetscFunctionReturn(PETSC_SUCCESS);
1737 }
1738 
1739 /*@
1740   DMPlexDistribute - Distributes the mesh and any associated sections.
1741 
1742   Collective
1743 
1744   Input Parameters:
1745 + dm      - The original `DMPLEX` object
1746 - overlap - The overlap of partitions, 0 is the default
1747 
1748   Output Parameters:
1749 + sf         - The `PetscSF` used for point distribution, or `NULL` if not needed
1750 - dmParallel - The distributed `DMPLEX` object
1751 
1752   Level: intermediate
1753 
1754   Note:
1755   If the mesh was not distributed, the output `dmParallel` will be `NULL`.
1756 
1757   The user can control the definition of adjacency for the mesh using `DMSetAdjacency()`. They should choose the combination appropriate for the function
1758   representation on the mesh.
1759 
1760 .seealso: `DMPLEX`, `DM`, `DMPlexCreate()`, `DMSetAdjacency()`, `DMPlexGetOverlap()`
1761 @*/
1762 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PeOp PetscSF *sf, DM *dmParallel)
1763 {
1764   MPI_Comm         comm;
1765   PetscPartitioner partitioner;
1766   IS               cellPart;
1767   PetscSection     cellPartSection;
1768   DM               dmCoord;
1769   DMLabel          lblPartition, lblMigration;
1770   PetscSF          sfMigration, sfStratified, sfPoint;
1771   PetscBool        flg, balance;
1772   PetscMPIInt      rank, size;
1773 
1774   PetscFunctionBegin;
1775   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1776   PetscValidLogicalCollectiveInt(dm, overlap, 2);
1777   if (sf) PetscAssertPointer(sf, 3);
1778   PetscAssertPointer(dmParallel, 4);
1779 
1780   if (sf) *sf = NULL;
1781   *dmParallel = NULL;
1782   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1783   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1784   PetscCallMPI(MPI_Comm_size(comm, &size));
1785   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
1786 
1787   PetscCall(PetscLogEventBegin(DMPLEX_Distribute, dm, 0, 0, 0));
1788   /* Create cell partition */
1789   PetscCall(PetscLogEventBegin(DMPLEX_Partition, dm, 0, 0, 0));
1790   PetscCall(PetscSectionCreate(comm, &cellPartSection));
1791   PetscCall(DMPlexGetPartitioner(dm, &partitioner));
1792   PetscCall(PetscPartitionerDMPlexPartition(partitioner, dm, NULL, cellPartSection, &cellPart));
1793   PetscCall(PetscLogEventBegin(DMPLEX_PartSelf, dm, 0, 0, 0));
1794   {
1795     /* Convert partition to DMLabel */
1796     IS              is;
1797     PetscHSetI      ht;
1798     const PetscInt *points;
1799     PetscInt       *iranks;
1800     PetscInt        pStart, pEnd, proc, npoints, poff = 0, nranks;
1801 
1802     PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Point Partition", &lblPartition));
1803     /* Preallocate strata */
1804     PetscCall(PetscHSetICreate(&ht));
1805     PetscCall(PetscSectionGetChart(cellPartSection, &pStart, &pEnd));
1806     for (proc = pStart; proc < pEnd; proc++) {
1807       PetscCall(PetscSectionGetDof(cellPartSection, proc, &npoints));
1808       if (npoints) PetscCall(PetscHSetIAdd(ht, proc));
1809     }
1810     PetscCall(PetscHSetIGetSize(ht, &nranks));
1811     PetscCall(PetscMalloc1(nranks, &iranks));
1812     PetscCall(PetscHSetIGetElems(ht, &poff, iranks));
1813     PetscCall(PetscHSetIDestroy(&ht));
1814     PetscCall(DMLabelAddStrata(lblPartition, nranks, iranks));
1815     PetscCall(PetscFree(iranks));
1816     /* Inline DMPlexPartitionLabelClosure() */
1817     PetscCall(ISGetIndices(cellPart, &points));
1818     PetscCall(PetscSectionGetChart(cellPartSection, &pStart, &pEnd));
1819     for (proc = pStart; proc < pEnd; proc++) {
1820       PetscCall(PetscSectionGetDof(cellPartSection, proc, &npoints));
1821       if (!npoints) continue;
1822       PetscCall(PetscSectionGetOffset(cellPartSection, proc, &poff));
1823       PetscCall(DMPlexClosurePoints_Private(dm, npoints, points + poff, &is));
1824       PetscCall(DMLabelSetStratumIS(lblPartition, proc, is));
1825       PetscCall(ISDestroy(&is));
1826     }
1827     PetscCall(ISRestoreIndices(cellPart, &points));
1828   }
1829   PetscCall(PetscLogEventEnd(DMPLEX_PartSelf, dm, 0, 0, 0));
1830 
1831   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Point migration", &lblMigration));
1832   PetscCall(DMPlexPartitionLabelInvert(dm, lblPartition, NULL, lblMigration));
1833   PetscCall(DMPlexPartitionLabelCreateSF(dm, lblMigration, PETSC_TRUE, &sfMigration));
1834   PetscCall(DMPlexStratifyMigrationSF(dm, sfMigration, &sfStratified));
1835   PetscCall(PetscSFDestroy(&sfMigration));
1836   sfMigration = sfStratified;
1837   PetscCall(PetscSFSetUp(sfMigration));
1838   PetscCall(PetscLogEventEnd(DMPLEX_Partition, dm, 0, 0, 0));
1839   PetscCall(PetscOptionsHasName(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-partition_view", &flg));
1840   if (flg) {
1841     PetscCall(DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_(comm)));
1842     PetscCall(PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_(comm)));
1843   }
1844 
1845   /* Create non-overlapping parallel DM and migrate internal data */
1846   PetscCall(DMPlexCreate(comm, dmParallel));
1847   PetscCall(PetscObjectSetName((PetscObject)*dmParallel, "Parallel Mesh"));
1848   PetscCall(DMPlexMigrate(dm, sfMigration, *dmParallel));
1849 
1850   /* Build the point SF without overlap */
1851   PetscCall(DMPlexGetPartitionBalance(dm, &balance));
1852   PetscCall(DMPlexSetPartitionBalance(*dmParallel, balance));
1853   PetscCall(DMPlexCreatePointSF(*dmParallel, sfMigration, PETSC_TRUE, &sfPoint));
1854   PetscCall(DMSetPointSF(*dmParallel, sfPoint));
1855   PetscCall(DMPlexMigrateIsoperiodicFaceSF_Internal(dm, *dmParallel, sfMigration));
1856   PetscCall(DMGetCoordinateDM(*dmParallel, &dmCoord));
1857   if (dmCoord) PetscCall(DMSetPointSF(dmCoord, sfPoint));
1858   if (flg) PetscCall(PetscSFView(sfPoint, NULL));
1859 
1860   if (overlap > 0) {
1861     DM      dmOverlap;
1862     PetscSF sfOverlap, sfOverlapPoint;
1863 
1864     /* Add the partition overlap to the distributed DM */
1865     PetscCall(DMPlexDistributeOverlap(*dmParallel, overlap, &sfOverlap, &dmOverlap));
1866     PetscCall(DMDestroy(dmParallel));
1867     *dmParallel = dmOverlap;
1868     if (flg) {
1869       PetscCall(PetscPrintf(comm, "Overlap Migration SF:\n"));
1870       PetscCall(PetscSFView(sfOverlap, NULL));
1871     }
1872 
1873     /* Re-map the migration SF to establish the full migration pattern */
1874     PetscCall(DMPlexRemapMigrationSF(sfOverlap, sfMigration, &sfOverlapPoint));
1875     PetscCall(PetscSFDestroy(&sfOverlap));
1876     PetscCall(PetscSFDestroy(&sfMigration));
1877     sfMigration = sfOverlapPoint;
1878   }
1879   /* Cleanup Partition */
1880   PetscCall(DMLabelDestroy(&lblPartition));
1881   PetscCall(DMLabelDestroy(&lblMigration));
1882   PetscCall(PetscSectionDestroy(&cellPartSection));
1883   PetscCall(ISDestroy(&cellPart));
1884   PetscCall(DMPlexCopy_Internal(dm, PETSC_TRUE, PETSC_FALSE, *dmParallel));
1885   // Create sfNatural, need discretization information
1886   PetscCall(DMCopyDisc(dm, *dmParallel));
1887   if (dm->localSection) {
1888     PetscSection psection;
1889     PetscCall(PetscSectionCreate(PetscObjectComm((PetscObject)dm), &psection));
1890     PetscCall(PetscSFDistributeSection(sfMigration, dm->localSection, NULL, psection));
1891     PetscCall(DMSetLocalSection(*dmParallel, psection));
1892     PetscCall(PetscSectionDestroy(&psection));
1893   }
1894   if (dm->useNatural) {
1895     PetscSection section;
1896 
1897     PetscCall(DMSetUseNatural(*dmParallel, PETSC_TRUE));
1898     PetscCall(DMGetLocalSection(dm, &section));
1899 
1900     /* If DM has useNatural = PETSC_TRUE, but no sfNatural is set, the DM's Section is considered natural */
1901     /* sfMigration and sfNatural are respectively the point and dofs SFs mapping to this natural DM */
1902     /* Compose with a previous sfNatural if present */
1903     if (dm->sfNatural) {
1904       PetscCall(DMPlexMigrateGlobalToNaturalSF(dm, *dmParallel, dm->sfNatural, sfMigration, &(*dmParallel)->sfNatural));
1905     } else {
1906       PetscCall(DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &(*dmParallel)->sfNatural));
1907     }
1908     /* Compose with a previous sfMigration if present */
1909     if (dm->sfMigration) {
1910       PetscSF naturalPointSF;
1911 
1912       PetscCall(PetscSFCompose(dm->sfMigration, sfMigration, &naturalPointSF));
1913       PetscCall(PetscSFDestroy(&sfMigration));
1914       sfMigration = naturalPointSF;
1915     }
1916   }
1917   /* Cleanup */
1918   if (sf) {
1919     *sf = sfMigration;
1920   } else PetscCall(PetscSFDestroy(&sfMigration));
1921   PetscCall(PetscSFDestroy(&sfPoint));
1922   PetscCall(PetscLogEventEnd(DMPLEX_Distribute, dm, 0, 0, 0));
1923   PetscFunctionReturn(PETSC_SUCCESS);
1924 }
1925 
1926 PetscErrorCode DMPlexDistributeOverlap_Internal(DM dm, PetscInt overlap, MPI_Comm newcomm, const char *ovlboundary, PetscSF *sf, DM *dmOverlap)
1927 {
1928   DM_Plex     *mesh = (DM_Plex *)dm->data;
1929   MPI_Comm     comm;
1930   PetscMPIInt  size, rank;
1931   PetscSection rootSection, leafSection;
1932   IS           rootrank, leafrank;
1933   DM           dmCoord;
1934   DMLabel      lblOverlap;
1935   PetscSF      sfOverlap, sfStratified, sfPoint;
1936   PetscBool    clear_ovlboundary;
1937 
1938   PetscFunctionBegin;
1939   if (sf) *sf = NULL;
1940   *dmOverlap = NULL;
1941   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1942   PetscCallMPI(MPI_Comm_size(comm, &size));
1943   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1944   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
1945   {
1946     // We need to get options for the _already_distributed mesh, so it must be done here
1947     PetscInt    overlap;
1948     const char *prefix;
1949     char        oldPrefix[PETSC_MAX_PATH_LEN];
1950 
1951     oldPrefix[0] = '\0';
1952     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)dm, &prefix));
1953     PetscCall(PetscStrncpy(oldPrefix, prefix, sizeof(oldPrefix)));
1954     PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)dm, "dist_"));
1955     PetscCall(DMPlexGetOverlap(dm, &overlap));
1956     PetscObjectOptionsBegin((PetscObject)dm);
1957     PetscCall(DMSetFromOptions_Overlap_Plex(dm, PetscOptionsObject, &overlap));
1958     PetscOptionsEnd();
1959     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)dm, oldPrefix[0] == '\0' ? NULL : oldPrefix));
1960   }
1961   if (ovlboundary) {
1962     PetscBool flg;
1963     PetscCall(DMHasLabel(dm, ovlboundary, &flg));
1964     if (!flg) {
1965       DMLabel label;
1966 
1967       PetscCall(DMCreateLabel(dm, ovlboundary));
1968       PetscCall(DMGetLabel(dm, ovlboundary, &label));
1969       PetscCall(DMPlexMarkBoundaryFaces(dm, 1, label));
1970       clear_ovlboundary = PETSC_TRUE;
1971     }
1972   }
1973   PetscCall(PetscLogEventBegin(DMPLEX_DistributeOverlap, dm, 0, 0, 0));
1974   /* Compute point overlap with neighbouring processes on the distributed DM */
1975   PetscCall(PetscLogEventBegin(DMPLEX_Partition, dm, 0, 0, 0));
1976   PetscCall(PetscSectionCreate(newcomm, &rootSection));
1977   PetscCall(PetscSectionCreate(newcomm, &leafSection));
1978   PetscCall(DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank));
1979   if (mesh->numOvLabels) PetscCall(DMPlexCreateOverlapLabelFromLabels(dm, mesh->numOvLabels, mesh->ovLabels, mesh->ovValues, mesh->numOvExLabels, mesh->ovExLabels, mesh->ovExValues, rootSection, rootrank, leafSection, leafrank, &lblOverlap));
1980   else PetscCall(DMPlexCreateOverlapLabel(dm, overlap, rootSection, rootrank, leafSection, leafrank, &lblOverlap));
1981   /* Convert overlap label to stratified migration SF */
1982   PetscCall(DMPlexPartitionLabelCreateSF(dm, lblOverlap, PETSC_FALSE, &sfOverlap));
1983   PetscCall(DMPlexStratifyMigrationSF(dm, sfOverlap, &sfStratified));
1984   PetscCall(PetscSFDestroy(&sfOverlap));
1985   sfOverlap = sfStratified;
1986   PetscCall(PetscObjectSetName((PetscObject)sfOverlap, "Overlap SF"));
1987   PetscCall(PetscSFSetFromOptions(sfOverlap));
1988 
1989   PetscCall(PetscSectionDestroy(&rootSection));
1990   PetscCall(PetscSectionDestroy(&leafSection));
1991   PetscCall(ISDestroy(&rootrank));
1992   PetscCall(ISDestroy(&leafrank));
1993   PetscCall(PetscLogEventEnd(DMPLEX_Partition, dm, 0, 0, 0));
1994 
1995   /* Build the overlapping DM */
1996   PetscCall(DMPlexCreate(newcomm, dmOverlap));
1997   PetscCall(PetscObjectSetName((PetscObject)*dmOverlap, "Parallel Mesh"));
1998   PetscCall(DMPlexMigrate(dm, sfOverlap, *dmOverlap));
1999   /* Store the overlap in the new DM */
2000   PetscCall(DMPlexSetOverlap_Plex(*dmOverlap, dm, overlap));
2001   /* Build the new point SF */
2002   PetscCall(DMPlexCreatePointSF(*dmOverlap, sfOverlap, PETSC_FALSE, &sfPoint));
2003   PetscCall(DMSetPointSF(*dmOverlap, sfPoint));
2004   PetscCall(DMGetCoordinateDM(*dmOverlap, &dmCoord));
2005   if (dmCoord) PetscCall(DMSetPointSF(dmCoord, sfPoint));
2006   PetscCall(DMGetCellCoordinateDM(*dmOverlap, &dmCoord));
2007   if (dmCoord) PetscCall(DMSetPointSF(dmCoord, sfPoint));
2008   PetscCall(PetscSFDestroy(&sfPoint));
2009   PetscCall(DMPlexCopy_Internal(dm, PETSC_TRUE, PETSC_FALSE, *dmOverlap));
2010   /* TODO: labels stored inside the DS for regions should be handled here */
2011   PetscCall(DMCopyDisc(dm, *dmOverlap));
2012   /* Cleanup overlap partition */
2013   PetscCall(DMLabelDestroy(&lblOverlap));
2014   if (sf) *sf = sfOverlap;
2015   else PetscCall(PetscSFDestroy(&sfOverlap));
2016   if (ovlboundary) {
2017     DMLabel   label;
2018     PetscBool flg;
2019 
2020     if (clear_ovlboundary) PetscCall(DMRemoveLabel(dm, ovlboundary, NULL));
2021     PetscCall(DMHasLabel(*dmOverlap, ovlboundary, &flg));
2022     PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Missing %s label", ovlboundary);
2023     PetscCall(DMGetLabel(*dmOverlap, ovlboundary, &label));
2024     PetscCall(DMPlexMarkBoundaryFaces_Internal(*dmOverlap, 1, 0, label, PETSC_TRUE));
2025   }
2026   PetscCall(PetscLogEventEnd(DMPLEX_DistributeOverlap, dm, 0, 0, 0));
2027   PetscFunctionReturn(PETSC_SUCCESS);
2028 }
2029 
2030 /*@
2031   DMPlexDistributeOverlap - Add partition overlap to a distributed non-overlapping `DM`.
2032 
2033   Collective
2034 
2035   Input Parameters:
2036 + dm      - The non-overlapping distributed `DMPLEX` object
2037 - overlap - The overlap of partitions (the same on all ranks)
2038 
2039   Output Parameters:
2040 + sf        - The `PetscSF` used for point distribution, or pass `NULL` if not needed
2041 - dmOverlap - The overlapping distributed `DMPLEX` object
2042 
2043   Options Database Keys:
2044 + -dm_plex_overlap_labels <name1,name2,...> - List of overlap label names
2045 . -dm_plex_overlap_values <int1,int2,...>   - List of overlap label values
2046 . -dm_plex_overlap_exclude_label <name>     - Label used to exclude points from overlap
2047 - -dm_plex_overlap_exclude_value <int>      - Label value used to exclude points from overlap
2048 
2049   Level: advanced
2050 
2051   Notes:
2052   If the mesh was not distributed, the return value is `NULL`.
2053 
2054   The user can control the definition of adjacency for the mesh using `DMSetAdjacency()`. They should choose the combination appropriate for the function
2055   representation on the mesh.
2056 
2057 .seealso: `DMPLEX`, `PetscSF`, `DM`, `DMPlexCreate()`, `DMSetAdjacency()`, `DMPlexDistribute()`, `DMPlexCreateOverlapLabel()`, `DMPlexGetOverlap()`
2058 @*/
2059 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, PeOp PetscSF *sf, DM *dmOverlap)
2060 {
2061   PetscFunctionBegin;
2062   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2063   PetscValidLogicalCollectiveInt(dm, overlap, 2);
2064   if (sf) PetscAssertPointer(sf, 3);
2065   PetscAssertPointer(dmOverlap, 4);
2066   PetscCall(DMPlexDistributeOverlap_Internal(dm, overlap, PetscObjectComm((PetscObject)dm), NULL, sf, dmOverlap));
2067   PetscFunctionReturn(PETSC_SUCCESS);
2068 }
2069 
2070 PetscErrorCode DMPlexGetOverlap_Plex(DM dm, PetscInt *overlap)
2071 {
2072   DM_Plex *mesh = (DM_Plex *)dm->data;
2073 
2074   PetscFunctionBegin;
2075   *overlap = mesh->overlap;
2076   PetscFunctionReturn(PETSC_SUCCESS);
2077 }
2078 
2079 PetscErrorCode DMPlexSetOverlap_Plex(DM dm, DM dmSrc, PetscInt overlap)
2080 {
2081   DM_Plex *mesh    = NULL;
2082   DM_Plex *meshSrc = NULL;
2083 
2084   PetscFunctionBegin;
2085   PetscValidHeaderSpecificType(dm, DM_CLASSID, 1, DMPLEX);
2086   mesh = (DM_Plex *)dm->data;
2087   if (dmSrc) {
2088     PetscValidHeaderSpecificType(dmSrc, DM_CLASSID, 2, DMPLEX);
2089     meshSrc       = (DM_Plex *)dmSrc->data;
2090     mesh->overlap = meshSrc->overlap;
2091   } else {
2092     mesh->overlap = 0;
2093   }
2094   mesh->overlap += overlap;
2095   PetscFunctionReturn(PETSC_SUCCESS);
2096 }
2097 
2098 /*@
2099   DMPlexGetOverlap - Get the width of the cell overlap
2100 
2101   Not Collective
2102 
2103   Input Parameter:
2104 . dm - The `DM`
2105 
2106   Output Parameter:
2107 . overlap - the width of the cell overlap
2108 
2109   Level: intermediate
2110 
2111 .seealso: `DMPLEX`, `DMPlexSetOverlap()`, `DMPlexDistribute()`
2112 @*/
2113 PetscErrorCode DMPlexGetOverlap(DM dm, PetscInt *overlap)
2114 {
2115   PetscFunctionBegin;
2116   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2117   PetscAssertPointer(overlap, 2);
2118   PetscUseMethod(dm, "DMPlexGetOverlap_C", (DM, PetscInt *), (dm, overlap));
2119   PetscFunctionReturn(PETSC_SUCCESS);
2120 }
2121 
2122 /*@
2123   DMPlexSetOverlap - Set the width of the cell overlap
2124 
2125   Logically Collective
2126 
2127   Input Parameters:
2128 + dm      - The `DM`
2129 . dmSrc   - The `DM` that produced this one, or `NULL`
2130 - overlap - the width of the cell overlap
2131 
2132   Level: intermediate
2133 
2134   Note:
2135   The overlap from `dmSrc` is added to `dm`
2136 
2137 .seealso: `DMPLEX`, `DMPlexGetOverlap()`, `DMPlexDistribute()`
2138 @*/
2139 PetscErrorCode DMPlexSetOverlap(DM dm, DM dmSrc, PetscInt overlap)
2140 {
2141   PetscFunctionBegin;
2142   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2143   PetscValidLogicalCollectiveInt(dm, overlap, 3);
2144   PetscTryMethod(dm, "DMPlexSetOverlap_C", (DM, DM, PetscInt), (dm, dmSrc, overlap));
2145   PetscFunctionReturn(PETSC_SUCCESS);
2146 }
2147 
2148 PetscErrorCode DMPlexDistributeSetDefault_Plex(DM dm, PetscBool dist)
2149 {
2150   DM_Plex *mesh = (DM_Plex *)dm->data;
2151 
2152   PetscFunctionBegin;
2153   mesh->distDefault = dist;
2154   PetscFunctionReturn(PETSC_SUCCESS);
2155 }
2156 
2157 /*@
2158   DMPlexDistributeSetDefault - Set flag indicating whether the `DM` should be distributed by default
2159 
2160   Logically Collective
2161 
2162   Input Parameters:
2163 + dm   - The `DM`
2164 - dist - Flag for distribution
2165 
2166   Level: intermediate
2167 
2168 .seealso: `DMPLEX`, `DMPlexDistributeGetDefault()`, `DMPlexDistribute()`
2169 @*/
2170 PetscErrorCode DMPlexDistributeSetDefault(DM dm, PetscBool dist)
2171 {
2172   PetscFunctionBegin;
2173   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2174   PetscValidLogicalCollectiveBool(dm, dist, 2);
2175   PetscTryMethod(dm, "DMPlexDistributeSetDefault_C", (DM, PetscBool), (dm, dist));
2176   PetscFunctionReturn(PETSC_SUCCESS);
2177 }
2178 
2179 PetscErrorCode DMPlexDistributeGetDefault_Plex(DM dm, PetscBool *dist)
2180 {
2181   DM_Plex *mesh = (DM_Plex *)dm->data;
2182 
2183   PetscFunctionBegin;
2184   *dist = mesh->distDefault;
2185   PetscFunctionReturn(PETSC_SUCCESS);
2186 }
2187 
2188 /*@
2189   DMPlexDistributeGetDefault - Get flag indicating whether the `DM` should be distributed by default
2190 
2191   Not Collective
2192 
2193   Input Parameter:
2194 . dm - The `DM`
2195 
2196   Output Parameter:
2197 . dist - Flag for distribution
2198 
2199   Level: intermediate
2200 
2201 .seealso: `DMPLEX`, `DM`, `DMPlexDistributeSetDefault()`, `DMPlexDistribute()`
2202 @*/
2203 PetscErrorCode DMPlexDistributeGetDefault(DM dm, PetscBool *dist)
2204 {
2205   PetscFunctionBegin;
2206   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2207   PetscAssertPointer(dist, 2);
2208   PetscUseMethod(dm, "DMPlexDistributeGetDefault_C", (DM, PetscBool *), (dm, dist));
2209   PetscFunctionReturn(PETSC_SUCCESS);
2210 }
2211 
2212 /*@
2213   DMPlexGetGatherDM - Get a copy of the `DMPLEX` that gathers all points on the
2214   root process of the original's communicator.
2215 
2216   Collective
2217 
2218   Input Parameter:
2219 . dm - the original `DMPLEX` object
2220 
2221   Output Parameters:
2222 + sf         - the `PetscSF` used for point distribution (optional)
2223 - gatherMesh - the gathered `DM` object, or `NULL`
2224 
2225   Level: intermediate
2226 
2227 .seealso: `DMPLEX`, `DM`, `PetscSF`, `DMPlexDistribute()`, `DMPlexGetRedundantDM()`
2228 @*/
2229 PetscErrorCode DMPlexGetGatherDM(DM dm, PetscSF *sf, PeOp DM *gatherMesh)
2230 {
2231   MPI_Comm         comm;
2232   PetscMPIInt      size;
2233   PetscPartitioner oldPart, gatherPart;
2234 
2235   PetscFunctionBegin;
2236   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2237   PetscAssertPointer(gatherMesh, 3);
2238   *gatherMesh = NULL;
2239   if (sf) *sf = NULL;
2240   comm = PetscObjectComm((PetscObject)dm);
2241   PetscCallMPI(MPI_Comm_size(comm, &size));
2242   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
2243   PetscCall(DMPlexGetPartitioner(dm, &oldPart));
2244   PetscCall(PetscObjectReference((PetscObject)oldPart));
2245   PetscCall(PetscPartitionerCreate(comm, &gatherPart));
2246   PetscCall(PetscPartitionerSetType(gatherPart, PETSCPARTITIONERGATHER));
2247   PetscCall(DMPlexSetPartitioner(dm, gatherPart));
2248   PetscCall(DMPlexDistribute(dm, 0, sf, gatherMesh));
2249 
2250   PetscCall(DMPlexSetPartitioner(dm, oldPart));
2251   PetscCall(PetscPartitionerDestroy(&gatherPart));
2252   PetscCall(PetscPartitionerDestroy(&oldPart));
2253   PetscFunctionReturn(PETSC_SUCCESS);
2254 }
2255 
2256 /*@
2257   DMPlexGetRedundantDM - Get a copy of the `DMPLEX` that is completely copied on each process.
2258 
2259   Collective
2260 
2261   Input Parameter:
2262 . dm - the original `DMPLEX` object
2263 
2264   Output Parameters:
2265 + sf            - the `PetscSF` used for point distribution (optional)
2266 - redundantMesh - the redundant `DM` object, or `NULL`
2267 
2268   Level: intermediate
2269 
2270 .seealso: `DMPLEX`, `DMPlexDistribute()`, `DMPlexGetGatherDM()`
2271 @*/
2272 PetscErrorCode DMPlexGetRedundantDM(DM dm, PetscSF *sf, PeOp DM *redundantMesh)
2273 {
2274   MPI_Comm     comm;
2275   PetscMPIInt  size, rank;
2276   PetscInt     pStart, pEnd, p;
2277   PetscInt     numPoints = -1;
2278   PetscSF      migrationSF, sfPoint, gatherSF;
2279   DM           gatherDM, dmCoord;
2280   PetscSFNode *points;
2281 
2282   PetscFunctionBegin;
2283   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2284   PetscAssertPointer(redundantMesh, 3);
2285   *redundantMesh = NULL;
2286   comm           = PetscObjectComm((PetscObject)dm);
2287   PetscCallMPI(MPI_Comm_size(comm, &size));
2288   if (size == 1) {
2289     PetscCall(PetscObjectReference((PetscObject)dm));
2290     *redundantMesh = dm;
2291     if (sf) *sf = NULL;
2292     PetscFunctionReturn(PETSC_SUCCESS);
2293   }
2294   PetscCall(DMPlexGetGatherDM(dm, &gatherSF, &gatherDM));
2295   if (!gatherDM) PetscFunctionReturn(PETSC_SUCCESS);
2296   PetscCallMPI(MPI_Comm_rank(comm, &rank));
2297   PetscCall(DMPlexGetChart(gatherDM, &pStart, &pEnd));
2298   numPoints = pEnd - pStart;
2299   PetscCallMPI(MPI_Bcast(&numPoints, 1, MPIU_INT, 0, comm));
2300   PetscCall(PetscMalloc1(numPoints, &points));
2301   PetscCall(PetscSFCreate(comm, &migrationSF));
2302   for (p = 0; p < numPoints; p++) {
2303     points[p].index = p;
2304     points[p].rank  = 0;
2305   }
2306   PetscCall(PetscSFSetGraph(migrationSF, pEnd - pStart, numPoints, NULL, PETSC_OWN_POINTER, points, PETSC_OWN_POINTER));
2307   PetscCall(DMPlexCreate(comm, redundantMesh));
2308   PetscCall(PetscObjectSetName((PetscObject)*redundantMesh, "Redundant Mesh"));
2309   PetscCall(DMPlexMigrate(gatherDM, migrationSF, *redundantMesh));
2310   /* This is to express that all point are in overlap */
2311   PetscCall(DMPlexSetOverlap_Plex(*redundantMesh, NULL, PETSC_INT_MAX));
2312   PetscCall(DMPlexCreatePointSF(*redundantMesh, migrationSF, PETSC_FALSE, &sfPoint));
2313   PetscCall(DMSetPointSF(*redundantMesh, sfPoint));
2314   PetscCall(DMGetCoordinateDM(*redundantMesh, &dmCoord));
2315   if (dmCoord) PetscCall(DMSetPointSF(dmCoord, sfPoint));
2316   PetscCall(PetscSFDestroy(&sfPoint));
2317   if (sf) {
2318     PetscSF tsf;
2319 
2320     PetscCall(PetscSFCompose(gatherSF, migrationSF, &tsf));
2321     PetscCall(DMPlexStratifyMigrationSF(dm, tsf, sf));
2322     PetscCall(PetscSFDestroy(&tsf));
2323   }
2324   PetscCall(PetscSFDestroy(&migrationSF));
2325   PetscCall(PetscSFDestroy(&gatherSF));
2326   PetscCall(DMDestroy(&gatherDM));
2327   PetscCall(DMCopyDisc(dm, *redundantMesh));
2328   PetscCall(DMPlexCopy_Internal(dm, PETSC_TRUE, PETSC_FALSE, *redundantMesh));
2329   PetscFunctionReturn(PETSC_SUCCESS);
2330 }
2331 
2332 /*@
2333   DMPlexIsDistributed - Find out whether this `DM` is distributed, i.e. more than one rank owns some points.
2334 
2335   Collective
2336 
2337   Input Parameter:
2338 . dm - The `DM` object
2339 
2340   Output Parameter:
2341 . distributed - Flag whether the `DM` is distributed
2342 
2343   Level: intermediate
2344 
2345   Notes:
2346   This currently finds out whether at least two ranks have any DAG points.
2347   This involves `MPI_Allreduce()` with one integer.
2348   The result is currently not stashed so every call to this routine involves this global communication.
2349 
2350 .seealso: `DMPLEX`, `DMPlexDistribute()`, `DMPlexGetOverlap()`, `DMPlexIsInterpolated()`
2351 @*/
2352 PetscErrorCode DMPlexIsDistributed(DM dm, PetscBool *distributed)
2353 {
2354   PetscInt    pStart, pEnd, count;
2355   MPI_Comm    comm;
2356   PetscMPIInt size;
2357 
2358   PetscFunctionBegin;
2359   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2360   PetscAssertPointer(distributed, 2);
2361   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
2362   PetscCallMPI(MPI_Comm_size(comm, &size));
2363   if (size == 1) {
2364     *distributed = PETSC_FALSE;
2365     PetscFunctionReturn(PETSC_SUCCESS);
2366   }
2367   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
2368   count = (pEnd - pStart) > 0 ? 1 : 0;
2369   PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, &count, 1, MPIU_INT, MPI_SUM, comm));
2370   *distributed = count > 1 ? PETSC_TRUE : PETSC_FALSE;
2371   PetscFunctionReturn(PETSC_SUCCESS);
2372 }
2373 
2374 /*@
2375   DMPlexDistributionSetName - Set the name of the specific parallel distribution
2376 
2377   Input Parameters:
2378 + dm   - The `DM`
2379 - name - The name of the specific parallel distribution
2380 
2381   Level: developer
2382 
2383   Note:
2384   If distribution name is set when saving, `DMPlexTopologyView()` saves the plex's
2385   parallel distribution (i.e., partition, ownership, and local ordering of points) under
2386   this name. Conversely, if distribution name is set when loading, `DMPlexTopologyLoad()`
2387   loads the parallel distribution stored in file under this name.
2388 
2389 .seealso: `DMPLEX`, `DMPlexDistributionGetName()`, `DMPlexTopologyView()`, `DMPlexTopologyLoad()`
2390 @*/
2391 PetscErrorCode DMPlexDistributionSetName(DM dm, const char name[])
2392 {
2393   DM_Plex *mesh = (DM_Plex *)dm->data;
2394 
2395   PetscFunctionBegin;
2396   PetscValidHeaderSpecificType(dm, DM_CLASSID, 1, DMPLEX);
2397   if (name) PetscAssertPointer(name, 2);
2398   PetscCall(PetscFree(mesh->distributionName));
2399   PetscCall(PetscStrallocpy(name, &mesh->distributionName));
2400   PetscFunctionReturn(PETSC_SUCCESS);
2401 }
2402 
2403 /*@
2404   DMPlexDistributionGetName - Retrieve the name of the specific parallel distribution
2405 
2406   Input Parameter:
2407 . dm - The `DM`
2408 
2409   Output Parameter:
2410 . name - The name of the specific parallel distribution
2411 
2412   Level: developer
2413 
2414   Note:
2415   If distribution name is set when saving, `DMPlexTopologyView()` saves the plex's
2416   parallel distribution (i.e., partition, ownership, and local ordering of points) under
2417   this name. Conversely, if distribution name is set when loading, `DMPlexTopologyLoad()`
2418   loads the parallel distribution stored in file under this name.
2419 
2420 .seealso: `DMPLEX`, `DMPlexDistributionSetName()`, `DMPlexTopologyView()`, `DMPlexTopologyLoad()`
2421 @*/
2422 PetscErrorCode DMPlexDistributionGetName(DM dm, const char *name[])
2423 {
2424   DM_Plex *mesh = (DM_Plex *)dm->data;
2425 
2426   PetscFunctionBegin;
2427   PetscValidHeaderSpecificType(dm, DM_CLASSID, 1, DMPLEX);
2428   PetscAssertPointer(name, 2);
2429   *name = mesh->distributionName;
2430   PetscFunctionReturn(PETSC_SUCCESS);
2431 }
2432