xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision 4e797053c5b2a03fd7d9d53239503c964862b706) !
1 #include <petsc/private/dmpleximpl.h>  /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/dmlabelimpl.h> /*I      "petscdmlabel.h"  I*/
3 
4 /*@C
5   DMPlexSetAdjacencyUser - Define adjacency in the mesh using a user-provided callback
6 
7   Input Parameters:
8 + dm   - The DM object
9 . user - The user callback, may be `NULL` (to clear the callback)
10 - ctx  - context for callback evaluation, may be `NULL`
11 
12   Level: advanced
13 
14   Notes:
15   The caller of `DMPlexGetAdjacency()` may need to arrange that a large enough array is available for the adjacency.
16 
17   Any setting here overrides other configuration of `DMPLEX` adjacency determination.
18 
19 .seealso: `DMPLEX`, `DMSetAdjacency()`, `DMPlexDistribute()`, `DMPlexPreallocateOperator()`, `DMPlexGetAdjacency()`, `DMPlexGetAdjacencyUser()`
20 @*/
21 PetscErrorCode DMPlexSetAdjacencyUser(DM dm, PetscErrorCode (*user)(DM, PetscInt, PetscInt *, PetscInt[], void *), void *ctx)
22 {
23   DM_Plex *mesh = (DM_Plex *)dm->data;
24 
25   PetscFunctionBegin;
26   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
27   mesh->useradjacency    = user;
28   mesh->useradjacencyctx = ctx;
29   PetscFunctionReturn(PETSC_SUCCESS);
30 }
31 
32 /*@C
33   DMPlexGetAdjacencyUser - get the user-defined adjacency callback
34 
35   Input Parameter:
36 . dm - The `DM` object
37 
38   Output Parameters:
39 + user - The callback
40 - ctx  - context for callback evaluation
41 
42   Level: advanced
43 
44 .seealso: `DMPLEX`, `DMSetAdjacency()`, `DMPlexDistribute()`, `DMPlexPreallocateOperator()`, `DMPlexGetAdjacency()`, `DMPlexSetAdjacencyUser()`
45 @*/
46 PetscErrorCode DMPlexGetAdjacencyUser(DM dm, PetscErrorCode (**user)(DM, PetscInt, PetscInt *, PetscInt[], void *), void **ctx)
47 {
48   DM_Plex *mesh = (DM_Plex *)dm->data;
49 
50   PetscFunctionBegin;
51   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
52   if (user) *user = mesh->useradjacency;
53   if (ctx) *ctx = mesh->useradjacencyctx;
54   PetscFunctionReturn(PETSC_SUCCESS);
55 }
56 
57 /*@
58   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
59 
60   Input Parameters:
61 + dm         - The `DM` object
62 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
63 
64   Level: intermediate
65 
66 .seealso: `DMPLEX`, `DMGetAdjacency()`, `DMSetAdjacency()`, `DMPlexDistribute()`, `DMPlexPreallocateOperator()`, `DMPlexSetAnchors()`
67 @*/
68 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
69 {
70   DM_Plex *mesh = (DM_Plex *)dm->data;
71 
72   PetscFunctionBegin;
73   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
74   mesh->useAnchors = useAnchors;
75   PetscFunctionReturn(PETSC_SUCCESS);
76 }
77 
78 /*@
79   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
80 
81   Input Parameter:
82 . dm - The `DM` object
83 
84   Output Parameter:
85 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
86 
87   Level: intermediate
88 
89 .seealso: `DMPLEX`, `DMPlexSetAdjacencyUseAnchors()`, `DMSetAdjacency()`, `DMGetAdjacency()`, `DMPlexDistribute()`, `DMPlexPreallocateOperator()`, `DMPlexSetAnchors()`
90 @*/
91 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
92 {
93   DM_Plex *mesh = (DM_Plex *)dm->data;
94 
95   PetscFunctionBegin;
96   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
97   PetscAssertPointer(useAnchors, 2);
98   *useAnchors = mesh->useAnchors;
99   PetscFunctionReturn(PETSC_SUCCESS);
100 }
101 
102 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
103 {
104   const PetscInt *cone   = NULL;
105   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
106 
107   PetscFunctionBeginHot;
108   PetscCall(DMPlexGetConeSize(dm, p, &coneSize));
109   PetscCall(DMPlexGetCone(dm, p, &cone));
110   for (c = 0; c <= coneSize; ++c) {
111     const PetscInt  point   = !c ? p : cone[c - 1];
112     const PetscInt *support = NULL;
113     PetscInt        supportSize, s, q;
114 
115     PetscCall(DMPlexGetSupportSize(dm, point, &supportSize));
116     PetscCall(DMPlexGetSupport(dm, point, &support));
117     for (s = 0; s < supportSize; ++s) {
118       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = support[s]), 0); ++q) {
119         if (support[s] == adj[q]) break;
120       }
121       PetscCheck(numAdj <= maxAdjSize, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%" PetscInt_FMT ")", maxAdjSize);
122     }
123   }
124   *adjSize = numAdj;
125   PetscFunctionReturn(PETSC_SUCCESS);
126 }
127 
128 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
129 {
130   const PetscInt *support = NULL;
131   PetscInt        numAdj = 0, maxAdjSize = *adjSize, supportSize, s;
132 
133   PetscFunctionBeginHot;
134   PetscCall(DMPlexGetSupportSize(dm, p, &supportSize));
135   PetscCall(DMPlexGetSupport(dm, p, &support));
136   for (s = 0; s <= supportSize; ++s) {
137     const PetscInt  point = !s ? p : support[s - 1];
138     const PetscInt *cone  = NULL;
139     PetscInt        coneSize, c, q;
140 
141     PetscCall(DMPlexGetConeSize(dm, point, &coneSize));
142     PetscCall(DMPlexGetCone(dm, point, &cone));
143     for (c = 0; c < coneSize; ++c) {
144       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = cone[c]), 0); ++q) {
145         if (cone[c] == adj[q]) break;
146       }
147       PetscCheck(numAdj <= maxAdjSize, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%" PetscInt_FMT ")", maxAdjSize);
148     }
149   }
150   *adjSize = numAdj;
151   PetscFunctionReturn(PETSC_SUCCESS);
152 }
153 
154 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
155 {
156   PetscInt *star   = NULL;
157   PetscInt  numAdj = 0, maxAdjSize = *adjSize, starSize, s;
158 
159   PetscFunctionBeginHot;
160   PetscCall(DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star));
161   for (s = 0; s < starSize * 2; s += 2) {
162     const PetscInt *closure = NULL;
163     PetscInt        closureSize, c, q;
164 
165     PetscCall(DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt **)&closure));
166     for (c = 0; c < closureSize * 2; c += 2) {
167       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = closure[c]), 0); ++q) {
168         if (closure[c] == adj[q]) break;
169       }
170       PetscCheck(numAdj <= maxAdjSize, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%" PetscInt_FMT ")", maxAdjSize);
171     }
172     PetscCall(DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt **)&closure));
173   }
174   PetscCall(DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star));
175   *adjSize = numAdj;
176   PetscFunctionReturn(PETSC_SUCCESS);
177 }
178 
179 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
180 {
181   static PetscInt asiz       = 0;
182   PetscInt        maxAnchors = 1;
183   PetscInt        aStart = -1, aEnd = -1;
184   PetscInt        maxAdjSize;
185   PetscSection    aSec = NULL;
186   IS              aIS  = NULL;
187   const PetscInt *anchors;
188   DM_Plex        *mesh = (DM_Plex *)dm->data;
189 
190   PetscFunctionBeginHot;
191   if (useAnchors) {
192     PetscCall(DMPlexGetAnchors(dm, &aSec, &aIS));
193     if (aSec) {
194       PetscCall(PetscSectionGetMaxDof(aSec, &maxAnchors));
195       maxAnchors = PetscMax(1, maxAnchors);
196       PetscCall(PetscSectionGetChart(aSec, &aStart, &aEnd));
197       PetscCall(ISGetIndices(aIS, &anchors));
198     }
199   }
200   if (!*adj) {
201     PetscInt depth, maxC, maxS, maxP, pStart, pEnd;
202 
203     PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
204     PetscCall(DMPlexGetDepth(dm, &depth));
205     depth = PetscMax(depth, -depth);
206     PetscCall(DMPlexGetMaxSizes(dm, &maxC, &maxS));
207     maxP = maxS * maxC;
208     /* Adjacency can be as large as supp(cl(cell)) or cl(supp(vertex)),
209            supp(cell) + supp(maxC faces) + supp(maxC^2 edges) + supp(maxC^3 vertices)
210          = 0 + maxS*maxC + maxS^2*maxC^2 + maxS^3*maxC^3
211          = \sum^d_{i=0} (maxS*maxC)^i - 1
212          = (maxS*maxC)^{d+1} - 1 / (maxS*maxC - 1) - 1
213       We could improve this by getting the max by strata:
214            supp[d](cell) + supp[d-1](maxC[d] faces) + supp[1](maxC[d]*maxC[d-1] edges) + supp[0](maxC[d]*maxC[d-1]*maxC[d-2] vertices)
215          = 0 + maxS[d-1]*maxC[d] + maxS[1]*maxC[d]*maxC[d-1] + maxS[0]*maxC[d]*maxC[d-1]*maxC[d-2]
216       and the same with S and C reversed
217     */
218     if ((depth == 3 && maxP > 200) || (depth == 2 && maxP > 580)) asiz = pEnd - pStart;
219     else asiz = (maxP > 1) ? ((PetscPowInt(maxP, depth + 1) - 1) / (maxP - 1)) : depth + 1;
220     asiz *= maxAnchors;
221     asiz = PetscMin(asiz, pEnd - pStart);
222     PetscCall(PetscMalloc1(asiz, adj));
223   }
224   if (*adjSize < 0) *adjSize = asiz;
225   maxAdjSize = *adjSize;
226   if (mesh->useradjacency) {
227     PetscCall((*mesh->useradjacency)(dm, p, adjSize, *adj, mesh->useradjacencyctx));
228   } else if (useTransitiveClosure) {
229     PetscCall(DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj));
230   } else if (useCone) {
231     PetscCall(DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj));
232   } else {
233     PetscCall(DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj));
234   }
235   if (useAnchors && aSec) {
236     PetscInt  origSize = *adjSize;
237     PetscInt  numAdj   = origSize;
238     PetscInt  i        = 0, j;
239     PetscInt *orig     = *adj;
240 
241     while (i < origSize) {
242       PetscInt p    = orig[i];
243       PetscInt aDof = 0;
244 
245       if (p >= aStart && p < aEnd) PetscCall(PetscSectionGetDof(aSec, p, &aDof));
246       if (aDof) {
247         PetscInt aOff;
248         PetscInt s, q;
249 
250         for (j = i + 1; j < numAdj; j++) orig[j - 1] = orig[j];
251         origSize--;
252         numAdj--;
253         PetscCall(PetscSectionGetOffset(aSec, p, &aOff));
254         for (s = 0; s < aDof; ++s) {
255           for (q = 0; q < numAdj || ((void)(orig[numAdj++] = anchors[aOff + s]), 0); ++q) {
256             if (anchors[aOff + s] == orig[q]) break;
257           }
258           PetscCheck(numAdj <= maxAdjSize, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%" PetscInt_FMT ")", maxAdjSize);
259         }
260       } else {
261         i++;
262       }
263     }
264     *adjSize = numAdj;
265     PetscCall(ISRestoreIndices(aIS, &anchors));
266   }
267   PetscFunctionReturn(PETSC_SUCCESS);
268 }
269 
270 /*@
271   DMPlexGetAdjacency - Return all points adjacent to the given point
272 
273   Input Parameters:
274 + dm - The `DM` object
275 - p  - The point
276 
277   Input/Output Parameters:
278 + adjSize - The maximum size of `adj` if it is non-`NULL`, or `PETSC_DETERMINE`;
279             on output the number of adjacent points
280 - adj     - Either `NULL` so that the array is allocated, or an existing array with size `adjSize`;
281         on output contains the adjacent points
282 
283   Level: advanced
284 
285   Notes:
286   The user must `PetscFree()` the `adj` array if it was not passed in.
287 
288 .seealso: `DMPLEX`, `DMSetAdjacency()`, `DMPlexDistribute()`, `DMCreateMatrix()`, `DMPlexPreallocateOperator()`
289 @*/
290 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
291 {
292   PetscBool useCone, useClosure, useAnchors;
293 
294   PetscFunctionBeginHot;
295   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
296   PetscAssertPointer(adjSize, 3);
297   PetscAssertPointer(adj, 4);
298   PetscCall(DMGetBasicAdjacency(dm, &useCone, &useClosure));
299   PetscCall(DMPlexGetAdjacencyUseAnchors(dm, &useAnchors));
300   PetscCall(DMPlexGetAdjacency_Internal(dm, p, useCone, useClosure, useAnchors, adjSize, adj));
301   PetscFunctionReturn(PETSC_SUCCESS);
302 }
303 
304 /*@
305   DMPlexCreateTwoSidedProcessSF - Create an `PetscSF` which just has process connectivity
306 
307   Collective
308 
309   Input Parameters:
310 + dm              - The `DM`
311 . sfPoint         - The `PetscSF` which encodes point connectivity
312 . rootRankSection - to be documented
313 . rootRanks       - to be documented
314 . leafRankSection - to be documented
315 - leafRanks       - to be documented
316 
317   Output Parameters:
318 + processRanks - A list of process neighbors, or `NULL`
319 - sfProcess    - An `PetscSF` encoding the two-sided process connectivity, or `NULL`
320 
321   Level: developer
322 
323 .seealso: `DMPLEX`, `PetscSFCreate()`, `DMPlexCreateProcessSF()`
324 @*/
325 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
326 {
327   const PetscSFNode *remotePoints;
328   PetscInt          *localPointsNew;
329   PetscSFNode       *remotePointsNew;
330   const PetscInt    *nranks;
331   PetscInt          *ranksNew;
332   PetscBT            neighbors;
333   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
334   PetscMPIInt        size, proc, rank;
335 
336   PetscFunctionBegin;
337   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
338   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
339   if (processRanks) PetscAssertPointer(processRanks, 7);
340   if (sfProcess) PetscAssertPointer(sfProcess, 8);
341   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)dm), &size));
342   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
343   PetscCall(PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints));
344   PetscCall(PetscBTCreate(size, &neighbors));
345   PetscCall(PetscBTMemzero(size, neighbors));
346   /* Compute root-to-leaf process connectivity */
347   PetscCall(PetscSectionGetChart(rootRankSection, &pStart, &pEnd));
348   PetscCall(ISGetIndices(rootRanks, &nranks));
349   for (p = pStart; p < pEnd; ++p) {
350     PetscInt ndof, noff, n;
351 
352     PetscCall(PetscSectionGetDof(rootRankSection, p, &ndof));
353     PetscCall(PetscSectionGetOffset(rootRankSection, p, &noff));
354     for (n = 0; n < ndof; ++n) PetscCall(PetscBTSet(neighbors, nranks[noff + n]));
355   }
356   PetscCall(ISRestoreIndices(rootRanks, &nranks));
357   /* Compute leaf-to-neighbor process connectivity */
358   PetscCall(PetscSectionGetChart(leafRankSection, &pStart, &pEnd));
359   PetscCall(ISGetIndices(leafRanks, &nranks));
360   for (p = pStart; p < pEnd; ++p) {
361     PetscInt ndof, noff, n;
362 
363     PetscCall(PetscSectionGetDof(leafRankSection, p, &ndof));
364     PetscCall(PetscSectionGetOffset(leafRankSection, p, &noff));
365     for (n = 0; n < ndof; ++n) PetscCall(PetscBTSet(neighbors, nranks[noff + n]));
366   }
367   PetscCall(ISRestoreIndices(leafRanks, &nranks));
368   /* Compute leaf-to-root process connectivity */
369   for (l = 0; l < numLeaves; ++l) PetscCall(PetscBTSet(neighbors, remotePoints[l].rank));
370   /* Calculate edges */
371   PetscCall(PetscBTClear(neighbors, rank));
372   for (proc = 0, numNeighbors = 0; proc < size; ++proc) {
373     if (PetscBTLookup(neighbors, proc)) ++numNeighbors;
374   }
375   PetscCall(PetscMalloc1(numNeighbors, &ranksNew));
376   PetscCall(PetscMalloc1(numNeighbors, &localPointsNew));
377   PetscCall(PetscMalloc1(numNeighbors, &remotePointsNew));
378   for (proc = 0, n = 0; proc < size; ++proc) {
379     if (PetscBTLookup(neighbors, proc)) {
380       ranksNew[n]              = proc;
381       localPointsNew[n]        = proc;
382       remotePointsNew[n].index = rank;
383       remotePointsNew[n].rank  = proc;
384       ++n;
385     }
386   }
387   PetscCall(PetscBTDestroy(&neighbors));
388   if (processRanks) PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks));
389   else PetscCall(PetscFree(ranksNew));
390   if (sfProcess) {
391     PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess));
392     PetscCall(PetscObjectSetName((PetscObject)*sfProcess, "Two-Sided Process SF"));
393     PetscCall(PetscSFSetFromOptions(*sfProcess));
394     PetscCall(PetscSFSetGraph(*sfProcess, size, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER));
395   }
396   PetscFunctionReturn(PETSC_SUCCESS);
397 }
398 
399 /*@
400   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
401 
402   Collective
403 
404   Input Parameter:
405 . dm - The `DM`
406 
407   Output Parameters:
408 + rootSection - The number of leaves for a given root point
409 . rootrank    - The rank of each edge into the root point
410 . leafSection - The number of processes sharing a given leaf point
411 - leafrank    - The rank of each process sharing a leaf point
412 
413   Level: developer
414 
415 .seealso: `DMPLEX`, `DMPlexCreateOverlapLabel()`, `DMPlexDistribute()`, `DMPlexDistributeOverlap()`
416 @*/
417 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
418 {
419   MPI_Comm        comm;
420   PetscSF         sfPoint;
421   const PetscInt *rootdegree;
422   PetscInt       *myrank, *remoterank;
423   PetscInt        pStart, pEnd, p, nedges;
424   PetscMPIInt     rank;
425 
426   PetscFunctionBegin;
427   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
428   PetscCallMPI(MPI_Comm_rank(comm, &rank));
429   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
430   PetscCall(DMGetPointSF(dm, &sfPoint));
431   /* Compute number of leaves for each root */
432   PetscCall(PetscObjectSetName((PetscObject)rootSection, "Root Section"));
433   PetscCall(PetscSectionSetChart(rootSection, pStart, pEnd));
434   PetscCall(PetscSFComputeDegreeBegin(sfPoint, &rootdegree));
435   PetscCall(PetscSFComputeDegreeEnd(sfPoint, &rootdegree));
436   for (p = pStart; p < pEnd; ++p) PetscCall(PetscSectionSetDof(rootSection, p, rootdegree[p - pStart]));
437   PetscCall(PetscSectionSetUp(rootSection));
438   /* Gather rank of each leaf to root */
439   PetscCall(PetscSectionGetStorageSize(rootSection, &nedges));
440   PetscCall(PetscMalloc1(pEnd - pStart, &myrank));
441   PetscCall(PetscMalloc1(nedges, &remoterank));
442   for (p = 0; p < pEnd - pStart; ++p) myrank[p] = rank;
443   PetscCall(PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank));
444   PetscCall(PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank));
445   PetscCall(PetscFree(myrank));
446   PetscCall(ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank));
447   /* Distribute remote ranks to leaves */
448   PetscCall(PetscObjectSetName((PetscObject)leafSection, "Leaf Section"));
449   PetscCall(DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank));
450   PetscFunctionReturn(PETSC_SUCCESS);
451 }
452 
453 /*@C
454   DMPlexCreateOverlapLabel - Compute a label indicating what overlap points should be sent to new processes
455 
456   Collective
457 
458   Input Parameters:
459 + dm          - The `DM`
460 . levels      - Number of overlap levels
461 . rootSection - The number of leaves for a given root point
462 . rootrank    - The rank of each edge into the root point
463 . leafSection - The number of processes sharing a given leaf point
464 - leafrank    - The rank of each process sharing a leaf point
465 
466   Output Parameter:
467 . ovLabel - `DMLabel` containing remote overlap contributions as point/rank pairings
468 
469   Level: developer
470 
471 .seealso: `DMPLEX`, `DMPlexCreateOverlapLabelFromLabels()`, `DMPlexGetAdjacency()`, `DMPlexDistributeOwnership()`, `DMPlexDistribute()`
472 @*/
473 PetscErrorCode DMPlexCreateOverlapLabel(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
474 {
475   MPI_Comm           comm;
476   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
477   PetscSF            sfPoint;
478   const PetscSFNode *remote;
479   const PetscInt    *local;
480   const PetscInt    *nrank, *rrank;
481   PetscInt          *adj = NULL;
482   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l;
483   PetscMPIInt        rank, size;
484   PetscBool          flg;
485 
486   PetscFunctionBegin;
487   *ovLabel = NULL;
488   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
489   PetscCallMPI(MPI_Comm_size(comm, &size));
490   PetscCallMPI(MPI_Comm_rank(comm, &rank));
491   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
492   PetscCall(DMGetPointSF(dm, &sfPoint));
493   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
494   if (!levels) {
495     /* Add owned points */
496     PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel));
497     for (p = pStart; p < pEnd; ++p) PetscCall(DMLabelSetValue(*ovLabel, p, rank));
498     PetscFunctionReturn(PETSC_SUCCESS);
499   }
500   PetscCall(PetscSectionGetChart(leafSection, &sStart, &sEnd));
501   PetscCall(PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote));
502   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap adjacency", &ovAdjByRank));
503   /* Handle leaves: shared with the root point */
504   for (l = 0; l < nleaves; ++l) {
505     PetscInt adjSize = PETSC_DETERMINE, a;
506 
507     PetscCall(DMPlexGetAdjacency(dm, local ? local[l] : l, &adjSize, &adj));
508     for (a = 0; a < adjSize; ++a) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank));
509   }
510   PetscCall(ISGetIndices(rootrank, &rrank));
511   PetscCall(ISGetIndices(leafrank, &nrank));
512   /* Handle roots */
513   for (p = pStart; p < pEnd; ++p) {
514     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
515 
516     if ((p >= sStart) && (p < sEnd)) {
517       /* Some leaves share a root with other leaves on different processes */
518       PetscCall(PetscSectionGetDof(leafSection, p, &neighbors));
519       if (neighbors) {
520         PetscCall(PetscSectionGetOffset(leafSection, p, &noff));
521         PetscCall(DMPlexGetAdjacency(dm, p, &adjSize, &adj));
522         for (n = 0; n < neighbors; ++n) {
523           const PetscInt remoteRank = nrank[noff + n];
524 
525           if (remoteRank == rank) continue;
526           for (a = 0; a < adjSize; ++a) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remoteRank));
527         }
528       }
529     }
530     /* Roots are shared with leaves */
531     PetscCall(PetscSectionGetDof(rootSection, p, &neighbors));
532     if (!neighbors) continue;
533     PetscCall(PetscSectionGetOffset(rootSection, p, &noff));
534     PetscCall(DMPlexGetAdjacency(dm, p, &adjSize, &adj));
535     for (n = 0; n < neighbors; ++n) {
536       const PetscInt remoteRank = rrank[noff + n];
537 
538       if (remoteRank == rank) continue;
539       for (a = 0; a < adjSize; ++a) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remoteRank));
540     }
541   }
542   PetscCall(PetscFree(adj));
543   PetscCall(ISRestoreIndices(rootrank, &rrank));
544   PetscCall(ISRestoreIndices(leafrank, &nrank));
545   /* Add additional overlap levels */
546   for (l = 1; l < levels; l++) {
547     /* Propagate point donations over SF to capture remote connections */
548     PetscCall(DMPlexPartitionLabelPropagate(dm, ovAdjByRank));
549     /* Add next level of point donations to the label */
550     PetscCall(DMPlexPartitionLabelAdjacency(dm, ovAdjByRank));
551   }
552   /* We require the closure in the overlap */
553   PetscCall(DMPlexPartitionLabelClosure(dm, ovAdjByRank));
554   PetscCall(PetscOptionsHasName(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-overlap_view", &flg));
555   if (flg) {
556     PetscViewer viewer;
557     PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm), &viewer));
558     PetscCall(DMLabelView(ovAdjByRank, viewer));
559   }
560   /* Invert sender to receiver label */
561   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel));
562   PetscCall(DMPlexPartitionLabelInvert(dm, ovAdjByRank, NULL, *ovLabel));
563   /* Add owned points, except for shared local points */
564   for (p = pStart; p < pEnd; ++p) PetscCall(DMLabelSetValue(*ovLabel, p, rank));
565   for (l = 0; l < nleaves; ++l) {
566     PetscCall(DMLabelClearValue(*ovLabel, local[l], rank));
567     PetscCall(DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank));
568   }
569   /* Clean up */
570   PetscCall(DMLabelDestroy(&ovAdjByRank));
571   PetscFunctionReturn(PETSC_SUCCESS);
572 }
573 
574 static PetscErrorCode HandlePoint_Private(DM dm, PetscInt p, PetscSection section, const PetscInt ranks[], PetscInt numExLabels, const DMLabel exLabel[], const PetscInt exValue[], DMLabel ovAdjByRank)
575 {
576   PetscInt neighbors, el;
577 
578   PetscFunctionBegin;
579   PetscCall(PetscSectionGetDof(section, p, &neighbors));
580   if (neighbors) {
581     PetscInt   *adj     = NULL;
582     PetscInt    adjSize = PETSC_DETERMINE, noff, n, a;
583     PetscMPIInt rank;
584 
585     PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
586     PetscCall(PetscSectionGetOffset(section, p, &noff));
587     PetscCall(DMPlexGetAdjacency(dm, p, &adjSize, &adj));
588     for (n = 0; n < neighbors; ++n) {
589       const PetscInt remoteRank = ranks[noff + n];
590 
591       if (remoteRank == rank) continue;
592       for (a = 0; a < adjSize; ++a) {
593         PetscBool insert = PETSC_TRUE;
594 
595         for (el = 0; el < numExLabels; ++el) {
596           PetscInt exVal;
597           PetscCall(DMLabelGetValue(exLabel[el], adj[a], &exVal));
598           if (exVal == exValue[el]) {
599             insert = PETSC_FALSE;
600             break;
601           }
602         }
603         if (insert) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remoteRank));
604       }
605     }
606     PetscCall(PetscFree(adj));
607   }
608   PetscFunctionReturn(PETSC_SUCCESS);
609 }
610 
611 /*@C
612   DMPlexCreateOverlapLabelFromLabels - Compute a label indicating what overlap points should be sent to new processes
613 
614   Collective
615 
616   Input Parameters:
617 + dm          - The `DM`
618 . numLabels   - The number of labels to draw candidate points from
619 . label       - An array of labels containing candidate points
620 . value       - An array of label values marking the candidate points
621 . numExLabels - The number of labels to use for exclusion
622 . exLabel     - An array of labels indicating points to be excluded, or `NULL`
623 . exValue     - An array of label values to be excluded, or `NULL`
624 . rootSection - The number of leaves for a given root point
625 . rootrank    - The rank of each edge into the root point
626 . leafSection - The number of processes sharing a given leaf point
627 - leafrank    - The rank of each process sharing a leaf point
628 
629   Output Parameter:
630 . ovLabel - `DMLabel` containing remote overlap contributions as point/rank pairings
631 
632   Level: developer
633 
634   Note:
635   The candidate points are only added to the overlap if they are adjacent to a shared point
636 
637 .seealso: `DMPLEX`, `DMPlexCreateOverlapLabel()`, `DMPlexGetAdjacency()`, `DMPlexDistributeOwnership()`, `DMPlexDistribute()`
638 @*/
639 PetscErrorCode DMPlexCreateOverlapLabelFromLabels(DM dm, PetscInt numLabels, const DMLabel label[], const PetscInt value[], PetscInt numExLabels, const DMLabel exLabel[], const PetscInt exValue[], PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
640 {
641   MPI_Comm           comm;
642   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
643   PetscSF            sfPoint;
644   const PetscSFNode *remote;
645   const PetscInt    *local;
646   const PetscInt    *nrank, *rrank;
647   PetscInt          *adj = NULL;
648   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l, el;
649   PetscMPIInt        rank, size;
650   PetscBool          flg;
651 
652   PetscFunctionBegin;
653   *ovLabel = NULL;
654   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
655   PetscCallMPI(MPI_Comm_size(comm, &size));
656   PetscCallMPI(MPI_Comm_rank(comm, &rank));
657   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
658   PetscCall(DMGetPointSF(dm, &sfPoint));
659   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
660   PetscCall(PetscSectionGetChart(leafSection, &sStart, &sEnd));
661   PetscCall(PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote));
662   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap adjacency", &ovAdjByRank));
663   PetscCall(ISGetIndices(rootrank, &rrank));
664   PetscCall(ISGetIndices(leafrank, &nrank));
665   for (l = 0; l < numLabels; ++l) {
666     IS              valIS;
667     const PetscInt *points;
668     PetscInt        n;
669 
670     PetscCall(DMLabelGetStratumIS(label[l], value[l], &valIS));
671     if (!valIS) continue;
672     PetscCall(ISGetIndices(valIS, &points));
673     PetscCall(ISGetLocalSize(valIS, &n));
674     for (PetscInt i = 0; i < n; ++i) {
675       const PetscInt p = points[i];
676 
677       if ((p >= sStart) && (p < sEnd)) {
678         PetscInt loc, adjSize = PETSC_DETERMINE;
679 
680         /* Handle leaves: shared with the root point */
681         if (local) PetscCall(PetscFindInt(p, nleaves, local, &loc));
682         else loc = (p >= 0 && p < nleaves) ? p : -1;
683         if (loc >= 0) {
684           const PetscInt remoteRank = remote[loc].rank;
685 
686           PetscCall(DMPlexGetAdjacency(dm, p, &adjSize, &adj));
687           for (PetscInt a = 0; a < adjSize; ++a) {
688             PetscBool insert = PETSC_TRUE;
689 
690             for (el = 0; el < numExLabels; ++el) {
691               PetscInt exVal;
692               PetscCall(DMLabelGetValue(exLabel[el], adj[a], &exVal));
693               if (exVal == exValue[el]) {
694                 insert = PETSC_FALSE;
695                 break;
696               }
697             }
698             if (insert) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remoteRank));
699           }
700         }
701         /* Some leaves share a root with other leaves on different processes */
702         PetscCall(HandlePoint_Private(dm, p, leafSection, nrank, numExLabels, exLabel, exValue, ovAdjByRank));
703       }
704       /* Roots are shared with leaves */
705       PetscCall(HandlePoint_Private(dm, p, rootSection, rrank, numExLabels, exLabel, exValue, ovAdjByRank));
706     }
707     PetscCall(ISRestoreIndices(valIS, &points));
708     PetscCall(ISDestroy(&valIS));
709   }
710   PetscCall(PetscFree(adj));
711   PetscCall(ISRestoreIndices(rootrank, &rrank));
712   PetscCall(ISRestoreIndices(leafrank, &nrank));
713   /* We require the closure in the overlap */
714   PetscCall(DMPlexPartitionLabelClosure(dm, ovAdjByRank));
715   PetscCall(PetscOptionsHasName(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-overlap_view", &flg));
716   if (flg) {
717     PetscViewer viewer;
718     PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm), &viewer));
719     PetscCall(DMLabelView(ovAdjByRank, viewer));
720   }
721   /* Invert sender to receiver label */
722   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel));
723   PetscCall(DMPlexPartitionLabelInvert(dm, ovAdjByRank, NULL, *ovLabel));
724   /* Add owned points, except for shared local points */
725   for (p = pStart; p < pEnd; ++p) PetscCall(DMLabelSetValue(*ovLabel, p, rank));
726   for (l = 0; l < nleaves; ++l) {
727     PetscCall(DMLabelClearValue(*ovLabel, local[l], rank));
728     PetscCall(DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank));
729   }
730   /* Clean up */
731   PetscCall(DMLabelDestroy(&ovAdjByRank));
732   PetscFunctionReturn(PETSC_SUCCESS);
733 }
734 
735 /*@
736   DMPlexCreateOverlapMigrationSF - Create a `PetscSF` describing the new mesh distribution to make the overlap described by the input `PetscSF`
737 
738   Collective
739 
740   Input Parameters:
741 + dm        - The `DM`
742 - overlapSF - The `PetscSF` mapping ghost points in overlap to owner points on other processes
743 
744   Output Parameter:
745 . migrationSF - A `PetscSF` that maps original points in old locations to points in new locations
746 
747   Level: developer
748 
749 .seealso: `DMPLEX`, `DMPlexCreateOverlapLabel()`, `DMPlexDistributeOverlap()`, `DMPlexDistribute()`
750 @*/
751 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
752 {
753   MPI_Comm           comm;
754   PetscMPIInt        rank, size;
755   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
756   PetscInt          *pointDepths, *remoteDepths, *ilocal;
757   PetscInt          *depthRecv, *depthShift, *depthIdx;
758   PetscSFNode       *iremote;
759   PetscSF            pointSF;
760   const PetscInt    *sharedLocal;
761   const PetscSFNode *overlapRemote, *sharedRemote;
762 
763   PetscFunctionBegin;
764   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
765   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
766   PetscCallMPI(MPI_Comm_rank(comm, &rank));
767   PetscCallMPI(MPI_Comm_size(comm, &size));
768   PetscCall(DMGetDimension(dm, &dim));
769 
770   /* Before building the migration SF we need to know the new stratum offsets */
771   PetscCall(PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote));
772   PetscCall(PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths));
773   for (d = 0; d < dim + 1; d++) {
774     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
775     for (p = pStart; p < pEnd; p++) pointDepths[p] = d;
776   }
777   for (p = 0; p < nleaves; p++) remoteDepths[p] = -1;
778   PetscCall(PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths, MPI_REPLACE));
779   PetscCall(PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths, MPI_REPLACE));
780 
781   /* Count received points in each stratum and compute the internal strata shift */
782   PetscCall(PetscMalloc3(dim + 1, &depthRecv, dim + 1, &depthShift, dim + 1, &depthIdx));
783   for (d = 0; d < dim + 1; d++) depthRecv[d] = 0;
784   for (p = 0; p < nleaves; p++) depthRecv[remoteDepths[p]]++;
785   depthShift[dim] = 0;
786   for (d = 0; d < dim; d++) depthShift[d] = depthRecv[dim];
787   for (d = 1; d < dim; d++) depthShift[d] += depthRecv[0];
788   for (d = dim - 2; d > 0; d--) depthShift[d] += depthRecv[d + 1];
789   for (d = 0; d < dim + 1; d++) {
790     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
791     depthIdx[d] = pStart + depthShift[d];
792   }
793 
794   /* Form the overlap SF build an SF that describes the full overlap migration SF */
795   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
796   newLeaves = pEnd - pStart + nleaves;
797   PetscCall(PetscMalloc1(newLeaves, &ilocal));
798   PetscCall(PetscMalloc1(newLeaves, &iremote));
799   /* First map local points to themselves */
800   for (d = 0; d < dim + 1; d++) {
801     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
802     for (p = pStart; p < pEnd; p++) {
803       point                = p + depthShift[d];
804       ilocal[point]        = point;
805       iremote[point].index = p;
806       iremote[point].rank  = rank;
807       depthIdx[d]++;
808     }
809   }
810 
811   /* Add in the remote roots for currently shared points */
812   PetscCall(DMGetPointSF(dm, &pointSF));
813   PetscCall(PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote));
814   for (d = 0; d < dim + 1; d++) {
815     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
816     for (p = 0; p < numSharedPoints; p++) {
817       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
818         point                = sharedLocal[p] + depthShift[d];
819         iremote[point].index = sharedRemote[p].index;
820         iremote[point].rank  = sharedRemote[p].rank;
821       }
822     }
823   }
824 
825   /* Now add the incoming overlap points */
826   for (p = 0; p < nleaves; p++) {
827     point                = depthIdx[remoteDepths[p]];
828     ilocal[point]        = point;
829     iremote[point].index = overlapRemote[p].index;
830     iremote[point].rank  = overlapRemote[p].rank;
831     depthIdx[remoteDepths[p]]++;
832   }
833   PetscCall(PetscFree2(pointDepths, remoteDepths));
834 
835   PetscCall(PetscSFCreate(comm, migrationSF));
836   PetscCall(PetscObjectSetName((PetscObject)*migrationSF, "Overlap Migration SF"));
837   PetscCall(PetscSFSetFromOptions(*migrationSF));
838   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
839   PetscCall(PetscSFSetGraph(*migrationSF, pEnd - pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER));
840 
841   PetscCall(PetscFree3(depthRecv, depthShift, depthIdx));
842   PetscFunctionReturn(PETSC_SUCCESS);
843 }
844 
845 /*@
846   DMPlexStratifyMigrationSF - Rearrange the leaves of a migration sf for stratification.
847 
848   Input Parameters:
849 + dm - The DM
850 - sf - A star forest with non-ordered leaves, usually defining a DM point migration
851 
852   Output Parameter:
853 . migrationSF - A star forest with added leaf indirection that ensures the resulting DM is stratified
854 
855   Level: developer
856 
857   Note:
858   This lexicographically sorts by (depth, cellType)
859 
860 .seealso: `DMPLEX`, `DMPlexPartitionLabelCreateSF()`, `DMPlexDistribute()`, `DMPlexDistributeOverlap()`
861 @*/
862 PetscErrorCode DMPlexStratifyMigrationSF(DM dm, PetscSF sf, PetscSF *migrationSF)
863 {
864   MPI_Comm           comm;
865   PetscMPIInt        rank, size;
866   PetscInt           d, ldepth, depth, dim, p, pStart, pEnd, nroots, nleaves;
867   PetscSFNode       *pointDepths, *remoteDepths;
868   PetscInt          *ilocal;
869   PetscInt          *depthRecv, *depthShift, *depthIdx;
870   PetscInt          *ctRecv, *ctShift, *ctIdx;
871   const PetscSFNode *iremote;
872 
873   PetscFunctionBegin;
874   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
875   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
876   PetscCallMPI(MPI_Comm_rank(comm, &rank));
877   PetscCallMPI(MPI_Comm_size(comm, &size));
878   PetscCall(DMPlexGetDepth(dm, &ldepth));
879   PetscCall(DMGetDimension(dm, &dim));
880   PetscCall(MPIU_Allreduce(&ldepth, &depth, 1, MPIU_INT, MPI_MAX, comm));
881   PetscCheck(!(ldepth >= 0) || !(depth != ldepth), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %" PetscInt_FMT " != %" PetscInt_FMT, ldepth, depth);
882   PetscCall(PetscLogEventBegin(DMPLEX_PartStratSF, dm, 0, 0, 0));
883 
884   /* Before building the migration SF we need to know the new stratum offsets */
885   PetscCall(PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote));
886   PetscCall(PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths));
887   for (d = 0; d < depth + 1; ++d) {
888     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
889     for (p = pStart; p < pEnd; ++p) {
890       DMPolytopeType ct;
891 
892       PetscCall(DMPlexGetCellType(dm, p, &ct));
893       pointDepths[p].index = d;
894       pointDepths[p].rank  = ct;
895     }
896   }
897   for (p = 0; p < nleaves; ++p) {
898     remoteDepths[p].index = -1;
899     remoteDepths[p].rank  = -1;
900   }
901   PetscCall(PetscSFBcastBegin(sf, MPIU_2INT, pointDepths, remoteDepths, MPI_REPLACE));
902   PetscCall(PetscSFBcastEnd(sf, MPIU_2INT, pointDepths, remoteDepths, MPI_REPLACE));
903   /* Count received points in each stratum and compute the internal strata shift */
904   PetscCall(PetscCalloc6(depth + 1, &depthRecv, depth + 1, &depthShift, depth + 1, &depthIdx, DM_NUM_POLYTOPES, &ctRecv, DM_NUM_POLYTOPES, &ctShift, DM_NUM_POLYTOPES, &ctIdx));
905   for (p = 0; p < nleaves; ++p) {
906     if (remoteDepths[p].rank < 0) {
907       ++depthRecv[remoteDepths[p].index];
908     } else {
909       ++ctRecv[remoteDepths[p].rank];
910     }
911   }
912   {
913     PetscInt depths[4], dims[4], shift = 0, i, c;
914 
915     /* Cells (depth), Vertices (0), Faces (depth-1), Edges (1)
916          Consider DM_POLYTOPE_FV_GHOST, DM_POLYTOPE_INTERIOR_GHOST and DM_POLYTOPE_UNKNOWN_CELL as cells
917      */
918     depths[0] = depth;
919     depths[1] = 0;
920     depths[2] = depth - 1;
921     depths[3] = 1;
922     dims[0]   = dim;
923     dims[1]   = 0;
924     dims[2]   = dim - 1;
925     dims[3]   = 1;
926     for (i = 0; i <= depth; ++i) {
927       const PetscInt dep = depths[i];
928       const PetscInt dim = dims[i];
929 
930       for (c = 0; c < DM_NUM_POLYTOPES; ++c) {
931         if (DMPolytopeTypeGetDim((DMPolytopeType)c) != dim && !(i == 0 && (c == DM_POLYTOPE_FV_GHOST || c == DM_POLYTOPE_INTERIOR_GHOST || c == DM_POLYTOPE_UNKNOWN_CELL))) continue;
932         ctShift[c] = shift;
933         shift += ctRecv[c];
934       }
935       depthShift[dep] = shift;
936       shift += depthRecv[dep];
937     }
938     for (c = 0; c < DM_NUM_POLYTOPES; ++c) {
939       const PetscInt ctDim = DMPolytopeTypeGetDim((DMPolytopeType)c);
940 
941       if ((ctDim < 0 || ctDim > dim) && (c != DM_POLYTOPE_FV_GHOST && c != DM_POLYTOPE_INTERIOR_GHOST && c != DM_POLYTOPE_UNKNOWN_CELL)) {
942         ctShift[c] = shift;
943         shift += ctRecv[c];
944       }
945     }
946   }
947   /* Derive a new local permutation based on stratified indices */
948   PetscCall(PetscMalloc1(nleaves, &ilocal));
949   for (p = 0; p < nleaves; ++p) {
950     const DMPolytopeType ct = (DMPolytopeType)remoteDepths[p].rank;
951 
952     ilocal[p] = ctShift[ct] + ctIdx[ct];
953     ++ctIdx[ct];
954   }
955   PetscCall(PetscSFCreate(comm, migrationSF));
956   PetscCall(PetscObjectSetName((PetscObject)*migrationSF, "Migration SF"));
957   PetscCall(PetscSFSetGraph(*migrationSF, nroots, nleaves, ilocal, PETSC_OWN_POINTER, (PetscSFNode *)iremote, PETSC_COPY_VALUES));
958   PetscCall(PetscFree2(pointDepths, remoteDepths));
959   PetscCall(PetscFree6(depthRecv, depthShift, depthIdx, ctRecv, ctShift, ctIdx));
960   PetscCall(PetscLogEventEnd(DMPLEX_PartStratSF, dm, 0, 0, 0));
961   PetscFunctionReturn(PETSC_SUCCESS);
962 }
963 
964 /*@
965   DMPlexDistributeField - Distribute field data to match a given `PetscSF`, usually the `PetscSF` from mesh distribution
966 
967   Collective
968 
969   Input Parameters:
970 + dm              - The `DMPLEX` object
971 . pointSF         - The `PetscSF` describing the communication pattern
972 . originalSection - The `PetscSection` for existing data layout
973 - originalVec     - The existing data in a local vector
974 
975   Output Parameters:
976 + newSection - The `PetscSF` describing the new data layout
977 - newVec     - The new data in a local vector
978 
979   Level: developer
980 
981 .seealso: `DMPLEX`, `DMPlexDistribute()`, `DMPlexDistributeFieldIS()`, `DMPlexDistributeData()`
982 @*/
983 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
984 {
985   PetscSF      fieldSF;
986   PetscInt    *remoteOffsets, fieldSize;
987   PetscScalar *originalValues, *newValues;
988 
989   PetscFunctionBegin;
990   PetscCall(PetscLogEventBegin(DMPLEX_DistributeField, dm, 0, 0, 0));
991   PetscCall(PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection));
992 
993   PetscCall(PetscSectionGetStorageSize(newSection, &fieldSize));
994   PetscCall(VecSetSizes(newVec, fieldSize, PETSC_DETERMINE));
995   PetscCall(VecSetType(newVec, dm->vectype));
996 
997   PetscCall(VecGetArray(originalVec, &originalValues));
998   PetscCall(VecGetArray(newVec, &newValues));
999   PetscCall(PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF));
1000   PetscCall(PetscFree(remoteOffsets));
1001   PetscCall(PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues, MPI_REPLACE));
1002   PetscCall(PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues, MPI_REPLACE));
1003   PetscCall(PetscSFDestroy(&fieldSF));
1004   PetscCall(VecRestoreArray(newVec, &newValues));
1005   PetscCall(VecRestoreArray(originalVec, &originalValues));
1006   PetscCall(PetscLogEventEnd(DMPLEX_DistributeField, dm, 0, 0, 0));
1007   PetscFunctionReturn(PETSC_SUCCESS);
1008 }
1009 
1010 /*@
1011   DMPlexDistributeFieldIS - Distribute field data to match a given `PetscSF`, usually the `PetscSF` from mesh distribution
1012 
1013   Collective
1014 
1015   Input Parameters:
1016 + dm              - The `DMPLEX` object
1017 . pointSF         - The `PetscSF` describing the communication pattern
1018 . originalSection - The `PetscSection` for existing data layout
1019 - originalIS      - The existing data
1020 
1021   Output Parameters:
1022 + newSection - The `PetscSF` describing the new data layout
1023 - newIS      - The new data
1024 
1025   Level: developer
1026 
1027 .seealso: `DMPLEX`, `DMPlexDistribute()`, `DMPlexDistributeField()`, `DMPlexDistributeData()`
1028 @*/
1029 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
1030 {
1031   PetscSF         fieldSF;
1032   PetscInt       *newValues, *remoteOffsets, fieldSize;
1033   const PetscInt *originalValues;
1034 
1035   PetscFunctionBegin;
1036   PetscCall(PetscLogEventBegin(DMPLEX_DistributeField, dm, 0, 0, 0));
1037   PetscCall(PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection));
1038 
1039   PetscCall(PetscSectionGetStorageSize(newSection, &fieldSize));
1040   PetscCall(PetscMalloc1(fieldSize, &newValues));
1041 
1042   PetscCall(ISGetIndices(originalIS, &originalValues));
1043   PetscCall(PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF));
1044   PetscCall(PetscFree(remoteOffsets));
1045   PetscCall(PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *)originalValues, newValues, MPI_REPLACE));
1046   PetscCall(PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *)originalValues, newValues, MPI_REPLACE));
1047   PetscCall(PetscSFDestroy(&fieldSF));
1048   PetscCall(ISRestoreIndices(originalIS, &originalValues));
1049   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS));
1050   PetscCall(PetscLogEventEnd(DMPLEX_DistributeField, dm, 0, 0, 0));
1051   PetscFunctionReturn(PETSC_SUCCESS);
1052 }
1053 
1054 /*@
1055   DMPlexDistributeData - Distribute field data to match a given `PetscSF`, usually the `PetscSF` from mesh distribution
1056 
1057   Collective
1058 
1059   Input Parameters:
1060 + dm              - The `DMPLEX` object
1061 . pointSF         - The `PetscSF` describing the communication pattern
1062 . originalSection - The `PetscSection` for existing data layout
1063 . datatype        - The type of data
1064 - originalData    - The existing data
1065 
1066   Output Parameters:
1067 + newSection - The `PetscSection` describing the new data layout
1068 - newData    - The new data
1069 
1070   Level: developer
1071 
1072 .seealso: `DMPLEX`, `DMPlexDistribute()`, `DMPlexDistributeField()`
1073 @*/
1074 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
1075 {
1076   PetscSF     fieldSF;
1077   PetscInt   *remoteOffsets, fieldSize;
1078   PetscMPIInt dataSize;
1079 
1080   PetscFunctionBegin;
1081   PetscCall(PetscLogEventBegin(DMPLEX_DistributeData, dm, 0, 0, 0));
1082   PetscCall(PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection));
1083 
1084   PetscCall(PetscSectionGetStorageSize(newSection, &fieldSize));
1085   PetscCallMPI(MPI_Type_size(datatype, &dataSize));
1086   PetscCall(PetscMalloc(fieldSize * dataSize, newData));
1087 
1088   PetscCall(PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF));
1089   PetscCall(PetscFree(remoteOffsets));
1090   PetscCall(PetscSFBcastBegin(fieldSF, datatype, originalData, *newData, MPI_REPLACE));
1091   PetscCall(PetscSFBcastEnd(fieldSF, datatype, originalData, *newData, MPI_REPLACE));
1092   PetscCall(PetscSFDestroy(&fieldSF));
1093   PetscCall(PetscLogEventEnd(DMPLEX_DistributeData, dm, 0, 0, 0));
1094   PetscFunctionReturn(PETSC_SUCCESS);
1095 }
1096 
1097 static PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1098 {
1099   DM_Plex     *pmesh = (DM_Plex *)(dmParallel)->data;
1100   MPI_Comm     comm;
1101   PetscSF      coneSF;
1102   PetscSection originalConeSection, newConeSection;
1103   PetscInt    *remoteOffsets, *cones, *globCones, *newCones, newConesSize;
1104   PetscBool    flg;
1105 
1106   PetscFunctionBegin;
1107   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1108   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
1109   PetscCall(PetscLogEventBegin(DMPLEX_DistributeCones, dm, 0, 0, 0));
1110   /* Distribute cone section */
1111   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1112   PetscCall(DMPlexGetConeSection(dm, &originalConeSection));
1113   PetscCall(DMPlexGetConeSection(dmParallel, &newConeSection));
1114   PetscCall(PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection));
1115   PetscCall(DMSetUp(dmParallel));
1116   /* Communicate and renumber cones */
1117   PetscCall(PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF));
1118   PetscCall(PetscFree(remoteOffsets));
1119   PetscCall(DMPlexGetCones(dm, &cones));
1120   if (original) {
1121     PetscInt numCones;
1122 
1123     PetscCall(PetscSectionGetStorageSize(originalConeSection, &numCones));
1124     PetscCall(PetscMalloc1(numCones, &globCones));
1125     PetscCall(ISLocalToGlobalMappingApplyBlock(original, numCones, cones, globCones));
1126   } else {
1127     globCones = cones;
1128   }
1129   PetscCall(DMPlexGetCones(dmParallel, &newCones));
1130   PetscCall(PetscSFBcastBegin(coneSF, MPIU_INT, globCones, newCones, MPI_REPLACE));
1131   PetscCall(PetscSFBcastEnd(coneSF, MPIU_INT, globCones, newCones, MPI_REPLACE));
1132   if (original) PetscCall(PetscFree(globCones));
1133   PetscCall(PetscSectionGetStorageSize(newConeSection, &newConesSize));
1134   PetscCall(ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones));
1135   if (PetscDefined(USE_DEBUG)) {
1136     PetscInt  p;
1137     PetscBool valid = PETSC_TRUE;
1138     for (p = 0; p < newConesSize; ++p) {
1139       if (newCones[p] < 0) {
1140         valid = PETSC_FALSE;
1141         PetscCall(PetscPrintf(PETSC_COMM_SELF, "[%d] Point %" PetscInt_FMT " not in overlap SF\n", PetscGlobalRank, p));
1142       }
1143     }
1144     PetscCheck(valid, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1145   }
1146   PetscCall(PetscOptionsHasName(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-cones_view", &flg));
1147   if (flg) {
1148     PetscCall(PetscPrintf(comm, "Serial Cone Section:\n"));
1149     PetscCall(PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_(comm)));
1150     PetscCall(PetscPrintf(comm, "Parallel Cone Section:\n"));
1151     PetscCall(PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_(comm)));
1152     PetscCall(PetscSFView(coneSF, NULL));
1153   }
1154   PetscCall(DMPlexGetConeOrientations(dm, &cones));
1155   PetscCall(DMPlexGetConeOrientations(dmParallel, &newCones));
1156   PetscCall(PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones, MPI_REPLACE));
1157   PetscCall(PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones, MPI_REPLACE));
1158   PetscCall(PetscSFDestroy(&coneSF));
1159   PetscCall(PetscLogEventEnd(DMPLEX_DistributeCones, dm, 0, 0, 0));
1160   /* Create supports and stratify DMPlex */
1161   {
1162     PetscInt pStart, pEnd;
1163 
1164     PetscCall(PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd));
1165     PetscCall(PetscSectionSetChart(pmesh->supportSection, pStart, pEnd));
1166   }
1167   PetscCall(DMPlexSymmetrize(dmParallel));
1168   PetscCall(DMPlexStratify(dmParallel));
1169   {
1170     PetscBool useCone, useClosure, useAnchors;
1171 
1172     PetscCall(DMGetBasicAdjacency(dm, &useCone, &useClosure));
1173     PetscCall(DMSetBasicAdjacency(dmParallel, useCone, useClosure));
1174     PetscCall(DMPlexGetAdjacencyUseAnchors(dm, &useAnchors));
1175     PetscCall(DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors));
1176   }
1177   PetscFunctionReturn(PETSC_SUCCESS);
1178 }
1179 
1180 static PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
1181 {
1182   MPI_Comm         comm;
1183   DM               cdm, cdmParallel;
1184   PetscSection     originalCoordSection, newCoordSection;
1185   Vec              originalCoordinates, newCoordinates;
1186   PetscInt         bs;
1187   const char      *name;
1188   const PetscReal *maxCell, *Lstart, *L;
1189 
1190   PetscFunctionBegin;
1191   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1192   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1193 
1194   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1195   PetscCall(DMGetCoordinateDM(dm, &cdm));
1196   PetscCall(DMGetCoordinateDM(dmParallel, &cdmParallel));
1197   PetscCall(DMCopyDisc(cdm, cdmParallel));
1198   PetscCall(DMGetCoordinateSection(dm, &originalCoordSection));
1199   PetscCall(DMGetCoordinateSection(dmParallel, &newCoordSection));
1200   PetscCall(DMGetCoordinatesLocal(dm, &originalCoordinates));
1201   if (originalCoordinates) {
1202     PetscCall(VecCreate(PETSC_COMM_SELF, &newCoordinates));
1203     PetscCall(PetscObjectGetName((PetscObject)originalCoordinates, &name));
1204     PetscCall(PetscObjectSetName((PetscObject)newCoordinates, name));
1205 
1206     PetscCall(DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates));
1207     PetscCall(DMSetCoordinatesLocal(dmParallel, newCoordinates));
1208     PetscCall(VecGetBlockSize(originalCoordinates, &bs));
1209     PetscCall(VecSetBlockSize(newCoordinates, bs));
1210     PetscCall(VecDestroy(&newCoordinates));
1211   }
1212 
1213   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1214   PetscCall(DMGetPeriodicity(dm, &maxCell, &Lstart, &L));
1215   PetscCall(DMSetPeriodicity(dmParallel, maxCell, Lstart, L));
1216   PetscCall(DMGetCellCoordinateDM(dm, &cdm));
1217   if (cdm) {
1218     PetscCall(DMClone(dmParallel, &cdmParallel));
1219     PetscCall(DMSetCellCoordinateDM(dmParallel, cdmParallel));
1220     PetscCall(DMCopyDisc(cdm, cdmParallel));
1221     PetscCall(DMDestroy(&cdmParallel));
1222     PetscCall(DMGetCellCoordinateSection(dm, &originalCoordSection));
1223     PetscCall(DMGetCellCoordinatesLocal(dm, &originalCoordinates));
1224     PetscCall(PetscSectionCreate(comm, &newCoordSection));
1225     if (originalCoordinates) {
1226       PetscCall(VecCreate(PETSC_COMM_SELF, &newCoordinates));
1227       PetscCall(PetscObjectGetName((PetscObject)originalCoordinates, &name));
1228       PetscCall(PetscObjectSetName((PetscObject)newCoordinates, name));
1229 
1230       PetscCall(DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates));
1231       PetscCall(VecGetBlockSize(originalCoordinates, &bs));
1232       PetscCall(VecSetBlockSize(newCoordinates, bs));
1233       PetscCall(DMSetCellCoordinateSection(dmParallel, bs, newCoordSection));
1234       PetscCall(DMSetCellCoordinatesLocal(dmParallel, newCoordinates));
1235       PetscCall(VecDestroy(&newCoordinates));
1236     }
1237     PetscCall(PetscSectionDestroy(&newCoordSection));
1238   }
1239   PetscFunctionReturn(PETSC_SUCCESS);
1240 }
1241 
1242 static PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1243 {
1244   DM_Plex         *mesh = (DM_Plex *)dm->data;
1245   MPI_Comm         comm;
1246   DMLabel          depthLabel;
1247   PetscMPIInt      rank;
1248   PetscInt         depth, d, numLabels, numLocalLabels, l;
1249   PetscBool        hasLabels  = PETSC_FALSE, lsendDepth, sendDepth;
1250   PetscObjectState depthState = -1;
1251 
1252   PetscFunctionBegin;
1253   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1254   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1255 
1256   PetscCall(PetscLogEventBegin(DMPLEX_DistributeLabels, dm, 0, 0, 0));
1257   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1258   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1259 
1260   /* If the user has changed the depth label, communicate it instead */
1261   PetscCall(DMPlexGetDepth(dm, &depth));
1262   PetscCall(DMPlexGetDepthLabel(dm, &depthLabel));
1263   if (depthLabel) PetscCall(PetscObjectStateGet((PetscObject)depthLabel, &depthState));
1264   lsendDepth = mesh->depthState != depthState ? PETSC_TRUE : PETSC_FALSE;
1265   PetscCall(MPIU_Allreduce(&lsendDepth, &sendDepth, 1, MPIU_BOOL, MPI_LOR, comm));
1266   if (sendDepth) {
1267     PetscCall(DMPlexGetDepthLabel(dmParallel, &dmParallel->depthLabel));
1268     PetscCall(DMRemoveLabelBySelf(dmParallel, &dmParallel->depthLabel, PETSC_FALSE));
1269   }
1270   /* Everyone must have either the same number of labels, or none */
1271   PetscCall(DMGetNumLabels(dm, &numLocalLabels));
1272   numLabels = numLocalLabels;
1273   PetscCallMPI(MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm));
1274   if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1275   for (l = 0; l < numLabels; ++l) {
1276     DMLabel     label = NULL, labelNew = NULL;
1277     PetscBool   isDepth, lisOutput     = PETSC_TRUE, isOutput;
1278     const char *name = NULL;
1279 
1280     if (hasLabels) {
1281       PetscCall(DMGetLabelByNum(dm, l, &label));
1282       /* Skip "depth" because it is recreated */
1283       PetscCall(PetscObjectGetName((PetscObject)label, &name));
1284       PetscCall(PetscStrcmp(name, "depth", &isDepth));
1285     } else {
1286       isDepth = PETSC_FALSE;
1287     }
1288     PetscCallMPI(MPI_Bcast(&isDepth, 1, MPIU_BOOL, 0, comm));
1289     if (isDepth && !sendDepth) continue;
1290     PetscCall(DMLabelDistribute(label, migrationSF, &labelNew));
1291     if (isDepth) {
1292       /* Put in any missing strata which can occur if users are managing the depth label themselves */
1293       PetscInt gdepth;
1294 
1295       PetscCall(MPIU_Allreduce(&depth, &gdepth, 1, MPIU_INT, MPI_MAX, comm));
1296       PetscCheck(!(depth >= 0) || !(gdepth != depth), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %" PetscInt_FMT " != %" PetscInt_FMT, depth, gdepth);
1297       for (d = 0; d <= gdepth; ++d) {
1298         PetscBool has;
1299 
1300         PetscCall(DMLabelHasStratum(labelNew, d, &has));
1301         if (!has) PetscCall(DMLabelAddStratum(labelNew, d));
1302       }
1303     }
1304     PetscCall(DMAddLabel(dmParallel, labelNew));
1305     /* Put the output flag in the new label */
1306     if (hasLabels) PetscCall(DMGetLabelOutput(dm, name, &lisOutput));
1307     PetscCall(MPIU_Allreduce(&lisOutput, &isOutput, 1, MPIU_BOOL, MPI_LAND, comm));
1308     PetscCall(PetscObjectGetName((PetscObject)labelNew, &name));
1309     PetscCall(DMSetLabelOutput(dmParallel, name, isOutput));
1310     PetscCall(DMLabelDestroy(&labelNew));
1311   }
1312   {
1313     DMLabel ctLabel;
1314 
1315     // Reset label for fast lookup
1316     PetscCall(DMPlexGetCellTypeLabel(dmParallel, &ctLabel));
1317     PetscCall(DMLabelMakeAllInvalid_Internal(ctLabel));
1318   }
1319   PetscCall(PetscLogEventEnd(DMPLEX_DistributeLabels, dm, 0, 0, 0));
1320   PetscFunctionReturn(PETSC_SUCCESS);
1321 }
1322 
1323 static PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1324 {
1325   DM_Plex     *mesh  = (DM_Plex *)dm->data;
1326   DM_Plex     *pmesh = (DM_Plex *)(dmParallel)->data;
1327   MPI_Comm     comm;
1328   DM           refTree;
1329   PetscSection origParentSection, newParentSection;
1330   PetscInt    *origParents, *origChildIDs;
1331   PetscBool    flg;
1332 
1333   PetscFunctionBegin;
1334   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1335   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
1336   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1337 
1338   /* Set up tree */
1339   PetscCall(DMPlexGetReferenceTree(dm, &refTree));
1340   PetscCall(DMPlexSetReferenceTree(dmParallel, refTree));
1341   PetscCall(DMPlexGetTree(dm, &origParentSection, &origParents, &origChildIDs, NULL, NULL));
1342   if (origParentSection) {
1343     PetscInt  pStart, pEnd;
1344     PetscInt *newParents, *newChildIDs, *globParents;
1345     PetscInt *remoteOffsetsParents, newParentSize;
1346     PetscSF   parentSF;
1347 
1348     PetscCall(DMPlexGetChart(dmParallel, &pStart, &pEnd));
1349     PetscCall(PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel), &newParentSection));
1350     PetscCall(PetscSectionSetChart(newParentSection, pStart, pEnd));
1351     PetscCall(PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection));
1352     PetscCall(PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF));
1353     PetscCall(PetscFree(remoteOffsetsParents));
1354     PetscCall(PetscSectionGetStorageSize(newParentSection, &newParentSize));
1355     PetscCall(PetscMalloc2(newParentSize, &newParents, newParentSize, &newChildIDs));
1356     if (original) {
1357       PetscInt numParents;
1358 
1359       PetscCall(PetscSectionGetStorageSize(origParentSection, &numParents));
1360       PetscCall(PetscMalloc1(numParents, &globParents));
1361       PetscCall(ISLocalToGlobalMappingApplyBlock(original, numParents, origParents, globParents));
1362     } else {
1363       globParents = origParents;
1364     }
1365     PetscCall(PetscSFBcastBegin(parentSF, MPIU_INT, globParents, newParents, MPI_REPLACE));
1366     PetscCall(PetscSFBcastEnd(parentSF, MPIU_INT, globParents, newParents, MPI_REPLACE));
1367     if (original) PetscCall(PetscFree(globParents));
1368     PetscCall(PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs, MPI_REPLACE));
1369     PetscCall(PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs, MPI_REPLACE));
1370     PetscCall(ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents));
1371     if (PetscDefined(USE_DEBUG)) {
1372       PetscInt  p;
1373       PetscBool valid = PETSC_TRUE;
1374       for (p = 0; p < newParentSize; ++p) {
1375         if (newParents[p] < 0) {
1376           valid = PETSC_FALSE;
1377           PetscCall(PetscPrintf(PETSC_COMM_SELF, "Point %" PetscInt_FMT " not in overlap SF\n", p));
1378         }
1379       }
1380       PetscCheck(valid, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1381     }
1382     PetscCall(PetscOptionsHasName(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-parents_view", &flg));
1383     if (flg) {
1384       PetscCall(PetscPrintf(comm, "Serial Parent Section: \n"));
1385       PetscCall(PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_(comm)));
1386       PetscCall(PetscPrintf(comm, "Parallel Parent Section: \n"));
1387       PetscCall(PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_(comm)));
1388       PetscCall(PetscSFView(parentSF, NULL));
1389     }
1390     PetscCall(DMPlexSetTree(dmParallel, newParentSection, newParents, newChildIDs));
1391     PetscCall(PetscSectionDestroy(&newParentSection));
1392     PetscCall(PetscFree2(newParents, newChildIDs));
1393     PetscCall(PetscSFDestroy(&parentSF));
1394   }
1395   pmesh->useAnchors = mesh->useAnchors;
1396   PetscFunctionReturn(PETSC_SUCCESS);
1397 }
1398 
1399 /*@
1400   DMPlexSetPartitionBalance - Should distribution of the `DM` attempt to balance the shared point partition?
1401 
1402   Input Parameters:
1403 + dm  - The `DMPLEX` object
1404 - flg - Balance the partition?
1405 
1406   Level: intermediate
1407 
1408 .seealso: `DMPLEX`, `DMPlexDistribute()`, `DMPlexGetPartitionBalance()`
1409 @*/
1410 PetscErrorCode DMPlexSetPartitionBalance(DM dm, PetscBool flg)
1411 {
1412   DM_Plex *mesh = (DM_Plex *)dm->data;
1413 
1414   PetscFunctionBegin;
1415   mesh->partitionBalance = flg;
1416   PetscFunctionReturn(PETSC_SUCCESS);
1417 }
1418 
1419 /*@
1420   DMPlexGetPartitionBalance - Does distribution of the `DM` attempt to balance the shared point partition?
1421 
1422   Input Parameter:
1423 . dm - The `DMPLEX` object
1424 
1425   Output Parameter:
1426 . flg - Balance the partition?
1427 
1428   Level: intermediate
1429 
1430 .seealso: `DMPLEX`, `DMPlexDistribute()`, `DMPlexSetPartitionBalance()`
1431 @*/
1432 PetscErrorCode DMPlexGetPartitionBalance(DM dm, PetscBool *flg)
1433 {
1434   DM_Plex *mesh = (DM_Plex *)dm->data;
1435 
1436   PetscFunctionBegin;
1437   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1438   PetscAssertPointer(flg, 2);
1439   *flg = mesh->partitionBalance;
1440   PetscFunctionReturn(PETSC_SUCCESS);
1441 }
1442 
1443 typedef struct {
1444   PetscInt vote, rank, index;
1445 } Petsc3Int;
1446 
1447 /* MaxLoc, but carry a third piece of information around */
1448 static void MPIAPI MaxLocCarry(void *in_, void *inout_, PetscMPIInt *len_, MPI_Datatype *dtype)
1449 {
1450   Petsc3Int *a = (Petsc3Int *)inout_;
1451   Petsc3Int *b = (Petsc3Int *)in_;
1452   PetscInt   i, len = *len_;
1453   for (i = 0; i < len; i++) {
1454     if (a[i].vote < b[i].vote) {
1455       a[i].vote  = b[i].vote;
1456       a[i].rank  = b[i].rank;
1457       a[i].index = b[i].index;
1458     } else if (a[i].vote <= b[i].vote) {
1459       if (a[i].rank >= b[i].rank) {
1460         a[i].rank  = b[i].rank;
1461         a[i].index = b[i].index;
1462       }
1463     }
1464   }
1465 }
1466 
1467 /*@
1468   DMPlexCreatePointSF - Build a point `PetscSF` from an `PetscSF` describing a point migration
1469 
1470   Input Parameters:
1471 + dm          - The source `DMPLEX` object
1472 . migrationSF - The star forest that describes the parallel point remapping
1473 - ownership   - Flag causing a vote to determine point ownership
1474 
1475   Output Parameter:
1476 . pointSF - The star forest describing the point overlap in the remapped `DM`
1477 
1478   Level: developer
1479 
1480   Note:
1481   Output `pointSF` is guaranteed to have the array of local indices (leaves) sorted.
1482 
1483 .seealso: `DMPLEX`, `PetscSF`, `DM`, `DMPlexDistribute()`, `DMPlexDistributeOverlap()`
1484 @*/
1485 PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownership, PetscSF *pointSF)
1486 {
1487   PetscMPIInt        rank, size;
1488   PetscInt           p, nroots, nleaves, idx, npointLeaves;
1489   PetscInt          *pointLocal;
1490   const PetscInt    *leaves;
1491   const PetscSFNode *roots;
1492   PetscSFNode       *rootNodes, *leafNodes, *pointRemote;
1493   Vec                shifts;
1494   const PetscInt     numShifts  = 13759;
1495   const PetscScalar *shift      = NULL;
1496   const PetscBool    shiftDebug = PETSC_FALSE;
1497   PetscBool          balance;
1498 
1499   PetscFunctionBegin;
1500   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1501   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
1502   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)dm), &size));
1503   PetscCall(PetscLogEventBegin(DMPLEX_CreatePointSF, dm, 0, 0, 0));
1504   PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)dm), pointSF));
1505   PetscCall(PetscSFSetFromOptions(*pointSF));
1506   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
1507 
1508   PetscCall(DMPlexGetPartitionBalance(dm, &balance));
1509   PetscCall(PetscSFGetGraph(migrationSF, &nroots, &nleaves, &leaves, &roots));
1510   PetscCall(PetscMalloc2(nroots, &rootNodes, nleaves, &leafNodes));
1511   if (ownership) {
1512     MPI_Op       op;
1513     MPI_Datatype datatype;
1514     Petsc3Int   *rootVote = NULL, *leafVote = NULL;
1515     /* If balancing, we compute a random cyclic shift of the rank for each remote point. That way, the max will evenly distribute among ranks. */
1516     if (balance) {
1517       PetscRandom r;
1518 
1519       PetscCall(PetscRandomCreate(PETSC_COMM_SELF, &r));
1520       PetscCall(PetscRandomSetInterval(r, 0, 2467 * size));
1521       PetscCall(VecCreate(PETSC_COMM_SELF, &shifts));
1522       PetscCall(VecSetSizes(shifts, numShifts, numShifts));
1523       PetscCall(VecSetType(shifts, VECSTANDARD));
1524       PetscCall(VecSetRandom(shifts, r));
1525       PetscCall(PetscRandomDestroy(&r));
1526       PetscCall(VecGetArrayRead(shifts, &shift));
1527     }
1528 
1529     PetscCall(PetscMalloc1(nroots, &rootVote));
1530     PetscCall(PetscMalloc1(nleaves, &leafVote));
1531     /* Point ownership vote: Process with highest rank owns shared points */
1532     for (p = 0; p < nleaves; ++p) {
1533       if (shiftDebug) {
1534         PetscCall(PetscSynchronizedPrintf(PetscObjectComm((PetscObject)dm), "[%d] Point %" PetscInt_FMT " RemotePoint %" PetscInt_FMT " Shift %" PetscInt_FMT " MyRank %" PetscInt_FMT "\n", rank, leaves ? leaves[p] : p, roots[p].index,
1535                                           (PetscInt)PetscRealPart(shift[roots[p].index % numShifts]), (rank + (shift ? (PetscInt)PetscRealPart(shift[roots[p].index % numShifts]) : 0)) % size));
1536       }
1537       /* Either put in a bid or we know we own it */
1538       leafVote[p].vote  = (rank + (shift ? (PetscInt)PetscRealPart(shift[roots[p].index % numShifts]) : 0)) % size;
1539       leafVote[p].rank  = rank;
1540       leafVote[p].index = p;
1541     }
1542     for (p = 0; p < nroots; p++) {
1543       /* Root must not participate in the reduction, flag so that MAXLOC does not use */
1544       rootVote[p].vote  = -3;
1545       rootVote[p].rank  = -3;
1546       rootVote[p].index = -3;
1547     }
1548     PetscCallMPI(MPI_Type_contiguous(3, MPIU_INT, &datatype));
1549     PetscCallMPI(MPI_Type_commit(&datatype));
1550     PetscCallMPI(MPI_Op_create(&MaxLocCarry, 1, &op));
1551     PetscCall(PetscSFReduceBegin(migrationSF, datatype, leafVote, rootVote, op));
1552     PetscCall(PetscSFReduceEnd(migrationSF, datatype, leafVote, rootVote, op));
1553     PetscCallMPI(MPI_Op_free(&op));
1554     PetscCallMPI(MPI_Type_free(&datatype));
1555     for (p = 0; p < nroots; p++) {
1556       rootNodes[p].rank  = rootVote[p].rank;
1557       rootNodes[p].index = rootVote[p].index;
1558     }
1559     PetscCall(PetscFree(leafVote));
1560     PetscCall(PetscFree(rootVote));
1561   } else {
1562     for (p = 0; p < nroots; p++) {
1563       rootNodes[p].index = -1;
1564       rootNodes[p].rank  = rank;
1565     }
1566     for (p = 0; p < nleaves; p++) {
1567       /* Write new local id into old location */
1568       if (roots[p].rank == rank) rootNodes[roots[p].index].index = leaves ? leaves[p] : p;
1569     }
1570   }
1571   PetscCall(PetscSFBcastBegin(migrationSF, MPIU_2INT, rootNodes, leafNodes, MPI_REPLACE));
1572   PetscCall(PetscSFBcastEnd(migrationSF, MPIU_2INT, rootNodes, leafNodes, MPI_REPLACE));
1573 
1574   for (npointLeaves = 0, p = 0; p < nleaves; p++) {
1575     if (leafNodes[p].rank != rank) npointLeaves++;
1576   }
1577   PetscCall(PetscMalloc1(npointLeaves, &pointLocal));
1578   PetscCall(PetscMalloc1(npointLeaves, &pointRemote));
1579   for (idx = 0, p = 0; p < nleaves; p++) {
1580     if (leafNodes[p].rank != rank) {
1581       /* Note that pointLocal is automatically sorted as it is sublist of 0, ..., nleaves-1 */
1582       pointLocal[idx]  = p;
1583       pointRemote[idx] = leafNodes[p];
1584       idx++;
1585     }
1586   }
1587   if (shift) {
1588     PetscCall(VecRestoreArrayRead(shifts, &shift));
1589     PetscCall(VecDestroy(&shifts));
1590   }
1591   if (shiftDebug) PetscCall(PetscSynchronizedFlush(PetscObjectComm((PetscObject)dm), PETSC_STDOUT));
1592   PetscCall(PetscSFSetGraph(*pointSF, nleaves, npointLeaves, pointLocal, PETSC_OWN_POINTER, pointRemote, PETSC_OWN_POINTER));
1593   PetscCall(PetscFree2(rootNodes, leafNodes));
1594   PetscCall(PetscLogEventEnd(DMPLEX_CreatePointSF, dm, 0, 0, 0));
1595   if (PetscDefined(USE_DEBUG)) PetscCall(DMPlexCheckPointSF(dm, *pointSF, PETSC_FALSE));
1596   PetscFunctionReturn(PETSC_SUCCESS);
1597 }
1598 
1599 /*@
1600   DMPlexMigrate  - Migrates internal `DM` data over the supplied star forest
1601 
1602   Collective
1603 
1604   Input Parameters:
1605 + dm - The source `DMPLEX` object
1606 - sf - The star forest communication context describing the migration pattern
1607 
1608   Output Parameter:
1609 . targetDM - The target `DMPLEX` object
1610 
1611   Level: intermediate
1612 
1613 .seealso: `DMPLEX`, `PetscSF`, `DM`, `DMPlexDistribute()`, `DMPlexDistributeOverlap()`
1614 @*/
1615 PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM)
1616 {
1617   MPI_Comm               comm;
1618   PetscInt               dim, cdim, nroots;
1619   PetscSF                sfPoint;
1620   ISLocalToGlobalMapping ltogMigration;
1621   ISLocalToGlobalMapping ltogOriginal = NULL;
1622 
1623   PetscFunctionBegin;
1624   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1625   PetscCall(PetscLogEventBegin(DMPLEX_Migrate, dm, 0, 0, 0));
1626   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1627   PetscCall(DMGetDimension(dm, &dim));
1628   PetscCall(DMSetDimension(targetDM, dim));
1629   PetscCall(DMGetCoordinateDim(dm, &cdim));
1630   PetscCall(DMSetCoordinateDim(targetDM, cdim));
1631 
1632   /* Check for a one-to-all distribution pattern */
1633   PetscCall(DMGetPointSF(dm, &sfPoint));
1634   PetscCall(PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL));
1635   if (nroots >= 0) {
1636     IS        isOriginal;
1637     PetscInt  n, size, nleaves;
1638     PetscInt *numbering_orig, *numbering_new;
1639 
1640     /* Get the original point numbering */
1641     PetscCall(DMPlexCreatePointNumbering(dm, &isOriginal));
1642     PetscCall(ISLocalToGlobalMappingCreateIS(isOriginal, &ltogOriginal));
1643     PetscCall(ISLocalToGlobalMappingGetSize(ltogOriginal, &size));
1644     PetscCall(ISLocalToGlobalMappingGetBlockIndices(ltogOriginal, (const PetscInt **)&numbering_orig));
1645     /* Convert to positive global numbers */
1646     for (n = 0; n < size; n++) {
1647       if (numbering_orig[n] < 0) numbering_orig[n] = -(numbering_orig[n] + 1);
1648     }
1649     /* Derive the new local-to-global mapping from the old one */
1650     PetscCall(PetscSFGetGraph(sf, NULL, &nleaves, NULL, NULL));
1651     PetscCall(PetscMalloc1(nleaves, &numbering_new));
1652     PetscCall(PetscSFBcastBegin(sf, MPIU_INT, numbering_orig, numbering_new, MPI_REPLACE));
1653     PetscCall(PetscSFBcastEnd(sf, MPIU_INT, numbering_orig, numbering_new, MPI_REPLACE));
1654     PetscCall(ISLocalToGlobalMappingCreate(comm, 1, nleaves, numbering_new, PETSC_OWN_POINTER, &ltogMigration));
1655     PetscCall(ISLocalToGlobalMappingRestoreIndices(ltogOriginal, (const PetscInt **)&numbering_orig));
1656     PetscCall(ISDestroy(&isOriginal));
1657   } else {
1658     /* One-to-all distribution pattern: We can derive LToG from SF */
1659     PetscCall(ISLocalToGlobalMappingCreateSF(sf, 0, &ltogMigration));
1660   }
1661   PetscCall(PetscObjectSetName((PetscObject)ltogMigration, "Point renumbering for DM migration"));
1662   PetscCall(ISLocalToGlobalMappingViewFromOptions(ltogMigration, (PetscObject)dm, "-partition_view"));
1663   /* Migrate DM data to target DM */
1664   PetscCall(DMPlexDistributeCones(dm, sf, ltogOriginal, ltogMigration, targetDM));
1665   PetscCall(DMPlexDistributeLabels(dm, sf, targetDM));
1666   PetscCall(DMPlexDistributeCoordinates(dm, sf, targetDM));
1667   PetscCall(DMPlexDistributeSetupTree(dm, sf, ltogOriginal, ltogMigration, targetDM));
1668   PetscCall(ISLocalToGlobalMappingDestroy(&ltogOriginal));
1669   PetscCall(ISLocalToGlobalMappingDestroy(&ltogMigration));
1670   PetscCall(PetscLogEventEnd(DMPLEX_Migrate, dm, 0, 0, 0));
1671   PetscFunctionReturn(PETSC_SUCCESS);
1672 }
1673 
1674 /*@
1675   DMPlexRemapMigrationSF - Rewrite the distribution SF to account for overlap
1676 
1677   Collective
1678 
1679   Input Parameters:
1680 + sfOverlap   - The `PetscSF` object just for the overlap
1681 - sfMigration - The original distribution `PetscSF` object
1682 
1683   Output Parameters:
1684 . sfMigrationNew - A rewritten `PetscSF` object that incorporates the overlap
1685 
1686   Level: developer
1687 
1688 .seealso: `DMPLEX`, `DM`, `DMPlexDistribute()`, `DMPlexDistributeOverlap()`, `DMPlexGetOverlap()`
1689 @*/
1690 PetscErrorCode DMPlexRemapMigrationSF(PetscSF sfOverlap, PetscSF sfMigration, PetscSF *sfMigrationNew)
1691 {
1692   PetscSFNode       *newRemote, *permRemote;
1693   const PetscInt    *oldLeaves;
1694   const PetscSFNode *oldRemote;
1695   PetscInt           nroots, nleaves, noldleaves;
1696 
1697   PetscFunctionBegin;
1698   PetscCall(PetscSFGetGraph(sfMigration, &nroots, &noldleaves, &oldLeaves, &oldRemote));
1699   PetscCall(PetscSFGetGraph(sfOverlap, NULL, &nleaves, NULL, NULL));
1700   PetscCall(PetscMalloc1(nleaves, &newRemote));
1701   /* oldRemote: original root point mapping to original leaf point
1702      newRemote: original leaf point mapping to overlapped leaf point */
1703   if (oldLeaves) {
1704     /* After stratification, the migration remotes may not be in root (canonical) order, so we reorder using the leaf numbering */
1705     PetscCall(PetscMalloc1(noldleaves, &permRemote));
1706     for (PetscInt l = 0; l < noldleaves; ++l) permRemote[oldLeaves[l]] = oldRemote[l];
1707     oldRemote = permRemote;
1708   }
1709   PetscCall(PetscSFBcastBegin(sfOverlap, MPIU_2INT, oldRemote, newRemote, MPI_REPLACE));
1710   PetscCall(PetscSFBcastEnd(sfOverlap, MPIU_2INT, oldRemote, newRemote, MPI_REPLACE));
1711   if (oldLeaves) PetscCall(PetscFree(oldRemote));
1712   PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)sfOverlap), sfMigrationNew));
1713   PetscCall(PetscSFSetGraph(*sfMigrationNew, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER));
1714   PetscFunctionReturn(PETSC_SUCCESS);
1715 }
1716 
1717 /*@
1718   DMPlexDistribute - Distributes the mesh and any associated sections.
1719 
1720   Collective
1721 
1722   Input Parameters:
1723 + dm      - The original `DMPLEX` object
1724 - overlap - The overlap of partitions, 0 is the default
1725 
1726   Output Parameters:
1727 + sf         - The `PetscSF` used for point distribution, or `NULL` if not needed
1728 - dmParallel - The distributed `DMPLEX` object
1729 
1730   Level: intermediate
1731 
1732   Note:
1733   If the mesh was not distributed, the output `dmParallel` will be `NULL`.
1734 
1735   The user can control the definition of adjacency for the mesh using `DMSetAdjacency()`. They should choose the combination appropriate for the function
1736   representation on the mesh.
1737 
1738 .seealso: `DMPLEX`, `DM`, `DMPlexCreate()`, `DMSetAdjacency()`, `DMPlexGetOverlap()`
1739 @*/
1740 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1741 {
1742   MPI_Comm         comm;
1743   PetscPartitioner partitioner;
1744   IS               cellPart;
1745   PetscSection     cellPartSection;
1746   DM               dmCoord;
1747   DMLabel          lblPartition, lblMigration;
1748   PetscSF          sfMigration, sfStratified, sfPoint;
1749   PetscBool        flg, balance;
1750   PetscMPIInt      rank, size;
1751 
1752   PetscFunctionBegin;
1753   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1754   PetscValidLogicalCollectiveInt(dm, overlap, 2);
1755   if (sf) PetscAssertPointer(sf, 3);
1756   PetscAssertPointer(dmParallel, 4);
1757 
1758   if (sf) *sf = NULL;
1759   *dmParallel = NULL;
1760   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1761   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1762   PetscCallMPI(MPI_Comm_size(comm, &size));
1763   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
1764 
1765   PetscCall(PetscLogEventBegin(DMPLEX_Distribute, dm, 0, 0, 0));
1766   /* Create cell partition */
1767   PetscCall(PetscLogEventBegin(DMPLEX_Partition, dm, 0, 0, 0));
1768   PetscCall(PetscSectionCreate(comm, &cellPartSection));
1769   PetscCall(DMPlexGetPartitioner(dm, &partitioner));
1770   PetscCall(PetscPartitionerDMPlexPartition(partitioner, dm, NULL, cellPartSection, &cellPart));
1771   PetscCall(PetscLogEventBegin(DMPLEX_PartSelf, dm, 0, 0, 0));
1772   {
1773     /* Convert partition to DMLabel */
1774     IS              is;
1775     PetscHSetI      ht;
1776     const PetscInt *points;
1777     PetscInt       *iranks;
1778     PetscInt        pStart, pEnd, proc, npoints, poff = 0, nranks;
1779 
1780     PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Point Partition", &lblPartition));
1781     /* Preallocate strata */
1782     PetscCall(PetscHSetICreate(&ht));
1783     PetscCall(PetscSectionGetChart(cellPartSection, &pStart, &pEnd));
1784     for (proc = pStart; proc < pEnd; proc++) {
1785       PetscCall(PetscSectionGetDof(cellPartSection, proc, &npoints));
1786       if (npoints) PetscCall(PetscHSetIAdd(ht, proc));
1787     }
1788     PetscCall(PetscHSetIGetSize(ht, &nranks));
1789     PetscCall(PetscMalloc1(nranks, &iranks));
1790     PetscCall(PetscHSetIGetElems(ht, &poff, iranks));
1791     PetscCall(PetscHSetIDestroy(&ht));
1792     PetscCall(DMLabelAddStrata(lblPartition, nranks, iranks));
1793     PetscCall(PetscFree(iranks));
1794     /* Inline DMPlexPartitionLabelClosure() */
1795     PetscCall(ISGetIndices(cellPart, &points));
1796     PetscCall(PetscSectionGetChart(cellPartSection, &pStart, &pEnd));
1797     for (proc = pStart; proc < pEnd; proc++) {
1798       PetscCall(PetscSectionGetDof(cellPartSection, proc, &npoints));
1799       if (!npoints) continue;
1800       PetscCall(PetscSectionGetOffset(cellPartSection, proc, &poff));
1801       PetscCall(DMPlexClosurePoints_Private(dm, npoints, points + poff, &is));
1802       PetscCall(DMLabelSetStratumIS(lblPartition, proc, is));
1803       PetscCall(ISDestroy(&is));
1804     }
1805     PetscCall(ISRestoreIndices(cellPart, &points));
1806   }
1807   PetscCall(PetscLogEventEnd(DMPLEX_PartSelf, dm, 0, 0, 0));
1808 
1809   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Point migration", &lblMigration));
1810   PetscCall(DMPlexPartitionLabelInvert(dm, lblPartition, NULL, lblMigration));
1811   PetscCall(DMPlexPartitionLabelCreateSF(dm, lblMigration, PETSC_TRUE, &sfMigration));
1812   PetscCall(DMPlexStratifyMigrationSF(dm, sfMigration, &sfStratified));
1813   PetscCall(PetscSFDestroy(&sfMigration));
1814   sfMigration = sfStratified;
1815   PetscCall(PetscSFSetUp(sfMigration));
1816   PetscCall(PetscLogEventEnd(DMPLEX_Partition, dm, 0, 0, 0));
1817   PetscCall(PetscOptionsHasName(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-partition_view", &flg));
1818   if (flg) {
1819     PetscCall(DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_(comm)));
1820     PetscCall(PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_(comm)));
1821   }
1822 
1823   /* Create non-overlapping parallel DM and migrate internal data */
1824   PetscCall(DMPlexCreate(comm, dmParallel));
1825   PetscCall(PetscObjectSetName((PetscObject)*dmParallel, "Parallel Mesh"));
1826   PetscCall(DMPlexMigrate(dm, sfMigration, *dmParallel));
1827 
1828   /* Build the point SF without overlap */
1829   PetscCall(DMPlexGetPartitionBalance(dm, &balance));
1830   PetscCall(DMPlexSetPartitionBalance(*dmParallel, balance));
1831   PetscCall(DMPlexCreatePointSF(*dmParallel, sfMigration, PETSC_TRUE, &sfPoint));
1832   PetscCall(DMSetPointSF(*dmParallel, sfPoint));
1833   PetscCall(DMPlexMigrateIsoperiodicFaceSF_Internal(dm, *dmParallel, sfMigration));
1834   PetscCall(DMGetCoordinateDM(*dmParallel, &dmCoord));
1835   if (dmCoord) PetscCall(DMSetPointSF(dmCoord, sfPoint));
1836   if (flg) PetscCall(PetscSFView(sfPoint, NULL));
1837 
1838   if (overlap > 0) {
1839     DM      dmOverlap;
1840     PetscSF sfOverlap, sfOverlapPoint;
1841 
1842     /* Add the partition overlap to the distributed DM */
1843     PetscCall(DMPlexDistributeOverlap(*dmParallel, overlap, &sfOverlap, &dmOverlap));
1844     PetscCall(DMDestroy(dmParallel));
1845     *dmParallel = dmOverlap;
1846     if (flg) {
1847       PetscCall(PetscPrintf(comm, "Overlap Migration SF:\n"));
1848       PetscCall(PetscSFView(sfOverlap, NULL));
1849     }
1850 
1851     /* Re-map the migration SF to establish the full migration pattern */
1852     PetscCall(DMPlexRemapMigrationSF(sfOverlap, sfMigration, &sfOverlapPoint));
1853     PetscCall(PetscSFDestroy(&sfOverlap));
1854     PetscCall(PetscSFDestroy(&sfMigration));
1855     sfMigration = sfOverlapPoint;
1856   }
1857   /* Cleanup Partition */
1858   PetscCall(DMLabelDestroy(&lblPartition));
1859   PetscCall(DMLabelDestroy(&lblMigration));
1860   PetscCall(PetscSectionDestroy(&cellPartSection));
1861   PetscCall(ISDestroy(&cellPart));
1862   PetscCall(DMPlexCopy_Internal(dm, PETSC_TRUE, PETSC_FALSE, *dmParallel));
1863   // Create sfNatural, need discretization information
1864   PetscCall(DMCopyDisc(dm, *dmParallel));
1865   if (dm->useNatural) {
1866     PetscSection section;
1867 
1868     PetscCall(DMSetUseNatural(*dmParallel, PETSC_TRUE));
1869     PetscCall(DMGetLocalSection(dm, &section));
1870 
1871     /* First DM with useNatural = PETSC_TRUE is considered natural */
1872     /* sfMigration and sfNatural are respectively the point and dofs SFs mapping to this natural DM */
1873     /* Compose with a previous sfNatural if present */
1874     if (dm->sfNatural) {
1875       PetscSF natSF;
1876 
1877       PetscCall(DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &natSF));
1878       PetscCall(PetscSFCompose(dm->sfNatural, natSF, &(*dmParallel)->sfNatural));
1879       PetscCall(PetscSFDestroy(&natSF));
1880     } else {
1881       PetscCall(DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &(*dmParallel)->sfNatural));
1882     }
1883     /* Compose with a previous sfMigration if present */
1884     if (dm->sfMigration) {
1885       PetscSF naturalPointSF;
1886 
1887       PetscCall(PetscSFCompose(dm->sfMigration, sfMigration, &naturalPointSF));
1888       PetscCall(PetscSFDestroy(&sfMigration));
1889       sfMigration = naturalPointSF;
1890     }
1891   }
1892   /* Cleanup */
1893   if (sf) {
1894     *sf = sfMigration;
1895   } else PetscCall(PetscSFDestroy(&sfMigration));
1896   PetscCall(PetscSFDestroy(&sfPoint));
1897   PetscCall(PetscLogEventEnd(DMPLEX_Distribute, dm, 0, 0, 0));
1898   PetscFunctionReturn(PETSC_SUCCESS);
1899 }
1900 
1901 PetscErrorCode DMPlexDistributeOverlap_Internal(DM dm, PetscInt overlap, MPI_Comm newcomm, const char *ovlboundary, PetscSF *sf, DM *dmOverlap)
1902 {
1903   DM_Plex     *mesh = (DM_Plex *)dm->data;
1904   MPI_Comm     comm;
1905   PetscMPIInt  size, rank;
1906   PetscSection rootSection, leafSection;
1907   IS           rootrank, leafrank;
1908   DM           dmCoord;
1909   DMLabel      lblOverlap;
1910   PetscSF      sfOverlap, sfStratified, sfPoint;
1911   PetscBool    clear_ovlboundary;
1912 
1913   PetscFunctionBegin;
1914   if (sf) *sf = NULL;
1915   *dmOverlap = NULL;
1916   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1917   PetscCallMPI(MPI_Comm_size(comm, &size));
1918   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1919   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
1920   {
1921     // We need to get options for the _already_distributed mesh, so it must be done here
1922     PetscInt    overlap;
1923     const char *prefix;
1924     char        oldPrefix[PETSC_MAX_PATH_LEN];
1925 
1926     oldPrefix[0] = '\0';
1927     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)dm, &prefix));
1928     PetscCall(PetscStrncpy(oldPrefix, prefix, sizeof(oldPrefix)));
1929     PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)dm, "dist_"));
1930     PetscCall(DMPlexGetOverlap(dm, &overlap));
1931     PetscObjectOptionsBegin((PetscObject)dm);
1932     PetscCall(DMSetFromOptions_Overlap_Plex(dm, PetscOptionsObject, &overlap));
1933     PetscOptionsEnd();
1934     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)dm, oldPrefix[0] == '\0' ? NULL : oldPrefix));
1935   }
1936   if (ovlboundary) {
1937     PetscBool flg;
1938     PetscCall(DMHasLabel(dm, ovlboundary, &flg));
1939     if (!flg) {
1940       DMLabel label;
1941 
1942       PetscCall(DMCreateLabel(dm, ovlboundary));
1943       PetscCall(DMGetLabel(dm, ovlboundary, &label));
1944       PetscCall(DMPlexMarkBoundaryFaces(dm, 1, label));
1945       clear_ovlboundary = PETSC_TRUE;
1946     }
1947   }
1948   PetscCall(PetscLogEventBegin(DMPLEX_DistributeOverlap, dm, 0, 0, 0));
1949   /* Compute point overlap with neighbouring processes on the distributed DM */
1950   PetscCall(PetscLogEventBegin(DMPLEX_Partition, dm, 0, 0, 0));
1951   PetscCall(PetscSectionCreate(newcomm, &rootSection));
1952   PetscCall(PetscSectionCreate(newcomm, &leafSection));
1953   PetscCall(DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank));
1954   if (mesh->numOvLabels) PetscCall(DMPlexCreateOverlapLabelFromLabels(dm, mesh->numOvLabels, mesh->ovLabels, mesh->ovValues, mesh->numOvExLabels, mesh->ovExLabels, mesh->ovExValues, rootSection, rootrank, leafSection, leafrank, &lblOverlap));
1955   else PetscCall(DMPlexCreateOverlapLabel(dm, overlap, rootSection, rootrank, leafSection, leafrank, &lblOverlap));
1956   /* Convert overlap label to stratified migration SF */
1957   PetscCall(DMPlexPartitionLabelCreateSF(dm, lblOverlap, PETSC_FALSE, &sfOverlap));
1958   PetscCall(DMPlexStratifyMigrationSF(dm, sfOverlap, &sfStratified));
1959   PetscCall(PetscSFDestroy(&sfOverlap));
1960   sfOverlap = sfStratified;
1961   PetscCall(PetscObjectSetName((PetscObject)sfOverlap, "Overlap SF"));
1962   PetscCall(PetscSFSetFromOptions(sfOverlap));
1963 
1964   PetscCall(PetscSectionDestroy(&rootSection));
1965   PetscCall(PetscSectionDestroy(&leafSection));
1966   PetscCall(ISDestroy(&rootrank));
1967   PetscCall(ISDestroy(&leafrank));
1968   PetscCall(PetscLogEventEnd(DMPLEX_Partition, dm, 0, 0, 0));
1969 
1970   /* Build the overlapping DM */
1971   PetscCall(DMPlexCreate(newcomm, dmOverlap));
1972   PetscCall(PetscObjectSetName((PetscObject)*dmOverlap, "Parallel Mesh"));
1973   PetscCall(DMPlexMigrate(dm, sfOverlap, *dmOverlap));
1974   /* Store the overlap in the new DM */
1975   PetscCall(DMPlexSetOverlap_Plex(*dmOverlap, dm, overlap));
1976   /* Build the new point SF */
1977   PetscCall(DMPlexCreatePointSF(*dmOverlap, sfOverlap, PETSC_FALSE, &sfPoint));
1978   PetscCall(DMSetPointSF(*dmOverlap, sfPoint));
1979   PetscCall(DMGetCoordinateDM(*dmOverlap, &dmCoord));
1980   if (dmCoord) PetscCall(DMSetPointSF(dmCoord, sfPoint));
1981   PetscCall(DMGetCellCoordinateDM(*dmOverlap, &dmCoord));
1982   if (dmCoord) PetscCall(DMSetPointSF(dmCoord, sfPoint));
1983   PetscCall(PetscSFDestroy(&sfPoint));
1984   PetscCall(DMPlexCopy_Internal(dm, PETSC_TRUE, PETSC_FALSE, *dmOverlap));
1985   /* TODO: labels stored inside the DS for regions should be handled here */
1986   PetscCall(DMCopyDisc(dm, *dmOverlap));
1987   /* Cleanup overlap partition */
1988   PetscCall(DMLabelDestroy(&lblOverlap));
1989   if (sf) *sf = sfOverlap;
1990   else PetscCall(PetscSFDestroy(&sfOverlap));
1991   if (ovlboundary) {
1992     DMLabel   label;
1993     PetscBool flg;
1994 
1995     if (clear_ovlboundary) PetscCall(DMRemoveLabel(dm, ovlboundary, NULL));
1996     PetscCall(DMHasLabel(*dmOverlap, ovlboundary, &flg));
1997     PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Missing %s label", ovlboundary);
1998     PetscCall(DMGetLabel(*dmOverlap, ovlboundary, &label));
1999     PetscCall(DMPlexMarkBoundaryFaces_Internal(*dmOverlap, 1, 0, label, PETSC_TRUE));
2000   }
2001   PetscCall(PetscLogEventEnd(DMPLEX_DistributeOverlap, dm, 0, 0, 0));
2002   PetscFunctionReturn(PETSC_SUCCESS);
2003 }
2004 
2005 /*@
2006   DMPlexDistributeOverlap - Add partition overlap to a distributed non-overlapping `DM`.
2007 
2008   Collective
2009 
2010   Input Parameters:
2011 + dm      - The non-overlapping distributed `DMPLEX` object
2012 - overlap - The overlap of partitions (the same on all ranks)
2013 
2014   Output Parameters:
2015 + sf        - The `PetscSF` used for point distribution, or pass `NULL` if not needed
2016 - dmOverlap - The overlapping distributed `DMPLEX` object
2017 
2018   Options Database Keys:
2019 + -dm_plex_overlap_labels <name1,name2,...> - List of overlap label names
2020 . -dm_plex_overlap_values <int1,int2,...>   - List of overlap label values
2021 . -dm_plex_overlap_exclude_label <name>     - Label used to exclude points from overlap
2022 - -dm_plex_overlap_exclude_value <int>      - Label value used to exclude points from overlap
2023 
2024   Level: advanced
2025 
2026   Notes:
2027   If the mesh was not distributed, the return value is `NULL`.
2028 
2029   The user can control the definition of adjacency for the mesh using `DMSetAdjacency()`. They should choose the combination appropriate for the function
2030   representation on the mesh.
2031 
2032 .seealso: `DMPLEX`, `PetscSF`, `DM`, `DMPlexCreate()`, `DMSetAdjacency()`, `DMPlexDistribute()`, `DMPlexCreateOverlapLabel()`, `DMPlexGetOverlap()`
2033 @*/
2034 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, PetscSF *sf, DM *dmOverlap)
2035 {
2036   PetscFunctionBegin;
2037   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2038   PetscValidLogicalCollectiveInt(dm, overlap, 2);
2039   if (sf) PetscAssertPointer(sf, 3);
2040   PetscAssertPointer(dmOverlap, 4);
2041   PetscCall(DMPlexDistributeOverlap_Internal(dm, overlap, PetscObjectComm((PetscObject)dm), NULL, sf, dmOverlap));
2042   PetscFunctionReturn(PETSC_SUCCESS);
2043 }
2044 
2045 PetscErrorCode DMPlexGetOverlap_Plex(DM dm, PetscInt *overlap)
2046 {
2047   DM_Plex *mesh = (DM_Plex *)dm->data;
2048 
2049   PetscFunctionBegin;
2050   *overlap = mesh->overlap;
2051   PetscFunctionReturn(PETSC_SUCCESS);
2052 }
2053 
2054 PetscErrorCode DMPlexSetOverlap_Plex(DM dm, DM dmSrc, PetscInt overlap)
2055 {
2056   DM_Plex *mesh    = NULL;
2057   DM_Plex *meshSrc = NULL;
2058 
2059   PetscFunctionBegin;
2060   PetscValidHeaderSpecificType(dm, DM_CLASSID, 1, DMPLEX);
2061   mesh = (DM_Plex *)dm->data;
2062   if (dmSrc) {
2063     PetscValidHeaderSpecificType(dmSrc, DM_CLASSID, 2, DMPLEX);
2064     meshSrc       = (DM_Plex *)dmSrc->data;
2065     mesh->overlap = meshSrc->overlap;
2066   } else {
2067     mesh->overlap = 0;
2068   }
2069   mesh->overlap += overlap;
2070   PetscFunctionReturn(PETSC_SUCCESS);
2071 }
2072 
2073 /*@
2074   DMPlexGetOverlap - Get the width of the cell overlap
2075 
2076   Not Collective
2077 
2078   Input Parameter:
2079 . dm - The `DM`
2080 
2081   Output Parameter:
2082 . overlap - the width of the cell overlap
2083 
2084   Level: intermediate
2085 
2086 .seealso: `DMPLEX`, `DMPlexSetOverlap()`, `DMPlexDistribute()`
2087 @*/
2088 PetscErrorCode DMPlexGetOverlap(DM dm, PetscInt *overlap)
2089 {
2090   PetscFunctionBegin;
2091   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2092   PetscAssertPointer(overlap, 2);
2093   PetscUseMethod(dm, "DMPlexGetOverlap_C", (DM, PetscInt *), (dm, overlap));
2094   PetscFunctionReturn(PETSC_SUCCESS);
2095 }
2096 
2097 /*@
2098   DMPlexSetOverlap - Set the width of the cell overlap
2099 
2100   Logically Collective
2101 
2102   Input Parameters:
2103 + dm      - The `DM`
2104 . dmSrc   - The `DM` that produced this one, or `NULL`
2105 - overlap - the width of the cell overlap
2106 
2107   Level: intermediate
2108 
2109   Note:
2110   The overlap from `dmSrc` is added to `dm`
2111 
2112 .seealso: `DMPLEX`, `DMPlexGetOverlap()`, `DMPlexDistribute()`
2113 @*/
2114 PetscErrorCode DMPlexSetOverlap(DM dm, DM dmSrc, PetscInt overlap)
2115 {
2116   PetscFunctionBegin;
2117   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2118   PetscValidLogicalCollectiveInt(dm, overlap, 3);
2119   PetscTryMethod(dm, "DMPlexSetOverlap_C", (DM, DM, PetscInt), (dm, dmSrc, overlap));
2120   PetscFunctionReturn(PETSC_SUCCESS);
2121 }
2122 
2123 PetscErrorCode DMPlexDistributeSetDefault_Plex(DM dm, PetscBool dist)
2124 {
2125   DM_Plex *mesh = (DM_Plex *)dm->data;
2126 
2127   PetscFunctionBegin;
2128   mesh->distDefault = dist;
2129   PetscFunctionReturn(PETSC_SUCCESS);
2130 }
2131 
2132 /*@
2133   DMPlexDistributeSetDefault - Set flag indicating whether the `DM` should be distributed by default
2134 
2135   Logically Collective
2136 
2137   Input Parameters:
2138 + dm   - The `DM`
2139 - dist - Flag for distribution
2140 
2141   Level: intermediate
2142 
2143 .seealso: `DMPLEX`, `DMPlexDistributeGetDefault()`, `DMPlexDistribute()`
2144 @*/
2145 PetscErrorCode DMPlexDistributeSetDefault(DM dm, PetscBool dist)
2146 {
2147   PetscFunctionBegin;
2148   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2149   PetscValidLogicalCollectiveBool(dm, dist, 2);
2150   PetscTryMethod(dm, "DMPlexDistributeSetDefault_C", (DM, PetscBool), (dm, dist));
2151   PetscFunctionReturn(PETSC_SUCCESS);
2152 }
2153 
2154 PetscErrorCode DMPlexDistributeGetDefault_Plex(DM dm, PetscBool *dist)
2155 {
2156   DM_Plex *mesh = (DM_Plex *)dm->data;
2157 
2158   PetscFunctionBegin;
2159   *dist = mesh->distDefault;
2160   PetscFunctionReturn(PETSC_SUCCESS);
2161 }
2162 
2163 /*@
2164   DMPlexDistributeGetDefault - Get flag indicating whether the `DM` should be distributed by default
2165 
2166   Not Collective
2167 
2168   Input Parameter:
2169 . dm - The `DM`
2170 
2171   Output Parameter:
2172 . dist - Flag for distribution
2173 
2174   Level: intermediate
2175 
2176 .seealso: `DMPLEX`, `DM`, `DMPlexDistributeSetDefault()`, `DMPlexDistribute()`
2177 @*/
2178 PetscErrorCode DMPlexDistributeGetDefault(DM dm, PetscBool *dist)
2179 {
2180   PetscFunctionBegin;
2181   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2182   PetscAssertPointer(dist, 2);
2183   PetscUseMethod(dm, "DMPlexDistributeGetDefault_C", (DM, PetscBool *), (dm, dist));
2184   PetscFunctionReturn(PETSC_SUCCESS);
2185 }
2186 
2187 /*@C
2188   DMPlexGetGatherDM - Get a copy of the `DMPLEX` that gathers all points on the
2189   root process of the original's communicator.
2190 
2191   Collective
2192 
2193   Input Parameter:
2194 . dm - the original `DMPLEX` object
2195 
2196   Output Parameters:
2197 + sf         - the `PetscSF` used for point distribution (optional)
2198 - gatherMesh - the gathered `DM` object, or `NULL`
2199 
2200   Level: intermediate
2201 
2202 .seealso: `DMPLEX`, `DM`, `PetscSF`, `DMPlexDistribute()`, `DMPlexGetRedundantDM()`
2203 @*/
2204 PetscErrorCode DMPlexGetGatherDM(DM dm, PetscSF *sf, DM *gatherMesh)
2205 {
2206   MPI_Comm         comm;
2207   PetscMPIInt      size;
2208   PetscPartitioner oldPart, gatherPart;
2209 
2210   PetscFunctionBegin;
2211   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2212   PetscAssertPointer(gatherMesh, 3);
2213   *gatherMesh = NULL;
2214   if (sf) *sf = NULL;
2215   comm = PetscObjectComm((PetscObject)dm);
2216   PetscCallMPI(MPI_Comm_size(comm, &size));
2217   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
2218   PetscCall(DMPlexGetPartitioner(dm, &oldPart));
2219   PetscCall(PetscObjectReference((PetscObject)oldPart));
2220   PetscCall(PetscPartitionerCreate(comm, &gatherPart));
2221   PetscCall(PetscPartitionerSetType(gatherPart, PETSCPARTITIONERGATHER));
2222   PetscCall(DMPlexSetPartitioner(dm, gatherPart));
2223   PetscCall(DMPlexDistribute(dm, 0, sf, gatherMesh));
2224 
2225   PetscCall(DMPlexSetPartitioner(dm, oldPart));
2226   PetscCall(PetscPartitionerDestroy(&gatherPart));
2227   PetscCall(PetscPartitionerDestroy(&oldPart));
2228   PetscFunctionReturn(PETSC_SUCCESS);
2229 }
2230 
2231 /*@C
2232   DMPlexGetRedundantDM - Get a copy of the `DMPLEX` that is completely copied on each process.
2233 
2234   Collective
2235 
2236   Input Parameter:
2237 . dm - the original `DMPLEX` object
2238 
2239   Output Parameters:
2240 + sf            - the `PetscSF` used for point distribution (optional)
2241 - redundantMesh - the redundant `DM` object, or `NULL`
2242 
2243   Level: intermediate
2244 
2245 .seealso: `DMPLEX`, `DMPlexDistribute()`, `DMPlexGetGatherDM()`
2246 @*/
2247 PetscErrorCode DMPlexGetRedundantDM(DM dm, PetscSF *sf, DM *redundantMesh)
2248 {
2249   MPI_Comm     comm;
2250   PetscMPIInt  size, rank;
2251   PetscInt     pStart, pEnd, p;
2252   PetscInt     numPoints = -1;
2253   PetscSF      migrationSF, sfPoint, gatherSF;
2254   DM           gatherDM, dmCoord;
2255   PetscSFNode *points;
2256 
2257   PetscFunctionBegin;
2258   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2259   PetscAssertPointer(redundantMesh, 3);
2260   *redundantMesh = NULL;
2261   comm           = PetscObjectComm((PetscObject)dm);
2262   PetscCallMPI(MPI_Comm_size(comm, &size));
2263   if (size == 1) {
2264     PetscCall(PetscObjectReference((PetscObject)dm));
2265     *redundantMesh = dm;
2266     if (sf) *sf = NULL;
2267     PetscFunctionReturn(PETSC_SUCCESS);
2268   }
2269   PetscCall(DMPlexGetGatherDM(dm, &gatherSF, &gatherDM));
2270   if (!gatherDM) PetscFunctionReturn(PETSC_SUCCESS);
2271   PetscCallMPI(MPI_Comm_rank(comm, &rank));
2272   PetscCall(DMPlexGetChart(gatherDM, &pStart, &pEnd));
2273   numPoints = pEnd - pStart;
2274   PetscCallMPI(MPI_Bcast(&numPoints, 1, MPIU_INT, 0, comm));
2275   PetscCall(PetscMalloc1(numPoints, &points));
2276   PetscCall(PetscSFCreate(comm, &migrationSF));
2277   for (p = 0; p < numPoints; p++) {
2278     points[p].index = p;
2279     points[p].rank  = 0;
2280   }
2281   PetscCall(PetscSFSetGraph(migrationSF, pEnd - pStart, numPoints, NULL, PETSC_OWN_POINTER, points, PETSC_OWN_POINTER));
2282   PetscCall(DMPlexCreate(comm, redundantMesh));
2283   PetscCall(PetscObjectSetName((PetscObject)*redundantMesh, "Redundant Mesh"));
2284   PetscCall(DMPlexMigrate(gatherDM, migrationSF, *redundantMesh));
2285   /* This is to express that all point are in overlap */
2286   PetscCall(DMPlexSetOverlap_Plex(*redundantMesh, NULL, PETSC_MAX_INT));
2287   PetscCall(DMPlexCreatePointSF(*redundantMesh, migrationSF, PETSC_FALSE, &sfPoint));
2288   PetscCall(DMSetPointSF(*redundantMesh, sfPoint));
2289   PetscCall(DMGetCoordinateDM(*redundantMesh, &dmCoord));
2290   if (dmCoord) PetscCall(DMSetPointSF(dmCoord, sfPoint));
2291   PetscCall(PetscSFDestroy(&sfPoint));
2292   if (sf) {
2293     PetscSF tsf;
2294 
2295     PetscCall(PetscSFCompose(gatherSF, migrationSF, &tsf));
2296     PetscCall(DMPlexStratifyMigrationSF(dm, tsf, sf));
2297     PetscCall(PetscSFDestroy(&tsf));
2298   }
2299   PetscCall(PetscSFDestroy(&migrationSF));
2300   PetscCall(PetscSFDestroy(&gatherSF));
2301   PetscCall(DMDestroy(&gatherDM));
2302   PetscCall(DMCopyDisc(dm, *redundantMesh));
2303   PetscCall(DMPlexCopy_Internal(dm, PETSC_TRUE, PETSC_FALSE, *redundantMesh));
2304   PetscFunctionReturn(PETSC_SUCCESS);
2305 }
2306 
2307 /*@
2308   DMPlexIsDistributed - Find out whether this `DM` is distributed, i.e. more than one rank owns some points.
2309 
2310   Collective
2311 
2312   Input Parameter:
2313 . dm - The `DM` object
2314 
2315   Output Parameter:
2316 . distributed - Flag whether the `DM` is distributed
2317 
2318   Level: intermediate
2319 
2320   Notes:
2321   This currently finds out whether at least two ranks have any DAG points.
2322   This involves `MPI_Allreduce()` with one integer.
2323   The result is currently not stashed so every call to this routine involves this global communication.
2324 
2325 .seealso: `DMPLEX`, `DMPlexDistribute()`, `DMPlexGetOverlap()`, `DMPlexIsInterpolated()`
2326 @*/
2327 PetscErrorCode DMPlexIsDistributed(DM dm, PetscBool *distributed)
2328 {
2329   PetscInt    pStart, pEnd, count;
2330   MPI_Comm    comm;
2331   PetscMPIInt size;
2332 
2333   PetscFunctionBegin;
2334   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2335   PetscAssertPointer(distributed, 2);
2336   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
2337   PetscCallMPI(MPI_Comm_size(comm, &size));
2338   if (size == 1) {
2339     *distributed = PETSC_FALSE;
2340     PetscFunctionReturn(PETSC_SUCCESS);
2341   }
2342   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
2343   count = (pEnd - pStart) > 0 ? 1 : 0;
2344   PetscCall(MPIU_Allreduce(MPI_IN_PLACE, &count, 1, MPIU_INT, MPI_SUM, comm));
2345   *distributed = count > 1 ? PETSC_TRUE : PETSC_FALSE;
2346   PetscFunctionReturn(PETSC_SUCCESS);
2347 }
2348 
2349 /*@
2350   DMPlexDistributionSetName - Set the name of the specific parallel distribution
2351 
2352   Input Parameters:
2353 + dm   - The `DM`
2354 - name - The name of the specific parallel distribution
2355 
2356   Level: developer
2357 
2358   Note:
2359   If distribution name is set when saving, `DMPlexTopologyView()` saves the plex's
2360   parallel distribution (i.e., partition, ownership, and local ordering of points) under
2361   this name. Conversely, if distribution name is set when loading, `DMPlexTopologyLoad()`
2362   loads the parallel distribution stored in file under this name.
2363 
2364 .seealso: `DMPLEX`, `DMPlexDistributionGetName()`, `DMPlexTopologyView()`, `DMPlexTopologyLoad()`
2365 @*/
2366 PetscErrorCode DMPlexDistributionSetName(DM dm, const char name[])
2367 {
2368   DM_Plex *mesh = (DM_Plex *)dm->data;
2369 
2370   PetscFunctionBegin;
2371   PetscValidHeaderSpecificType(dm, DM_CLASSID, 1, DMPLEX);
2372   if (name) PetscAssertPointer(name, 2);
2373   PetscCall(PetscFree(mesh->distributionName));
2374   PetscCall(PetscStrallocpy(name, &mesh->distributionName));
2375   PetscFunctionReturn(PETSC_SUCCESS);
2376 }
2377 
2378 /*@
2379   DMPlexDistributionGetName - Retrieve the name of the specific parallel distribution
2380 
2381   Input Parameter:
2382 . dm - The `DM`
2383 
2384   Output Parameter:
2385 . name - The name of the specific parallel distribution
2386 
2387   Level: developer
2388 
2389   Note:
2390   If distribution name is set when saving, `DMPlexTopologyView()` saves the plex's
2391   parallel distribution (i.e., partition, ownership, and local ordering of points) under
2392   this name. Conversely, if distribution name is set when loading, `DMPlexTopologyLoad()`
2393   loads the parallel distribution stored in file under this name.
2394 
2395 .seealso: `DMPLEX`, `DMPlexDistributionSetName()`, `DMPlexTopologyView()`, `DMPlexTopologyLoad()`
2396 @*/
2397 PetscErrorCode DMPlexDistributionGetName(DM dm, const char *name[])
2398 {
2399   DM_Plex *mesh = (DM_Plex *)dm->data;
2400 
2401   PetscFunctionBegin;
2402   PetscValidHeaderSpecificType(dm, DM_CLASSID, 1, DMPLEX);
2403   PetscAssertPointer(name, 2);
2404   *name = mesh->distributionName;
2405   PetscFunctionReturn(PETSC_SUCCESS);
2406 }
2407