xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision 49664dce07a55e12bbee01dbb7f26988e081dbef)
1 #include <petsc/private/dmpleximpl.h>    /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/dmlabelimpl.h>   /*I      "petscdmlabel.h"  I*/
3 
4 /*@C
5   DMPlexSetAdjacencyUser - Define adjacency in the mesh using a user-provided callback
6 
7   Input Parameters:
8 + dm      - The DM object
9 . user    - The user callback, may be NULL (to clear the callback)
10 - ctx     - context for callback evaluation, may be NULL
11 
12   Level: advanced
13 
14   Notes:
15      The caller of DMPlexGetAdjacency may need to arrange that a large enough array is available for the adjacency.
16 
17      Any setting here overrides other configuration of DMPlex adjacency determination.
18 
19 .seealso: `DMSetAdjacency()`, `DMPlexDistribute()`, `DMPlexPreallocateOperator()`, `DMPlexGetAdjacency()`, `DMPlexGetAdjacencyUser()`
20 @*/
21 PetscErrorCode DMPlexSetAdjacencyUser(DM dm,PetscErrorCode (*user)(DM,PetscInt,PetscInt*,PetscInt[],void*),void *ctx)
22 {
23   DM_Plex *mesh = (DM_Plex *)dm->data;
24 
25   PetscFunctionBegin;
26   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
27   mesh->useradjacency = user;
28   mesh->useradjacencyctx = ctx;
29   PetscFunctionReturn(0);
30 }
31 
32 /*@C
33   DMPlexGetAdjacencyUser - get the user-defined adjacency callback
34 
35   Input Parameter:
36 . dm      - The DM object
37 
38   Output Parameters:
39 + user    - The user callback
40 - ctx     - context for callback evaluation
41 
42   Level: advanced
43 
44 .seealso: `DMSetAdjacency()`, `DMPlexDistribute()`, `DMPlexPreallocateOperator()`, `DMPlexGetAdjacency()`, `DMPlexSetAdjacencyUser()`
45 @*/
46 PetscErrorCode DMPlexGetAdjacencyUser(DM dm, PetscErrorCode (**user)(DM,PetscInt,PetscInt*,PetscInt[],void*), void **ctx)
47 {
48   DM_Plex *mesh = (DM_Plex *)dm->data;
49 
50   PetscFunctionBegin;
51   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
52   if (user) *user = mesh->useradjacency;
53   if (ctx) *ctx = mesh->useradjacencyctx;
54   PetscFunctionReturn(0);
55 }
56 
57 /*@
58   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
59 
60   Input Parameters:
61 + dm      - The DM object
62 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
63 
64   Level: intermediate
65 
66 .seealso: `DMGetAdjacency()`, `DMSetAdjacency()`, `DMPlexDistribute()`, `DMPlexPreallocateOperator()`, `DMPlexSetAnchors()`
67 @*/
68 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
69 {
70   DM_Plex *mesh = (DM_Plex *) dm->data;
71 
72   PetscFunctionBegin;
73   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
74   mesh->useAnchors = useAnchors;
75   PetscFunctionReturn(0);
76 }
77 
78 /*@
79   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
80 
81   Input Parameter:
82 . dm      - The DM object
83 
84   Output Parameter:
85 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
86 
87   Level: intermediate
88 
89 .seealso: `DMPlexSetAdjacencyUseAnchors()`, `DMSetAdjacency()`, `DMGetAdjacency()`, `DMPlexDistribute()`, `DMPlexPreallocateOperator()`, `DMPlexSetAnchors()`
90 @*/
91 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
92 {
93   DM_Plex *mesh = (DM_Plex *) dm->data;
94 
95   PetscFunctionBegin;
96   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
97   PetscValidBoolPointer(useAnchors, 2);
98   *useAnchors = mesh->useAnchors;
99   PetscFunctionReturn(0);
100 }
101 
102 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
103 {
104   const PetscInt *cone = NULL;
105   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
106 
107   PetscFunctionBeginHot;
108   PetscCall(DMPlexGetConeSize(dm, p, &coneSize));
109   PetscCall(DMPlexGetCone(dm, p, &cone));
110   for (c = 0; c <= coneSize; ++c) {
111     const PetscInt  point   = !c ? p : cone[c-1];
112     const PetscInt *support = NULL;
113     PetscInt        supportSize, s, q;
114 
115     PetscCall(DMPlexGetSupportSize(dm, point, &supportSize));
116     PetscCall(DMPlexGetSupport(dm, point, &support));
117     for (s = 0; s < supportSize; ++s) {
118       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = support[s]),0); ++q) {
119         if (support[s] == adj[q]) break;
120       }
121       PetscCheck(numAdj <= maxAdjSize,PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%" PetscInt_FMT ")", maxAdjSize);
122     }
123   }
124   *adjSize = numAdj;
125   PetscFunctionReturn(0);
126 }
127 
128 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
129 {
130   const PetscInt *support = NULL;
131   PetscInt        numAdj   = 0, maxAdjSize = *adjSize, supportSize, s;
132 
133   PetscFunctionBeginHot;
134   PetscCall(DMPlexGetSupportSize(dm, p, &supportSize));
135   PetscCall(DMPlexGetSupport(dm, p, &support));
136   for (s = 0; s <= supportSize; ++s) {
137     const PetscInt  point = !s ? p : support[s-1];
138     const PetscInt *cone  = NULL;
139     PetscInt        coneSize, c, q;
140 
141     PetscCall(DMPlexGetConeSize(dm, point, &coneSize));
142     PetscCall(DMPlexGetCone(dm, point, &cone));
143     for (c = 0; c < coneSize; ++c) {
144       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = cone[c]),0); ++q) {
145         if (cone[c] == adj[q]) break;
146       }
147       PetscCheck(numAdj <= maxAdjSize,PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%" PetscInt_FMT ")", maxAdjSize);
148     }
149   }
150   *adjSize = numAdj;
151   PetscFunctionReturn(0);
152 }
153 
154 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
155 {
156   PetscInt      *star = NULL;
157   PetscInt       numAdj = 0, maxAdjSize = *adjSize, starSize, s;
158 
159   PetscFunctionBeginHot;
160   PetscCall(DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star));
161   for (s = 0; s < starSize*2; s += 2) {
162     const PetscInt *closure = NULL;
163     PetscInt        closureSize, c, q;
164 
165     PetscCall(DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure));
166     for (c = 0; c < closureSize*2; c += 2) {
167       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = closure[c]),0); ++q) {
168         if (closure[c] == adj[q]) break;
169       }
170       PetscCheck(numAdj <= maxAdjSize,PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%" PetscInt_FMT ")", maxAdjSize);
171     }
172     PetscCall(DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure));
173   }
174   PetscCall(DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star));
175   *adjSize = numAdj;
176   PetscFunctionReturn(0);
177 }
178 
179 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
180 {
181   static PetscInt asiz = 0;
182   PetscInt maxAnchors = 1;
183   PetscInt aStart = -1, aEnd = -1;
184   PetscInt maxAdjSize;
185   PetscSection aSec = NULL;
186   IS aIS = NULL;
187   const PetscInt *anchors;
188   DM_Plex *mesh = (DM_Plex *)dm->data;
189 
190   PetscFunctionBeginHot;
191   if (useAnchors) {
192     PetscCall(DMPlexGetAnchors(dm,&aSec,&aIS));
193     if (aSec) {
194       PetscCall(PetscSectionGetMaxDof(aSec,&maxAnchors));
195       maxAnchors = PetscMax(1,maxAnchors);
196       PetscCall(PetscSectionGetChart(aSec,&aStart,&aEnd));
197       PetscCall(ISGetIndices(aIS,&anchors));
198     }
199   }
200   if (!*adj) {
201     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
202 
203     PetscCall(DMPlexGetChart(dm, &pStart,&pEnd));
204     PetscCall(DMPlexGetDepth(dm, &depth));
205     depth = PetscMax(depth, -depth);
206     PetscCall(DMPlexGetMaxSizes(dm, &maxC, &maxS));
207     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1;
208     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1;
209     asiz  = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries);
210     asiz *= maxAnchors;
211     asiz  = PetscMin(asiz,pEnd-pStart);
212     PetscCall(PetscMalloc1(asiz,adj));
213   }
214   if (*adjSize < 0) *adjSize = asiz;
215   maxAdjSize = *adjSize;
216   if (mesh->useradjacency) {
217     PetscCall((*mesh->useradjacency)(dm, p, adjSize, *adj, mesh->useradjacencyctx));
218   } else if (useTransitiveClosure) {
219     PetscCall(DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj));
220   } else if (useCone) {
221     PetscCall(DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj));
222   } else {
223     PetscCall(DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj));
224   }
225   if (useAnchors && aSec) {
226     PetscInt origSize = *adjSize;
227     PetscInt numAdj = origSize;
228     PetscInt i = 0, j;
229     PetscInt *orig = *adj;
230 
231     while (i < origSize) {
232       PetscInt p = orig[i];
233       PetscInt aDof = 0;
234 
235       if (p >= aStart && p < aEnd) {
236         PetscCall(PetscSectionGetDof(aSec,p,&aDof));
237       }
238       if (aDof) {
239         PetscInt aOff;
240         PetscInt s, q;
241 
242         for (j = i + 1; j < numAdj; j++) {
243           orig[j - 1] = orig[j];
244         }
245         origSize--;
246         numAdj--;
247         PetscCall(PetscSectionGetOffset(aSec,p,&aOff));
248         for (s = 0; s < aDof; ++s) {
249           for (q = 0; q < numAdj || ((void)(orig[numAdj++] = anchors[aOff+s]),0); ++q) {
250             if (anchors[aOff+s] == orig[q]) break;
251           }
252           PetscCheck(numAdj <= maxAdjSize,PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%" PetscInt_FMT ")", maxAdjSize);
253         }
254       }
255       else {
256         i++;
257       }
258     }
259     *adjSize = numAdj;
260     PetscCall(ISRestoreIndices(aIS,&anchors));
261   }
262   PetscFunctionReturn(0);
263 }
264 
265 /*@
266   DMPlexGetAdjacency - Return all points adjacent to the given point
267 
268   Input Parameters:
269 + dm - The DM object
270 - p  - The point
271 
272   Input/Output Parameters:
273 + adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE;
274             on output the number of adjacent points
275 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize;
276         on output contains the adjacent points
277 
278   Level: advanced
279 
280   Notes:
281     The user must PetscFree the adj array if it was not passed in.
282 
283 .seealso: `DMSetAdjacency()`, `DMPlexDistribute()`, `DMCreateMatrix()`, `DMPlexPreallocateOperator()`
284 @*/
285 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
286 {
287   PetscBool      useCone, useClosure, useAnchors;
288 
289   PetscFunctionBeginHot;
290   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
291   PetscValidIntPointer(adjSize,3);
292   PetscValidPointer(adj,4);
293   PetscCall(DMGetBasicAdjacency(dm, &useCone, &useClosure));
294   PetscCall(DMPlexGetAdjacencyUseAnchors(dm, &useAnchors));
295   PetscCall(DMPlexGetAdjacency_Internal(dm, p, useCone, useClosure, useAnchors, adjSize, adj));
296   PetscFunctionReturn(0);
297 }
298 
299 /*@
300   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
301 
302   Collective on dm
303 
304   Input Parameters:
305 + dm      - The DM
306 . sfPoint - The PetscSF which encodes point connectivity
307 . rootRankSection -
308 . rootRanks -
309 . leftRankSection -
310 - leafRanks -
311 
312   Output Parameters:
313 + processRanks - A list of process neighbors, or NULL
314 - sfProcess    - An SF encoding the two-sided process connectivity, or NULL
315 
316   Level: developer
317 
318 .seealso: `PetscSFCreate()`, `DMPlexCreateProcessSF()`
319 @*/
320 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
321 {
322   const PetscSFNode *remotePoints;
323   PetscInt          *localPointsNew;
324   PetscSFNode       *remotePointsNew;
325   const PetscInt    *nranks;
326   PetscInt          *ranksNew;
327   PetscBT            neighbors;
328   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
329   PetscMPIInt        size, proc, rank;
330 
331   PetscFunctionBegin;
332   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
333   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
334   if (processRanks) {PetscValidPointer(processRanks, 7);}
335   if (sfProcess)    {PetscValidPointer(sfProcess, 8);}
336   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size));
337   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank));
338   PetscCall(PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints));
339   PetscCall(PetscBTCreate(size, &neighbors));
340   PetscCall(PetscBTMemzero(size, neighbors));
341   /* Compute root-to-leaf process connectivity */
342   PetscCall(PetscSectionGetChart(rootRankSection, &pStart, &pEnd));
343   PetscCall(ISGetIndices(rootRanks, &nranks));
344   for (p = pStart; p < pEnd; ++p) {
345     PetscInt ndof, noff, n;
346 
347     PetscCall(PetscSectionGetDof(rootRankSection, p, &ndof));
348     PetscCall(PetscSectionGetOffset(rootRankSection, p, &noff));
349     for (n = 0; n < ndof; ++n) PetscCall(PetscBTSet(neighbors, nranks[noff+n]));
350   }
351   PetscCall(ISRestoreIndices(rootRanks, &nranks));
352   /* Compute leaf-to-neighbor process connectivity */
353   PetscCall(PetscSectionGetChart(leafRankSection, &pStart, &pEnd));
354   PetscCall(ISGetIndices(leafRanks, &nranks));
355   for (p = pStart; p < pEnd; ++p) {
356     PetscInt ndof, noff, n;
357 
358     PetscCall(PetscSectionGetDof(leafRankSection, p, &ndof));
359     PetscCall(PetscSectionGetOffset(leafRankSection, p, &noff));
360     for (n = 0; n < ndof; ++n) PetscCall(PetscBTSet(neighbors, nranks[noff+n]));
361   }
362   PetscCall(ISRestoreIndices(leafRanks, &nranks));
363   /* Compute leaf-to-root process connectivity */
364   for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);}
365   /* Calculate edges */
366   PetscBTClear(neighbors, rank);
367   for (proc = 0, numNeighbors = 0; proc < size; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;}
368   PetscCall(PetscMalloc1(numNeighbors, &ranksNew));
369   PetscCall(PetscMalloc1(numNeighbors, &localPointsNew));
370   PetscCall(PetscMalloc1(numNeighbors, &remotePointsNew));
371   for (proc = 0, n = 0; proc < size; ++proc) {
372     if (PetscBTLookup(neighbors, proc)) {
373       ranksNew[n]              = proc;
374       localPointsNew[n]        = proc;
375       remotePointsNew[n].index = rank;
376       remotePointsNew[n].rank  = proc;
377       ++n;
378     }
379   }
380   PetscCall(PetscBTDestroy(&neighbors));
381   if (processRanks) PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks));
382   else              PetscCall(PetscFree(ranksNew));
383   if (sfProcess) {
384     PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess));
385     PetscCall(PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF"));
386     PetscCall(PetscSFSetFromOptions(*sfProcess));
387     PetscCall(PetscSFSetGraph(*sfProcess, size, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER));
388   }
389   PetscFunctionReturn(0);
390 }
391 
392 /*@
393   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
394 
395   Collective on dm
396 
397   Input Parameter:
398 . dm - The DM
399 
400   Output Parameters:
401 + rootSection - The number of leaves for a given root point
402 . rootrank    - The rank of each edge into the root point
403 . leafSection - The number of processes sharing a given leaf point
404 - leafrank    - The rank of each process sharing a leaf point
405 
406   Level: developer
407 
408 .seealso: `DMPlexCreateOverlapLabel()`, `DMPlexDistribute()`, `DMPlexDistributeOverlap()`
409 @*/
410 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
411 {
412   MPI_Comm        comm;
413   PetscSF         sfPoint;
414   const PetscInt *rootdegree;
415   PetscInt       *myrank, *remoterank;
416   PetscInt        pStart, pEnd, p, nedges;
417   PetscMPIInt     rank;
418 
419   PetscFunctionBegin;
420   PetscCall(PetscObjectGetComm((PetscObject) dm, &comm));
421   PetscCallMPI(MPI_Comm_rank(comm, &rank));
422   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
423   PetscCall(DMGetPointSF(dm, &sfPoint));
424   /* Compute number of leaves for each root */
425   PetscCall(PetscObjectSetName((PetscObject) rootSection, "Root Section"));
426   PetscCall(PetscSectionSetChart(rootSection, pStart, pEnd));
427   PetscCall(PetscSFComputeDegreeBegin(sfPoint, &rootdegree));
428   PetscCall(PetscSFComputeDegreeEnd(sfPoint, &rootdegree));
429   for (p = pStart; p < pEnd; ++p) PetscCall(PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]));
430   PetscCall(PetscSectionSetUp(rootSection));
431   /* Gather rank of each leaf to root */
432   PetscCall(PetscSectionGetStorageSize(rootSection, &nedges));
433   PetscCall(PetscMalloc1(pEnd-pStart, &myrank));
434   PetscCall(PetscMalloc1(nedges,  &remoterank));
435   for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank;
436   PetscCall(PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank));
437   PetscCall(PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank));
438   PetscCall(PetscFree(myrank));
439   PetscCall(ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank));
440   /* Distribute remote ranks to leaves */
441   PetscCall(PetscObjectSetName((PetscObject) leafSection, "Leaf Section"));
442   PetscCall(DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank));
443   PetscFunctionReturn(0);
444 }
445 
446 #if 0
447 static PetscErrorCode DMPlexCopyOverlapLabels(DM dm, DM ndm)
448 {
449   DM_Plex *mesh  = (DM_Plex *) dm->data;
450   DM_Plex *nmesh = (DM_Plex *) ndm->data;
451 
452   PetscFunctionBegin;
453   if (mesh->numOvLabels) {
454     const char *name;
455     PetscInt    l;
456 
457     nmesh->numOvLabels = mesh->numOvLabels;
458     for (l = 0; l < mesh->numOvLabels; ++l) {
459       PetscCall(PetscObjectGetName((PetscObject) mesh->ovLabels[l], &name));
460       PetscCall(DMGetLabel(ndm, name, &nmesh->ovLabels[l]));
461       nmesh->ovValues[l] = mesh->ovValues[l];
462     }
463     PetscCall(PetscObjectGetName((PetscObject) mesh->ovExLabel, &name));
464     PetscCall(DMGetLabel(ndm, name, &nmesh->ovExLabel));
465     nmesh->ovExValue = mesh->ovExValue;
466   }
467   PetscFunctionReturn(0);
468 }
469 #endif
470 
471 /*@C
472   DMPlexCreateOverlapLabel - Compute a label indicating what overlap points should be sent to new processes
473 
474   Collective on dm
475 
476   Input Parameters:
477 + dm          - The DM
478 . levels      - Number of overlap levels
479 . rootSection - The number of leaves for a given root point
480 . rootrank    - The rank of each edge into the root point
481 . leafSection - The number of processes sharing a given leaf point
482 - leafrank    - The rank of each process sharing a leaf point
483 
484   Output Parameter:
485 . ovLabel     - DMLabel containing remote overlap contributions as point/rank pairings
486 
487   Level: developer
488 
489 .seealso: `DMPlexCreateOverlapLabelFromLabels()`, `DMPlexGetAdjacency()`, `DMPlexDistributeOwnership()`, `DMPlexDistribute()`
490 @*/
491 PetscErrorCode DMPlexCreateOverlapLabel(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
492 {
493   MPI_Comm           comm;
494   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
495   PetscSF            sfPoint;
496   const PetscSFNode *remote;
497   const PetscInt    *local;
498   const PetscInt    *nrank, *rrank;
499   PetscInt          *adj = NULL;
500   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l;
501   PetscMPIInt        rank, size;
502   PetscBool          flg;
503 
504   PetscFunctionBegin;
505   *ovLabel = NULL;
506   PetscCall(PetscObjectGetComm((PetscObject) dm, &comm));
507   PetscCallMPI(MPI_Comm_size(comm, &size));
508   PetscCallMPI(MPI_Comm_rank(comm, &rank));
509   if (size == 1) PetscFunctionReturn(0);
510   PetscCall(DMGetPointSF(dm, &sfPoint));
511   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
512   if (!levels) {
513     /* Add owned points */
514     PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel));
515     for (p = pStart; p < pEnd; ++p) PetscCall(DMLabelSetValue(*ovLabel, p, rank));
516     PetscFunctionReturn(0);
517   }
518   PetscCall(PetscSectionGetChart(leafSection, &sStart, &sEnd));
519   PetscCall(PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote));
520   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap adjacency", &ovAdjByRank));
521   /* Handle leaves: shared with the root point */
522   for (l = 0; l < nleaves; ++l) {
523     PetscInt adjSize = PETSC_DETERMINE, a;
524 
525     PetscCall(DMPlexGetAdjacency(dm, local ? local[l] : l, &adjSize, &adj));
526     for (a = 0; a < adjSize; ++a) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank));
527   }
528   PetscCall(ISGetIndices(rootrank, &rrank));
529   PetscCall(ISGetIndices(leafrank, &nrank));
530   /* Handle roots */
531   for (p = pStart; p < pEnd; ++p) {
532     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
533 
534     if ((p >= sStart) && (p < sEnd)) {
535       /* Some leaves share a root with other leaves on different processes */
536       PetscCall(PetscSectionGetDof(leafSection, p, &neighbors));
537       if (neighbors) {
538         PetscCall(PetscSectionGetOffset(leafSection, p, &noff));
539         PetscCall(DMPlexGetAdjacency(dm, p, &adjSize, &adj));
540         for (n = 0; n < neighbors; ++n) {
541           const PetscInt remoteRank = nrank[noff+n];
542 
543           if (remoteRank == rank) continue;
544           for (a = 0; a < adjSize; ++a) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remoteRank));
545         }
546       }
547     }
548     /* Roots are shared with leaves */
549     PetscCall(PetscSectionGetDof(rootSection, p, &neighbors));
550     if (!neighbors) continue;
551     PetscCall(PetscSectionGetOffset(rootSection, p, &noff));
552     PetscCall(DMPlexGetAdjacency(dm, p, &adjSize, &adj));
553     for (n = 0; n < neighbors; ++n) {
554       const PetscInt remoteRank = rrank[noff+n];
555 
556       if (remoteRank == rank) continue;
557       for (a = 0; a < adjSize; ++a) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remoteRank));
558     }
559   }
560   PetscCall(PetscFree(adj));
561   PetscCall(ISRestoreIndices(rootrank, &rrank));
562   PetscCall(ISRestoreIndices(leafrank, &nrank));
563   /* Add additional overlap levels */
564   for (l = 1; l < levels; l++) {
565     /* Propagate point donations over SF to capture remote connections */
566     PetscCall(DMPlexPartitionLabelPropagate(dm, ovAdjByRank));
567     /* Add next level of point donations to the label */
568     PetscCall(DMPlexPartitionLabelAdjacency(dm, ovAdjByRank));
569   }
570   /* We require the closure in the overlap */
571   PetscCall(DMPlexPartitionLabelClosure(dm, ovAdjByRank));
572   PetscCall(PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-overlap_view", &flg));
573   if (flg) {
574     PetscViewer viewer;
575     PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm), &viewer));
576     PetscCall(DMLabelView(ovAdjByRank, viewer));
577   }
578   /* Invert sender to receiver label */
579   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel));
580   PetscCall(DMPlexPartitionLabelInvert(dm, ovAdjByRank, NULL, *ovLabel));
581   /* Add owned points, except for shared local points */
582   for (p = pStart; p < pEnd; ++p) PetscCall(DMLabelSetValue(*ovLabel, p, rank));
583   for (l = 0; l < nleaves; ++l) {
584     PetscCall(DMLabelClearValue(*ovLabel, local[l], rank));
585     PetscCall(DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank));
586   }
587   /* Clean up */
588   PetscCall(DMLabelDestroy(&ovAdjByRank));
589   PetscFunctionReturn(0);
590 }
591 
592 static PetscErrorCode HandlePoint_Private(DM dm, PetscInt p, PetscSection section, const PetscInt ranks[], PetscInt numExLabels, const DMLabel exLabel[], const PetscInt exValue[], DMLabel ovAdjByRank)
593 {
594   PetscInt neighbors, el;
595 
596   PetscFunctionBegin;
597   PetscCall(PetscSectionGetDof(section, p, &neighbors));
598   if (neighbors) {
599     PetscInt   *adj = NULL;
600     PetscInt    adjSize = PETSC_DETERMINE, noff, n, a;
601     PetscMPIInt rank;
602 
603     PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank));
604     PetscCall(PetscSectionGetOffset(section, p, &noff));
605     PetscCall(DMPlexGetAdjacency(dm, p, &adjSize, &adj));
606     for (n = 0; n < neighbors; ++n) {
607       const PetscInt remoteRank = ranks[noff+n];
608 
609       if (remoteRank == rank) continue;
610       for (a = 0; a < adjSize; ++a) {
611         PetscBool insert = PETSC_TRUE;
612 
613         for (el = 0; el < numExLabels; ++el) {
614           PetscInt exVal;
615           PetscCall(DMLabelGetValue(exLabel[el], adj[a], &exVal));
616           if (exVal == exValue[el]) {insert = PETSC_FALSE; break;}
617         }
618         if (insert) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remoteRank));
619       }
620     }
621   }
622   PetscFunctionReturn(0);
623 }
624 
625 /*@C
626   DMPlexCreateOverlapLabelFromLabels - Compute a label indicating what overlap points should be sent to new processes
627 
628   Collective on dm
629 
630   Input Parameters:
631 + dm          - The DM
632 . numLabels   - The number of labels to draw candidate points from
633 . label       - An array of labels containing candidate points
634 . value       - An array of label values marking the candidate points
635 . numExLabels - The number of labels to use for exclusion
636 . exLabel     - An array of labels indicating points to be excluded, or NULL
637 . exValue     - An array of label values to be excluded, or NULL
638 . rootSection - The number of leaves for a given root point
639 . rootrank    - The rank of each edge into the root point
640 . leafSection - The number of processes sharing a given leaf point
641 - leafrank    - The rank of each process sharing a leaf point
642 
643   Output Parameter:
644 . ovLabel     - DMLabel containing remote overlap contributions as point/rank pairings
645 
646   Note:
647   The candidate points are only added to the overlap if they are adjacent to a shared point
648 
649   Level: developer
650 
651 .seealso: `DMPlexCreateOverlapLabel()`, `DMPlexGetAdjacency()`, `DMPlexDistributeOwnership()`, `DMPlexDistribute()`
652 @*/
653 PetscErrorCode DMPlexCreateOverlapLabelFromLabels(DM dm, PetscInt numLabels, const DMLabel label[], const PetscInt value[],
654   PetscInt numExLabels, const DMLabel exLabel[], const PetscInt exValue[], PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
655 {
656   MPI_Comm           comm;
657   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
658   PetscSF            sfPoint;
659   const PetscSFNode *remote;
660   const PetscInt    *local;
661   const PetscInt    *nrank, *rrank;
662   PetscInt          *adj = NULL;
663   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l, el;
664   PetscMPIInt        rank, size;
665   PetscBool          flg;
666 
667   PetscFunctionBegin;
668   *ovLabel = NULL;
669   PetscCall(PetscObjectGetComm((PetscObject) dm, &comm));
670   PetscCallMPI(MPI_Comm_size(comm, &size));
671   PetscCallMPI(MPI_Comm_rank(comm, &rank));
672   if (size == 1) PetscFunctionReturn(0);
673   PetscCall(DMGetPointSF(dm, &sfPoint));
674   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
675   PetscCall(PetscSectionGetChart(leafSection, &sStart, &sEnd));
676   PetscCall(PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote));
677   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap adjacency", &ovAdjByRank));
678   PetscCall(ISGetIndices(rootrank, &rrank));
679   PetscCall(ISGetIndices(leafrank, &nrank));
680   for (l = 0; l < numLabels; ++l) {
681     IS              valIS;
682     const PetscInt *points;
683     PetscInt        n;
684 
685     PetscCall(DMLabelGetStratumIS(label[l], value[l], &valIS));
686     if (!valIS) continue;
687     PetscCall(ISGetIndices(valIS, &points));
688     PetscCall(ISGetLocalSize(valIS, &n));
689     for (PetscInt i = 0; i < n; ++i) {
690       const PetscInt p = points[i];
691 
692       if ((p >= sStart) && (p < sEnd)) {
693         PetscInt loc, adjSize = PETSC_DETERMINE;
694 
695         /* Handle leaves: shared with the root point */
696         if (local) PetscCall(PetscFindInt(p, nleaves, local, &loc));
697         else       loc = (p >= 0 && p < nleaves) ? p : -1;
698         if (loc >= 0) {
699           const PetscInt remoteRank = remote[loc].rank;
700 
701           PetscCall(DMPlexGetAdjacency(dm, p, &adjSize, &adj));
702           for (PetscInt a = 0; a < adjSize; ++a) {
703             PetscBool insert = PETSC_TRUE;
704 
705             for (el = 0; el < numExLabels; ++el) {
706               PetscInt exVal;
707               PetscCall(DMLabelGetValue(exLabel[el], adj[a], &exVal));
708               if (exVal == exValue[el]) {insert = PETSC_FALSE; break;}
709             }
710             if (insert) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remoteRank));
711           }
712         }
713         /* Some leaves share a root with other leaves on different processes */
714         HandlePoint_Private(dm, p, leafSection, nrank, numExLabels, exLabel, exValue, ovAdjByRank);
715       }
716       /* Roots are shared with leaves */
717       HandlePoint_Private(dm, p, rootSection, rrank, numExLabels, exLabel, exValue, ovAdjByRank);
718     }
719     PetscCall(ISRestoreIndices(valIS, &points));
720     PetscCall(ISDestroy(&valIS));
721   }
722   PetscCall(PetscFree(adj));
723   PetscCall(ISRestoreIndices(rootrank, &rrank));
724   PetscCall(ISRestoreIndices(leafrank, &nrank));
725   /* We require the closure in the overlap */
726   PetscCall(DMPlexPartitionLabelClosure(dm, ovAdjByRank));
727   PetscCall(PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-overlap_view", &flg));
728   if (flg) {
729     PetscViewer viewer;
730     PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm), &viewer));
731     PetscCall(DMLabelView(ovAdjByRank, viewer));
732   }
733   /* Invert sender to receiver label */
734   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel));
735   PetscCall(DMPlexPartitionLabelInvert(dm, ovAdjByRank, NULL, *ovLabel));
736   /* Add owned points, except for shared local points */
737   for (p = pStart; p < pEnd; ++p) PetscCall(DMLabelSetValue(*ovLabel, p, rank));
738   for (l = 0; l < nleaves; ++l) {
739     PetscCall(DMLabelClearValue(*ovLabel, local[l], rank));
740     PetscCall(DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank));
741   }
742   /* Clean up */
743   PetscCall(DMLabelDestroy(&ovAdjByRank));
744   PetscFunctionReturn(0);
745 }
746 
747 /*@C
748   DMPlexCreateOverlapMigrationSF - Create an SF describing the new mesh distribution to make the overlap described by the input SF
749 
750   Collective on dm
751 
752   Input Parameters:
753 + dm          - The DM
754 - overlapSF   - The SF mapping ghost points in overlap to owner points on other processes
755 
756   Output Parameter:
757 . migrationSF - An SF that maps original points in old locations to points in new locations
758 
759   Level: developer
760 
761 .seealso: `DMPlexCreateOverlapLabel()`, `DMPlexDistributeOverlap()`, `DMPlexDistribute()`
762 @*/
763 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
764 {
765   MPI_Comm           comm;
766   PetscMPIInt        rank, size;
767   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
768   PetscInt          *pointDepths, *remoteDepths, *ilocal;
769   PetscInt          *depthRecv, *depthShift, *depthIdx;
770   PetscSFNode       *iremote;
771   PetscSF            pointSF;
772   const PetscInt    *sharedLocal;
773   const PetscSFNode *overlapRemote, *sharedRemote;
774 
775   PetscFunctionBegin;
776   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
777   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
778   PetscCallMPI(MPI_Comm_rank(comm, &rank));
779   PetscCallMPI(MPI_Comm_size(comm, &size));
780   PetscCall(DMGetDimension(dm, &dim));
781 
782   /* Before building the migration SF we need to know the new stratum offsets */
783   PetscCall(PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote));
784   PetscCall(PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths));
785   for (d=0; d<dim+1; d++) {
786     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
787     for (p=pStart; p<pEnd; p++) pointDepths[p] = d;
788   }
789   for (p=0; p<nleaves; p++) remoteDepths[p] = -1;
790   PetscCall(PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths,MPI_REPLACE));
791   PetscCall(PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths,MPI_REPLACE));
792 
793   /* Count received points in each stratum and compute the internal strata shift */
794   PetscCall(PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx));
795   for (d=0; d<dim+1; d++) depthRecv[d]=0;
796   for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++;
797   depthShift[dim] = 0;
798   for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim];
799   for (d=1; d<dim; d++) depthShift[d] += depthRecv[0];
800   for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1];
801   for (d=0; d<dim+1; d++) {
802     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
803     depthIdx[d] = pStart + depthShift[d];
804   }
805 
806   /* Form the overlap SF build an SF that describes the full overlap migration SF */
807   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
808   newLeaves = pEnd - pStart + nleaves;
809   PetscCall(PetscMalloc1(newLeaves, &ilocal));
810   PetscCall(PetscMalloc1(newLeaves, &iremote));
811   /* First map local points to themselves */
812   for (d=0; d<dim+1; d++) {
813     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
814     for (p=pStart; p<pEnd; p++) {
815       point = p + depthShift[d];
816       ilocal[point] = point;
817       iremote[point].index = p;
818       iremote[point].rank = rank;
819       depthIdx[d]++;
820     }
821   }
822 
823   /* Add in the remote roots for currently shared points */
824   PetscCall(DMGetPointSF(dm, &pointSF));
825   PetscCall(PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote));
826   for (d=0; d<dim+1; d++) {
827     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
828     for (p=0; p<numSharedPoints; p++) {
829       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
830         point = sharedLocal[p] + depthShift[d];
831         iremote[point].index = sharedRemote[p].index;
832         iremote[point].rank = sharedRemote[p].rank;
833       }
834     }
835   }
836 
837   /* Now add the incoming overlap points */
838   for (p=0; p<nleaves; p++) {
839     point = depthIdx[remoteDepths[p]];
840     ilocal[point] = point;
841     iremote[point].index = overlapRemote[p].index;
842     iremote[point].rank = overlapRemote[p].rank;
843     depthIdx[remoteDepths[p]]++;
844   }
845   PetscCall(PetscFree2(pointDepths,remoteDepths));
846 
847   PetscCall(PetscSFCreate(comm, migrationSF));
848   PetscCall(PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF"));
849   PetscCall(PetscSFSetFromOptions(*migrationSF));
850   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
851   PetscCall(PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER));
852 
853   PetscCall(PetscFree3(depthRecv, depthShift, depthIdx));
854   PetscFunctionReturn(0);
855 }
856 
857 /*@
858   DMPlexStratifyMigrationSF - Rearrange the leaves of a migration sf for stratification.
859 
860   Input Parameters:
861 + dm          - The DM
862 - sf          - A star forest with non-ordered leaves, usually defining a DM point migration
863 
864   Output Parameter:
865 . migrationSF - A star forest with added leaf indirection that ensures the resulting DM is stratified
866 
867   Note:
868   This lexicographically sorts by (depth, cellType)
869 
870   Level: developer
871 
872 .seealso: `DMPlexPartitionLabelCreateSF()`, `DMPlexDistribute()`, `DMPlexDistributeOverlap()`
873 @*/
874 PetscErrorCode DMPlexStratifyMigrationSF(DM dm, PetscSF sf, PetscSF *migrationSF)
875 {
876   MPI_Comm           comm;
877   PetscMPIInt        rank, size;
878   PetscInt           d, ldepth, depth, dim, p, pStart, pEnd, nroots, nleaves;
879   PetscSFNode       *pointDepths, *remoteDepths;
880   PetscInt          *ilocal;
881   PetscInt          *depthRecv, *depthShift, *depthIdx;
882   PetscInt          *ctRecv,    *ctShift,    *ctIdx;
883   const PetscSFNode *iremote;
884 
885   PetscFunctionBegin;
886   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
887   PetscCall(PetscObjectGetComm((PetscObject) dm, &comm));
888   PetscCallMPI(MPI_Comm_rank(comm, &rank));
889   PetscCallMPI(MPI_Comm_size(comm, &size));
890   PetscCall(DMPlexGetDepth(dm, &ldepth));
891   PetscCall(DMGetDimension(dm, &dim));
892   PetscCall(MPIU_Allreduce(&ldepth, &depth, 1, MPIU_INT, MPI_MAX, comm));
893   PetscCheck(!(ldepth >= 0) || !(depth != ldepth),PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %" PetscInt_FMT " != %" PetscInt_FMT, ldepth, depth);
894   PetscCall(PetscLogEventBegin(DMPLEX_PartStratSF,dm,0,0,0));
895 
896   /* Before building the migration SF we need to know the new stratum offsets */
897   PetscCall(PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote));
898   PetscCall(PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths));
899   for (d = 0; d < depth+1; ++d) {
900     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
901     for (p = pStart; p < pEnd; ++p) {
902       DMPolytopeType ct;
903 
904       PetscCall(DMPlexGetCellType(dm, p, &ct));
905       pointDepths[p].index = d;
906       pointDepths[p].rank  = ct;
907     }
908   }
909   for (p = 0; p < nleaves; ++p) {remoteDepths[p].index = -1; remoteDepths[p].rank = -1;}
910   PetscCall(PetscSFBcastBegin(sf, MPIU_2INT, pointDepths, remoteDepths,MPI_REPLACE));
911   PetscCall(PetscSFBcastEnd(sf, MPIU_2INT, pointDepths, remoteDepths,MPI_REPLACE));
912   /* Count received points in each stratum and compute the internal strata shift */
913   PetscCall(PetscCalloc6(depth+1, &depthRecv, depth+1, &depthShift, depth+1, &depthIdx, DM_NUM_POLYTOPES, &ctRecv, DM_NUM_POLYTOPES, &ctShift, DM_NUM_POLYTOPES, &ctIdx));
914   for (p = 0; p < nleaves; ++p) {
915     if (remoteDepths[p].rank < 0) {
916       ++depthRecv[remoteDepths[p].index];
917     } else {
918       ++ctRecv[remoteDepths[p].rank];
919     }
920   }
921   {
922     PetscInt depths[4], dims[4], shift = 0, i, c;
923 
924     /* Cells (depth), Vertices (0), Faces (depth-1), Edges (1)
925          Consider DM_POLYTOPE_FV_GHOST and DM_POLYTOPE_INTERIOR_GHOST as cells
926      */
927     depths[0] = depth; depths[1] = 0; depths[2] = depth-1; depths[3] = 1;
928     dims[0]   = dim;   dims[1]   = 0; dims[2]   = dim-1;   dims[3]   = 1;
929     for (i = 0; i <= depth; ++i) {
930       const PetscInt dep = depths[i];
931       const PetscInt dim = dims[i];
932 
933       for (c = 0; c < DM_NUM_POLYTOPES; ++c) {
934         if (DMPolytopeTypeGetDim((DMPolytopeType) c) != dim && !(i == 0 && (c == DM_POLYTOPE_FV_GHOST || c == DM_POLYTOPE_INTERIOR_GHOST))) continue;
935         ctShift[c] = shift;
936         shift     += ctRecv[c];
937       }
938       depthShift[dep] = shift;
939       shift          += depthRecv[dep];
940     }
941     for (c = 0; c < DM_NUM_POLYTOPES; ++c) {
942       const PetscInt ctDim = DMPolytopeTypeGetDim((DMPolytopeType) c);
943 
944       if ((ctDim < 0 || ctDim > dim) && (c != DM_POLYTOPE_FV_GHOST && c != DM_POLYTOPE_INTERIOR_GHOST)) {
945         ctShift[c] = shift;
946         shift     += ctRecv[c];
947       }
948     }
949   }
950   /* Derive a new local permutation based on stratified indices */
951   PetscCall(PetscMalloc1(nleaves, &ilocal));
952   for (p = 0; p < nleaves; ++p) {
953     const PetscInt       dep = remoteDepths[p].index;
954     const DMPolytopeType ct  = (DMPolytopeType) remoteDepths[p].rank;
955 
956     if ((PetscInt) ct < 0) {
957       ilocal[p] = depthShift[dep] + depthIdx[dep];
958       ++depthIdx[dep];
959     } else {
960       ilocal[p] = ctShift[ct] + ctIdx[ct];
961       ++ctIdx[ct];
962     }
963   }
964   PetscCall(PetscSFCreate(comm, migrationSF));
965   PetscCall(PetscObjectSetName((PetscObject) *migrationSF, "Migration SF"));
966   PetscCall(PetscSFSetGraph(*migrationSF, nroots, nleaves, ilocal, PETSC_OWN_POINTER, (PetscSFNode*)iremote, PETSC_COPY_VALUES));
967   PetscCall(PetscFree2(pointDepths,remoteDepths));
968   PetscCall(PetscFree6(depthRecv, depthShift, depthIdx, ctRecv, ctShift, ctIdx));
969   PetscCall(PetscLogEventEnd(DMPLEX_PartStratSF,dm,0,0,0));
970   PetscFunctionReturn(0);
971 }
972 
973 /*@
974   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
975 
976   Collective on dm
977 
978   Input Parameters:
979 + dm - The DMPlex object
980 . pointSF - The PetscSF describing the communication pattern
981 . originalSection - The PetscSection for existing data layout
982 - originalVec - The existing data in a local vector
983 
984   Output Parameters:
985 + newSection - The PetscSF describing the new data layout
986 - newVec - The new data in a local vector
987 
988   Level: developer
989 
990 .seealso: `DMPlexDistribute()`, `DMPlexDistributeFieldIS()`, `DMPlexDistributeData()`
991 @*/
992 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
993 {
994   PetscSF        fieldSF;
995   PetscInt      *remoteOffsets, fieldSize;
996   PetscScalar   *originalValues, *newValues;
997 
998   PetscFunctionBegin;
999   PetscCall(PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0));
1000   PetscCall(PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection));
1001 
1002   PetscCall(PetscSectionGetStorageSize(newSection, &fieldSize));
1003   PetscCall(VecSetSizes(newVec, fieldSize, PETSC_DETERMINE));
1004   PetscCall(VecSetType(newVec,dm->vectype));
1005 
1006   PetscCall(VecGetArray(originalVec, &originalValues));
1007   PetscCall(VecGetArray(newVec, &newValues));
1008   PetscCall(PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF));
1009   PetscCall(PetscFree(remoteOffsets));
1010   PetscCall(PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues,MPI_REPLACE));
1011   PetscCall(PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues,MPI_REPLACE));
1012   PetscCall(PetscSFDestroy(&fieldSF));
1013   PetscCall(VecRestoreArray(newVec, &newValues));
1014   PetscCall(VecRestoreArray(originalVec, &originalValues));
1015   PetscCall(PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0));
1016   PetscFunctionReturn(0);
1017 }
1018 
1019 /*@
1020   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
1021 
1022   Collective on dm
1023 
1024   Input Parameters:
1025 + dm - The DMPlex object
1026 . pointSF - The PetscSF describing the communication pattern
1027 . originalSection - The PetscSection for existing data layout
1028 - originalIS - The existing data
1029 
1030   Output Parameters:
1031 + newSection - The PetscSF describing the new data layout
1032 - newIS - The new data
1033 
1034   Level: developer
1035 
1036 .seealso: `DMPlexDistribute()`, `DMPlexDistributeField()`, `DMPlexDistributeData()`
1037 @*/
1038 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
1039 {
1040   PetscSF         fieldSF;
1041   PetscInt       *newValues, *remoteOffsets, fieldSize;
1042   const PetscInt *originalValues;
1043 
1044   PetscFunctionBegin;
1045   PetscCall(PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0));
1046   PetscCall(PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection));
1047 
1048   PetscCall(PetscSectionGetStorageSize(newSection, &fieldSize));
1049   PetscCall(PetscMalloc1(fieldSize, &newValues));
1050 
1051   PetscCall(ISGetIndices(originalIS, &originalValues));
1052   PetscCall(PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF));
1053   PetscCall(PetscFree(remoteOffsets));
1054   PetscCall(PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues,MPI_REPLACE));
1055   PetscCall(PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues,MPI_REPLACE));
1056   PetscCall(PetscSFDestroy(&fieldSF));
1057   PetscCall(ISRestoreIndices(originalIS, &originalValues));
1058   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS));
1059   PetscCall(PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0));
1060   PetscFunctionReturn(0);
1061 }
1062 
1063 /*@
1064   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
1065 
1066   Collective on dm
1067 
1068   Input Parameters:
1069 + dm - The DMPlex object
1070 . pointSF - The PetscSF describing the communication pattern
1071 . originalSection - The PetscSection for existing data layout
1072 . datatype - The type of data
1073 - originalData - The existing data
1074 
1075   Output Parameters:
1076 + newSection - The PetscSection describing the new data layout
1077 - newData - The new data
1078 
1079   Level: developer
1080 
1081 .seealso: `DMPlexDistribute()`, `DMPlexDistributeField()`
1082 @*/
1083 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
1084 {
1085   PetscSF        fieldSF;
1086   PetscInt      *remoteOffsets, fieldSize;
1087   PetscMPIInt    dataSize;
1088 
1089   PetscFunctionBegin;
1090   PetscCall(PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0));
1091   PetscCall(PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection));
1092 
1093   PetscCall(PetscSectionGetStorageSize(newSection, &fieldSize));
1094   PetscCallMPI(MPI_Type_size(datatype, &dataSize));
1095   PetscCall(PetscMalloc(fieldSize * dataSize, newData));
1096 
1097   PetscCall(PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF));
1098   PetscCall(PetscFree(remoteOffsets));
1099   PetscCall(PetscSFBcastBegin(fieldSF, datatype, originalData, *newData,MPI_REPLACE));
1100   PetscCall(PetscSFBcastEnd(fieldSF, datatype, originalData, *newData,MPI_REPLACE));
1101   PetscCall(PetscSFDestroy(&fieldSF));
1102   PetscCall(PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0));
1103   PetscFunctionReturn(0);
1104 }
1105 
1106 static PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1107 {
1108   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
1109   MPI_Comm               comm;
1110   PetscSF                coneSF;
1111   PetscSection           originalConeSection, newConeSection;
1112   PetscInt              *remoteOffsets, *cones, *globCones, *newCones, newConesSize;
1113   PetscBool              flg;
1114 
1115   PetscFunctionBegin;
1116   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1117   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
1118   PetscCall(PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0));
1119   /* Distribute cone section */
1120   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1121   PetscCall(DMPlexGetConeSection(dm, &originalConeSection));
1122   PetscCall(DMPlexGetConeSection(dmParallel, &newConeSection));
1123   PetscCall(PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection));
1124   PetscCall(DMSetUp(dmParallel));
1125   /* Communicate and renumber cones */
1126   PetscCall(PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF));
1127   PetscCall(PetscFree(remoteOffsets));
1128   PetscCall(DMPlexGetCones(dm, &cones));
1129   if (original) {
1130     PetscInt numCones;
1131 
1132     PetscCall(PetscSectionGetStorageSize(originalConeSection,&numCones));
1133     PetscCall(PetscMalloc1(numCones,&globCones));
1134     PetscCall(ISLocalToGlobalMappingApplyBlock(original, numCones, cones, globCones));
1135   } else {
1136     globCones = cones;
1137   }
1138   PetscCall(DMPlexGetCones(dmParallel, &newCones));
1139   PetscCall(PetscSFBcastBegin(coneSF, MPIU_INT, globCones, newCones,MPI_REPLACE));
1140   PetscCall(PetscSFBcastEnd(coneSF, MPIU_INT, globCones, newCones,MPI_REPLACE));
1141   if (original) {
1142     PetscCall(PetscFree(globCones));
1143   }
1144   PetscCall(PetscSectionGetStorageSize(newConeSection, &newConesSize));
1145   PetscCall(ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones));
1146   if (PetscDefined(USE_DEBUG)) {
1147     PetscInt  p;
1148     PetscBool valid = PETSC_TRUE;
1149     for (p = 0; p < newConesSize; ++p) {
1150       if (newCones[p] < 0) {valid = PETSC_FALSE; PetscCall(PetscPrintf(PETSC_COMM_SELF, "[%d] Point %" PetscInt_FMT " not in overlap SF\n", PetscGlobalRank,p));}
1151     }
1152     PetscCheck(valid,PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1153   }
1154   PetscCall(PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-cones_view", &flg));
1155   if (flg) {
1156     PetscCall(PetscPrintf(comm, "Serial Cone Section:\n"));
1157     PetscCall(PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_(comm)));
1158     PetscCall(PetscPrintf(comm, "Parallel Cone Section:\n"));
1159     PetscCall(PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_(comm)));
1160     PetscCall(PetscSFView(coneSF, NULL));
1161   }
1162   PetscCall(DMPlexGetConeOrientations(dm, &cones));
1163   PetscCall(DMPlexGetConeOrientations(dmParallel, &newCones));
1164   PetscCall(PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones,MPI_REPLACE));
1165   PetscCall(PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones,MPI_REPLACE));
1166   PetscCall(PetscSFDestroy(&coneSF));
1167   PetscCall(PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0));
1168   /* Create supports and stratify DMPlex */
1169   {
1170     PetscInt pStart, pEnd;
1171 
1172     PetscCall(PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd));
1173     PetscCall(PetscSectionSetChart(pmesh->supportSection, pStart, pEnd));
1174   }
1175   PetscCall(DMPlexSymmetrize(dmParallel));
1176   PetscCall(DMPlexStratify(dmParallel));
1177   {
1178     PetscBool useCone, useClosure, useAnchors;
1179 
1180     PetscCall(DMGetBasicAdjacency(dm, &useCone, &useClosure));
1181     PetscCall(DMSetBasicAdjacency(dmParallel, useCone, useClosure));
1182     PetscCall(DMPlexGetAdjacencyUseAnchors(dm, &useAnchors));
1183     PetscCall(DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors));
1184   }
1185   PetscFunctionReturn(0);
1186 }
1187 
1188 static PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
1189 {
1190   MPI_Comm         comm;
1191   DM               cdm, cdmParallel;
1192   PetscSection     originalCoordSection, newCoordSection;
1193   Vec              originalCoordinates, newCoordinates;
1194   PetscInt         bs;
1195   PetscBool        isper;
1196   const char      *name;
1197   const PetscReal *maxCell, *L;
1198   const DMBoundaryType *bd;
1199 
1200   PetscFunctionBegin;
1201   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1202   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1203 
1204   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1205   PetscCall(DMGetCoordinateSection(dm, &originalCoordSection));
1206   PetscCall(DMGetCoordinateSection(dmParallel, &newCoordSection));
1207   PetscCall(DMGetCoordinatesLocal(dm, &originalCoordinates));
1208   if (originalCoordinates) {
1209     PetscCall(VecCreate(PETSC_COMM_SELF, &newCoordinates));
1210     PetscCall(PetscObjectGetName((PetscObject) originalCoordinates, &name));
1211     PetscCall(PetscObjectSetName((PetscObject) newCoordinates, name));
1212 
1213     PetscCall(DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates));
1214     PetscCall(DMSetCoordinatesLocal(dmParallel, newCoordinates));
1215     PetscCall(VecGetBlockSize(originalCoordinates, &bs));
1216     PetscCall(VecSetBlockSize(newCoordinates, bs));
1217     PetscCall(VecDestroy(&newCoordinates));
1218   }
1219   PetscCall(DMGetPeriodicity(dm, &isper, &maxCell, &L, &bd));
1220   PetscCall(DMSetPeriodicity(dmParallel, isper, maxCell, L, bd));
1221   PetscCall(DMGetCoordinateDM(dm, &cdm));
1222   PetscCall(DMGetCoordinateDM(dmParallel, &cdmParallel));
1223   PetscCall(DMCopyDisc(cdm, cdmParallel));
1224   PetscFunctionReturn(0);
1225 }
1226 
1227 static PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1228 {
1229   DM_Plex         *mesh = (DM_Plex*) dm->data;
1230   MPI_Comm         comm;
1231   DMLabel          depthLabel;
1232   PetscMPIInt      rank;
1233   PetscInt         depth, d, numLabels, numLocalLabels, l;
1234   PetscBool        hasLabels = PETSC_FALSE, lsendDepth, sendDepth;
1235   PetscObjectState depthState = -1;
1236 
1237   PetscFunctionBegin;
1238   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1239   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1240 
1241   PetscCall(PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0));
1242   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1243   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1244 
1245   /* If the user has changed the depth label, communicate it instead */
1246   PetscCall(DMPlexGetDepth(dm, &depth));
1247   PetscCall(DMPlexGetDepthLabel(dm, &depthLabel));
1248   if (depthLabel) PetscCall(PetscObjectStateGet((PetscObject) depthLabel, &depthState));
1249   lsendDepth = mesh->depthState != depthState ? PETSC_TRUE : PETSC_FALSE;
1250   PetscCall(MPIU_Allreduce(&lsendDepth, &sendDepth, 1, MPIU_BOOL, MPI_LOR, comm));
1251   if (sendDepth) {
1252     PetscCall(DMPlexGetDepthLabel(dmParallel, &dmParallel->depthLabel));
1253     PetscCall(DMRemoveLabelBySelf(dmParallel, &dmParallel->depthLabel, PETSC_FALSE));
1254   }
1255   /* Everyone must have either the same number of labels, or none */
1256   PetscCall(DMGetNumLabels(dm, &numLocalLabels));
1257   numLabels = numLocalLabels;
1258   PetscCallMPI(MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm));
1259   if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1260   for (l = 0; l < numLabels; ++l) {
1261     DMLabel     label = NULL, labelNew = NULL;
1262     PetscBool   isDepth, lisOutput = PETSC_TRUE, isOutput;
1263     const char *name = NULL;
1264 
1265     if (hasLabels) {
1266       PetscCall(DMGetLabelByNum(dm, l, &label));
1267       /* Skip "depth" because it is recreated */
1268       PetscCall(PetscObjectGetName((PetscObject) label, &name));
1269       PetscCall(PetscStrcmp(name, "depth", &isDepth));
1270     }
1271     PetscCallMPI(MPI_Bcast(&isDepth, 1, MPIU_BOOL, 0, comm));
1272     if (isDepth && !sendDepth) continue;
1273     PetscCall(DMLabelDistribute(label, migrationSF, &labelNew));
1274     if (isDepth) {
1275       /* Put in any missing strata which can occur if users are managing the depth label themselves */
1276       PetscInt gdepth;
1277 
1278       PetscCall(MPIU_Allreduce(&depth, &gdepth, 1, MPIU_INT, MPI_MAX, comm));
1279       PetscCheck(!(depth >= 0) || !(gdepth != depth),PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %" PetscInt_FMT " != %" PetscInt_FMT, depth, gdepth);
1280       for (d = 0; d <= gdepth; ++d) {
1281         PetscBool has;
1282 
1283         PetscCall(DMLabelHasStratum(labelNew, d, &has));
1284         if (!has) PetscCall(DMLabelAddStratum(labelNew, d));
1285       }
1286     }
1287     PetscCall(DMAddLabel(dmParallel, labelNew));
1288     /* Put the output flag in the new label */
1289     if (hasLabels) PetscCall(DMGetLabelOutput(dm, name, &lisOutput));
1290     PetscCall(MPIU_Allreduce(&lisOutput, &isOutput, 1, MPIU_BOOL, MPI_LAND, comm));
1291     PetscCall(PetscObjectGetName((PetscObject) labelNew, &name));
1292     PetscCall(DMSetLabelOutput(dmParallel, name, isOutput));
1293     PetscCall(DMLabelDestroy(&labelNew));
1294   }
1295   PetscCall(PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0));
1296   PetscFunctionReturn(0);
1297 }
1298 
1299 static PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1300 {
1301   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1302   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1303   MPI_Comm        comm;
1304   DM              refTree;
1305   PetscSection    origParentSection, newParentSection;
1306   PetscInt        *origParents, *origChildIDs;
1307   PetscBool       flg;
1308 
1309   PetscFunctionBegin;
1310   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1311   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
1312   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1313 
1314   /* Set up tree */
1315   PetscCall(DMPlexGetReferenceTree(dm,&refTree));
1316   PetscCall(DMPlexSetReferenceTree(dmParallel,refTree));
1317   PetscCall(DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL));
1318   if (origParentSection) {
1319     PetscInt        pStart, pEnd;
1320     PetscInt        *newParents, *newChildIDs, *globParents;
1321     PetscInt        *remoteOffsetsParents, newParentSize;
1322     PetscSF         parentSF;
1323 
1324     PetscCall(DMPlexGetChart(dmParallel, &pStart, &pEnd));
1325     PetscCall(PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection));
1326     PetscCall(PetscSectionSetChart(newParentSection,pStart,pEnd));
1327     PetscCall(PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection));
1328     PetscCall(PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF));
1329     PetscCall(PetscFree(remoteOffsetsParents));
1330     PetscCall(PetscSectionGetStorageSize(newParentSection,&newParentSize));
1331     PetscCall(PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs));
1332     if (original) {
1333       PetscInt numParents;
1334 
1335       PetscCall(PetscSectionGetStorageSize(origParentSection,&numParents));
1336       PetscCall(PetscMalloc1(numParents,&globParents));
1337       PetscCall(ISLocalToGlobalMappingApplyBlock(original, numParents, origParents, globParents));
1338     }
1339     else {
1340       globParents = origParents;
1341     }
1342     PetscCall(PetscSFBcastBegin(parentSF, MPIU_INT, globParents, newParents,MPI_REPLACE));
1343     PetscCall(PetscSFBcastEnd(parentSF, MPIU_INT, globParents, newParents,MPI_REPLACE));
1344     if (original) {
1345       PetscCall(PetscFree(globParents));
1346     }
1347     PetscCall(PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs,MPI_REPLACE));
1348     PetscCall(PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs,MPI_REPLACE));
1349     PetscCall(ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents));
1350     if (PetscDefined(USE_DEBUG)) {
1351       PetscInt  p;
1352       PetscBool valid = PETSC_TRUE;
1353       for (p = 0; p < newParentSize; ++p) {
1354         if (newParents[p] < 0) {valid = PETSC_FALSE; PetscCall(PetscPrintf(PETSC_COMM_SELF, "Point %" PetscInt_FMT " not in overlap SF\n", p));}
1355       }
1356       PetscCheck(valid,PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1357     }
1358     PetscCall(PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-parents_view", &flg));
1359     if (flg) {
1360       PetscCall(PetscPrintf(comm, "Serial Parent Section: \n"));
1361       PetscCall(PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_(comm)));
1362       PetscCall(PetscPrintf(comm, "Parallel Parent Section: \n"));
1363       PetscCall(PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_(comm)));
1364       PetscCall(PetscSFView(parentSF, NULL));
1365     }
1366     PetscCall(DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs));
1367     PetscCall(PetscSectionDestroy(&newParentSection));
1368     PetscCall(PetscFree2(newParents,newChildIDs));
1369     PetscCall(PetscSFDestroy(&parentSF));
1370   }
1371   pmesh->useAnchors = mesh->useAnchors;
1372   PetscFunctionReturn(0);
1373 }
1374 
1375 PETSC_UNUSED static PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, DM dmParallel)
1376 {
1377   PetscMPIInt            rank, size;
1378   MPI_Comm               comm;
1379 
1380   PetscFunctionBegin;
1381   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1382   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1383 
1384   /* Create point SF for parallel mesh */
1385   PetscCall(PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0));
1386   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1387   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1388   PetscCallMPI(MPI_Comm_size(comm, &size));
1389   {
1390     const PetscInt *leaves;
1391     PetscSFNode    *remotePoints, *rowners, *lowners;
1392     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1393     PetscInt        pStart, pEnd;
1394 
1395     PetscCall(DMPlexGetChart(dmParallel, &pStart, &pEnd));
1396     PetscCall(PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL));
1397     PetscCall(PetscMalloc2(numRoots,&rowners,numLeaves,&lowners));
1398     for (p=0; p<numRoots; p++) {
1399       rowners[p].rank  = -1;
1400       rowners[p].index = -1;
1401     }
1402     PetscCall(PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners,MPI_REPLACE));
1403     PetscCall(PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners,MPI_REPLACE));
1404     for (p = 0; p < numLeaves; ++p) {
1405       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1406         lowners[p].rank  = rank;
1407         lowners[p].index = leaves ? leaves[p] : p;
1408       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1409         lowners[p].rank  = -2;
1410         lowners[p].index = -2;
1411       }
1412     }
1413     for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1414       rowners[p].rank  = -3;
1415       rowners[p].index = -3;
1416     }
1417     PetscCall(PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC));
1418     PetscCall(PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC));
1419     PetscCall(PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners,MPI_REPLACE));
1420     PetscCall(PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners,MPI_REPLACE));
1421     for (p = 0; p < numLeaves; ++p) {
1422       PetscCheck(lowners[p].rank >= 0 && lowners[p].index >= 0,PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
1423       if (lowners[p].rank != rank) ++numGhostPoints;
1424     }
1425     PetscCall(PetscMalloc1(numGhostPoints, &ghostPoints));
1426     PetscCall(PetscMalloc1(numGhostPoints, &remotePoints));
1427     for (p = 0, gp = 0; p < numLeaves; ++p) {
1428       if (lowners[p].rank != rank) {
1429         ghostPoints[gp]        = leaves ? leaves[p] : p;
1430         remotePoints[gp].rank  = lowners[p].rank;
1431         remotePoints[gp].index = lowners[p].index;
1432         ++gp;
1433       }
1434     }
1435     PetscCall(PetscFree2(rowners,lowners));
1436     PetscCall(PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER));
1437     PetscCall(PetscSFSetFromOptions((dmParallel)->sf));
1438   }
1439   {
1440     PetscBool useCone, useClosure, useAnchors;
1441 
1442     PetscCall(DMGetBasicAdjacency(dm, &useCone, &useClosure));
1443     PetscCall(DMSetBasicAdjacency(dmParallel, useCone, useClosure));
1444     PetscCall(DMPlexGetAdjacencyUseAnchors(dm, &useAnchors));
1445     PetscCall(DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors));
1446   }
1447   PetscCall(PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0));
1448   PetscFunctionReturn(0);
1449 }
1450 
1451 /*@
1452   DMPlexSetPartitionBalance - Should distribution of the DM attempt to balance the shared point partition?
1453 
1454   Input Parameters:
1455 + dm - The DMPlex object
1456 - flg - Balance the partition?
1457 
1458   Level: intermediate
1459 
1460 .seealso: `DMPlexDistribute()`, `DMPlexGetPartitionBalance()`
1461 @*/
1462 PetscErrorCode DMPlexSetPartitionBalance(DM dm, PetscBool flg)
1463 {
1464   DM_Plex *mesh = (DM_Plex *)dm->data;
1465 
1466   PetscFunctionBegin;
1467   mesh->partitionBalance = flg;
1468   PetscFunctionReturn(0);
1469 }
1470 
1471 /*@
1472   DMPlexGetPartitionBalance - Does distribution of the DM attempt to balance the shared point partition?
1473 
1474   Input Parameter:
1475 . dm - The DMPlex object
1476 
1477   Output Parameter:
1478 . flg - Balance the partition?
1479 
1480   Level: intermediate
1481 
1482 .seealso: `DMPlexDistribute()`, `DMPlexSetPartitionBalance()`
1483 @*/
1484 PetscErrorCode DMPlexGetPartitionBalance(DM dm, PetscBool *flg)
1485 {
1486   DM_Plex *mesh = (DM_Plex *)dm->data;
1487 
1488   PetscFunctionBegin;
1489   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1490   PetscValidBoolPointer(flg, 2);
1491   *flg = mesh->partitionBalance;
1492   PetscFunctionReturn(0);
1493 }
1494 
1495 typedef struct {
1496   PetscInt vote, rank, index;
1497 } Petsc3Int;
1498 
1499 /* MaxLoc, but carry a third piece of information around */
1500 static void MPIAPI MaxLocCarry(void *in_, void *inout_, PetscMPIInt *len_, MPI_Datatype *dtype)
1501 {
1502   Petsc3Int *a = (Petsc3Int *)inout_;
1503   Petsc3Int *b = (Petsc3Int *)in_;
1504   PetscInt i, len = *len_;
1505   for (i = 0; i < len; i++) {
1506     if (a[i].vote < b[i].vote) {
1507       a[i].vote = b[i].vote;
1508       a[i].rank = b[i].rank;
1509       a[i].index = b[i].index;
1510     } else if (a[i].vote <= b[i].vote) {
1511       if (a[i].rank >= b[i].rank) {
1512         a[i].rank = b[i].rank;
1513         a[i].index = b[i].index;
1514       }
1515     }
1516   }
1517 }
1518 
1519 /*@C
1520   DMPlexCreatePointSF - Build a point SF from an SF describing a point migration
1521 
1522   Input Parameters:
1523 + dm          - The source DMPlex object
1524 . migrationSF - The star forest that describes the parallel point remapping
1525 - ownership   - Flag causing a vote to determine point ownership
1526 
1527   Output Parameter:
1528 . pointSF     - The star forest describing the point overlap in the remapped DM
1529 
1530   Notes:
1531   Output pointSF is guaranteed to have the array of local indices (leaves) sorted.
1532 
1533   Level: developer
1534 
1535 .seealso: `DMPlexDistribute()`, `DMPlexDistributeOverlap()`
1536 @*/
1537 PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownership, PetscSF *pointSF)
1538 {
1539   PetscMPIInt        rank, size;
1540   PetscInt           p, nroots, nleaves, idx, npointLeaves;
1541   PetscInt          *pointLocal;
1542   const PetscInt    *leaves;
1543   const PetscSFNode *roots;
1544   PetscSFNode       *rootNodes, *leafNodes, *pointRemote;
1545   Vec                shifts;
1546   const PetscInt     numShifts = 13759;
1547   const PetscScalar *shift = NULL;
1548   const PetscBool    shiftDebug = PETSC_FALSE;
1549   PetscBool          balance;
1550 
1551   PetscFunctionBegin;
1552   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1553   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank));
1554   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size));
1555   PetscCall(PetscLogEventBegin(DMPLEX_CreatePointSF,dm,0,0,0));
1556 
1557   PetscCall(DMPlexGetPartitionBalance(dm, &balance));
1558   PetscCall(PetscSFGetGraph(migrationSF, &nroots, &nleaves, &leaves, &roots));
1559   PetscCall(PetscMalloc2(nroots, &rootNodes, nleaves, &leafNodes));
1560   if (ownership) {
1561     MPI_Op       op;
1562     MPI_Datatype datatype;
1563     Petsc3Int   *rootVote = NULL, *leafVote = NULL;
1564     /* If balancing, we compute a random cyclic shift of the rank for each remote point. That way, the max will evenly distribute among ranks. */
1565     if (balance) {
1566       PetscRandom r;
1567 
1568       PetscCall(PetscRandomCreate(PETSC_COMM_SELF, &r));
1569       PetscCall(PetscRandomSetInterval(r, 0, 2467*size));
1570       PetscCall(VecCreate(PETSC_COMM_SELF, &shifts));
1571       PetscCall(VecSetSizes(shifts, numShifts, numShifts));
1572       PetscCall(VecSetType(shifts, VECSTANDARD));
1573       PetscCall(VecSetRandom(shifts, r));
1574       PetscCall(PetscRandomDestroy(&r));
1575       PetscCall(VecGetArrayRead(shifts, &shift));
1576     }
1577 
1578     PetscCall(PetscMalloc1(nroots, &rootVote));
1579     PetscCall(PetscMalloc1(nleaves, &leafVote));
1580     /* Point ownership vote: Process with highest rank owns shared points */
1581     for (p = 0; p < nleaves; ++p) {
1582       if (shiftDebug) {
1583         PetscCall(PetscSynchronizedPrintf(PetscObjectComm((PetscObject) dm), "[%d] Point %" PetscInt_FMT " RemotePoint %" PetscInt_FMT " Shift %" PetscInt_FMT " MyRank %" PetscInt_FMT "\n", rank, leaves ? leaves[p] : p, roots[p].index, (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]), (rank + (shift ? (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]) : 0))%size));
1584       }
1585       /* Either put in a bid or we know we own it */
1586       leafVote[p].vote  = (rank + (shift ? (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]) : 0))%size;
1587       leafVote[p].rank = rank;
1588       leafVote[p].index = p;
1589     }
1590     for (p = 0; p < nroots; p++) {
1591       /* Root must not participate in the reduction, flag so that MAXLOC does not use */
1592       rootVote[p].vote  = -3;
1593       rootVote[p].rank  = -3;
1594       rootVote[p].index = -3;
1595     }
1596     PetscCallMPI(MPI_Type_contiguous(3, MPIU_INT, &datatype));
1597     PetscCallMPI(MPI_Type_commit(&datatype));
1598     PetscCallMPI(MPI_Op_create(&MaxLocCarry, 1, &op));
1599     PetscCall(PetscSFReduceBegin(migrationSF, datatype, leafVote, rootVote, op));
1600     PetscCall(PetscSFReduceEnd(migrationSF, datatype, leafVote, rootVote, op));
1601     PetscCallMPI(MPI_Op_free(&op));
1602     PetscCallMPI(MPI_Type_free(&datatype));
1603     for (p = 0; p < nroots; p++) {
1604       rootNodes[p].rank = rootVote[p].rank;
1605       rootNodes[p].index = rootVote[p].index;
1606     }
1607     PetscCall(PetscFree(leafVote));
1608     PetscCall(PetscFree(rootVote));
1609   } else {
1610     for (p = 0; p < nroots; p++) {
1611       rootNodes[p].index = -1;
1612       rootNodes[p].rank = rank;
1613     }
1614     for (p = 0; p < nleaves; p++) {
1615       /* Write new local id into old location */
1616       if (roots[p].rank == rank) {
1617         rootNodes[roots[p].index].index = leaves ? leaves[p] : p;
1618       }
1619     }
1620   }
1621   PetscCall(PetscSFBcastBegin(migrationSF, MPIU_2INT, rootNodes, leafNodes,MPI_REPLACE));
1622   PetscCall(PetscSFBcastEnd(migrationSF, MPIU_2INT, rootNodes, leafNodes,MPI_REPLACE));
1623 
1624   for (npointLeaves = 0, p = 0; p < nleaves; p++) {
1625     if (leafNodes[p].rank != rank) npointLeaves++;
1626   }
1627   PetscCall(PetscMalloc1(npointLeaves, &pointLocal));
1628   PetscCall(PetscMalloc1(npointLeaves, &pointRemote));
1629   for (idx = 0, p = 0; p < nleaves; p++) {
1630     if (leafNodes[p].rank != rank) {
1631       /* Note that pointLocal is automatically sorted as it is sublist of 0, ..., nleaves-1 */
1632       pointLocal[idx] = p;
1633       pointRemote[idx] = leafNodes[p];
1634       idx++;
1635     }
1636   }
1637   if (shift) {
1638     PetscCall(VecRestoreArrayRead(shifts, &shift));
1639     PetscCall(VecDestroy(&shifts));
1640   }
1641   if (shiftDebug) PetscCall(PetscSynchronizedFlush(PetscObjectComm((PetscObject) dm), PETSC_STDOUT));
1642   PetscCall(PetscSFCreate(PetscObjectComm((PetscObject) dm), pointSF));
1643   PetscCall(PetscSFSetFromOptions(*pointSF));
1644   PetscCall(PetscSFSetGraph(*pointSF, nleaves, npointLeaves, pointLocal, PETSC_OWN_POINTER, pointRemote, PETSC_OWN_POINTER));
1645   PetscCall(PetscFree2(rootNodes, leafNodes));
1646   PetscCall(PetscLogEventEnd(DMPLEX_CreatePointSF,dm,0,0,0));
1647   if (PetscDefined(USE_DEBUG)) PetscCall(DMPlexCheckPointSF(dm, *pointSF));
1648   PetscFunctionReturn(0);
1649 }
1650 
1651 /*@C
1652   DMPlexMigrate  - Migrates internal DM data over the supplied star forest
1653 
1654   Collective on dm
1655 
1656   Input Parameters:
1657 + dm       - The source DMPlex object
1658 - sf       - The star forest communication context describing the migration pattern
1659 
1660   Output Parameter:
1661 . targetDM - The target DMPlex object
1662 
1663   Level: intermediate
1664 
1665 .seealso: `DMPlexDistribute()`, `DMPlexDistributeOverlap()`
1666 @*/
1667 PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM)
1668 {
1669   MPI_Comm               comm;
1670   PetscInt               dim, cdim, nroots;
1671   PetscSF                sfPoint;
1672   ISLocalToGlobalMapping ltogMigration;
1673   ISLocalToGlobalMapping ltogOriginal = NULL;
1674 
1675   PetscFunctionBegin;
1676   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1677   PetscCall(PetscLogEventBegin(DMPLEX_Migrate, dm, 0, 0, 0));
1678   PetscCall(PetscObjectGetComm((PetscObject) dm, &comm));
1679   PetscCall(DMGetDimension(dm, &dim));
1680   PetscCall(DMSetDimension(targetDM, dim));
1681   PetscCall(DMGetCoordinateDim(dm, &cdim));
1682   PetscCall(DMSetCoordinateDim(targetDM, cdim));
1683 
1684   /* Check for a one-to-all distribution pattern */
1685   PetscCall(DMGetPointSF(dm, &sfPoint));
1686   PetscCall(PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL));
1687   if (nroots >= 0) {
1688     IS        isOriginal;
1689     PetscInt  n, size, nleaves;
1690     PetscInt  *numbering_orig, *numbering_new;
1691 
1692     /* Get the original point numbering */
1693     PetscCall(DMPlexCreatePointNumbering(dm, &isOriginal));
1694     PetscCall(ISLocalToGlobalMappingCreateIS(isOriginal, &ltogOriginal));
1695     PetscCall(ISLocalToGlobalMappingGetSize(ltogOriginal, &size));
1696     PetscCall(ISLocalToGlobalMappingGetBlockIndices(ltogOriginal, (const PetscInt**)&numbering_orig));
1697     /* Convert to positive global numbers */
1698     for (n=0; n<size; n++) {if (numbering_orig[n] < 0) numbering_orig[n] = -(numbering_orig[n]+1);}
1699     /* Derive the new local-to-global mapping from the old one */
1700     PetscCall(PetscSFGetGraph(sf, NULL, &nleaves, NULL, NULL));
1701     PetscCall(PetscMalloc1(nleaves, &numbering_new));
1702     PetscCall(PetscSFBcastBegin(sf, MPIU_INT, numbering_orig, numbering_new,MPI_REPLACE));
1703     PetscCall(PetscSFBcastEnd(sf, MPIU_INT, numbering_orig, numbering_new,MPI_REPLACE));
1704     PetscCall(ISLocalToGlobalMappingCreate(comm, 1, nleaves, numbering_new, PETSC_OWN_POINTER, &ltogMigration));
1705     PetscCall(ISLocalToGlobalMappingRestoreIndices(ltogOriginal, (const PetscInt**)&numbering_orig));
1706     PetscCall(ISDestroy(&isOriginal));
1707   } else {
1708     /* One-to-all distribution pattern: We can derive LToG from SF */
1709     PetscCall(ISLocalToGlobalMappingCreateSF(sf, 0, &ltogMigration));
1710   }
1711   PetscCall(PetscObjectSetName((PetscObject)ltogMigration, "Point renumbering for DM migration"));
1712   PetscCall(ISLocalToGlobalMappingViewFromOptions(ltogMigration, (PetscObject)dm, "-partition_view"));
1713   /* Migrate DM data to target DM */
1714   PetscCall(DMPlexDistributeCones(dm, sf, ltogOriginal, ltogMigration, targetDM));
1715   PetscCall(DMPlexDistributeLabels(dm, sf, targetDM));
1716   PetscCall(DMPlexDistributeCoordinates(dm, sf, targetDM));
1717   PetscCall(DMPlexDistributeSetupTree(dm, sf, ltogOriginal, ltogMigration, targetDM));
1718   PetscCall(ISLocalToGlobalMappingDestroy(&ltogOriginal));
1719   PetscCall(ISLocalToGlobalMappingDestroy(&ltogMigration));
1720   PetscCall(PetscLogEventEnd(DMPLEX_Migrate, dm, 0, 0, 0));
1721   PetscFunctionReturn(0);
1722 }
1723 
1724 /*@C
1725   DMPlexDistribute - Distributes the mesh and any associated sections.
1726 
1727   Collective on dm
1728 
1729   Input Parameters:
1730 + dm  - The original DMPlex object
1731 - overlap - The overlap of partitions, 0 is the default
1732 
1733   Output Parameters:
1734 + sf - The PetscSF used for point distribution, or NULL if not needed
1735 - dmParallel - The distributed DMPlex object
1736 
1737   Note: If the mesh was not distributed, the output dmParallel will be NULL.
1738 
1739   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
1740   representation on the mesh.
1741 
1742   Level: intermediate
1743 
1744 .seealso: `DMPlexCreate()`, `DMSetAdjacency()`, `DMPlexGetOverlap()`
1745 @*/
1746 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1747 {
1748   MPI_Comm               comm;
1749   PetscPartitioner       partitioner;
1750   IS                     cellPart;
1751   PetscSection           cellPartSection;
1752   DM                     dmCoord;
1753   DMLabel                lblPartition, lblMigration;
1754   PetscSF                sfMigration, sfStratified, sfPoint;
1755   PetscBool              flg, balance;
1756   PetscMPIInt            rank, size;
1757 
1758   PetscFunctionBegin;
1759   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1760   PetscValidLogicalCollectiveInt(dm, overlap, 2);
1761   if (sf) PetscValidPointer(sf,3);
1762   PetscValidPointer(dmParallel,4);
1763 
1764   if (sf) *sf = NULL;
1765   *dmParallel = NULL;
1766   PetscCall(PetscObjectGetComm((PetscObject)dm,&comm));
1767   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1768   PetscCallMPI(MPI_Comm_size(comm, &size));
1769   if (size == 1) PetscFunctionReturn(0);
1770 
1771   PetscCall(PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0));
1772   /* Create cell partition */
1773   PetscCall(PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0));
1774   PetscCall(PetscSectionCreate(comm, &cellPartSection));
1775   PetscCall(DMPlexGetPartitioner(dm, &partitioner));
1776   PetscCall(PetscPartitionerDMPlexPartition(partitioner, dm, NULL, cellPartSection, &cellPart));
1777   PetscCall(PetscLogEventBegin(DMPLEX_PartSelf,dm,0,0,0));
1778   {
1779     /* Convert partition to DMLabel */
1780     IS             is;
1781     PetscHSetI     ht;
1782     const PetscInt *points;
1783     PetscInt       *iranks;
1784     PetscInt       pStart, pEnd, proc, npoints, poff = 0, nranks;
1785 
1786     PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Point Partition", &lblPartition));
1787     /* Preallocate strata */
1788     PetscCall(PetscHSetICreate(&ht));
1789     PetscCall(PetscSectionGetChart(cellPartSection, &pStart, &pEnd));
1790     for (proc = pStart; proc < pEnd; proc++) {
1791       PetscCall(PetscSectionGetDof(cellPartSection, proc, &npoints));
1792       if (npoints) PetscCall(PetscHSetIAdd(ht, proc));
1793     }
1794     PetscCall(PetscHSetIGetSize(ht, &nranks));
1795     PetscCall(PetscMalloc1(nranks, &iranks));
1796     PetscCall(PetscHSetIGetElems(ht, &poff, iranks));
1797     PetscCall(PetscHSetIDestroy(&ht));
1798     PetscCall(DMLabelAddStrata(lblPartition, nranks, iranks));
1799     PetscCall(PetscFree(iranks));
1800     /* Inline DMPlexPartitionLabelClosure() */
1801     PetscCall(ISGetIndices(cellPart, &points));
1802     PetscCall(PetscSectionGetChart(cellPartSection, &pStart, &pEnd));
1803     for (proc = pStart; proc < pEnd; proc++) {
1804       PetscCall(PetscSectionGetDof(cellPartSection, proc, &npoints));
1805       if (!npoints) continue;
1806       PetscCall(PetscSectionGetOffset(cellPartSection, proc, &poff));
1807       PetscCall(DMPlexClosurePoints_Private(dm, npoints, points+poff, &is));
1808       PetscCall(DMLabelSetStratumIS(lblPartition, proc, is));
1809       PetscCall(ISDestroy(&is));
1810     }
1811     PetscCall(ISRestoreIndices(cellPart, &points));
1812   }
1813   PetscCall(PetscLogEventEnd(DMPLEX_PartSelf,dm,0,0,0));
1814 
1815   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Point migration", &lblMigration));
1816   PetscCall(DMPlexPartitionLabelInvert(dm, lblPartition, NULL, lblMigration));
1817   PetscCall(DMPlexPartitionLabelCreateSF(dm, lblMigration, &sfMigration));
1818   PetscCall(DMPlexStratifyMigrationSF(dm, sfMigration, &sfStratified));
1819   PetscCall(PetscSFDestroy(&sfMigration));
1820   sfMigration = sfStratified;
1821   PetscCall(PetscSFSetUp(sfMigration));
1822   PetscCall(PetscLogEventEnd(DMPLEX_Partition,dm,0,0,0));
1823   PetscCall(PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg));
1824   if (flg) {
1825     PetscCall(DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_(comm)));
1826     PetscCall(PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_(comm)));
1827   }
1828 
1829   /* Create non-overlapping parallel DM and migrate internal data */
1830   PetscCall(DMPlexCreate(comm, dmParallel));
1831   PetscCall(PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh"));
1832   PetscCall(DMPlexMigrate(dm, sfMigration, *dmParallel));
1833 
1834   /* Build the point SF without overlap */
1835   PetscCall(DMPlexGetPartitionBalance(dm, &balance));
1836   PetscCall(DMPlexSetPartitionBalance(*dmParallel, balance));
1837   PetscCall(DMPlexCreatePointSF(*dmParallel, sfMigration, PETSC_TRUE, &sfPoint));
1838   PetscCall(DMSetPointSF(*dmParallel, sfPoint));
1839   PetscCall(DMGetCoordinateDM(*dmParallel, &dmCoord));
1840   if (dmCoord) PetscCall(DMSetPointSF(dmCoord, sfPoint));
1841   if (flg) PetscCall(PetscSFView(sfPoint, NULL));
1842 
1843   if (overlap > 0) {
1844     DM                 dmOverlap;
1845     PetscInt           nroots, nleaves, noldleaves, l;
1846     const PetscInt    *oldLeaves;
1847     PetscSFNode       *newRemote, *permRemote;
1848     const PetscSFNode *oldRemote;
1849     PetscSF            sfOverlap, sfOverlapPoint;
1850 
1851     /* Add the partition overlap to the distributed DM */
1852     PetscCall(DMPlexDistributeOverlap(*dmParallel, overlap, &sfOverlap, &dmOverlap));
1853     PetscCall(DMDestroy(dmParallel));
1854     *dmParallel = dmOverlap;
1855     if (flg) {
1856       PetscCall(PetscPrintf(comm, "Overlap Migration SF:\n"));
1857       PetscCall(PetscSFView(sfOverlap, NULL));
1858     }
1859 
1860     /* Re-map the migration SF to establish the full migration pattern */
1861     PetscCall(PetscSFGetGraph(sfMigration, &nroots, &noldleaves, &oldLeaves, &oldRemote));
1862     PetscCall(PetscSFGetGraph(sfOverlap, NULL, &nleaves, NULL, NULL));
1863     PetscCall(PetscMalloc1(nleaves, &newRemote));
1864     /* oldRemote: original root point mapping to original leaf point
1865        newRemote: original leaf point mapping to overlapped leaf point */
1866     if (oldLeaves) {
1867       /* After stratification, the migration remotes may not be in root (canonical) order, so we reorder using the leaf numbering */
1868       PetscCall(PetscMalloc1(noldleaves, &permRemote));
1869       for (l = 0; l < noldleaves; ++l) permRemote[oldLeaves[l]] = oldRemote[l];
1870       oldRemote = permRemote;
1871     }
1872     PetscCall(PetscSFBcastBegin(sfOverlap, MPIU_2INT, oldRemote, newRemote,MPI_REPLACE));
1873     PetscCall(PetscSFBcastEnd(sfOverlap, MPIU_2INT, oldRemote, newRemote,MPI_REPLACE));
1874     if (oldLeaves) PetscCall(PetscFree(oldRemote));
1875     PetscCall(PetscSFCreate(comm, &sfOverlapPoint));
1876     PetscCall(PetscSFSetGraph(sfOverlapPoint, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER));
1877     PetscCall(PetscSFDestroy(&sfOverlap));
1878     PetscCall(PetscSFDestroy(&sfMigration));
1879     sfMigration = sfOverlapPoint;
1880   }
1881   /* Cleanup Partition */
1882   PetscCall(DMLabelDestroy(&lblPartition));
1883   PetscCall(DMLabelDestroy(&lblMigration));
1884   PetscCall(PetscSectionDestroy(&cellPartSection));
1885   PetscCall(ISDestroy(&cellPart));
1886   /* Copy discretization */
1887   PetscCall(DMCopyDisc(dm, *dmParallel));
1888   /* Create sfNatural */
1889   if (dm->useNatural) {
1890     PetscSection section;
1891 
1892     PetscCall(DMGetLocalSection(dm, &section));
1893     PetscCall(DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &(*dmParallel)->sfNatural));
1894     PetscCall(DMSetUseNatural(*dmParallel, PETSC_TRUE));
1895   }
1896   PetscCall(DMPlexCopy_Internal(dm, PETSC_TRUE, PETSC_FALSE, *dmParallel));
1897   /* Cleanup */
1898   if (sf) {*sf = sfMigration;}
1899   else    PetscCall(PetscSFDestroy(&sfMigration));
1900   PetscCall(PetscSFDestroy(&sfPoint));
1901   PetscCall(PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0));
1902   PetscFunctionReturn(0);
1903 }
1904 
1905 /*@C
1906   DMPlexDistributeOverlap - Add partition overlap to a distributed non-overlapping DM.
1907 
1908   Collective on dm
1909 
1910   Input Parameters:
1911 + dm  - The non-overlapping distributed DMPlex object
1912 - overlap - The overlap of partitions (the same on all ranks)
1913 
1914   Output Parameters:
1915 + sf - The PetscSF used for point distribution
1916 - dmOverlap - The overlapping distributed DMPlex object, or NULL
1917 
1918   Notes:
1919   If the mesh was not distributed, the return value is NULL.
1920 
1921   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
1922   representation on the mesh.
1923 
1924   Options Database Keys:
1925 + -dm_plex_overlap_labels <name1,name2,...> - List of overlap label names
1926 . -dm_plex_overlap_values <int1,int2,...>   - List of overlap label values
1927 . -dm_plex_overlap_exclude_label <name>     - Label used to exclude points from overlap
1928 - -dm_plex_overlap_exclude_value <int>      - Label value used to exclude points from overlap
1929 
1930   Level: advanced
1931 
1932 .seealso: `DMPlexCreate()`, `DMSetAdjacency()`, `DMPlexDistribute()`, `DMPlexCreateOverlapLabel()`, `DMPlexGetOverlap()`
1933 @*/
1934 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, PetscSF *sf, DM *dmOverlap)
1935 {
1936   DM_Plex     *mesh = (DM_Plex *) dm->data;
1937   MPI_Comm     comm;
1938   PetscMPIInt  size, rank;
1939   PetscSection rootSection, leafSection;
1940   IS           rootrank, leafrank;
1941   DM           dmCoord;
1942   DMLabel      lblOverlap;
1943   PetscSF      sfOverlap, sfStratified, sfPoint;
1944 
1945   PetscFunctionBegin;
1946   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1947   PetscValidLogicalCollectiveInt(dm, overlap, 2);
1948   if (sf) PetscValidPointer(sf, 3);
1949   PetscValidPointer(dmOverlap, 4);
1950 
1951   if (sf) *sf = NULL;
1952   *dmOverlap  = NULL;
1953   PetscCall(PetscObjectGetComm((PetscObject)dm,&comm));
1954   PetscCallMPI(MPI_Comm_size(comm, &size));
1955   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1956   if (size == 1) PetscFunctionReturn(0);
1957   {
1958     // We need to get options for the _already_distributed mesh, so it must be done here
1959     PetscInt    overlap;
1960     const char *prefix;
1961     char        oldPrefix[PETSC_MAX_PATH_LEN];
1962 
1963     oldPrefix[0] = '\0';
1964     PetscCall(PetscObjectGetOptionsPrefix((PetscObject) dm, &prefix));
1965     PetscCall(PetscStrcpy(oldPrefix, prefix));
1966     PetscCall(PetscObjectAppendOptionsPrefix((PetscObject) dm, "dist_"));
1967     PetscCall(DMPlexGetOverlap(dm, &overlap));
1968     PetscObjectOptionsBegin((PetscObject) dm);
1969     PetscCall(DMSetFromOptions_Overlap_Plex(PetscOptionsObject, dm, &overlap));
1970     PetscOptionsEnd();
1971     PetscCall(PetscObjectSetOptionsPrefix((PetscObject) dm, oldPrefix[0] == '\0' ? NULL : oldPrefix));
1972   }
1973   PetscCall(PetscLogEventBegin(DMPLEX_DistributeOverlap, dm, 0, 0, 0));
1974   /* Compute point overlap with neighbouring processes on the distributed DM */
1975   PetscCall(PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0));
1976   PetscCall(PetscSectionCreate(comm, &rootSection));
1977   PetscCall(PetscSectionCreate(comm, &leafSection));
1978   PetscCall(DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank));
1979   if (mesh->numOvLabels) PetscCall(DMPlexCreateOverlapLabelFromLabels(dm, mesh->numOvLabels, mesh->ovLabels, mesh->ovValues, mesh->numOvExLabels, mesh->ovExLabels, mesh->ovExValues, rootSection, rootrank, leafSection, leafrank, &lblOverlap));
1980   else PetscCall(DMPlexCreateOverlapLabel(dm, overlap, rootSection, rootrank, leafSection, leafrank, &lblOverlap));
1981   /* Convert overlap label to stratified migration SF */
1982   PetscCall(DMPlexPartitionLabelCreateSF(dm, lblOverlap, &sfOverlap));
1983   PetscCall(DMPlexStratifyMigrationSF(dm, sfOverlap, &sfStratified));
1984   PetscCall(PetscSFDestroy(&sfOverlap));
1985   sfOverlap = sfStratified;
1986   PetscCall(PetscObjectSetName((PetscObject) sfOverlap, "Overlap SF"));
1987   PetscCall(PetscSFSetFromOptions(sfOverlap));
1988 
1989   PetscCall(PetscSectionDestroy(&rootSection));
1990   PetscCall(PetscSectionDestroy(&leafSection));
1991   PetscCall(ISDestroy(&rootrank));
1992   PetscCall(ISDestroy(&leafrank));
1993   PetscCall(PetscLogEventEnd(DMPLEX_Partition,dm,0,0,0));
1994 
1995   /* Build the overlapping DM */
1996   PetscCall(DMPlexCreate(comm, dmOverlap));
1997   PetscCall(PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh"));
1998   PetscCall(DMPlexMigrate(dm, sfOverlap, *dmOverlap));
1999   /* Store the overlap in the new DM */
2000   PetscCall(DMPlexSetOverlap_Plex(*dmOverlap, dm, overlap));
2001   /* Build the new point SF */
2002   PetscCall(DMPlexCreatePointSF(*dmOverlap, sfOverlap, PETSC_FALSE, &sfPoint));
2003   PetscCall(DMSetPointSF(*dmOverlap, sfPoint));
2004   PetscCall(DMGetCoordinateDM(*dmOverlap, &dmCoord));
2005   if (dmCoord) PetscCall(DMSetPointSF(dmCoord, sfPoint));
2006   PetscCall(PetscSFDestroy(&sfPoint));
2007   /* Cleanup overlap partition */
2008   PetscCall(DMLabelDestroy(&lblOverlap));
2009   if (sf) *sf = sfOverlap;
2010   else    PetscCall(PetscSFDestroy(&sfOverlap));
2011   PetscCall(PetscLogEventEnd(DMPLEX_DistributeOverlap, dm, 0, 0, 0));
2012   PetscFunctionReturn(0);
2013 }
2014 
2015 PetscErrorCode DMPlexGetOverlap_Plex(DM dm, PetscInt *overlap)
2016 {
2017   DM_Plex        *mesh  = (DM_Plex*) dm->data;
2018 
2019   PetscFunctionBegin;
2020   *overlap = mesh->overlap;
2021   PetscFunctionReturn(0);
2022 }
2023 
2024 PetscErrorCode DMPlexSetOverlap_Plex(DM dm, DM dmSrc, PetscInt overlap)
2025 {
2026   DM_Plex        *mesh=NULL;
2027   DM_Plex        *meshSrc=NULL;
2028 
2029   PetscFunctionBegin;
2030   PetscValidHeaderSpecificType(dm, DM_CLASSID, 1, DMPLEX);
2031   mesh = (DM_Plex*) dm->data;
2032   mesh->overlap = overlap;
2033   if (dmSrc) {
2034     PetscValidHeaderSpecificType(dmSrc, DM_CLASSID, 2, DMPLEX);
2035     meshSrc = (DM_Plex*) dmSrc->data;
2036     mesh->overlap += meshSrc->overlap;
2037   }
2038   PetscFunctionReturn(0);
2039 }
2040 
2041 /*@
2042   DMPlexGetOverlap - Get the width of the cell overlap
2043 
2044   Not collective
2045 
2046   Input Parameter:
2047 . dm   - The DM
2048 
2049   Output Parameter:
2050 . overlap - the width of the cell overlap
2051 
2052   Level: intermediate
2053 
2054 .seealso: `DMPlexSetOverlap()`, `DMPlexDistribute()`
2055 @*/
2056 PetscErrorCode DMPlexGetOverlap(DM dm, PetscInt *overlap)
2057 {
2058   PetscFunctionBegin;
2059   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2060   PetscValidIntPointer(overlap, 2);
2061   PetscUseMethod(dm, "DMPlexGetOverlap_C",(DM,PetscInt*),(dm,overlap));
2062   PetscFunctionReturn(0);
2063 }
2064 
2065 /*@
2066   DMPlexSetOverlap - Set the width of the cell overlap
2067 
2068   Logically collective
2069 
2070   Input Parameters:
2071 + dm      - The DM
2072 . dmSrc   - The DM that produced this one, or NULL
2073 - overlap - the width of the cell overlap
2074 
2075   Note:
2076   The overlap from dmSrc is added to dm
2077 
2078   Level: intermediate
2079 
2080 .seealso: `DMPlexGetOverlap()`, `DMPlexDistribute()`
2081 @*/
2082 PetscErrorCode DMPlexSetOverlap(DM dm, DM dmSrc, PetscInt overlap)
2083 {
2084   PetscFunctionBegin;
2085   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2086   PetscValidLogicalCollectiveInt(dm, overlap, 3);
2087   PetscTryMethod(dm, "DMPlexSetOverlap_C",(DM,DM,PetscInt),(dm,dmSrc,overlap));
2088   PetscFunctionReturn(0);
2089 }
2090 
2091 PetscErrorCode DMPlexDistributeSetDefault_Plex(DM dm, PetscBool dist)
2092 {
2093   DM_Plex *mesh = (DM_Plex *) dm->data;
2094 
2095   PetscFunctionBegin;
2096   mesh->distDefault = dist;
2097   PetscFunctionReturn(0);
2098 }
2099 
2100 /*@
2101   DMPlexDistributeSetDefault - Set flag indicating whether the DM should be distributed by default
2102 
2103   Logically collective
2104 
2105   Input Parameters:
2106 + dm   - The DM
2107 - dist - Flag for distribution
2108 
2109   Level: intermediate
2110 
2111 .seealso: `DMPlexDistributeGetDefault()`, `DMPlexDistribute()`
2112 @*/
2113 PetscErrorCode DMPlexDistributeSetDefault(DM dm, PetscBool dist)
2114 {
2115   PetscFunctionBegin;
2116   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2117   PetscValidLogicalCollectiveBool(dm, dist, 2);
2118   PetscTryMethod(dm,"DMPlexDistributeSetDefault_C",(DM,PetscBool),(dm,dist));
2119   PetscFunctionReturn(0);
2120 }
2121 
2122 PetscErrorCode DMPlexDistributeGetDefault_Plex(DM dm, PetscBool *dist)
2123 {
2124   DM_Plex *mesh = (DM_Plex *) dm->data;
2125 
2126   PetscFunctionBegin;
2127   *dist = mesh->distDefault;
2128   PetscFunctionReturn(0);
2129 }
2130 
2131 /*@
2132   DMPlexDistributeGetDefault - Get flag indicating whether the DM should be distributed by default
2133 
2134   Not collective
2135 
2136   Input Parameter:
2137 . dm   - The DM
2138 
2139   Output Parameter:
2140 . dist - Flag for distribution
2141 
2142   Level: intermediate
2143 
2144 .seealso: `DMPlexDistributeSetDefault()`, `DMPlexDistribute()`
2145 @*/
2146 PetscErrorCode DMPlexDistributeGetDefault(DM dm, PetscBool *dist)
2147 {
2148   PetscFunctionBegin;
2149   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2150   PetscValidBoolPointer(dist, 2);
2151   PetscUseMethod(dm,"DMPlexDistributeGetDefault_C",(DM,PetscBool*),(dm,dist));
2152   PetscFunctionReturn(0);
2153 }
2154 
2155 /*@C
2156   DMPlexGetGatherDM - Get a copy of the DMPlex that gathers all points on the
2157   root process of the original's communicator.
2158 
2159   Collective on dm
2160 
2161   Input Parameter:
2162 . dm - the original DMPlex object
2163 
2164   Output Parameters:
2165 + sf - the PetscSF used for point distribution (optional)
2166 - gatherMesh - the gathered DM object, or NULL
2167 
2168   Level: intermediate
2169 
2170 .seealso: `DMPlexDistribute()`, `DMPlexGetRedundantDM()`
2171 @*/
2172 PetscErrorCode DMPlexGetGatherDM(DM dm, PetscSF *sf, DM *gatherMesh)
2173 {
2174   MPI_Comm       comm;
2175   PetscMPIInt    size;
2176   PetscPartitioner oldPart, gatherPart;
2177 
2178   PetscFunctionBegin;
2179   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2180   PetscValidPointer(gatherMesh,3);
2181   *gatherMesh = NULL;
2182   if (sf) *sf = NULL;
2183   comm = PetscObjectComm((PetscObject)dm);
2184   PetscCallMPI(MPI_Comm_size(comm,&size));
2185   if (size == 1) PetscFunctionReturn(0);
2186   PetscCall(DMPlexGetPartitioner(dm,&oldPart));
2187   PetscCall(PetscObjectReference((PetscObject)oldPart));
2188   PetscCall(PetscPartitionerCreate(comm,&gatherPart));
2189   PetscCall(PetscPartitionerSetType(gatherPart,PETSCPARTITIONERGATHER));
2190   PetscCall(DMPlexSetPartitioner(dm,gatherPart));
2191   PetscCall(DMPlexDistribute(dm,0,sf,gatherMesh));
2192 
2193   PetscCall(DMPlexSetPartitioner(dm,oldPart));
2194   PetscCall(PetscPartitionerDestroy(&gatherPart));
2195   PetscCall(PetscPartitionerDestroy(&oldPart));
2196   PetscFunctionReturn(0);
2197 }
2198 
2199 /*@C
2200   DMPlexGetRedundantDM - Get a copy of the DMPlex that is completely copied on each process.
2201 
2202   Collective on dm
2203 
2204   Input Parameter:
2205 . dm - the original DMPlex object
2206 
2207   Output Parameters:
2208 + sf - the PetscSF used for point distribution (optional)
2209 - redundantMesh - the redundant DM object, or NULL
2210 
2211   Level: intermediate
2212 
2213 .seealso: `DMPlexDistribute()`, `DMPlexGetGatherDM()`
2214 @*/
2215 PetscErrorCode DMPlexGetRedundantDM(DM dm, PetscSF *sf, DM *redundantMesh)
2216 {
2217   MPI_Comm       comm;
2218   PetscMPIInt    size, rank;
2219   PetscInt       pStart, pEnd, p;
2220   PetscInt       numPoints = -1;
2221   PetscSF        migrationSF, sfPoint, gatherSF;
2222   DM             gatherDM, dmCoord;
2223   PetscSFNode    *points;
2224 
2225   PetscFunctionBegin;
2226   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2227   PetscValidPointer(redundantMesh,3);
2228   *redundantMesh = NULL;
2229   comm = PetscObjectComm((PetscObject)dm);
2230   PetscCallMPI(MPI_Comm_size(comm,&size));
2231   if (size == 1) {
2232     PetscCall(PetscObjectReference((PetscObject) dm));
2233     *redundantMesh = dm;
2234     if (sf) *sf = NULL;
2235     PetscFunctionReturn(0);
2236   }
2237   PetscCall(DMPlexGetGatherDM(dm,&gatherSF,&gatherDM));
2238   if (!gatherDM) PetscFunctionReturn(0);
2239   PetscCallMPI(MPI_Comm_rank(comm,&rank));
2240   PetscCall(DMPlexGetChart(gatherDM,&pStart,&pEnd));
2241   numPoints = pEnd - pStart;
2242   PetscCallMPI(MPI_Bcast(&numPoints,1,MPIU_INT,0,comm));
2243   PetscCall(PetscMalloc1(numPoints,&points));
2244   PetscCall(PetscSFCreate(comm,&migrationSF));
2245   for (p = 0; p < numPoints; p++) {
2246     points[p].index = p;
2247     points[p].rank  = 0;
2248   }
2249   PetscCall(PetscSFSetGraph(migrationSF,pEnd-pStart,numPoints,NULL,PETSC_OWN_POINTER,points,PETSC_OWN_POINTER));
2250   PetscCall(DMPlexCreate(comm, redundantMesh));
2251   PetscCall(PetscObjectSetName((PetscObject) *redundantMesh, "Redundant Mesh"));
2252   PetscCall(DMPlexMigrate(gatherDM, migrationSF, *redundantMesh));
2253   /* This is to express that all point are in overlap */
2254   PetscCall(DMPlexSetOverlap_Plex(*redundantMesh, NULL, PETSC_MAX_INT));
2255   PetscCall(DMPlexCreatePointSF(*redundantMesh, migrationSF, PETSC_FALSE, &sfPoint));
2256   PetscCall(DMSetPointSF(*redundantMesh, sfPoint));
2257   PetscCall(DMGetCoordinateDM(*redundantMesh, &dmCoord));
2258   if (dmCoord) PetscCall(DMSetPointSF(dmCoord, sfPoint));
2259   PetscCall(PetscSFDestroy(&sfPoint));
2260   if (sf) {
2261     PetscSF tsf;
2262 
2263     PetscCall(PetscSFCompose(gatherSF,migrationSF,&tsf));
2264     PetscCall(DMPlexStratifyMigrationSF(dm, tsf, sf));
2265     PetscCall(PetscSFDestroy(&tsf));
2266   }
2267   PetscCall(PetscSFDestroy(&migrationSF));
2268   PetscCall(PetscSFDestroy(&gatherSF));
2269   PetscCall(DMDestroy(&gatherDM));
2270   PetscCall(DMCopyDisc(dm, *redundantMesh));
2271   PetscCall(DMPlexCopy_Internal(dm, PETSC_TRUE, PETSC_FALSE, *redundantMesh));
2272   PetscFunctionReturn(0);
2273 }
2274 
2275 /*@
2276   DMPlexIsDistributed - Find out whether this DM is distributed, i.e. more than one rank owns some points.
2277 
2278   Collective
2279 
2280   Input Parameter:
2281 . dm      - The DM object
2282 
2283   Output Parameter:
2284 . distributed - Flag whether the DM is distributed
2285 
2286   Level: intermediate
2287 
2288   Notes:
2289   This currently finds out whether at least two ranks have any DAG points.
2290   This involves MPI_Allreduce() with one integer.
2291   The result is currently not stashed so every call to this routine involves this global communication.
2292 
2293 .seealso: `DMPlexDistribute()`, `DMPlexGetOverlap()`, `DMPlexIsInterpolated()`
2294 @*/
2295 PetscErrorCode DMPlexIsDistributed(DM dm, PetscBool *distributed)
2296 {
2297   PetscInt          pStart, pEnd, count;
2298   MPI_Comm          comm;
2299   PetscMPIInt       size;
2300 
2301   PetscFunctionBegin;
2302   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2303   PetscValidBoolPointer(distributed,2);
2304   PetscCall(PetscObjectGetComm((PetscObject)dm,&comm));
2305   PetscCallMPI(MPI_Comm_size(comm,&size));
2306   if (size == 1) { *distributed = PETSC_FALSE; PetscFunctionReturn(0); }
2307   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
2308   count = (pEnd - pStart) > 0 ? 1 : 0;
2309   PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE, &count, 1, MPIU_INT, MPI_SUM, comm));
2310   *distributed = count > 1 ? PETSC_TRUE : PETSC_FALSE;
2311   PetscFunctionReturn(0);
2312 }
2313