xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision 8fb5bd83c3955fefcf33a54e3bb66920a9fa884b)
1 #include <petsc/private/dmpleximpl.h>    /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/dmlabelimpl.h>   /*I      "petscdmlabel.h"  I*/
3 
4 /*@C
5   DMPlexSetAdjacencyUser - Define adjacency in the mesh using a user-provided callback
6 
7   Input Parameters:
8 + dm      - The DM object
9 . user    - The user callback, may be NULL (to clear the callback)
10 - ctx     - context for callback evaluation, may be NULL
11 
12   Level: advanced
13 
14   Notes:
15      The caller of DMPlexGetAdjacency may need to arrange that a large enough array is available for the adjacency.
16 
17      Any setting here overrides other configuration of DMPlex adjacency determination.
18 
19 .seealso: `DMSetAdjacency()`, `DMPlexDistribute()`, `DMPlexPreallocateOperator()`, `DMPlexGetAdjacency()`, `DMPlexGetAdjacencyUser()`
20 @*/
21 PetscErrorCode DMPlexSetAdjacencyUser(DM dm,PetscErrorCode (*user)(DM,PetscInt,PetscInt*,PetscInt[],void*),void *ctx)
22 {
23   DM_Plex *mesh = (DM_Plex *)dm->data;
24 
25   PetscFunctionBegin;
26   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
27   mesh->useradjacency = user;
28   mesh->useradjacencyctx = ctx;
29   PetscFunctionReturn(0);
30 }
31 
32 /*@C
33   DMPlexGetAdjacencyUser - get the user-defined adjacency callback
34 
35   Input Parameter:
36 . dm      - The DM object
37 
38   Output Parameters:
39 + user    - The user callback
40 - ctx     - context for callback evaluation
41 
42   Level: advanced
43 
44 .seealso: `DMSetAdjacency()`, `DMPlexDistribute()`, `DMPlexPreallocateOperator()`, `DMPlexGetAdjacency()`, `DMPlexSetAdjacencyUser()`
45 @*/
46 PetscErrorCode DMPlexGetAdjacencyUser(DM dm, PetscErrorCode (**user)(DM,PetscInt,PetscInt*,PetscInt[],void*), void **ctx)
47 {
48   DM_Plex *mesh = (DM_Plex *)dm->data;
49 
50   PetscFunctionBegin;
51   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
52   if (user) *user = mesh->useradjacency;
53   if (ctx) *ctx = mesh->useradjacencyctx;
54   PetscFunctionReturn(0);
55 }
56 
57 /*@
58   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
59 
60   Input Parameters:
61 + dm      - The DM object
62 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
63 
64   Level: intermediate
65 
66 .seealso: `DMGetAdjacency()`, `DMSetAdjacency()`, `DMPlexDistribute()`, `DMPlexPreallocateOperator()`, `DMPlexSetAnchors()`
67 @*/
68 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
69 {
70   DM_Plex *mesh = (DM_Plex *) dm->data;
71 
72   PetscFunctionBegin;
73   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
74   mesh->useAnchors = useAnchors;
75   PetscFunctionReturn(0);
76 }
77 
78 /*@
79   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
80 
81   Input Parameter:
82 . dm      - The DM object
83 
84   Output Parameter:
85 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
86 
87   Level: intermediate
88 
89 .seealso: `DMPlexSetAdjacencyUseAnchors()`, `DMSetAdjacency()`, `DMGetAdjacency()`, `DMPlexDistribute()`, `DMPlexPreallocateOperator()`, `DMPlexSetAnchors()`
90 @*/
91 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
92 {
93   DM_Plex *mesh = (DM_Plex *) dm->data;
94 
95   PetscFunctionBegin;
96   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
97   PetscValidBoolPointer(useAnchors, 2);
98   *useAnchors = mesh->useAnchors;
99   PetscFunctionReturn(0);
100 }
101 
102 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
103 {
104   const PetscInt *cone = NULL;
105   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
106 
107   PetscFunctionBeginHot;
108   PetscCall(DMPlexGetConeSize(dm, p, &coneSize));
109   PetscCall(DMPlexGetCone(dm, p, &cone));
110   for (c = 0; c <= coneSize; ++c) {
111     const PetscInt  point   = !c ? p : cone[c-1];
112     const PetscInt *support = NULL;
113     PetscInt        supportSize, s, q;
114 
115     PetscCall(DMPlexGetSupportSize(dm, point, &supportSize));
116     PetscCall(DMPlexGetSupport(dm, point, &support));
117     for (s = 0; s < supportSize; ++s) {
118       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = support[s]),0); ++q) {
119         if (support[s] == adj[q]) break;
120       }
121       PetscCheck(numAdj <= maxAdjSize,PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%" PetscInt_FMT ")", maxAdjSize);
122     }
123   }
124   *adjSize = numAdj;
125   PetscFunctionReturn(0);
126 }
127 
128 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
129 {
130   const PetscInt *support = NULL;
131   PetscInt        numAdj   = 0, maxAdjSize = *adjSize, supportSize, s;
132 
133   PetscFunctionBeginHot;
134   PetscCall(DMPlexGetSupportSize(dm, p, &supportSize));
135   PetscCall(DMPlexGetSupport(dm, p, &support));
136   for (s = 0; s <= supportSize; ++s) {
137     const PetscInt  point = !s ? p : support[s-1];
138     const PetscInt *cone  = NULL;
139     PetscInt        coneSize, c, q;
140 
141     PetscCall(DMPlexGetConeSize(dm, point, &coneSize));
142     PetscCall(DMPlexGetCone(dm, point, &cone));
143     for (c = 0; c < coneSize; ++c) {
144       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = cone[c]),0); ++q) {
145         if (cone[c] == adj[q]) break;
146       }
147       PetscCheck(numAdj <= maxAdjSize,PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%" PetscInt_FMT ")", maxAdjSize);
148     }
149   }
150   *adjSize = numAdj;
151   PetscFunctionReturn(0);
152 }
153 
154 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
155 {
156   PetscInt      *star = NULL;
157   PetscInt       numAdj = 0, maxAdjSize = *adjSize, starSize, s;
158 
159   PetscFunctionBeginHot;
160   PetscCall(DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star));
161   for (s = 0; s < starSize*2; s += 2) {
162     const PetscInt *closure = NULL;
163     PetscInt        closureSize, c, q;
164 
165     PetscCall(DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure));
166     for (c = 0; c < closureSize*2; c += 2) {
167       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = closure[c]),0); ++q) {
168         if (closure[c] == adj[q]) break;
169       }
170       PetscCheck(numAdj <= maxAdjSize,PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%" PetscInt_FMT ")", maxAdjSize);
171     }
172     PetscCall(DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure));
173   }
174   PetscCall(DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star));
175   *adjSize = numAdj;
176   PetscFunctionReturn(0);
177 }
178 
179 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
180 {
181   static PetscInt asiz = 0;
182   PetscInt maxAnchors = 1;
183   PetscInt aStart = -1, aEnd = -1;
184   PetscInt maxAdjSize;
185   PetscSection aSec = NULL;
186   IS aIS = NULL;
187   const PetscInt *anchors;
188   DM_Plex *mesh = (DM_Plex *)dm->data;
189 
190   PetscFunctionBeginHot;
191   if (useAnchors) {
192     PetscCall(DMPlexGetAnchors(dm,&aSec,&aIS));
193     if (aSec) {
194       PetscCall(PetscSectionGetMaxDof(aSec,&maxAnchors));
195       maxAnchors = PetscMax(1,maxAnchors);
196       PetscCall(PetscSectionGetChart(aSec,&aStart,&aEnd));
197       PetscCall(ISGetIndices(aIS,&anchors));
198     }
199   }
200   if (!*adj) {
201     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
202 
203     PetscCall(DMPlexGetChart(dm, &pStart,&pEnd));
204     PetscCall(DMPlexGetDepth(dm, &depth));
205     depth = PetscMax(depth, -depth);
206     PetscCall(DMPlexGetMaxSizes(dm, &maxC, &maxS));
207     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1;
208     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1;
209     asiz  = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries);
210     asiz *= maxAnchors;
211     asiz  = PetscMin(asiz,pEnd-pStart);
212     PetscCall(PetscMalloc1(asiz,adj));
213   }
214   if (*adjSize < 0) *adjSize = asiz;
215   maxAdjSize = *adjSize;
216   if (mesh->useradjacency) {
217     PetscCall((*mesh->useradjacency)(dm, p, adjSize, *adj, mesh->useradjacencyctx));
218   } else if (useTransitiveClosure) {
219     PetscCall(DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj));
220   } else if (useCone) {
221     PetscCall(DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj));
222   } else {
223     PetscCall(DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj));
224   }
225   if (useAnchors && aSec) {
226     PetscInt origSize = *adjSize;
227     PetscInt numAdj = origSize;
228     PetscInt i = 0, j;
229     PetscInt *orig = *adj;
230 
231     while (i < origSize) {
232       PetscInt p = orig[i];
233       PetscInt aDof = 0;
234 
235       if (p >= aStart && p < aEnd) {
236         PetscCall(PetscSectionGetDof(aSec,p,&aDof));
237       }
238       if (aDof) {
239         PetscInt aOff;
240         PetscInt s, q;
241 
242         for (j = i + 1; j < numAdj; j++) {
243           orig[j - 1] = orig[j];
244         }
245         origSize--;
246         numAdj--;
247         PetscCall(PetscSectionGetOffset(aSec,p,&aOff));
248         for (s = 0; s < aDof; ++s) {
249           for (q = 0; q < numAdj || ((void)(orig[numAdj++] = anchors[aOff+s]),0); ++q) {
250             if (anchors[aOff+s] == orig[q]) break;
251           }
252           PetscCheck(numAdj <= maxAdjSize,PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%" PetscInt_FMT ")", maxAdjSize);
253         }
254       }
255       else {
256         i++;
257       }
258     }
259     *adjSize = numAdj;
260     PetscCall(ISRestoreIndices(aIS,&anchors));
261   }
262   PetscFunctionReturn(0);
263 }
264 
265 /*@
266   DMPlexGetAdjacency - Return all points adjacent to the given point
267 
268   Input Parameters:
269 + dm - The DM object
270 - p  - The point
271 
272   Input/Output Parameters:
273 + adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE;
274             on output the number of adjacent points
275 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize;
276         on output contains the adjacent points
277 
278   Level: advanced
279 
280   Notes:
281     The user must PetscFree the adj array if it was not passed in.
282 
283 .seealso: `DMSetAdjacency()`, `DMPlexDistribute()`, `DMCreateMatrix()`, `DMPlexPreallocateOperator()`
284 @*/
285 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
286 {
287   PetscBool      useCone, useClosure, useAnchors;
288 
289   PetscFunctionBeginHot;
290   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
291   PetscValidIntPointer(adjSize,3);
292   PetscValidPointer(adj,4);
293   PetscCall(DMGetBasicAdjacency(dm, &useCone, &useClosure));
294   PetscCall(DMPlexGetAdjacencyUseAnchors(dm, &useAnchors));
295   PetscCall(DMPlexGetAdjacency_Internal(dm, p, useCone, useClosure, useAnchors, adjSize, adj));
296   PetscFunctionReturn(0);
297 }
298 
299 /*@
300   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
301 
302   Collective on dm
303 
304   Input Parameters:
305 + dm      - The DM
306 . sfPoint - The PetscSF which encodes point connectivity
307 . rootRankSection -
308 . rootRanks -
309 . leftRankSection -
310 - leafRanks -
311 
312   Output Parameters:
313 + processRanks - A list of process neighbors, or NULL
314 - sfProcess    - An SF encoding the two-sided process connectivity, or NULL
315 
316   Level: developer
317 
318 .seealso: `PetscSFCreate()`, `DMPlexCreateProcessSF()`
319 @*/
320 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
321 {
322   const PetscSFNode *remotePoints;
323   PetscInt          *localPointsNew;
324   PetscSFNode       *remotePointsNew;
325   const PetscInt    *nranks;
326   PetscInt          *ranksNew;
327   PetscBT            neighbors;
328   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
329   PetscMPIInt        size, proc, rank;
330 
331   PetscFunctionBegin;
332   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
333   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
334   if (processRanks) {PetscValidPointer(processRanks, 7);}
335   if (sfProcess)    {PetscValidPointer(sfProcess, 8);}
336   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size));
337   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank));
338   PetscCall(PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints));
339   PetscCall(PetscBTCreate(size, &neighbors));
340   PetscCall(PetscBTMemzero(size, neighbors));
341   /* Compute root-to-leaf process connectivity */
342   PetscCall(PetscSectionGetChart(rootRankSection, &pStart, &pEnd));
343   PetscCall(ISGetIndices(rootRanks, &nranks));
344   for (p = pStart; p < pEnd; ++p) {
345     PetscInt ndof, noff, n;
346 
347     PetscCall(PetscSectionGetDof(rootRankSection, p, &ndof));
348     PetscCall(PetscSectionGetOffset(rootRankSection, p, &noff));
349     for (n = 0; n < ndof; ++n) PetscCall(PetscBTSet(neighbors, nranks[noff+n]));
350   }
351   PetscCall(ISRestoreIndices(rootRanks, &nranks));
352   /* Compute leaf-to-neighbor process connectivity */
353   PetscCall(PetscSectionGetChart(leafRankSection, &pStart, &pEnd));
354   PetscCall(ISGetIndices(leafRanks, &nranks));
355   for (p = pStart; p < pEnd; ++p) {
356     PetscInt ndof, noff, n;
357 
358     PetscCall(PetscSectionGetDof(leafRankSection, p, &ndof));
359     PetscCall(PetscSectionGetOffset(leafRankSection, p, &noff));
360     for (n = 0; n < ndof; ++n) PetscCall(PetscBTSet(neighbors, nranks[noff+n]));
361   }
362   PetscCall(ISRestoreIndices(leafRanks, &nranks));
363   /* Compute leaf-to-root process connectivity */
364   for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);}
365   /* Calculate edges */
366   PetscBTClear(neighbors, rank);
367   for (proc = 0, numNeighbors = 0; proc < size; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;}
368   PetscCall(PetscMalloc1(numNeighbors, &ranksNew));
369   PetscCall(PetscMalloc1(numNeighbors, &localPointsNew));
370   PetscCall(PetscMalloc1(numNeighbors, &remotePointsNew));
371   for (proc = 0, n = 0; proc < size; ++proc) {
372     if (PetscBTLookup(neighbors, proc)) {
373       ranksNew[n]              = proc;
374       localPointsNew[n]        = proc;
375       remotePointsNew[n].index = rank;
376       remotePointsNew[n].rank  = proc;
377       ++n;
378     }
379   }
380   PetscCall(PetscBTDestroy(&neighbors));
381   if (processRanks) PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks));
382   else              PetscCall(PetscFree(ranksNew));
383   if (sfProcess) {
384     PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess));
385     PetscCall(PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF"));
386     PetscCall(PetscSFSetFromOptions(*sfProcess));
387     PetscCall(PetscSFSetGraph(*sfProcess, size, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER));
388   }
389   PetscFunctionReturn(0);
390 }
391 
392 /*@
393   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
394 
395   Collective on dm
396 
397   Input Parameter:
398 . dm - The DM
399 
400   Output Parameters:
401 + rootSection - The number of leaves for a given root point
402 . rootrank    - The rank of each edge into the root point
403 . leafSection - The number of processes sharing a given leaf point
404 - leafrank    - The rank of each process sharing a leaf point
405 
406   Level: developer
407 
408 .seealso: `DMPlexCreateOverlapLabel()`, `DMPlexDistribute()`, `DMPlexDistributeOverlap()`
409 @*/
410 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
411 {
412   MPI_Comm        comm;
413   PetscSF         sfPoint;
414   const PetscInt *rootdegree;
415   PetscInt       *myrank, *remoterank;
416   PetscInt        pStart, pEnd, p, nedges;
417   PetscMPIInt     rank;
418 
419   PetscFunctionBegin;
420   PetscCall(PetscObjectGetComm((PetscObject) dm, &comm));
421   PetscCallMPI(MPI_Comm_rank(comm, &rank));
422   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
423   PetscCall(DMGetPointSF(dm, &sfPoint));
424   /* Compute number of leaves for each root */
425   PetscCall(PetscObjectSetName((PetscObject) rootSection, "Root Section"));
426   PetscCall(PetscSectionSetChart(rootSection, pStart, pEnd));
427   PetscCall(PetscSFComputeDegreeBegin(sfPoint, &rootdegree));
428   PetscCall(PetscSFComputeDegreeEnd(sfPoint, &rootdegree));
429   for (p = pStart; p < pEnd; ++p) PetscCall(PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]));
430   PetscCall(PetscSectionSetUp(rootSection));
431   /* Gather rank of each leaf to root */
432   PetscCall(PetscSectionGetStorageSize(rootSection, &nedges));
433   PetscCall(PetscMalloc1(pEnd-pStart, &myrank));
434   PetscCall(PetscMalloc1(nedges,  &remoterank));
435   for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank;
436   PetscCall(PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank));
437   PetscCall(PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank));
438   PetscCall(PetscFree(myrank));
439   PetscCall(ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank));
440   /* Distribute remote ranks to leaves */
441   PetscCall(PetscObjectSetName((PetscObject) leafSection, "Leaf Section"));
442   PetscCall(DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank));
443   PetscFunctionReturn(0);
444 }
445 
446 #if 0
447 static PetscErrorCode DMPlexCopyOverlapLabels(DM dm, DM ndm)
448 {
449   DM_Plex *mesh  = (DM_Plex *) dm->data;
450   DM_Plex *nmesh = (DM_Plex *) ndm->data;
451 
452   PetscFunctionBegin;
453   if (mesh->numOvLabels) {
454     const char *name;
455     PetscInt    l;
456 
457     nmesh->numOvLabels = mesh->numOvLabels;
458     for (l = 0; l < mesh->numOvLabels; ++l) {
459       PetscCall(PetscObjectGetName((PetscObject) mesh->ovLabels[l], &name));
460       PetscCall(DMGetLabel(ndm, name, &nmesh->ovLabels[l]));
461       nmesh->ovValues[l] = mesh->ovValues[l];
462     }
463     PetscCall(PetscObjectGetName((PetscObject) mesh->ovExLabel, &name));
464     PetscCall(DMGetLabel(ndm, name, &nmesh->ovExLabel));
465     nmesh->ovExValue = mesh->ovExValue;
466   }
467   PetscFunctionReturn(0);
468 }
469 #endif
470 
471 /*@C
472   DMPlexCreateOverlapLabel - Compute a label indicating what overlap points should be sent to new processes
473 
474   Collective on dm
475 
476   Input Parameters:
477 + dm          - The DM
478 . levels      - Number of overlap levels
479 . rootSection - The number of leaves for a given root point
480 . rootrank    - The rank of each edge into the root point
481 . leafSection - The number of processes sharing a given leaf point
482 - leafrank    - The rank of each process sharing a leaf point
483 
484   Output Parameter:
485 . ovLabel     - DMLabel containing remote overlap contributions as point/rank pairings
486 
487   Level: developer
488 
489 .seealso: `DMPlexCreateOverlapLabelFromLabels()`, `DMPlexGetAdjacency()`, `DMPlexDistributeOwnership()`, `DMPlexDistribute()`
490 @*/
491 PetscErrorCode DMPlexCreateOverlapLabel(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
492 {
493   MPI_Comm           comm;
494   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
495   PetscSF            sfPoint;
496   const PetscSFNode *remote;
497   const PetscInt    *local;
498   const PetscInt    *nrank, *rrank;
499   PetscInt          *adj = NULL;
500   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l;
501   PetscMPIInt        rank, size;
502   PetscBool          flg;
503 
504   PetscFunctionBegin;
505   *ovLabel = NULL;
506   PetscCall(PetscObjectGetComm((PetscObject) dm, &comm));
507   PetscCallMPI(MPI_Comm_size(comm, &size));
508   PetscCallMPI(MPI_Comm_rank(comm, &rank));
509   if (size == 1) PetscFunctionReturn(0);
510   PetscCall(DMGetPointSF(dm, &sfPoint));
511   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
512   if (!levels) {
513     /* Add owned points */
514     PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel));
515     for (p = pStart; p < pEnd; ++p) PetscCall(DMLabelSetValue(*ovLabel, p, rank));
516     PetscFunctionReturn(0);
517   }
518   PetscCall(PetscSectionGetChart(leafSection, &sStart, &sEnd));
519   PetscCall(PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote));
520   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap adjacency", &ovAdjByRank));
521   /* Handle leaves: shared with the root point */
522   for (l = 0; l < nleaves; ++l) {
523     PetscInt adjSize = PETSC_DETERMINE, a;
524 
525     PetscCall(DMPlexGetAdjacency(dm, local ? local[l] : l, &adjSize, &adj));
526     for (a = 0; a < adjSize; ++a) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank));
527   }
528   PetscCall(ISGetIndices(rootrank, &rrank));
529   PetscCall(ISGetIndices(leafrank, &nrank));
530   /* Handle roots */
531   for (p = pStart; p < pEnd; ++p) {
532     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
533 
534     if ((p >= sStart) && (p < sEnd)) {
535       /* Some leaves share a root with other leaves on different processes */
536       PetscCall(PetscSectionGetDof(leafSection, p, &neighbors));
537       if (neighbors) {
538         PetscCall(PetscSectionGetOffset(leafSection, p, &noff));
539         PetscCall(DMPlexGetAdjacency(dm, p, &adjSize, &adj));
540         for (n = 0; n < neighbors; ++n) {
541           const PetscInt remoteRank = nrank[noff+n];
542 
543           if (remoteRank == rank) continue;
544           for (a = 0; a < adjSize; ++a) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remoteRank));
545         }
546       }
547     }
548     /* Roots are shared with leaves */
549     PetscCall(PetscSectionGetDof(rootSection, p, &neighbors));
550     if (!neighbors) continue;
551     PetscCall(PetscSectionGetOffset(rootSection, p, &noff));
552     PetscCall(DMPlexGetAdjacency(dm, p, &adjSize, &adj));
553     for (n = 0; n < neighbors; ++n) {
554       const PetscInt remoteRank = rrank[noff+n];
555 
556       if (remoteRank == rank) continue;
557       for (a = 0; a < adjSize; ++a) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remoteRank));
558     }
559   }
560   PetscCall(PetscFree(adj));
561   PetscCall(ISRestoreIndices(rootrank, &rrank));
562   PetscCall(ISRestoreIndices(leafrank, &nrank));
563   /* Add additional overlap levels */
564   for (l = 1; l < levels; l++) {
565     /* Propagate point donations over SF to capture remote connections */
566     PetscCall(DMPlexPartitionLabelPropagate(dm, ovAdjByRank));
567     /* Add next level of point donations to the label */
568     PetscCall(DMPlexPartitionLabelAdjacency(dm, ovAdjByRank));
569   }
570   /* We require the closure in the overlap */
571   PetscCall(DMPlexPartitionLabelClosure(dm, ovAdjByRank));
572   PetscCall(PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-overlap_view", &flg));
573   if (flg) {
574     PetscViewer viewer;
575     PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm), &viewer));
576     PetscCall(DMLabelView(ovAdjByRank, viewer));
577   }
578   /* Invert sender to receiver label */
579   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel));
580   PetscCall(DMPlexPartitionLabelInvert(dm, ovAdjByRank, NULL, *ovLabel));
581   /* Add owned points, except for shared local points */
582   for (p = pStart; p < pEnd; ++p) PetscCall(DMLabelSetValue(*ovLabel, p, rank));
583   for (l = 0; l < nleaves; ++l) {
584     PetscCall(DMLabelClearValue(*ovLabel, local[l], rank));
585     PetscCall(DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank));
586   }
587   /* Clean up */
588   PetscCall(DMLabelDestroy(&ovAdjByRank));
589   PetscFunctionReturn(0);
590 }
591 
592 static PetscErrorCode HandlePoint_Private(DM dm, PetscInt p, PetscSection section, const PetscInt ranks[], PetscInt numExLabels, const DMLabel exLabel[], const PetscInt exValue[], DMLabel ovAdjByRank)
593 {
594   PetscInt neighbors, el;
595 
596   PetscFunctionBegin;
597   PetscCall(PetscSectionGetDof(section, p, &neighbors));
598   if (neighbors) {
599     PetscInt   *adj = NULL;
600     PetscInt    adjSize = PETSC_DETERMINE, noff, n, a;
601     PetscMPIInt rank;
602 
603     PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank));
604     PetscCall(PetscSectionGetOffset(section, p, &noff));
605     PetscCall(DMPlexGetAdjacency(dm, p, &adjSize, &adj));
606     for (n = 0; n < neighbors; ++n) {
607       const PetscInt remoteRank = ranks[noff+n];
608 
609       if (remoteRank == rank) continue;
610       for (a = 0; a < adjSize; ++a) {
611         PetscBool insert = PETSC_TRUE;
612 
613         for (el = 0; el < numExLabels; ++el) {
614           PetscInt exVal;
615           PetscCall(DMLabelGetValue(exLabel[el], adj[a], &exVal));
616           if (exVal == exValue[el]) {insert = PETSC_FALSE; break;}
617         }
618         if (insert) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remoteRank));
619       }
620     }
621   }
622   PetscFunctionReturn(0);
623 }
624 
625 /*@C
626   DMPlexCreateOverlapLabelFromLabels - Compute a label indicating what overlap points should be sent to new processes
627 
628   Collective on dm
629 
630   Input Parameters:
631 + dm          - The DM
632 . numLabels   - The number of labels to draw candidate points from
633 . label       - An array of labels containing candidate points
634 . value       - An array of label values marking the candidate points
635 . numExLabels - The number of labels to use for exclusion
636 . exLabel     - An array of labels indicating points to be excluded, or NULL
637 . exValue     - An array of label values to be excluded, or NULL
638 . rootSection - The number of leaves for a given root point
639 . rootrank    - The rank of each edge into the root point
640 . leafSection - The number of processes sharing a given leaf point
641 - leafrank    - The rank of each process sharing a leaf point
642 
643   Output Parameter:
644 . ovLabel     - DMLabel containing remote overlap contributions as point/rank pairings
645 
646   Note:
647   The candidate points are only added to the overlap if they are adjacent to a shared point
648 
649   Level: developer
650 
651 .seealso: `DMPlexCreateOverlapLabel()`, `DMPlexGetAdjacency()`, `DMPlexDistributeOwnership()`, `DMPlexDistribute()`
652 @*/
653 PetscErrorCode DMPlexCreateOverlapLabelFromLabels(DM dm, PetscInt numLabels, const DMLabel label[], const PetscInt value[],
654   PetscInt numExLabels, const DMLabel exLabel[], const PetscInt exValue[], PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
655 {
656   MPI_Comm           comm;
657   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
658   PetscSF            sfPoint;
659   const PetscSFNode *remote;
660   const PetscInt    *local;
661   const PetscInt    *nrank, *rrank;
662   PetscInt          *adj = NULL;
663   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l, el;
664   PetscMPIInt        rank, size;
665   PetscBool          flg;
666 
667   PetscFunctionBegin;
668   *ovLabel = NULL;
669   PetscCall(PetscObjectGetComm((PetscObject) dm, &comm));
670   PetscCallMPI(MPI_Comm_size(comm, &size));
671   PetscCallMPI(MPI_Comm_rank(comm, &rank));
672   if (size == 1) PetscFunctionReturn(0);
673   PetscCall(DMGetPointSF(dm, &sfPoint));
674   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
675   PetscCall(PetscSectionGetChart(leafSection, &sStart, &sEnd));
676   PetscCall(PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote));
677   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap adjacency", &ovAdjByRank));
678   PetscCall(ISGetIndices(rootrank, &rrank));
679   PetscCall(ISGetIndices(leafrank, &nrank));
680   for (l = 0; l < numLabels; ++l) {
681     IS              valIS;
682     const PetscInt *points;
683     PetscInt        n;
684 
685     PetscCall(DMLabelGetStratumIS(label[l], value[l], &valIS));
686     if (!valIS) continue;
687     PetscCall(ISGetIndices(valIS, &points));
688     PetscCall(ISGetLocalSize(valIS, &n));
689     for (PetscInt i = 0; i < n; ++i) {
690       const PetscInt p = points[i];
691 
692       if ((p >= sStart) && (p < sEnd)) {
693         PetscInt loc, adjSize = PETSC_DETERMINE;
694 
695         /* Handle leaves: shared with the root point */
696         if (local) PetscCall(PetscFindInt(p, nleaves, local, &loc));
697         else       loc = (p >= 0 && p < nleaves) ? p : -1;
698         if (loc >= 0) {
699           const PetscInt remoteRank = remote[loc].rank;
700 
701           PetscCall(DMPlexGetAdjacency(dm, p, &adjSize, &adj));
702           for (PetscInt a = 0; a < adjSize; ++a) {
703             PetscBool insert = PETSC_TRUE;
704 
705             for (el = 0; el < numExLabels; ++el) {
706               PetscInt exVal;
707               PetscCall(DMLabelGetValue(exLabel[el], adj[a], &exVal));
708               if (exVal == exValue[el]) {insert = PETSC_FALSE; break;}
709             }
710             if (insert) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remoteRank));
711           }
712         }
713         /* Some leaves share a root with other leaves on different processes */
714         HandlePoint_Private(dm, p, leafSection, nrank, numExLabels, exLabel, exValue, ovAdjByRank);
715       }
716       /* Roots are shared with leaves */
717       HandlePoint_Private(dm, p, rootSection, rrank, numExLabels, exLabel, exValue, ovAdjByRank);
718     }
719     PetscCall(ISRestoreIndices(valIS, &points));
720     PetscCall(ISDestroy(&valIS));
721   }
722   PetscCall(PetscFree(adj));
723   PetscCall(ISRestoreIndices(rootrank, &rrank));
724   PetscCall(ISRestoreIndices(leafrank, &nrank));
725   /* We require the closure in the overlap */
726   PetscCall(DMPlexPartitionLabelClosure(dm, ovAdjByRank));
727   PetscCall(PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-overlap_view", &flg));
728   if (flg) {
729     PetscViewer viewer;
730     PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm), &viewer));
731     PetscCall(DMLabelView(ovAdjByRank, viewer));
732   }
733   /* Invert sender to receiver label */
734   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel));
735   PetscCall(DMPlexPartitionLabelInvert(dm, ovAdjByRank, NULL, *ovLabel));
736   /* Add owned points, except for shared local points */
737   for (p = pStart; p < pEnd; ++p) PetscCall(DMLabelSetValue(*ovLabel, p, rank));
738   for (l = 0; l < nleaves; ++l) {
739     PetscCall(DMLabelClearValue(*ovLabel, local[l], rank));
740     PetscCall(DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank));
741   }
742   /* Clean up */
743   PetscCall(DMLabelDestroy(&ovAdjByRank));
744   PetscFunctionReturn(0);
745 }
746 
747 /*@C
748   DMPlexCreateOverlapMigrationSF - Create an SF describing the new mesh distribution to make the overlap described by the input SF
749 
750   Collective on dm
751 
752   Input Parameters:
753 + dm          - The DM
754 - overlapSF   - The SF mapping ghost points in overlap to owner points on other processes
755 
756   Output Parameter:
757 . migrationSF - An SF that maps original points in old locations to points in new locations
758 
759   Level: developer
760 
761 .seealso: `DMPlexCreateOverlapLabel()`, `DMPlexDistributeOverlap()`, `DMPlexDistribute()`
762 @*/
763 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
764 {
765   MPI_Comm           comm;
766   PetscMPIInt        rank, size;
767   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
768   PetscInt          *pointDepths, *remoteDepths, *ilocal;
769   PetscInt          *depthRecv, *depthShift, *depthIdx;
770   PetscSFNode       *iremote;
771   PetscSF            pointSF;
772   const PetscInt    *sharedLocal;
773   const PetscSFNode *overlapRemote, *sharedRemote;
774 
775   PetscFunctionBegin;
776   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
777   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
778   PetscCallMPI(MPI_Comm_rank(comm, &rank));
779   PetscCallMPI(MPI_Comm_size(comm, &size));
780   PetscCall(DMGetDimension(dm, &dim));
781 
782   /* Before building the migration SF we need to know the new stratum offsets */
783   PetscCall(PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote));
784   PetscCall(PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths));
785   for (d=0; d<dim+1; d++) {
786     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
787     for (p=pStart; p<pEnd; p++) pointDepths[p] = d;
788   }
789   for (p=0; p<nleaves; p++) remoteDepths[p] = -1;
790   PetscCall(PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths,MPI_REPLACE));
791   PetscCall(PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths,MPI_REPLACE));
792 
793   /* Count received points in each stratum and compute the internal strata shift */
794   PetscCall(PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx));
795   for (d=0; d<dim+1; d++) depthRecv[d]=0;
796   for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++;
797   depthShift[dim] = 0;
798   for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim];
799   for (d=1; d<dim; d++) depthShift[d] += depthRecv[0];
800   for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1];
801   for (d=0; d<dim+1; d++) {
802     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
803     depthIdx[d] = pStart + depthShift[d];
804   }
805 
806   /* Form the overlap SF build an SF that describes the full overlap migration SF */
807   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
808   newLeaves = pEnd - pStart + nleaves;
809   PetscCall(PetscMalloc1(newLeaves, &ilocal));
810   PetscCall(PetscMalloc1(newLeaves, &iremote));
811   /* First map local points to themselves */
812   for (d=0; d<dim+1; d++) {
813     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
814     for (p=pStart; p<pEnd; p++) {
815       point = p + depthShift[d];
816       ilocal[point] = point;
817       iremote[point].index = p;
818       iremote[point].rank = rank;
819       depthIdx[d]++;
820     }
821   }
822 
823   /* Add in the remote roots for currently shared points */
824   PetscCall(DMGetPointSF(dm, &pointSF));
825   PetscCall(PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote));
826   for (d=0; d<dim+1; d++) {
827     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
828     for (p=0; p<numSharedPoints; p++) {
829       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
830         point = sharedLocal[p] + depthShift[d];
831         iremote[point].index = sharedRemote[p].index;
832         iremote[point].rank = sharedRemote[p].rank;
833       }
834     }
835   }
836 
837   /* Now add the incoming overlap points */
838   for (p=0; p<nleaves; p++) {
839     point = depthIdx[remoteDepths[p]];
840     ilocal[point] = point;
841     iremote[point].index = overlapRemote[p].index;
842     iremote[point].rank = overlapRemote[p].rank;
843     depthIdx[remoteDepths[p]]++;
844   }
845   PetscCall(PetscFree2(pointDepths,remoteDepths));
846 
847   PetscCall(PetscSFCreate(comm, migrationSF));
848   PetscCall(PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF"));
849   PetscCall(PetscSFSetFromOptions(*migrationSF));
850   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
851   PetscCall(PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER));
852 
853   PetscCall(PetscFree3(depthRecv, depthShift, depthIdx));
854   PetscFunctionReturn(0);
855 }
856 
857 /*@
858   DMPlexStratifyMigrationSF - Rearrange the leaves of a migration sf for stratification.
859 
860   Input Parameters:
861 + dm          - The DM
862 - sf          - A star forest with non-ordered leaves, usually defining a DM point migration
863 
864   Output Parameter:
865 . migrationSF - A star forest with added leaf indirection that ensures the resulting DM is stratified
866 
867   Note:
868   This lexicographically sorts by (depth, cellType)
869 
870   Level: developer
871 
872 .seealso: `DMPlexPartitionLabelCreateSF()`, `DMPlexDistribute()`, `DMPlexDistributeOverlap()`
873 @*/
874 PetscErrorCode DMPlexStratifyMigrationSF(DM dm, PetscSF sf, PetscSF *migrationSF)
875 {
876   MPI_Comm           comm;
877   PetscMPIInt        rank, size;
878   PetscInt           d, ldepth, depth, dim, p, pStart, pEnd, nroots, nleaves;
879   PetscSFNode       *pointDepths, *remoteDepths;
880   PetscInt          *ilocal;
881   PetscInt          *depthRecv, *depthShift, *depthIdx;
882   PetscInt          *ctRecv,    *ctShift,    *ctIdx;
883   const PetscSFNode *iremote;
884 
885   PetscFunctionBegin;
886   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
887   PetscCall(PetscObjectGetComm((PetscObject) dm, &comm));
888   PetscCallMPI(MPI_Comm_rank(comm, &rank));
889   PetscCallMPI(MPI_Comm_size(comm, &size));
890   PetscCall(DMPlexGetDepth(dm, &ldepth));
891   PetscCall(DMGetDimension(dm, &dim));
892   PetscCall(MPIU_Allreduce(&ldepth, &depth, 1, MPIU_INT, MPI_MAX, comm));
893   PetscCheck(!(ldepth >= 0) || !(depth != ldepth),PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %" PetscInt_FMT " != %" PetscInt_FMT, ldepth, depth);
894   PetscCall(PetscLogEventBegin(DMPLEX_PartStratSF,dm,0,0,0));
895 
896   /* Before building the migration SF we need to know the new stratum offsets */
897   PetscCall(PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote));
898   PetscCall(PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths));
899   for (d = 0; d < depth+1; ++d) {
900     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
901     for (p = pStart; p < pEnd; ++p) {
902       DMPolytopeType ct;
903 
904       PetscCall(DMPlexGetCellType(dm, p, &ct));
905       pointDepths[p].index = d;
906       pointDepths[p].rank  = ct;
907     }
908   }
909   for (p = 0; p < nleaves; ++p) {remoteDepths[p].index = -1; remoteDepths[p].rank = -1;}
910   PetscCall(PetscSFBcastBegin(sf, MPIU_2INT, pointDepths, remoteDepths,MPI_REPLACE));
911   PetscCall(PetscSFBcastEnd(sf, MPIU_2INT, pointDepths, remoteDepths,MPI_REPLACE));
912   /* Count received points in each stratum and compute the internal strata shift */
913   PetscCall(PetscCalloc6(depth+1, &depthRecv, depth+1, &depthShift, depth+1, &depthIdx, DM_NUM_POLYTOPES, &ctRecv, DM_NUM_POLYTOPES, &ctShift, DM_NUM_POLYTOPES, &ctIdx));
914   for (p = 0; p < nleaves; ++p) {
915     if (remoteDepths[p].rank < 0) {
916       ++depthRecv[remoteDepths[p].index];
917     } else {
918       ++ctRecv[remoteDepths[p].rank];
919     }
920   }
921   {
922     PetscInt depths[4], dims[4], shift = 0, i, c;
923 
924     /* Cells (depth), Vertices (0), Faces (depth-1), Edges (1)
925          Consider DM_POLYTOPE_FV_GHOST and DM_POLYTOPE_INTERIOR_GHOST as cells
926      */
927     depths[0] = depth; depths[1] = 0; depths[2] = depth-1; depths[3] = 1;
928     dims[0]   = dim;   dims[1]   = 0; dims[2]   = dim-1;   dims[3]   = 1;
929     for (i = 0; i <= depth; ++i) {
930       const PetscInt dep = depths[i];
931       const PetscInt dim = dims[i];
932 
933       for (c = 0; c < DM_NUM_POLYTOPES; ++c) {
934         if (DMPolytopeTypeGetDim((DMPolytopeType) c) != dim && !(i == 0 && (c == DM_POLYTOPE_FV_GHOST || c == DM_POLYTOPE_INTERIOR_GHOST))) continue;
935         ctShift[c] = shift;
936         shift     += ctRecv[c];
937       }
938       depthShift[dep] = shift;
939       shift          += depthRecv[dep];
940     }
941     for (c = 0; c < DM_NUM_POLYTOPES; ++c) {
942       const PetscInt ctDim = DMPolytopeTypeGetDim((DMPolytopeType) c);
943 
944       if ((ctDim < 0 || ctDim > dim) && (c != DM_POLYTOPE_FV_GHOST && c != DM_POLYTOPE_INTERIOR_GHOST)) {
945         ctShift[c] = shift;
946         shift     += ctRecv[c];
947       }
948     }
949   }
950   /* Derive a new local permutation based on stratified indices */
951   PetscCall(PetscMalloc1(nleaves, &ilocal));
952   for (p = 0; p < nleaves; ++p) {
953     const PetscInt       dep = remoteDepths[p].index;
954     const DMPolytopeType ct  = (DMPolytopeType) remoteDepths[p].rank;
955 
956     if ((PetscInt) ct < 0) {
957       ilocal[p] = depthShift[dep] + depthIdx[dep];
958       ++depthIdx[dep];
959     } else {
960       ilocal[p] = ctShift[ct] + ctIdx[ct];
961       ++ctIdx[ct];
962     }
963   }
964   PetscCall(PetscSFCreate(comm, migrationSF));
965   PetscCall(PetscObjectSetName((PetscObject) *migrationSF, "Migration SF"));
966   PetscCall(PetscSFSetGraph(*migrationSF, nroots, nleaves, ilocal, PETSC_OWN_POINTER, (PetscSFNode*)iremote, PETSC_COPY_VALUES));
967   PetscCall(PetscFree2(pointDepths,remoteDepths));
968   PetscCall(PetscFree6(depthRecv, depthShift, depthIdx, ctRecv, ctShift, ctIdx));
969   PetscCall(PetscLogEventEnd(DMPLEX_PartStratSF,dm,0,0,0));
970   PetscFunctionReturn(0);
971 }
972 
973 /*@
974   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
975 
976   Collective on dm
977 
978   Input Parameters:
979 + dm - The DMPlex object
980 . pointSF - The PetscSF describing the communication pattern
981 . originalSection - The PetscSection for existing data layout
982 - originalVec - The existing data in a local vector
983 
984   Output Parameters:
985 + newSection - The PetscSF describing the new data layout
986 - newVec - The new data in a local vector
987 
988   Level: developer
989 
990 .seealso: `DMPlexDistribute()`, `DMPlexDistributeFieldIS()`, `DMPlexDistributeData()`
991 @*/
992 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
993 {
994   PetscSF        fieldSF;
995   PetscInt      *remoteOffsets, fieldSize;
996   PetscScalar   *originalValues, *newValues;
997 
998   PetscFunctionBegin;
999   PetscCall(PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0));
1000   PetscCall(PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection));
1001 
1002   PetscCall(PetscSectionGetStorageSize(newSection, &fieldSize));
1003   PetscCall(VecSetSizes(newVec, fieldSize, PETSC_DETERMINE));
1004   PetscCall(VecSetType(newVec,dm->vectype));
1005 
1006   PetscCall(VecGetArray(originalVec, &originalValues));
1007   PetscCall(VecGetArray(newVec, &newValues));
1008   PetscCall(PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF));
1009   PetscCall(PetscFree(remoteOffsets));
1010   PetscCall(PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues,MPI_REPLACE));
1011   PetscCall(PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues,MPI_REPLACE));
1012   PetscCall(PetscSFDestroy(&fieldSF));
1013   PetscCall(VecRestoreArray(newVec, &newValues));
1014   PetscCall(VecRestoreArray(originalVec, &originalValues));
1015   PetscCall(PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0));
1016   PetscFunctionReturn(0);
1017 }
1018 
1019 /*@
1020   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
1021 
1022   Collective on dm
1023 
1024   Input Parameters:
1025 + dm - The DMPlex object
1026 . pointSF - The PetscSF describing the communication pattern
1027 . originalSection - The PetscSection for existing data layout
1028 - originalIS - The existing data
1029 
1030   Output Parameters:
1031 + newSection - The PetscSF describing the new data layout
1032 - newIS - The new data
1033 
1034   Level: developer
1035 
1036 .seealso: `DMPlexDistribute()`, `DMPlexDistributeField()`, `DMPlexDistributeData()`
1037 @*/
1038 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
1039 {
1040   PetscSF         fieldSF;
1041   PetscInt       *newValues, *remoteOffsets, fieldSize;
1042   const PetscInt *originalValues;
1043 
1044   PetscFunctionBegin;
1045   PetscCall(PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0));
1046   PetscCall(PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection));
1047 
1048   PetscCall(PetscSectionGetStorageSize(newSection, &fieldSize));
1049   PetscCall(PetscMalloc1(fieldSize, &newValues));
1050 
1051   PetscCall(ISGetIndices(originalIS, &originalValues));
1052   PetscCall(PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF));
1053   PetscCall(PetscFree(remoteOffsets));
1054   PetscCall(PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues,MPI_REPLACE));
1055   PetscCall(PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues,MPI_REPLACE));
1056   PetscCall(PetscSFDestroy(&fieldSF));
1057   PetscCall(ISRestoreIndices(originalIS, &originalValues));
1058   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS));
1059   PetscCall(PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0));
1060   PetscFunctionReturn(0);
1061 }
1062 
1063 /*@
1064   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
1065 
1066   Collective on dm
1067 
1068   Input Parameters:
1069 + dm - The DMPlex object
1070 . pointSF - The PetscSF describing the communication pattern
1071 . originalSection - The PetscSection for existing data layout
1072 . datatype - The type of data
1073 - originalData - The existing data
1074 
1075   Output Parameters:
1076 + newSection - The PetscSection describing the new data layout
1077 - newData - The new data
1078 
1079   Level: developer
1080 
1081 .seealso: `DMPlexDistribute()`, `DMPlexDistributeField()`
1082 @*/
1083 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
1084 {
1085   PetscSF        fieldSF;
1086   PetscInt      *remoteOffsets, fieldSize;
1087   PetscMPIInt    dataSize;
1088 
1089   PetscFunctionBegin;
1090   PetscCall(PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0));
1091   PetscCall(PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection));
1092 
1093   PetscCall(PetscSectionGetStorageSize(newSection, &fieldSize));
1094   PetscCallMPI(MPI_Type_size(datatype, &dataSize));
1095   PetscCall(PetscMalloc(fieldSize * dataSize, newData));
1096 
1097   PetscCall(PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF));
1098   PetscCall(PetscFree(remoteOffsets));
1099   PetscCall(PetscSFBcastBegin(fieldSF, datatype, originalData, *newData,MPI_REPLACE));
1100   PetscCall(PetscSFBcastEnd(fieldSF, datatype, originalData, *newData,MPI_REPLACE));
1101   PetscCall(PetscSFDestroy(&fieldSF));
1102   PetscCall(PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0));
1103   PetscFunctionReturn(0);
1104 }
1105 
1106 static PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1107 {
1108   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
1109   MPI_Comm               comm;
1110   PetscSF                coneSF;
1111   PetscSection           originalConeSection, newConeSection;
1112   PetscInt              *remoteOffsets, *cones, *globCones, *newCones, newConesSize;
1113   PetscBool              flg;
1114 
1115   PetscFunctionBegin;
1116   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1117   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
1118   PetscCall(PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0));
1119   /* Distribute cone section */
1120   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1121   PetscCall(DMPlexGetConeSection(dm, &originalConeSection));
1122   PetscCall(DMPlexGetConeSection(dmParallel, &newConeSection));
1123   PetscCall(PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection));
1124   PetscCall(DMSetUp(dmParallel));
1125   /* Communicate and renumber cones */
1126   PetscCall(PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF));
1127   PetscCall(PetscFree(remoteOffsets));
1128   PetscCall(DMPlexGetCones(dm, &cones));
1129   if (original) {
1130     PetscInt numCones;
1131 
1132     PetscCall(PetscSectionGetStorageSize(originalConeSection,&numCones));
1133     PetscCall(PetscMalloc1(numCones,&globCones));
1134     PetscCall(ISLocalToGlobalMappingApplyBlock(original, numCones, cones, globCones));
1135   } else {
1136     globCones = cones;
1137   }
1138   PetscCall(DMPlexGetCones(dmParallel, &newCones));
1139   PetscCall(PetscSFBcastBegin(coneSF, MPIU_INT, globCones, newCones,MPI_REPLACE));
1140   PetscCall(PetscSFBcastEnd(coneSF, MPIU_INT, globCones, newCones,MPI_REPLACE));
1141   if (original) PetscCall(PetscFree(globCones));
1142   PetscCall(PetscSectionGetStorageSize(newConeSection, &newConesSize));
1143   PetscCall(ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones));
1144   if (PetscDefined(USE_DEBUG)) {
1145     PetscInt  p;
1146     PetscBool valid = PETSC_TRUE;
1147     for (p = 0; p < newConesSize; ++p) {
1148       if (newCones[p] < 0) {valid = PETSC_FALSE; PetscCall(PetscPrintf(PETSC_COMM_SELF, "[%d] Point %" PetscInt_FMT " not in overlap SF\n", PetscGlobalRank,p));}
1149     }
1150     PetscCheck(valid,PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1151   }
1152   PetscCall(PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-cones_view", &flg));
1153   if (flg) {
1154     PetscCall(PetscPrintf(comm, "Serial Cone Section:\n"));
1155     PetscCall(PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_(comm)));
1156     PetscCall(PetscPrintf(comm, "Parallel Cone Section:\n"));
1157     PetscCall(PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_(comm)));
1158     PetscCall(PetscSFView(coneSF, NULL));
1159   }
1160   PetscCall(DMPlexGetConeOrientations(dm, &cones));
1161   PetscCall(DMPlexGetConeOrientations(dmParallel, &newCones));
1162   PetscCall(PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones,MPI_REPLACE));
1163   PetscCall(PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones,MPI_REPLACE));
1164   PetscCall(PetscSFDestroy(&coneSF));
1165   PetscCall(PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0));
1166   /* Create supports and stratify DMPlex */
1167   {
1168     PetscInt pStart, pEnd;
1169 
1170     PetscCall(PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd));
1171     PetscCall(PetscSectionSetChart(pmesh->supportSection, pStart, pEnd));
1172   }
1173   PetscCall(DMPlexSymmetrize(dmParallel));
1174   PetscCall(DMPlexStratify(dmParallel));
1175   {
1176     PetscBool useCone, useClosure, useAnchors;
1177 
1178     PetscCall(DMGetBasicAdjacency(dm, &useCone, &useClosure));
1179     PetscCall(DMSetBasicAdjacency(dmParallel, useCone, useClosure));
1180     PetscCall(DMPlexGetAdjacencyUseAnchors(dm, &useAnchors));
1181     PetscCall(DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors));
1182   }
1183   PetscFunctionReturn(0);
1184 }
1185 
1186 static PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
1187 {
1188   MPI_Comm         comm;
1189   DM               cdm, cdmParallel;
1190   PetscSection     originalCoordSection, newCoordSection;
1191   Vec              originalCoordinates, newCoordinates;
1192   PetscInt         bs;
1193   const char      *name;
1194   const PetscReal *maxCell, *L;
1195 
1196   PetscFunctionBegin;
1197   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1198   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1199 
1200   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1201   PetscCall(DMGetCoordinateDM(dm, &cdm));
1202   PetscCall(DMGetCoordinateDM(dmParallel, &cdmParallel));
1203   PetscCall(DMCopyDisc(cdm, cdmParallel));
1204   PetscCall(DMGetCoordinateSection(dm, &originalCoordSection));
1205   PetscCall(DMGetCoordinateSection(dmParallel, &newCoordSection));
1206   PetscCall(DMGetCoordinatesLocal(dm, &originalCoordinates));
1207   if (originalCoordinates) {
1208     PetscCall(VecCreate(PETSC_COMM_SELF, &newCoordinates));
1209     PetscCall(PetscObjectGetName((PetscObject) originalCoordinates, &name));
1210     PetscCall(PetscObjectSetName((PetscObject) newCoordinates, name));
1211 
1212     PetscCall(DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates));
1213     PetscCall(DMSetCoordinatesLocal(dmParallel, newCoordinates));
1214     PetscCall(VecGetBlockSize(originalCoordinates, &bs));
1215     PetscCall(VecSetBlockSize(newCoordinates, bs));
1216     PetscCall(VecDestroy(&newCoordinates));
1217   }
1218 
1219   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1220   PetscCall(DMGetPeriodicity(dm, &maxCell, &L));
1221   PetscCall(DMSetPeriodicity(dmParallel, maxCell, L));
1222   PetscCall(DMGetCellCoordinateDM(dm, &cdm));
1223   if (cdm) {
1224     PetscCall(DMClone(dmParallel, &cdmParallel));
1225     PetscCall(DMSetCellCoordinateDM(dmParallel, cdmParallel));
1226     PetscCall(DMCopyDisc(cdm, cdmParallel));
1227     PetscCall(DMDestroy(&cdmParallel));
1228     PetscCall(DMGetCellCoordinateSection(dm, &originalCoordSection));
1229     PetscCall(DMGetCellCoordinatesLocal(dm, &originalCoordinates));
1230     PetscCall(PetscSectionCreate(comm, &newCoordSection));
1231     if (originalCoordinates) {
1232       PetscCall(VecCreate(PETSC_COMM_SELF, &newCoordinates));
1233       PetscCall(PetscObjectGetName((PetscObject) originalCoordinates, &name));
1234       PetscCall(PetscObjectSetName((PetscObject) newCoordinates, name));
1235 
1236       PetscCall(DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates));
1237       PetscCall(VecGetBlockSize(originalCoordinates, &bs));
1238       PetscCall(VecSetBlockSize(newCoordinates, bs));
1239       PetscCall(DMSetCellCoordinateSection(dmParallel, bs, newCoordSection));
1240       PetscCall(DMSetCellCoordinatesLocal(dmParallel, newCoordinates));
1241       PetscCall(VecDestroy(&newCoordinates));
1242     }
1243     PetscCall(PetscSectionDestroy(&newCoordSection));
1244   }
1245   PetscFunctionReturn(0);
1246 }
1247 
1248 static PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1249 {
1250   DM_Plex         *mesh = (DM_Plex*) dm->data;
1251   MPI_Comm         comm;
1252   DMLabel          depthLabel;
1253   PetscMPIInt      rank;
1254   PetscInt         depth, d, numLabels, numLocalLabels, l;
1255   PetscBool        hasLabels = PETSC_FALSE, lsendDepth, sendDepth;
1256   PetscObjectState depthState = -1;
1257 
1258   PetscFunctionBegin;
1259   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1260   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1261 
1262   PetscCall(PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0));
1263   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1264   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1265 
1266   /* If the user has changed the depth label, communicate it instead */
1267   PetscCall(DMPlexGetDepth(dm, &depth));
1268   PetscCall(DMPlexGetDepthLabel(dm, &depthLabel));
1269   if (depthLabel) PetscCall(PetscObjectStateGet((PetscObject) depthLabel, &depthState));
1270   lsendDepth = mesh->depthState != depthState ? PETSC_TRUE : PETSC_FALSE;
1271   PetscCall(MPIU_Allreduce(&lsendDepth, &sendDepth, 1, MPIU_BOOL, MPI_LOR, comm));
1272   if (sendDepth) {
1273     PetscCall(DMPlexGetDepthLabel(dmParallel, &dmParallel->depthLabel));
1274     PetscCall(DMRemoveLabelBySelf(dmParallel, &dmParallel->depthLabel, PETSC_FALSE));
1275   }
1276   /* Everyone must have either the same number of labels, or none */
1277   PetscCall(DMGetNumLabels(dm, &numLocalLabels));
1278   numLabels = numLocalLabels;
1279   PetscCallMPI(MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm));
1280   if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1281   for (l = 0; l < numLabels; ++l) {
1282     DMLabel     label = NULL, labelNew = NULL;
1283     PetscBool   isDepth, lisOutput = PETSC_TRUE, isOutput;
1284     const char *name = NULL;
1285 
1286     if (hasLabels) {
1287       PetscCall(DMGetLabelByNum(dm, l, &label));
1288       /* Skip "depth" because it is recreated */
1289       PetscCall(PetscObjectGetName((PetscObject) label, &name));
1290       PetscCall(PetscStrcmp(name, "depth", &isDepth));
1291     }
1292     PetscCallMPI(MPI_Bcast(&isDepth, 1, MPIU_BOOL, 0, comm));
1293     if (isDepth && !sendDepth) continue;
1294     PetscCall(DMLabelDistribute(label, migrationSF, &labelNew));
1295     if (isDepth) {
1296       /* Put in any missing strata which can occur if users are managing the depth label themselves */
1297       PetscInt gdepth;
1298 
1299       PetscCall(MPIU_Allreduce(&depth, &gdepth, 1, MPIU_INT, MPI_MAX, comm));
1300       PetscCheck(!(depth >= 0) || !(gdepth != depth),PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %" PetscInt_FMT " != %" PetscInt_FMT, depth, gdepth);
1301       for (d = 0; d <= gdepth; ++d) {
1302         PetscBool has;
1303 
1304         PetscCall(DMLabelHasStratum(labelNew, d, &has));
1305         if (!has) PetscCall(DMLabelAddStratum(labelNew, d));
1306       }
1307     }
1308     PetscCall(DMAddLabel(dmParallel, labelNew));
1309     /* Put the output flag in the new label */
1310     if (hasLabels) PetscCall(DMGetLabelOutput(dm, name, &lisOutput));
1311     PetscCall(MPIU_Allreduce(&lisOutput, &isOutput, 1, MPIU_BOOL, MPI_LAND, comm));
1312     PetscCall(PetscObjectGetName((PetscObject) labelNew, &name));
1313     PetscCall(DMSetLabelOutput(dmParallel, name, isOutput));
1314     PetscCall(DMLabelDestroy(&labelNew));
1315   }
1316   PetscCall(PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0));
1317   PetscFunctionReturn(0);
1318 }
1319 
1320 static PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1321 {
1322   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1323   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1324   MPI_Comm        comm;
1325   DM              refTree;
1326   PetscSection    origParentSection, newParentSection;
1327   PetscInt        *origParents, *origChildIDs;
1328   PetscBool       flg;
1329 
1330   PetscFunctionBegin;
1331   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1332   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
1333   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1334 
1335   /* Set up tree */
1336   PetscCall(DMPlexGetReferenceTree(dm,&refTree));
1337   PetscCall(DMPlexSetReferenceTree(dmParallel,refTree));
1338   PetscCall(DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL));
1339   if (origParentSection) {
1340     PetscInt        pStart, pEnd;
1341     PetscInt        *newParents, *newChildIDs, *globParents;
1342     PetscInt        *remoteOffsetsParents, newParentSize;
1343     PetscSF         parentSF;
1344 
1345     PetscCall(DMPlexGetChart(dmParallel, &pStart, &pEnd));
1346     PetscCall(PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection));
1347     PetscCall(PetscSectionSetChart(newParentSection,pStart,pEnd));
1348     PetscCall(PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection));
1349     PetscCall(PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF));
1350     PetscCall(PetscFree(remoteOffsetsParents));
1351     PetscCall(PetscSectionGetStorageSize(newParentSection,&newParentSize));
1352     PetscCall(PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs));
1353     if (original) {
1354       PetscInt numParents;
1355 
1356       PetscCall(PetscSectionGetStorageSize(origParentSection,&numParents));
1357       PetscCall(PetscMalloc1(numParents,&globParents));
1358       PetscCall(ISLocalToGlobalMappingApplyBlock(original, numParents, origParents, globParents));
1359     }
1360     else {
1361       globParents = origParents;
1362     }
1363     PetscCall(PetscSFBcastBegin(parentSF, MPIU_INT, globParents, newParents,MPI_REPLACE));
1364     PetscCall(PetscSFBcastEnd(parentSF, MPIU_INT, globParents, newParents,MPI_REPLACE));
1365     if (original) PetscCall(PetscFree(globParents));
1366     PetscCall(PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs,MPI_REPLACE));
1367     PetscCall(PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs,MPI_REPLACE));
1368     PetscCall(ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents));
1369     if (PetscDefined(USE_DEBUG)) {
1370       PetscInt  p;
1371       PetscBool valid = PETSC_TRUE;
1372       for (p = 0; p < newParentSize; ++p) {
1373         if (newParents[p] < 0) {valid = PETSC_FALSE; PetscCall(PetscPrintf(PETSC_COMM_SELF, "Point %" PetscInt_FMT " not in overlap SF\n", p));}
1374       }
1375       PetscCheck(valid,PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1376     }
1377     PetscCall(PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-parents_view", &flg));
1378     if (flg) {
1379       PetscCall(PetscPrintf(comm, "Serial Parent Section: \n"));
1380       PetscCall(PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_(comm)));
1381       PetscCall(PetscPrintf(comm, "Parallel Parent Section: \n"));
1382       PetscCall(PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_(comm)));
1383       PetscCall(PetscSFView(parentSF, NULL));
1384     }
1385     PetscCall(DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs));
1386     PetscCall(PetscSectionDestroy(&newParentSection));
1387     PetscCall(PetscFree2(newParents,newChildIDs));
1388     PetscCall(PetscSFDestroy(&parentSF));
1389   }
1390   pmesh->useAnchors = mesh->useAnchors;
1391   PetscFunctionReturn(0);
1392 }
1393 
1394 PETSC_UNUSED static PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, DM dmParallel)
1395 {
1396   PetscMPIInt            rank, size;
1397   MPI_Comm               comm;
1398 
1399   PetscFunctionBegin;
1400   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1401   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1402 
1403   /* Create point SF for parallel mesh */
1404   PetscCall(PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0));
1405   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1406   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1407   PetscCallMPI(MPI_Comm_size(comm, &size));
1408   {
1409     const PetscInt *leaves;
1410     PetscSFNode    *remotePoints, *rowners, *lowners;
1411     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1412     PetscInt        pStart, pEnd;
1413 
1414     PetscCall(DMPlexGetChart(dmParallel, &pStart, &pEnd));
1415     PetscCall(PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL));
1416     PetscCall(PetscMalloc2(numRoots,&rowners,numLeaves,&lowners));
1417     for (p=0; p<numRoots; p++) {
1418       rowners[p].rank  = -1;
1419       rowners[p].index = -1;
1420     }
1421     PetscCall(PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners,MPI_REPLACE));
1422     PetscCall(PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners,MPI_REPLACE));
1423     for (p = 0; p < numLeaves; ++p) {
1424       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1425         lowners[p].rank  = rank;
1426         lowners[p].index = leaves ? leaves[p] : p;
1427       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1428         lowners[p].rank  = -2;
1429         lowners[p].index = -2;
1430       }
1431     }
1432     for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1433       rowners[p].rank  = -3;
1434       rowners[p].index = -3;
1435     }
1436     PetscCall(PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC));
1437     PetscCall(PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC));
1438     PetscCall(PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners,MPI_REPLACE));
1439     PetscCall(PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners,MPI_REPLACE));
1440     for (p = 0; p < numLeaves; ++p) {
1441       PetscCheck(lowners[p].rank >= 0 && lowners[p].index >= 0,PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
1442       if (lowners[p].rank != rank) ++numGhostPoints;
1443     }
1444     PetscCall(PetscMalloc1(numGhostPoints, &ghostPoints));
1445     PetscCall(PetscMalloc1(numGhostPoints, &remotePoints));
1446     for (p = 0, gp = 0; p < numLeaves; ++p) {
1447       if (lowners[p].rank != rank) {
1448         ghostPoints[gp]        = leaves ? leaves[p] : p;
1449         remotePoints[gp].rank  = lowners[p].rank;
1450         remotePoints[gp].index = lowners[p].index;
1451         ++gp;
1452       }
1453     }
1454     PetscCall(PetscFree2(rowners,lowners));
1455     PetscCall(PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER));
1456     PetscCall(PetscSFSetFromOptions((dmParallel)->sf));
1457   }
1458   {
1459     PetscBool useCone, useClosure, useAnchors;
1460 
1461     PetscCall(DMGetBasicAdjacency(dm, &useCone, &useClosure));
1462     PetscCall(DMSetBasicAdjacency(dmParallel, useCone, useClosure));
1463     PetscCall(DMPlexGetAdjacencyUseAnchors(dm, &useAnchors));
1464     PetscCall(DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors));
1465   }
1466   PetscCall(PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0));
1467   PetscFunctionReturn(0);
1468 }
1469 
1470 /*@
1471   DMPlexSetPartitionBalance - Should distribution of the DM attempt to balance the shared point partition?
1472 
1473   Input Parameters:
1474 + dm - The DMPlex object
1475 - flg - Balance the partition?
1476 
1477   Level: intermediate
1478 
1479 .seealso: `DMPlexDistribute()`, `DMPlexGetPartitionBalance()`
1480 @*/
1481 PetscErrorCode DMPlexSetPartitionBalance(DM dm, PetscBool flg)
1482 {
1483   DM_Plex *mesh = (DM_Plex *)dm->data;
1484 
1485   PetscFunctionBegin;
1486   mesh->partitionBalance = flg;
1487   PetscFunctionReturn(0);
1488 }
1489 
1490 /*@
1491   DMPlexGetPartitionBalance - Does distribution of the DM attempt to balance the shared point partition?
1492 
1493   Input Parameter:
1494 . dm - The DMPlex object
1495 
1496   Output Parameter:
1497 . flg - Balance the partition?
1498 
1499   Level: intermediate
1500 
1501 .seealso: `DMPlexDistribute()`, `DMPlexSetPartitionBalance()`
1502 @*/
1503 PetscErrorCode DMPlexGetPartitionBalance(DM dm, PetscBool *flg)
1504 {
1505   DM_Plex *mesh = (DM_Plex *)dm->data;
1506 
1507   PetscFunctionBegin;
1508   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1509   PetscValidBoolPointer(flg, 2);
1510   *flg = mesh->partitionBalance;
1511   PetscFunctionReturn(0);
1512 }
1513 
1514 typedef struct {
1515   PetscInt vote, rank, index;
1516 } Petsc3Int;
1517 
1518 /* MaxLoc, but carry a third piece of information around */
1519 static void MPIAPI MaxLocCarry(void *in_, void *inout_, PetscMPIInt *len_, MPI_Datatype *dtype)
1520 {
1521   Petsc3Int *a = (Petsc3Int *)inout_;
1522   Petsc3Int *b = (Petsc3Int *)in_;
1523   PetscInt i, len = *len_;
1524   for (i = 0; i < len; i++) {
1525     if (a[i].vote < b[i].vote) {
1526       a[i].vote = b[i].vote;
1527       a[i].rank = b[i].rank;
1528       a[i].index = b[i].index;
1529     } else if (a[i].vote <= b[i].vote) {
1530       if (a[i].rank >= b[i].rank) {
1531         a[i].rank = b[i].rank;
1532         a[i].index = b[i].index;
1533       }
1534     }
1535   }
1536 }
1537 
1538 /*@C
1539   DMPlexCreatePointSF - Build a point SF from an SF describing a point migration
1540 
1541   Input Parameters:
1542 + dm          - The source DMPlex object
1543 . migrationSF - The star forest that describes the parallel point remapping
1544 - ownership   - Flag causing a vote to determine point ownership
1545 
1546   Output Parameter:
1547 . pointSF     - The star forest describing the point overlap in the remapped DM
1548 
1549   Notes:
1550   Output pointSF is guaranteed to have the array of local indices (leaves) sorted.
1551 
1552   Level: developer
1553 
1554 .seealso: `DMPlexDistribute()`, `DMPlexDistributeOverlap()`
1555 @*/
1556 PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownership, PetscSF *pointSF)
1557 {
1558   PetscMPIInt        rank, size;
1559   PetscInt           p, nroots, nleaves, idx, npointLeaves;
1560   PetscInt          *pointLocal;
1561   const PetscInt    *leaves;
1562   const PetscSFNode *roots;
1563   PetscSFNode       *rootNodes, *leafNodes, *pointRemote;
1564   Vec                shifts;
1565   const PetscInt     numShifts = 13759;
1566   const PetscScalar *shift = NULL;
1567   const PetscBool    shiftDebug = PETSC_FALSE;
1568   PetscBool          balance;
1569 
1570   PetscFunctionBegin;
1571   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1572   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank));
1573   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size));
1574   PetscCall(PetscLogEventBegin(DMPLEX_CreatePointSF,dm,0,0,0));
1575 
1576   PetscCall(DMPlexGetPartitionBalance(dm, &balance));
1577   PetscCall(PetscSFGetGraph(migrationSF, &nroots, &nleaves, &leaves, &roots));
1578   PetscCall(PetscMalloc2(nroots, &rootNodes, nleaves, &leafNodes));
1579   if (ownership) {
1580     MPI_Op       op;
1581     MPI_Datatype datatype;
1582     Petsc3Int   *rootVote = NULL, *leafVote = NULL;
1583     /* If balancing, we compute a random cyclic shift of the rank for each remote point. That way, the max will evenly distribute among ranks. */
1584     if (balance) {
1585       PetscRandom r;
1586 
1587       PetscCall(PetscRandomCreate(PETSC_COMM_SELF, &r));
1588       PetscCall(PetscRandomSetInterval(r, 0, 2467*size));
1589       PetscCall(VecCreate(PETSC_COMM_SELF, &shifts));
1590       PetscCall(VecSetSizes(shifts, numShifts, numShifts));
1591       PetscCall(VecSetType(shifts, VECSTANDARD));
1592       PetscCall(VecSetRandom(shifts, r));
1593       PetscCall(PetscRandomDestroy(&r));
1594       PetscCall(VecGetArrayRead(shifts, &shift));
1595     }
1596 
1597     PetscCall(PetscMalloc1(nroots, &rootVote));
1598     PetscCall(PetscMalloc1(nleaves, &leafVote));
1599     /* Point ownership vote: Process with highest rank owns shared points */
1600     for (p = 0; p < nleaves; ++p) {
1601       if (shiftDebug) {
1602         PetscCall(PetscSynchronizedPrintf(PetscObjectComm((PetscObject) dm), "[%d] Point %" PetscInt_FMT " RemotePoint %" PetscInt_FMT " Shift %" PetscInt_FMT " MyRank %" PetscInt_FMT "\n", rank, leaves ? leaves[p] : p, roots[p].index, (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]), (rank + (shift ? (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]) : 0))%size));
1603       }
1604       /* Either put in a bid or we know we own it */
1605       leafVote[p].vote  = (rank + (shift ? (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]) : 0))%size;
1606       leafVote[p].rank = rank;
1607       leafVote[p].index = p;
1608     }
1609     for (p = 0; p < nroots; p++) {
1610       /* Root must not participate in the reduction, flag so that MAXLOC does not use */
1611       rootVote[p].vote  = -3;
1612       rootVote[p].rank  = -3;
1613       rootVote[p].index = -3;
1614     }
1615     PetscCallMPI(MPI_Type_contiguous(3, MPIU_INT, &datatype));
1616     PetscCallMPI(MPI_Type_commit(&datatype));
1617     PetscCallMPI(MPI_Op_create(&MaxLocCarry, 1, &op));
1618     PetscCall(PetscSFReduceBegin(migrationSF, datatype, leafVote, rootVote, op));
1619     PetscCall(PetscSFReduceEnd(migrationSF, datatype, leafVote, rootVote, op));
1620     PetscCallMPI(MPI_Op_free(&op));
1621     PetscCallMPI(MPI_Type_free(&datatype));
1622     for (p = 0; p < nroots; p++) {
1623       rootNodes[p].rank = rootVote[p].rank;
1624       rootNodes[p].index = rootVote[p].index;
1625     }
1626     PetscCall(PetscFree(leafVote));
1627     PetscCall(PetscFree(rootVote));
1628   } else {
1629     for (p = 0; p < nroots; p++) {
1630       rootNodes[p].index = -1;
1631       rootNodes[p].rank = rank;
1632     }
1633     for (p = 0; p < nleaves; p++) {
1634       /* Write new local id into old location */
1635       if (roots[p].rank == rank) {
1636         rootNodes[roots[p].index].index = leaves ? leaves[p] : p;
1637       }
1638     }
1639   }
1640   PetscCall(PetscSFBcastBegin(migrationSF, MPIU_2INT, rootNodes, leafNodes,MPI_REPLACE));
1641   PetscCall(PetscSFBcastEnd(migrationSF, MPIU_2INT, rootNodes, leafNodes,MPI_REPLACE));
1642 
1643   for (npointLeaves = 0, p = 0; p < nleaves; p++) {
1644     if (leafNodes[p].rank != rank) npointLeaves++;
1645   }
1646   PetscCall(PetscMalloc1(npointLeaves, &pointLocal));
1647   PetscCall(PetscMalloc1(npointLeaves, &pointRemote));
1648   for (idx = 0, p = 0; p < nleaves; p++) {
1649     if (leafNodes[p].rank != rank) {
1650       /* Note that pointLocal is automatically sorted as it is sublist of 0, ..., nleaves-1 */
1651       pointLocal[idx] = p;
1652       pointRemote[idx] = leafNodes[p];
1653       idx++;
1654     }
1655   }
1656   if (shift) {
1657     PetscCall(VecRestoreArrayRead(shifts, &shift));
1658     PetscCall(VecDestroy(&shifts));
1659   }
1660   if (shiftDebug) PetscCall(PetscSynchronizedFlush(PetscObjectComm((PetscObject) dm), PETSC_STDOUT));
1661   PetscCall(PetscSFCreate(PetscObjectComm((PetscObject) dm), pointSF));
1662   PetscCall(PetscSFSetFromOptions(*pointSF));
1663   PetscCall(PetscSFSetGraph(*pointSF, nleaves, npointLeaves, pointLocal, PETSC_OWN_POINTER, pointRemote, PETSC_OWN_POINTER));
1664   PetscCall(PetscFree2(rootNodes, leafNodes));
1665   PetscCall(PetscLogEventEnd(DMPLEX_CreatePointSF,dm,0,0,0));
1666   if (PetscDefined(USE_DEBUG)) PetscCall(DMPlexCheckPointSF(dm, *pointSF));
1667   PetscFunctionReturn(0);
1668 }
1669 
1670 /*@C
1671   DMPlexMigrate  - Migrates internal DM data over the supplied star forest
1672 
1673   Collective on dm
1674 
1675   Input Parameters:
1676 + dm       - The source DMPlex object
1677 - sf       - The star forest communication context describing the migration pattern
1678 
1679   Output Parameter:
1680 . targetDM - The target DMPlex object
1681 
1682   Level: intermediate
1683 
1684 .seealso: `DMPlexDistribute()`, `DMPlexDistributeOverlap()`
1685 @*/
1686 PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM)
1687 {
1688   MPI_Comm               comm;
1689   PetscInt               dim, cdim, nroots;
1690   PetscSF                sfPoint;
1691   ISLocalToGlobalMapping ltogMigration;
1692   ISLocalToGlobalMapping ltogOriginal = NULL;
1693 
1694   PetscFunctionBegin;
1695   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1696   PetscCall(PetscLogEventBegin(DMPLEX_Migrate, dm, 0, 0, 0));
1697   PetscCall(PetscObjectGetComm((PetscObject) dm, &comm));
1698   PetscCall(DMGetDimension(dm, &dim));
1699   PetscCall(DMSetDimension(targetDM, dim));
1700   PetscCall(DMGetCoordinateDim(dm, &cdim));
1701   PetscCall(DMSetCoordinateDim(targetDM, cdim));
1702 
1703   /* Check for a one-to-all distribution pattern */
1704   PetscCall(DMGetPointSF(dm, &sfPoint));
1705   PetscCall(PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL));
1706   if (nroots >= 0) {
1707     IS        isOriginal;
1708     PetscInt  n, size, nleaves;
1709     PetscInt  *numbering_orig, *numbering_new;
1710 
1711     /* Get the original point numbering */
1712     PetscCall(DMPlexCreatePointNumbering(dm, &isOriginal));
1713     PetscCall(ISLocalToGlobalMappingCreateIS(isOriginal, &ltogOriginal));
1714     PetscCall(ISLocalToGlobalMappingGetSize(ltogOriginal, &size));
1715     PetscCall(ISLocalToGlobalMappingGetBlockIndices(ltogOriginal, (const PetscInt**)&numbering_orig));
1716     /* Convert to positive global numbers */
1717     for (n=0; n<size; n++) {if (numbering_orig[n] < 0) numbering_orig[n] = -(numbering_orig[n]+1);}
1718     /* Derive the new local-to-global mapping from the old one */
1719     PetscCall(PetscSFGetGraph(sf, NULL, &nleaves, NULL, NULL));
1720     PetscCall(PetscMalloc1(nleaves, &numbering_new));
1721     PetscCall(PetscSFBcastBegin(sf, MPIU_INT, numbering_orig, numbering_new,MPI_REPLACE));
1722     PetscCall(PetscSFBcastEnd(sf, MPIU_INT, numbering_orig, numbering_new,MPI_REPLACE));
1723     PetscCall(ISLocalToGlobalMappingCreate(comm, 1, nleaves, numbering_new, PETSC_OWN_POINTER, &ltogMigration));
1724     PetscCall(ISLocalToGlobalMappingRestoreIndices(ltogOriginal, (const PetscInt**)&numbering_orig));
1725     PetscCall(ISDestroy(&isOriginal));
1726   } else {
1727     /* One-to-all distribution pattern: We can derive LToG from SF */
1728     PetscCall(ISLocalToGlobalMappingCreateSF(sf, 0, &ltogMigration));
1729   }
1730   PetscCall(PetscObjectSetName((PetscObject)ltogMigration, "Point renumbering for DM migration"));
1731   PetscCall(ISLocalToGlobalMappingViewFromOptions(ltogMigration, (PetscObject)dm, "-partition_view"));
1732   /* Migrate DM data to target DM */
1733   PetscCall(DMPlexDistributeCones(dm, sf, ltogOriginal, ltogMigration, targetDM));
1734   PetscCall(DMPlexDistributeLabels(dm, sf, targetDM));
1735   PetscCall(DMPlexDistributeCoordinates(dm, sf, targetDM));
1736   PetscCall(DMPlexDistributeSetupTree(dm, sf, ltogOriginal, ltogMigration, targetDM));
1737   PetscCall(ISLocalToGlobalMappingDestroy(&ltogOriginal));
1738   PetscCall(ISLocalToGlobalMappingDestroy(&ltogMigration));
1739   PetscCall(PetscLogEventEnd(DMPLEX_Migrate, dm, 0, 0, 0));
1740   PetscFunctionReturn(0);
1741 }
1742 
1743 /*@C
1744   DMPlexDistribute - Distributes the mesh and any associated sections.
1745 
1746   Collective on dm
1747 
1748   Input Parameters:
1749 + dm  - The original DMPlex object
1750 - overlap - The overlap of partitions, 0 is the default
1751 
1752   Output Parameters:
1753 + sf - The PetscSF used for point distribution, or NULL if not needed
1754 - dmParallel - The distributed DMPlex object
1755 
1756   Note: If the mesh was not distributed, the output dmParallel will be NULL.
1757 
1758   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
1759   representation on the mesh.
1760 
1761   Level: intermediate
1762 
1763 .seealso: `DMPlexCreate()`, `DMSetAdjacency()`, `DMPlexGetOverlap()`
1764 @*/
1765 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1766 {
1767   MPI_Comm               comm;
1768   PetscPartitioner       partitioner;
1769   IS                     cellPart;
1770   PetscSection           cellPartSection;
1771   DM                     dmCoord;
1772   DMLabel                lblPartition, lblMigration;
1773   PetscSF                sfMigration, sfStratified, sfPoint;
1774   PetscBool              flg, balance;
1775   PetscMPIInt            rank, size;
1776 
1777   PetscFunctionBegin;
1778   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1779   PetscValidLogicalCollectiveInt(dm, overlap, 2);
1780   if (sf) PetscValidPointer(sf,3);
1781   PetscValidPointer(dmParallel,4);
1782 
1783   if (sf) *sf = NULL;
1784   *dmParallel = NULL;
1785   PetscCall(PetscObjectGetComm((PetscObject)dm,&comm));
1786   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1787   PetscCallMPI(MPI_Comm_size(comm, &size));
1788   if (size == 1) PetscFunctionReturn(0);
1789 
1790   PetscCall(PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0));
1791   /* Create cell partition */
1792   PetscCall(PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0));
1793   PetscCall(PetscSectionCreate(comm, &cellPartSection));
1794   PetscCall(DMPlexGetPartitioner(dm, &partitioner));
1795   PetscCall(PetscPartitionerDMPlexPartition(partitioner, dm, NULL, cellPartSection, &cellPart));
1796   PetscCall(PetscLogEventBegin(DMPLEX_PartSelf,dm,0,0,0));
1797   {
1798     /* Convert partition to DMLabel */
1799     IS             is;
1800     PetscHSetI     ht;
1801     const PetscInt *points;
1802     PetscInt       *iranks;
1803     PetscInt       pStart, pEnd, proc, npoints, poff = 0, nranks;
1804 
1805     PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Point Partition", &lblPartition));
1806     /* Preallocate strata */
1807     PetscCall(PetscHSetICreate(&ht));
1808     PetscCall(PetscSectionGetChart(cellPartSection, &pStart, &pEnd));
1809     for (proc = pStart; proc < pEnd; proc++) {
1810       PetscCall(PetscSectionGetDof(cellPartSection, proc, &npoints));
1811       if (npoints) PetscCall(PetscHSetIAdd(ht, proc));
1812     }
1813     PetscCall(PetscHSetIGetSize(ht, &nranks));
1814     PetscCall(PetscMalloc1(nranks, &iranks));
1815     PetscCall(PetscHSetIGetElems(ht, &poff, iranks));
1816     PetscCall(PetscHSetIDestroy(&ht));
1817     PetscCall(DMLabelAddStrata(lblPartition, nranks, iranks));
1818     PetscCall(PetscFree(iranks));
1819     /* Inline DMPlexPartitionLabelClosure() */
1820     PetscCall(ISGetIndices(cellPart, &points));
1821     PetscCall(PetscSectionGetChart(cellPartSection, &pStart, &pEnd));
1822     for (proc = pStart; proc < pEnd; proc++) {
1823       PetscCall(PetscSectionGetDof(cellPartSection, proc, &npoints));
1824       if (!npoints) continue;
1825       PetscCall(PetscSectionGetOffset(cellPartSection, proc, &poff));
1826       PetscCall(DMPlexClosurePoints_Private(dm, npoints, points+poff, &is));
1827       PetscCall(DMLabelSetStratumIS(lblPartition, proc, is));
1828       PetscCall(ISDestroy(&is));
1829     }
1830     PetscCall(ISRestoreIndices(cellPart, &points));
1831   }
1832   PetscCall(PetscLogEventEnd(DMPLEX_PartSelf,dm,0,0,0));
1833 
1834   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Point migration", &lblMigration));
1835   PetscCall(DMPlexPartitionLabelInvert(dm, lblPartition, NULL, lblMigration));
1836   PetscCall(DMPlexPartitionLabelCreateSF(dm, lblMigration, &sfMigration));
1837   PetscCall(DMPlexStratifyMigrationSF(dm, sfMigration, &sfStratified));
1838   PetscCall(PetscSFDestroy(&sfMigration));
1839   sfMigration = sfStratified;
1840   PetscCall(PetscSFSetUp(sfMigration));
1841   PetscCall(PetscLogEventEnd(DMPLEX_Partition,dm,0,0,0));
1842   PetscCall(PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg));
1843   if (flg) {
1844     PetscCall(DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_(comm)));
1845     PetscCall(PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_(comm)));
1846   }
1847 
1848   /* Create non-overlapping parallel DM and migrate internal data */
1849   PetscCall(DMPlexCreate(comm, dmParallel));
1850   PetscCall(PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh"));
1851   PetscCall(DMPlexMigrate(dm, sfMigration, *dmParallel));
1852 
1853   /* Build the point SF without overlap */
1854   PetscCall(DMPlexGetPartitionBalance(dm, &balance));
1855   PetscCall(DMPlexSetPartitionBalance(*dmParallel, balance));
1856   PetscCall(DMPlexCreatePointSF(*dmParallel, sfMigration, PETSC_TRUE, &sfPoint));
1857   PetscCall(DMSetPointSF(*dmParallel, sfPoint));
1858   PetscCall(DMGetCoordinateDM(*dmParallel, &dmCoord));
1859   if (dmCoord) PetscCall(DMSetPointSF(dmCoord, sfPoint));
1860   if (flg) PetscCall(PetscSFView(sfPoint, NULL));
1861 
1862   if (overlap > 0) {
1863     DM                 dmOverlap;
1864     PetscInt           nroots, nleaves, noldleaves, l;
1865     const PetscInt    *oldLeaves;
1866     PetscSFNode       *newRemote, *permRemote;
1867     const PetscSFNode *oldRemote;
1868     PetscSF            sfOverlap, sfOverlapPoint;
1869 
1870     /* Add the partition overlap to the distributed DM */
1871     PetscCall(DMPlexDistributeOverlap(*dmParallel, overlap, &sfOverlap, &dmOverlap));
1872     PetscCall(DMDestroy(dmParallel));
1873     *dmParallel = dmOverlap;
1874     if (flg) {
1875       PetscCall(PetscPrintf(comm, "Overlap Migration SF:\n"));
1876       PetscCall(PetscSFView(sfOverlap, NULL));
1877     }
1878 
1879     /* Re-map the migration SF to establish the full migration pattern */
1880     PetscCall(PetscSFGetGraph(sfMigration, &nroots, &noldleaves, &oldLeaves, &oldRemote));
1881     PetscCall(PetscSFGetGraph(sfOverlap, NULL, &nleaves, NULL, NULL));
1882     PetscCall(PetscMalloc1(nleaves, &newRemote));
1883     /* oldRemote: original root point mapping to original leaf point
1884        newRemote: original leaf point mapping to overlapped leaf point */
1885     if (oldLeaves) {
1886       /* After stratification, the migration remotes may not be in root (canonical) order, so we reorder using the leaf numbering */
1887       PetscCall(PetscMalloc1(noldleaves, &permRemote));
1888       for (l = 0; l < noldleaves; ++l) permRemote[oldLeaves[l]] = oldRemote[l];
1889       oldRemote = permRemote;
1890     }
1891     PetscCall(PetscSFBcastBegin(sfOverlap, MPIU_2INT, oldRemote, newRemote,MPI_REPLACE));
1892     PetscCall(PetscSFBcastEnd(sfOverlap, MPIU_2INT, oldRemote, newRemote,MPI_REPLACE));
1893     if (oldLeaves) PetscCall(PetscFree(oldRemote));
1894     PetscCall(PetscSFCreate(comm, &sfOverlapPoint));
1895     PetscCall(PetscSFSetGraph(sfOverlapPoint, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER));
1896     PetscCall(PetscSFDestroy(&sfOverlap));
1897     PetscCall(PetscSFDestroy(&sfMigration));
1898     sfMigration = sfOverlapPoint;
1899   }
1900   /* Cleanup Partition */
1901   PetscCall(DMLabelDestroy(&lblPartition));
1902   PetscCall(DMLabelDestroy(&lblMigration));
1903   PetscCall(PetscSectionDestroy(&cellPartSection));
1904   PetscCall(ISDestroy(&cellPart));
1905   /* Copy discretization */
1906   PetscCall(DMCopyDisc(dm, *dmParallel));
1907   /* Create sfNatural */
1908   if (dm->useNatural) {
1909     PetscSection section;
1910 
1911     PetscCall(DMGetLocalSection(dm, &section));
1912     PetscCall(DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &(*dmParallel)->sfNatural));
1913     PetscCall(DMSetUseNatural(*dmParallel, PETSC_TRUE));
1914   }
1915   PetscCall(DMPlexCopy_Internal(dm, PETSC_TRUE, PETSC_FALSE, *dmParallel));
1916   /* Cleanup */
1917   if (sf) {*sf = sfMigration;}
1918   else    PetscCall(PetscSFDestroy(&sfMigration));
1919   PetscCall(PetscSFDestroy(&sfPoint));
1920   PetscCall(PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0));
1921   PetscFunctionReturn(0);
1922 }
1923 
1924 /*@C
1925   DMPlexDistributeOverlap - Add partition overlap to a distributed non-overlapping DM.
1926 
1927   Collective on dm
1928 
1929   Input Parameters:
1930 + dm  - The non-overlapping distributed DMPlex object
1931 - overlap - The overlap of partitions (the same on all ranks)
1932 
1933   Output Parameters:
1934 + sf - The PetscSF used for point distribution
1935 - dmOverlap - The overlapping distributed DMPlex object, or NULL
1936 
1937   Notes:
1938   If the mesh was not distributed, the return value is NULL.
1939 
1940   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
1941   representation on the mesh.
1942 
1943   Options Database Keys:
1944 + -dm_plex_overlap_labels <name1,name2,...> - List of overlap label names
1945 . -dm_plex_overlap_values <int1,int2,...>   - List of overlap label values
1946 . -dm_plex_overlap_exclude_label <name>     - Label used to exclude points from overlap
1947 - -dm_plex_overlap_exclude_value <int>      - Label value used to exclude points from overlap
1948 
1949   Level: advanced
1950 
1951 .seealso: `DMPlexCreate()`, `DMSetAdjacency()`, `DMPlexDistribute()`, `DMPlexCreateOverlapLabel()`, `DMPlexGetOverlap()`
1952 @*/
1953 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, PetscSF *sf, DM *dmOverlap)
1954 {
1955   DM_Plex     *mesh = (DM_Plex *) dm->data;
1956   MPI_Comm     comm;
1957   PetscMPIInt  size, rank;
1958   PetscSection rootSection, leafSection;
1959   IS           rootrank, leafrank;
1960   DM           dmCoord;
1961   DMLabel      lblOverlap;
1962   PetscSF      sfOverlap, sfStratified, sfPoint;
1963 
1964   PetscFunctionBegin;
1965   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1966   PetscValidLogicalCollectiveInt(dm, overlap, 2);
1967   if (sf) PetscValidPointer(sf, 3);
1968   PetscValidPointer(dmOverlap, 4);
1969 
1970   if (sf) *sf = NULL;
1971   *dmOverlap  = NULL;
1972   PetscCall(PetscObjectGetComm((PetscObject)dm,&comm));
1973   PetscCallMPI(MPI_Comm_size(comm, &size));
1974   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1975   if (size == 1) PetscFunctionReturn(0);
1976   {
1977     // We need to get options for the _already_distributed mesh, so it must be done here
1978     PetscInt    overlap;
1979     const char *prefix;
1980     char        oldPrefix[PETSC_MAX_PATH_LEN];
1981 
1982     oldPrefix[0] = '\0';
1983     PetscCall(PetscObjectGetOptionsPrefix((PetscObject) dm, &prefix));
1984     PetscCall(PetscStrcpy(oldPrefix, prefix));
1985     PetscCall(PetscObjectAppendOptionsPrefix((PetscObject) dm, "dist_"));
1986     PetscCall(DMPlexGetOverlap(dm, &overlap));
1987     PetscObjectOptionsBegin((PetscObject) dm);
1988     PetscCall(DMSetFromOptions_Overlap_Plex(PetscOptionsObject, dm, &overlap));
1989     PetscOptionsEnd();
1990     PetscCall(PetscObjectSetOptionsPrefix((PetscObject) dm, oldPrefix[0] == '\0' ? NULL : oldPrefix));
1991   }
1992   PetscCall(PetscLogEventBegin(DMPLEX_DistributeOverlap, dm, 0, 0, 0));
1993   /* Compute point overlap with neighbouring processes on the distributed DM */
1994   PetscCall(PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0));
1995   PetscCall(PetscSectionCreate(comm, &rootSection));
1996   PetscCall(PetscSectionCreate(comm, &leafSection));
1997   PetscCall(DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank));
1998   if (mesh->numOvLabels) PetscCall(DMPlexCreateOverlapLabelFromLabels(dm, mesh->numOvLabels, mesh->ovLabels, mesh->ovValues, mesh->numOvExLabels, mesh->ovExLabels, mesh->ovExValues, rootSection, rootrank, leafSection, leafrank, &lblOverlap));
1999   else PetscCall(DMPlexCreateOverlapLabel(dm, overlap, rootSection, rootrank, leafSection, leafrank, &lblOverlap));
2000   /* Convert overlap label to stratified migration SF */
2001   PetscCall(DMPlexPartitionLabelCreateSF(dm, lblOverlap, &sfOverlap));
2002   PetscCall(DMPlexStratifyMigrationSF(dm, sfOverlap, &sfStratified));
2003   PetscCall(PetscSFDestroy(&sfOverlap));
2004   sfOverlap = sfStratified;
2005   PetscCall(PetscObjectSetName((PetscObject) sfOverlap, "Overlap SF"));
2006   PetscCall(PetscSFSetFromOptions(sfOverlap));
2007 
2008   PetscCall(PetscSectionDestroy(&rootSection));
2009   PetscCall(PetscSectionDestroy(&leafSection));
2010   PetscCall(ISDestroy(&rootrank));
2011   PetscCall(ISDestroy(&leafrank));
2012   PetscCall(PetscLogEventEnd(DMPLEX_Partition,dm,0,0,0));
2013 
2014   /* Build the overlapping DM */
2015   PetscCall(DMPlexCreate(comm, dmOverlap));
2016   PetscCall(PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh"));
2017   PetscCall(DMPlexMigrate(dm, sfOverlap, *dmOverlap));
2018   /* Store the overlap in the new DM */
2019   PetscCall(DMPlexSetOverlap_Plex(*dmOverlap, dm, overlap));
2020   /* Build the new point SF */
2021   PetscCall(DMPlexCreatePointSF(*dmOverlap, sfOverlap, PETSC_FALSE, &sfPoint));
2022   PetscCall(DMSetPointSF(*dmOverlap, sfPoint));
2023   PetscCall(DMGetCoordinateDM(*dmOverlap, &dmCoord));
2024   if (dmCoord) PetscCall(DMSetPointSF(dmCoord, sfPoint));
2025   PetscCall(DMGetCellCoordinateDM(*dmOverlap, &dmCoord));
2026   if (dmCoord) PetscCall(DMSetPointSF(dmCoord, sfPoint));
2027   PetscCall(PetscSFDestroy(&sfPoint));
2028   /* Cleanup overlap partition */
2029   PetscCall(DMLabelDestroy(&lblOverlap));
2030   if (sf) *sf = sfOverlap;
2031   else    PetscCall(PetscSFDestroy(&sfOverlap));
2032   PetscCall(PetscLogEventEnd(DMPLEX_DistributeOverlap, dm, 0, 0, 0));
2033   PetscFunctionReturn(0);
2034 }
2035 
2036 PetscErrorCode DMPlexGetOverlap_Plex(DM dm, PetscInt *overlap)
2037 {
2038   DM_Plex        *mesh  = (DM_Plex*) dm->data;
2039 
2040   PetscFunctionBegin;
2041   *overlap = mesh->overlap;
2042   PetscFunctionReturn(0);
2043 }
2044 
2045 PetscErrorCode DMPlexSetOverlap_Plex(DM dm, DM dmSrc, PetscInt overlap)
2046 {
2047   DM_Plex        *mesh=NULL;
2048   DM_Plex        *meshSrc=NULL;
2049 
2050   PetscFunctionBegin;
2051   PetscValidHeaderSpecificType(dm, DM_CLASSID, 1, DMPLEX);
2052   mesh = (DM_Plex*) dm->data;
2053   mesh->overlap = overlap;
2054   if (dmSrc) {
2055     PetscValidHeaderSpecificType(dmSrc, DM_CLASSID, 2, DMPLEX);
2056     meshSrc = (DM_Plex*) dmSrc->data;
2057     mesh->overlap += meshSrc->overlap;
2058   }
2059   PetscFunctionReturn(0);
2060 }
2061 
2062 /*@
2063   DMPlexGetOverlap - Get the width of the cell overlap
2064 
2065   Not collective
2066 
2067   Input Parameter:
2068 . dm   - The DM
2069 
2070   Output Parameter:
2071 . overlap - the width of the cell overlap
2072 
2073   Level: intermediate
2074 
2075 .seealso: `DMPlexSetOverlap()`, `DMPlexDistribute()`
2076 @*/
2077 PetscErrorCode DMPlexGetOverlap(DM dm, PetscInt *overlap)
2078 {
2079   PetscFunctionBegin;
2080   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2081   PetscValidIntPointer(overlap, 2);
2082   PetscUseMethod(dm, "DMPlexGetOverlap_C",(DM,PetscInt*),(dm,overlap));
2083   PetscFunctionReturn(0);
2084 }
2085 
2086 /*@
2087   DMPlexSetOverlap - Set the width of the cell overlap
2088 
2089   Logically collective
2090 
2091   Input Parameters:
2092 + dm      - The DM
2093 . dmSrc   - The DM that produced this one, or NULL
2094 - overlap - the width of the cell overlap
2095 
2096   Note:
2097   The overlap from dmSrc is added to dm
2098 
2099   Level: intermediate
2100 
2101 .seealso: `DMPlexGetOverlap()`, `DMPlexDistribute()`
2102 @*/
2103 PetscErrorCode DMPlexSetOverlap(DM dm, DM dmSrc, PetscInt overlap)
2104 {
2105   PetscFunctionBegin;
2106   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2107   PetscValidLogicalCollectiveInt(dm, overlap, 3);
2108   PetscTryMethod(dm, "DMPlexSetOverlap_C",(DM,DM,PetscInt),(dm,dmSrc,overlap));
2109   PetscFunctionReturn(0);
2110 }
2111 
2112 PetscErrorCode DMPlexDistributeSetDefault_Plex(DM dm, PetscBool dist)
2113 {
2114   DM_Plex *mesh = (DM_Plex *) dm->data;
2115 
2116   PetscFunctionBegin;
2117   mesh->distDefault = dist;
2118   PetscFunctionReturn(0);
2119 }
2120 
2121 /*@
2122   DMPlexDistributeSetDefault - Set flag indicating whether the DM should be distributed by default
2123 
2124   Logically collective
2125 
2126   Input Parameters:
2127 + dm   - The DM
2128 - dist - Flag for distribution
2129 
2130   Level: intermediate
2131 
2132 .seealso: `DMPlexDistributeGetDefault()`, `DMPlexDistribute()`
2133 @*/
2134 PetscErrorCode DMPlexDistributeSetDefault(DM dm, PetscBool dist)
2135 {
2136   PetscFunctionBegin;
2137   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2138   PetscValidLogicalCollectiveBool(dm, dist, 2);
2139   PetscTryMethod(dm,"DMPlexDistributeSetDefault_C",(DM,PetscBool),(dm,dist));
2140   PetscFunctionReturn(0);
2141 }
2142 
2143 PetscErrorCode DMPlexDistributeGetDefault_Plex(DM dm, PetscBool *dist)
2144 {
2145   DM_Plex *mesh = (DM_Plex *) dm->data;
2146 
2147   PetscFunctionBegin;
2148   *dist = mesh->distDefault;
2149   PetscFunctionReturn(0);
2150 }
2151 
2152 /*@
2153   DMPlexDistributeGetDefault - Get flag indicating whether the DM should be distributed by default
2154 
2155   Not collective
2156 
2157   Input Parameter:
2158 . dm   - The DM
2159 
2160   Output Parameter:
2161 . dist - Flag for distribution
2162 
2163   Level: intermediate
2164 
2165 .seealso: `DMPlexDistributeSetDefault()`, `DMPlexDistribute()`
2166 @*/
2167 PetscErrorCode DMPlexDistributeGetDefault(DM dm, PetscBool *dist)
2168 {
2169   PetscFunctionBegin;
2170   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2171   PetscValidBoolPointer(dist, 2);
2172   PetscUseMethod(dm,"DMPlexDistributeGetDefault_C",(DM,PetscBool*),(dm,dist));
2173   PetscFunctionReturn(0);
2174 }
2175 
2176 /*@C
2177   DMPlexGetGatherDM - Get a copy of the DMPlex that gathers all points on the
2178   root process of the original's communicator.
2179 
2180   Collective on dm
2181 
2182   Input Parameter:
2183 . dm - the original DMPlex object
2184 
2185   Output Parameters:
2186 + sf - the PetscSF used for point distribution (optional)
2187 - gatherMesh - the gathered DM object, or NULL
2188 
2189   Level: intermediate
2190 
2191 .seealso: `DMPlexDistribute()`, `DMPlexGetRedundantDM()`
2192 @*/
2193 PetscErrorCode DMPlexGetGatherDM(DM dm, PetscSF *sf, DM *gatherMesh)
2194 {
2195   MPI_Comm       comm;
2196   PetscMPIInt    size;
2197   PetscPartitioner oldPart, gatherPart;
2198 
2199   PetscFunctionBegin;
2200   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2201   PetscValidPointer(gatherMesh,3);
2202   *gatherMesh = NULL;
2203   if (sf) *sf = NULL;
2204   comm = PetscObjectComm((PetscObject)dm);
2205   PetscCallMPI(MPI_Comm_size(comm,&size));
2206   if (size == 1) PetscFunctionReturn(0);
2207   PetscCall(DMPlexGetPartitioner(dm,&oldPart));
2208   PetscCall(PetscObjectReference((PetscObject)oldPart));
2209   PetscCall(PetscPartitionerCreate(comm,&gatherPart));
2210   PetscCall(PetscPartitionerSetType(gatherPart,PETSCPARTITIONERGATHER));
2211   PetscCall(DMPlexSetPartitioner(dm,gatherPart));
2212   PetscCall(DMPlexDistribute(dm,0,sf,gatherMesh));
2213 
2214   PetscCall(DMPlexSetPartitioner(dm,oldPart));
2215   PetscCall(PetscPartitionerDestroy(&gatherPart));
2216   PetscCall(PetscPartitionerDestroy(&oldPart));
2217   PetscFunctionReturn(0);
2218 }
2219 
2220 /*@C
2221   DMPlexGetRedundantDM - Get a copy of the DMPlex that is completely copied on each process.
2222 
2223   Collective on dm
2224 
2225   Input Parameter:
2226 . dm - the original DMPlex object
2227 
2228   Output Parameters:
2229 + sf - the PetscSF used for point distribution (optional)
2230 - redundantMesh - the redundant DM object, or NULL
2231 
2232   Level: intermediate
2233 
2234 .seealso: `DMPlexDistribute()`, `DMPlexGetGatherDM()`
2235 @*/
2236 PetscErrorCode DMPlexGetRedundantDM(DM dm, PetscSF *sf, DM *redundantMesh)
2237 {
2238   MPI_Comm       comm;
2239   PetscMPIInt    size, rank;
2240   PetscInt       pStart, pEnd, p;
2241   PetscInt       numPoints = -1;
2242   PetscSF        migrationSF, sfPoint, gatherSF;
2243   DM             gatherDM, dmCoord;
2244   PetscSFNode    *points;
2245 
2246   PetscFunctionBegin;
2247   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2248   PetscValidPointer(redundantMesh,3);
2249   *redundantMesh = NULL;
2250   comm = PetscObjectComm((PetscObject)dm);
2251   PetscCallMPI(MPI_Comm_size(comm,&size));
2252   if (size == 1) {
2253     PetscCall(PetscObjectReference((PetscObject) dm));
2254     *redundantMesh = dm;
2255     if (sf) *sf = NULL;
2256     PetscFunctionReturn(0);
2257   }
2258   PetscCall(DMPlexGetGatherDM(dm,&gatherSF,&gatherDM));
2259   if (!gatherDM) PetscFunctionReturn(0);
2260   PetscCallMPI(MPI_Comm_rank(comm,&rank));
2261   PetscCall(DMPlexGetChart(gatherDM,&pStart,&pEnd));
2262   numPoints = pEnd - pStart;
2263   PetscCallMPI(MPI_Bcast(&numPoints,1,MPIU_INT,0,comm));
2264   PetscCall(PetscMalloc1(numPoints,&points));
2265   PetscCall(PetscSFCreate(comm,&migrationSF));
2266   for (p = 0; p < numPoints; p++) {
2267     points[p].index = p;
2268     points[p].rank  = 0;
2269   }
2270   PetscCall(PetscSFSetGraph(migrationSF,pEnd-pStart,numPoints,NULL,PETSC_OWN_POINTER,points,PETSC_OWN_POINTER));
2271   PetscCall(DMPlexCreate(comm, redundantMesh));
2272   PetscCall(PetscObjectSetName((PetscObject) *redundantMesh, "Redundant Mesh"));
2273   PetscCall(DMPlexMigrate(gatherDM, migrationSF, *redundantMesh));
2274   /* This is to express that all point are in overlap */
2275   PetscCall(DMPlexSetOverlap_Plex(*redundantMesh, NULL, PETSC_MAX_INT));
2276   PetscCall(DMPlexCreatePointSF(*redundantMesh, migrationSF, PETSC_FALSE, &sfPoint));
2277   PetscCall(DMSetPointSF(*redundantMesh, sfPoint));
2278   PetscCall(DMGetCoordinateDM(*redundantMesh, &dmCoord));
2279   if (dmCoord) PetscCall(DMSetPointSF(dmCoord, sfPoint));
2280   PetscCall(PetscSFDestroy(&sfPoint));
2281   if (sf) {
2282     PetscSF tsf;
2283 
2284     PetscCall(PetscSFCompose(gatherSF,migrationSF,&tsf));
2285     PetscCall(DMPlexStratifyMigrationSF(dm, tsf, sf));
2286     PetscCall(PetscSFDestroy(&tsf));
2287   }
2288   PetscCall(PetscSFDestroy(&migrationSF));
2289   PetscCall(PetscSFDestroy(&gatherSF));
2290   PetscCall(DMDestroy(&gatherDM));
2291   PetscCall(DMCopyDisc(dm, *redundantMesh));
2292   PetscCall(DMPlexCopy_Internal(dm, PETSC_TRUE, PETSC_FALSE, *redundantMesh));
2293   PetscFunctionReturn(0);
2294 }
2295 
2296 /*@
2297   DMPlexIsDistributed - Find out whether this DM is distributed, i.e. more than one rank owns some points.
2298 
2299   Collective
2300 
2301   Input Parameter:
2302 . dm      - The DM object
2303 
2304   Output Parameter:
2305 . distributed - Flag whether the DM is distributed
2306 
2307   Level: intermediate
2308 
2309   Notes:
2310   This currently finds out whether at least two ranks have any DAG points.
2311   This involves MPI_Allreduce() with one integer.
2312   The result is currently not stashed so every call to this routine involves this global communication.
2313 
2314 .seealso: `DMPlexDistribute()`, `DMPlexGetOverlap()`, `DMPlexIsInterpolated()`
2315 @*/
2316 PetscErrorCode DMPlexIsDistributed(DM dm, PetscBool *distributed)
2317 {
2318   PetscInt          pStart, pEnd, count;
2319   MPI_Comm          comm;
2320   PetscMPIInt       size;
2321 
2322   PetscFunctionBegin;
2323   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2324   PetscValidBoolPointer(distributed,2);
2325   PetscCall(PetscObjectGetComm((PetscObject)dm,&comm));
2326   PetscCallMPI(MPI_Comm_size(comm,&size));
2327   if (size == 1) { *distributed = PETSC_FALSE; PetscFunctionReturn(0); }
2328   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
2329   count = (pEnd - pStart) > 0 ? 1 : 0;
2330   PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE, &count, 1, MPIU_INT, MPI_SUM, comm));
2331   *distributed = count > 1 ? PETSC_TRUE : PETSC_FALSE;
2332   PetscFunctionReturn(0);
2333 }
2334