xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision bbcf679c7ce315810b13005931dd3a7fb75b5053)
1 #include <petsc/private/dmpleximpl.h>  /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/dmlabelimpl.h> /*I      "petscdmlabel.h"  I*/
3 
4 /*@C
5   DMPlexSetAdjacencyUser - Define adjacency in the mesh using a user-provided callback
6 
7   Input Parameters:
8 + dm      - The DM object
9 . user    - The user callback, may be NULL (to clear the callback)
10 - ctx     - context for callback evaluation, may be NULL
11 
12   Level: advanced
13 
14   Notes:
15      The caller of DMPlexGetAdjacency may need to arrange that a large enough array is available for the adjacency.
16 
17      Any setting here overrides other configuration of DMPlex adjacency determination.
18 
19 .seealso: `DMSetAdjacency()`, `DMPlexDistribute()`, `DMPlexPreallocateOperator()`, `DMPlexGetAdjacency()`, `DMPlexGetAdjacencyUser()`
20 @*/
21 PetscErrorCode DMPlexSetAdjacencyUser(DM dm, PetscErrorCode (*user)(DM, PetscInt, PetscInt *, PetscInt[], void *), void *ctx)
22 {
23   DM_Plex *mesh = (DM_Plex *)dm->data;
24 
25   PetscFunctionBegin;
26   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
27   mesh->useradjacency    = user;
28   mesh->useradjacencyctx = ctx;
29   PetscFunctionReturn(PETSC_SUCCESS);
30 }
31 
32 /*@C
33   DMPlexGetAdjacencyUser - get the user-defined adjacency callback
34 
35   Input Parameter:
36 . dm      - The DM object
37 
38   Output Parameters:
39 + user    - The callback
40 - ctx     - context for callback evaluation
41 
42   Level: advanced
43 
44 .seealso: `DMSetAdjacency()`, `DMPlexDistribute()`, `DMPlexPreallocateOperator()`, `DMPlexGetAdjacency()`, `DMPlexSetAdjacencyUser()`
45 @*/
46 PetscErrorCode DMPlexGetAdjacencyUser(DM dm, PetscErrorCode (**user)(DM, PetscInt, PetscInt *, PetscInt[], void *), void **ctx)
47 {
48   DM_Plex *mesh = (DM_Plex *)dm->data;
49 
50   PetscFunctionBegin;
51   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
52   if (user) *user = mesh->useradjacency;
53   if (ctx) *ctx = mesh->useradjacencyctx;
54   PetscFunctionReturn(PETSC_SUCCESS);
55 }
56 
57 /*@
58   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
59 
60   Input Parameters:
61 + dm      - The DM object
62 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
63 
64   Level: intermediate
65 
66 .seealso: `DMGetAdjacency()`, `DMSetAdjacency()`, `DMPlexDistribute()`, `DMPlexPreallocateOperator()`, `DMPlexSetAnchors()`
67 @*/
68 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
69 {
70   DM_Plex *mesh = (DM_Plex *)dm->data;
71 
72   PetscFunctionBegin;
73   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
74   mesh->useAnchors = useAnchors;
75   PetscFunctionReturn(PETSC_SUCCESS);
76 }
77 
78 /*@
79   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
80 
81   Input Parameter:
82 . dm      - The DM object
83 
84   Output Parameter:
85 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
86 
87   Level: intermediate
88 
89 .seealso: `DMPlexSetAdjacencyUseAnchors()`, `DMSetAdjacency()`, `DMGetAdjacency()`, `DMPlexDistribute()`, `DMPlexPreallocateOperator()`, `DMPlexSetAnchors()`
90 @*/
91 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
92 {
93   DM_Plex *mesh = (DM_Plex *)dm->data;
94 
95   PetscFunctionBegin;
96   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
97   PetscValidBoolPointer(useAnchors, 2);
98   *useAnchors = mesh->useAnchors;
99   PetscFunctionReturn(PETSC_SUCCESS);
100 }
101 
102 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
103 {
104   const PetscInt *cone   = NULL;
105   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
106 
107   PetscFunctionBeginHot;
108   PetscCall(DMPlexGetConeSize(dm, p, &coneSize));
109   PetscCall(DMPlexGetCone(dm, p, &cone));
110   for (c = 0; c <= coneSize; ++c) {
111     const PetscInt  point   = !c ? p : cone[c - 1];
112     const PetscInt *support = NULL;
113     PetscInt        supportSize, s, q;
114 
115     PetscCall(DMPlexGetSupportSize(dm, point, &supportSize));
116     PetscCall(DMPlexGetSupport(dm, point, &support));
117     for (s = 0; s < supportSize; ++s) {
118       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = support[s]), 0); ++q) {
119         if (support[s] == adj[q]) break;
120       }
121       PetscCheck(numAdj <= maxAdjSize, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%" PetscInt_FMT ")", maxAdjSize);
122     }
123   }
124   *adjSize = numAdj;
125   PetscFunctionReturn(PETSC_SUCCESS);
126 }
127 
128 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
129 {
130   const PetscInt *support = NULL;
131   PetscInt        numAdj = 0, maxAdjSize = *adjSize, supportSize, s;
132 
133   PetscFunctionBeginHot;
134   PetscCall(DMPlexGetSupportSize(dm, p, &supportSize));
135   PetscCall(DMPlexGetSupport(dm, p, &support));
136   for (s = 0; s <= supportSize; ++s) {
137     const PetscInt  point = !s ? p : support[s - 1];
138     const PetscInt *cone  = NULL;
139     PetscInt        coneSize, c, q;
140 
141     PetscCall(DMPlexGetConeSize(dm, point, &coneSize));
142     PetscCall(DMPlexGetCone(dm, point, &cone));
143     for (c = 0; c < coneSize; ++c) {
144       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = cone[c]), 0); ++q) {
145         if (cone[c] == adj[q]) break;
146       }
147       PetscCheck(numAdj <= maxAdjSize, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%" PetscInt_FMT ")", maxAdjSize);
148     }
149   }
150   *adjSize = numAdj;
151   PetscFunctionReturn(PETSC_SUCCESS);
152 }
153 
154 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
155 {
156   PetscInt *star   = NULL;
157   PetscInt  numAdj = 0, maxAdjSize = *adjSize, starSize, s;
158 
159   PetscFunctionBeginHot;
160   PetscCall(DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star));
161   for (s = 0; s < starSize * 2; s += 2) {
162     const PetscInt *closure = NULL;
163     PetscInt        closureSize, c, q;
164 
165     PetscCall(DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt **)&closure));
166     for (c = 0; c < closureSize * 2; c += 2) {
167       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = closure[c]), 0); ++q) {
168         if (closure[c] == adj[q]) break;
169       }
170       PetscCheck(numAdj <= maxAdjSize, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%" PetscInt_FMT ")", maxAdjSize);
171     }
172     PetscCall(DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt **)&closure));
173   }
174   PetscCall(DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star));
175   *adjSize = numAdj;
176   PetscFunctionReturn(PETSC_SUCCESS);
177 }
178 
179 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
180 {
181   static PetscInt asiz       = 0;
182   PetscInt        maxAnchors = 1;
183   PetscInt        aStart = -1, aEnd = -1;
184   PetscInt        maxAdjSize;
185   PetscSection    aSec = NULL;
186   IS              aIS  = NULL;
187   const PetscInt *anchors;
188   DM_Plex        *mesh = (DM_Plex *)dm->data;
189 
190   PetscFunctionBeginHot;
191   if (useAnchors) {
192     PetscCall(DMPlexGetAnchors(dm, &aSec, &aIS));
193     if (aSec) {
194       PetscCall(PetscSectionGetMaxDof(aSec, &maxAnchors));
195       maxAnchors = PetscMax(1, maxAnchors);
196       PetscCall(PetscSectionGetChart(aSec, &aStart, &aEnd));
197       PetscCall(ISGetIndices(aIS, &anchors));
198     }
199   }
200   if (!*adj) {
201     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
202 
203     PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
204     PetscCall(DMPlexGetDepth(dm, &depth));
205     depth = PetscMax(depth, -depth);
206     PetscCall(DMPlexGetMaxSizes(dm, &maxC, &maxS));
207     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC, depth + 1) - 1) / (maxC - 1)) : depth + 1;
208     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS, depth + 1) - 1) / (maxS - 1)) : depth + 1;
209     asiz          = PetscMax(PetscPowInt(maxS, depth) * coneSeries, PetscPowInt(maxC, depth) * supportSeries);
210     asiz *= maxAnchors;
211     asiz = PetscMin(asiz, pEnd - pStart);
212     PetscCall(PetscMalloc1(asiz, adj));
213   }
214   if (*adjSize < 0) *adjSize = asiz;
215   maxAdjSize = *adjSize;
216   if (mesh->useradjacency) {
217     PetscCall((*mesh->useradjacency)(dm, p, adjSize, *adj, mesh->useradjacencyctx));
218   } else if (useTransitiveClosure) {
219     PetscCall(DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj));
220   } else if (useCone) {
221     PetscCall(DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj));
222   } else {
223     PetscCall(DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj));
224   }
225   if (useAnchors && aSec) {
226     PetscInt  origSize = *adjSize;
227     PetscInt  numAdj   = origSize;
228     PetscInt  i        = 0, j;
229     PetscInt *orig     = *adj;
230 
231     while (i < origSize) {
232       PetscInt p    = orig[i];
233       PetscInt aDof = 0;
234 
235       if (p >= aStart && p < aEnd) PetscCall(PetscSectionGetDof(aSec, p, &aDof));
236       if (aDof) {
237         PetscInt aOff;
238         PetscInt s, q;
239 
240         for (j = i + 1; j < numAdj; j++) orig[j - 1] = orig[j];
241         origSize--;
242         numAdj--;
243         PetscCall(PetscSectionGetOffset(aSec, p, &aOff));
244         for (s = 0; s < aDof; ++s) {
245           for (q = 0; q < numAdj || ((void)(orig[numAdj++] = anchors[aOff + s]), 0); ++q) {
246             if (anchors[aOff + s] == orig[q]) break;
247           }
248           PetscCheck(numAdj <= maxAdjSize, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%" PetscInt_FMT ")", maxAdjSize);
249         }
250       } else {
251         i++;
252       }
253     }
254     *adjSize = numAdj;
255     PetscCall(ISRestoreIndices(aIS, &anchors));
256   }
257   PetscFunctionReturn(PETSC_SUCCESS);
258 }
259 
260 /*@
261   DMPlexGetAdjacency - Return all points adjacent to the given point
262 
263   Input Parameters:
264 + dm - The DM object
265 - p  - The point
266 
267   Input/Output Parameters:
268 + adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE;
269             on output the number of adjacent points
270 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize;
271         on output contains the adjacent points
272 
273   Level: advanced
274 
275   Notes:
276     The user must PetscFree the adj array if it was not passed in.
277 
278 .seealso: `DMSetAdjacency()`, `DMPlexDistribute()`, `DMCreateMatrix()`, `DMPlexPreallocateOperator()`
279 @*/
280 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
281 {
282   PetscBool useCone, useClosure, useAnchors;
283 
284   PetscFunctionBeginHot;
285   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
286   PetscValidIntPointer(adjSize, 3);
287   PetscValidPointer(adj, 4);
288   PetscCall(DMGetBasicAdjacency(dm, &useCone, &useClosure));
289   PetscCall(DMPlexGetAdjacencyUseAnchors(dm, &useAnchors));
290   PetscCall(DMPlexGetAdjacency_Internal(dm, p, useCone, useClosure, useAnchors, adjSize, adj));
291   PetscFunctionReturn(PETSC_SUCCESS);
292 }
293 
294 /*@
295   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
296 
297   Collective on dm
298 
299   Input Parameters:
300 + dm      - The DM
301 . sfPoint - The PetscSF which encodes point connectivity
302 . rootRankSection -
303 . rootRanks -
304 . leftRankSection -
305 - leafRanks -
306 
307   Output Parameters:
308 + processRanks - A list of process neighbors, or NULL
309 - sfProcess    - An SF encoding the two-sided process connectivity, or NULL
310 
311   Level: developer
312 
313 .seealso: `PetscSFCreate()`, `DMPlexCreateProcessSF()`
314 @*/
315 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
316 {
317   const PetscSFNode *remotePoints;
318   PetscInt          *localPointsNew;
319   PetscSFNode       *remotePointsNew;
320   const PetscInt    *nranks;
321   PetscInt          *ranksNew;
322   PetscBT            neighbors;
323   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
324   PetscMPIInt        size, proc, rank;
325 
326   PetscFunctionBegin;
327   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
328   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
329   if (processRanks) PetscValidPointer(processRanks, 7);
330   if (sfProcess) PetscValidPointer(sfProcess, 8);
331   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)dm), &size));
332   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
333   PetscCall(PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints));
334   PetscCall(PetscBTCreate(size, &neighbors));
335   PetscCall(PetscBTMemzero(size, neighbors));
336   /* Compute root-to-leaf process connectivity */
337   PetscCall(PetscSectionGetChart(rootRankSection, &pStart, &pEnd));
338   PetscCall(ISGetIndices(rootRanks, &nranks));
339   for (p = pStart; p < pEnd; ++p) {
340     PetscInt ndof, noff, n;
341 
342     PetscCall(PetscSectionGetDof(rootRankSection, p, &ndof));
343     PetscCall(PetscSectionGetOffset(rootRankSection, p, &noff));
344     for (n = 0; n < ndof; ++n) PetscCall(PetscBTSet(neighbors, nranks[noff + n]));
345   }
346   PetscCall(ISRestoreIndices(rootRanks, &nranks));
347   /* Compute leaf-to-neighbor process connectivity */
348   PetscCall(PetscSectionGetChart(leafRankSection, &pStart, &pEnd));
349   PetscCall(ISGetIndices(leafRanks, &nranks));
350   for (p = pStart; p < pEnd; ++p) {
351     PetscInt ndof, noff, n;
352 
353     PetscCall(PetscSectionGetDof(leafRankSection, p, &ndof));
354     PetscCall(PetscSectionGetOffset(leafRankSection, p, &noff));
355     for (n = 0; n < ndof; ++n) PetscCall(PetscBTSet(neighbors, nranks[noff + n]));
356   }
357   PetscCall(ISRestoreIndices(leafRanks, &nranks));
358   /* Compute leaf-to-root process connectivity */
359   for (l = 0; l < numLeaves; ++l) PetscCall(PetscBTSet(neighbors, remotePoints[l].rank));
360   /* Calculate edges */
361   PetscCall(PetscBTClear(neighbors, rank));
362   for (proc = 0, numNeighbors = 0; proc < size; ++proc) {
363     if (PetscBTLookup(neighbors, proc)) ++numNeighbors;
364   }
365   PetscCall(PetscMalloc1(numNeighbors, &ranksNew));
366   PetscCall(PetscMalloc1(numNeighbors, &localPointsNew));
367   PetscCall(PetscMalloc1(numNeighbors, &remotePointsNew));
368   for (proc = 0, n = 0; proc < size; ++proc) {
369     if (PetscBTLookup(neighbors, proc)) {
370       ranksNew[n]              = proc;
371       localPointsNew[n]        = proc;
372       remotePointsNew[n].index = rank;
373       remotePointsNew[n].rank  = proc;
374       ++n;
375     }
376   }
377   PetscCall(PetscBTDestroy(&neighbors));
378   if (processRanks) PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks));
379   else PetscCall(PetscFree(ranksNew));
380   if (sfProcess) {
381     PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess));
382     PetscCall(PetscObjectSetName((PetscObject)*sfProcess, "Two-Sided Process SF"));
383     PetscCall(PetscSFSetFromOptions(*sfProcess));
384     PetscCall(PetscSFSetGraph(*sfProcess, size, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER));
385   }
386   PetscFunctionReturn(PETSC_SUCCESS);
387 }
388 
389 /*@
390   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
391 
392   Collective on dm
393 
394   Input Parameter:
395 . dm - The DM
396 
397   Output Parameters:
398 + rootSection - The number of leaves for a given root point
399 . rootrank    - The rank of each edge into the root point
400 . leafSection - The number of processes sharing a given leaf point
401 - leafrank    - The rank of each process sharing a leaf point
402 
403   Level: developer
404 
405 .seealso: `DMPlexCreateOverlapLabel()`, `DMPlexDistribute()`, `DMPlexDistributeOverlap()`
406 @*/
407 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
408 {
409   MPI_Comm        comm;
410   PetscSF         sfPoint;
411   const PetscInt *rootdegree;
412   PetscInt       *myrank, *remoterank;
413   PetscInt        pStart, pEnd, p, nedges;
414   PetscMPIInt     rank;
415 
416   PetscFunctionBegin;
417   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
418   PetscCallMPI(MPI_Comm_rank(comm, &rank));
419   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
420   PetscCall(DMGetPointSF(dm, &sfPoint));
421   /* Compute number of leaves for each root */
422   PetscCall(PetscObjectSetName((PetscObject)rootSection, "Root Section"));
423   PetscCall(PetscSectionSetChart(rootSection, pStart, pEnd));
424   PetscCall(PetscSFComputeDegreeBegin(sfPoint, &rootdegree));
425   PetscCall(PetscSFComputeDegreeEnd(sfPoint, &rootdegree));
426   for (p = pStart; p < pEnd; ++p) PetscCall(PetscSectionSetDof(rootSection, p, rootdegree[p - pStart]));
427   PetscCall(PetscSectionSetUp(rootSection));
428   /* Gather rank of each leaf to root */
429   PetscCall(PetscSectionGetStorageSize(rootSection, &nedges));
430   PetscCall(PetscMalloc1(pEnd - pStart, &myrank));
431   PetscCall(PetscMalloc1(nedges, &remoterank));
432   for (p = 0; p < pEnd - pStart; ++p) myrank[p] = rank;
433   PetscCall(PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank));
434   PetscCall(PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank));
435   PetscCall(PetscFree(myrank));
436   PetscCall(ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank));
437   /* Distribute remote ranks to leaves */
438   PetscCall(PetscObjectSetName((PetscObject)leafSection, "Leaf Section"));
439   PetscCall(DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank));
440   PetscFunctionReturn(PETSC_SUCCESS);
441 }
442 
443 #if 0
444 static PetscErrorCode DMPlexCopyOverlapLabels(DM dm, DM ndm)
445 {
446   DM_Plex *mesh  = (DM_Plex *) dm->data;
447   DM_Plex *nmesh = (DM_Plex *) ndm->data;
448 
449   PetscFunctionBegin;
450   if (mesh->numOvLabels) {
451     const char *name;
452     PetscInt    l;
453 
454     nmesh->numOvLabels = mesh->numOvLabels;
455     for (l = 0; l < mesh->numOvLabels; ++l) {
456       PetscCall(PetscObjectGetName((PetscObject) mesh->ovLabels[l], &name));
457       PetscCall(DMGetLabel(ndm, name, &nmesh->ovLabels[l]));
458       nmesh->ovValues[l] = mesh->ovValues[l];
459     }
460     PetscCall(PetscObjectGetName((PetscObject) mesh->ovExLabel, &name));
461     PetscCall(DMGetLabel(ndm, name, &nmesh->ovExLabel));
462     nmesh->ovExValue = mesh->ovExValue;
463   }
464   PetscFunctionReturn(PETSC_SUCCESS);
465 }
466 #endif
467 
468 /*@C
469   DMPlexCreateOverlapLabel - Compute a label indicating what overlap points should be sent to new processes
470 
471   Collective on dm
472 
473   Input Parameters:
474 + dm          - The DM
475 . levels      - Number of overlap levels
476 . rootSection - The number of leaves for a given root point
477 . rootrank    - The rank of each edge into the root point
478 . leafSection - The number of processes sharing a given leaf point
479 - leafrank    - The rank of each process sharing a leaf point
480 
481   Output Parameter:
482 . ovLabel     - DMLabel containing remote overlap contributions as point/rank pairings
483 
484   Level: developer
485 
486 .seealso: `DMPlexCreateOverlapLabelFromLabels()`, `DMPlexGetAdjacency()`, `DMPlexDistributeOwnership()`, `DMPlexDistribute()`
487 @*/
488 PetscErrorCode DMPlexCreateOverlapLabel(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
489 {
490   MPI_Comm           comm;
491   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
492   PetscSF            sfPoint;
493   const PetscSFNode *remote;
494   const PetscInt    *local;
495   const PetscInt    *nrank, *rrank;
496   PetscInt          *adj = NULL;
497   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l;
498   PetscMPIInt        rank, size;
499   PetscBool          flg;
500 
501   PetscFunctionBegin;
502   *ovLabel = NULL;
503   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
504   PetscCallMPI(MPI_Comm_size(comm, &size));
505   PetscCallMPI(MPI_Comm_rank(comm, &rank));
506   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
507   PetscCall(DMGetPointSF(dm, &sfPoint));
508   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
509   if (!levels) {
510     /* Add owned points */
511     PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel));
512     for (p = pStart; p < pEnd; ++p) PetscCall(DMLabelSetValue(*ovLabel, p, rank));
513     PetscFunctionReturn(PETSC_SUCCESS);
514   }
515   PetscCall(PetscSectionGetChart(leafSection, &sStart, &sEnd));
516   PetscCall(PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote));
517   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap adjacency", &ovAdjByRank));
518   /* Handle leaves: shared with the root point */
519   for (l = 0; l < nleaves; ++l) {
520     PetscInt adjSize = PETSC_DETERMINE, a;
521 
522     PetscCall(DMPlexGetAdjacency(dm, local ? local[l] : l, &adjSize, &adj));
523     for (a = 0; a < adjSize; ++a) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank));
524   }
525   PetscCall(ISGetIndices(rootrank, &rrank));
526   PetscCall(ISGetIndices(leafrank, &nrank));
527   /* Handle roots */
528   for (p = pStart; p < pEnd; ++p) {
529     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
530 
531     if ((p >= sStart) && (p < sEnd)) {
532       /* Some leaves share a root with other leaves on different processes */
533       PetscCall(PetscSectionGetDof(leafSection, p, &neighbors));
534       if (neighbors) {
535         PetscCall(PetscSectionGetOffset(leafSection, p, &noff));
536         PetscCall(DMPlexGetAdjacency(dm, p, &adjSize, &adj));
537         for (n = 0; n < neighbors; ++n) {
538           const PetscInt remoteRank = nrank[noff + n];
539 
540           if (remoteRank == rank) continue;
541           for (a = 0; a < adjSize; ++a) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remoteRank));
542         }
543       }
544     }
545     /* Roots are shared with leaves */
546     PetscCall(PetscSectionGetDof(rootSection, p, &neighbors));
547     if (!neighbors) continue;
548     PetscCall(PetscSectionGetOffset(rootSection, p, &noff));
549     PetscCall(DMPlexGetAdjacency(dm, p, &adjSize, &adj));
550     for (n = 0; n < neighbors; ++n) {
551       const PetscInt remoteRank = rrank[noff + n];
552 
553       if (remoteRank == rank) continue;
554       for (a = 0; a < adjSize; ++a) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remoteRank));
555     }
556   }
557   PetscCall(PetscFree(adj));
558   PetscCall(ISRestoreIndices(rootrank, &rrank));
559   PetscCall(ISRestoreIndices(leafrank, &nrank));
560   /* Add additional overlap levels */
561   for (l = 1; l < levels; l++) {
562     /* Propagate point donations over SF to capture remote connections */
563     PetscCall(DMPlexPartitionLabelPropagate(dm, ovAdjByRank));
564     /* Add next level of point donations to the label */
565     PetscCall(DMPlexPartitionLabelAdjacency(dm, ovAdjByRank));
566   }
567   /* We require the closure in the overlap */
568   PetscCall(DMPlexPartitionLabelClosure(dm, ovAdjByRank));
569   PetscCall(PetscOptionsHasName(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-overlap_view", &flg));
570   if (flg) {
571     PetscViewer viewer;
572     PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm), &viewer));
573     PetscCall(DMLabelView(ovAdjByRank, viewer));
574   }
575   /* Invert sender to receiver label */
576   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel));
577   PetscCall(DMPlexPartitionLabelInvert(dm, ovAdjByRank, NULL, *ovLabel));
578   /* Add owned points, except for shared local points */
579   for (p = pStart; p < pEnd; ++p) PetscCall(DMLabelSetValue(*ovLabel, p, rank));
580   for (l = 0; l < nleaves; ++l) {
581     PetscCall(DMLabelClearValue(*ovLabel, local[l], rank));
582     PetscCall(DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank));
583   }
584   /* Clean up */
585   PetscCall(DMLabelDestroy(&ovAdjByRank));
586   PetscFunctionReturn(PETSC_SUCCESS);
587 }
588 
589 static PetscErrorCode HandlePoint_Private(DM dm, PetscInt p, PetscSection section, const PetscInt ranks[], PetscInt numExLabels, const DMLabel exLabel[], const PetscInt exValue[], DMLabel ovAdjByRank)
590 {
591   PetscInt neighbors, el;
592 
593   PetscFunctionBegin;
594   PetscCall(PetscSectionGetDof(section, p, &neighbors));
595   if (neighbors) {
596     PetscInt   *adj     = NULL;
597     PetscInt    adjSize = PETSC_DETERMINE, noff, n, a;
598     PetscMPIInt rank;
599 
600     PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
601     PetscCall(PetscSectionGetOffset(section, p, &noff));
602     PetscCall(DMPlexGetAdjacency(dm, p, &adjSize, &adj));
603     for (n = 0; n < neighbors; ++n) {
604       const PetscInt remoteRank = ranks[noff + n];
605 
606       if (remoteRank == rank) continue;
607       for (a = 0; a < adjSize; ++a) {
608         PetscBool insert = PETSC_TRUE;
609 
610         for (el = 0; el < numExLabels; ++el) {
611           PetscInt exVal;
612           PetscCall(DMLabelGetValue(exLabel[el], adj[a], &exVal));
613           if (exVal == exValue[el]) {
614             insert = PETSC_FALSE;
615             break;
616           }
617         }
618         if (insert) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remoteRank));
619       }
620     }
621     PetscCall(PetscFree(adj));
622   }
623   PetscFunctionReturn(PETSC_SUCCESS);
624 }
625 
626 /*@C
627   DMPlexCreateOverlapLabelFromLabels - Compute a label indicating what overlap points should be sent to new processes
628 
629   Collective on dm
630 
631   Input Parameters:
632 + dm          - The DM
633 . numLabels   - The number of labels to draw candidate points from
634 . label       - An array of labels containing candidate points
635 . value       - An array of label values marking the candidate points
636 . numExLabels - The number of labels to use for exclusion
637 . exLabel     - An array of labels indicating points to be excluded, or NULL
638 . exValue     - An array of label values to be excluded, or NULL
639 . rootSection - The number of leaves for a given root point
640 . rootrank    - The rank of each edge into the root point
641 . leafSection - The number of processes sharing a given leaf point
642 - leafrank    - The rank of each process sharing a leaf point
643 
644   Output Parameter:
645 . ovLabel     - DMLabel containing remote overlap contributions as point/rank pairings
646 
647   Note:
648   The candidate points are only added to the overlap if they are adjacent to a shared point
649 
650   Level: developer
651 
652 .seealso: `DMPlexCreateOverlapLabel()`, `DMPlexGetAdjacency()`, `DMPlexDistributeOwnership()`, `DMPlexDistribute()`
653 @*/
654 PetscErrorCode DMPlexCreateOverlapLabelFromLabels(DM dm, PetscInt numLabels, const DMLabel label[], const PetscInt value[], PetscInt numExLabels, const DMLabel exLabel[], const PetscInt exValue[], PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
655 {
656   MPI_Comm           comm;
657   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
658   PetscSF            sfPoint;
659   const PetscSFNode *remote;
660   const PetscInt    *local;
661   const PetscInt    *nrank, *rrank;
662   PetscInt          *adj = NULL;
663   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l, el;
664   PetscMPIInt        rank, size;
665   PetscBool          flg;
666 
667   PetscFunctionBegin;
668   *ovLabel = NULL;
669   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
670   PetscCallMPI(MPI_Comm_size(comm, &size));
671   PetscCallMPI(MPI_Comm_rank(comm, &rank));
672   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
673   PetscCall(DMGetPointSF(dm, &sfPoint));
674   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
675   PetscCall(PetscSectionGetChart(leafSection, &sStart, &sEnd));
676   PetscCall(PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote));
677   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap adjacency", &ovAdjByRank));
678   PetscCall(ISGetIndices(rootrank, &rrank));
679   PetscCall(ISGetIndices(leafrank, &nrank));
680   for (l = 0; l < numLabels; ++l) {
681     IS              valIS;
682     const PetscInt *points;
683     PetscInt        n;
684 
685     PetscCall(DMLabelGetStratumIS(label[l], value[l], &valIS));
686     if (!valIS) continue;
687     PetscCall(ISGetIndices(valIS, &points));
688     PetscCall(ISGetLocalSize(valIS, &n));
689     for (PetscInt i = 0; i < n; ++i) {
690       const PetscInt p = points[i];
691 
692       if ((p >= sStart) && (p < sEnd)) {
693         PetscInt loc, adjSize = PETSC_DETERMINE;
694 
695         /* Handle leaves: shared with the root point */
696         if (local) PetscCall(PetscFindInt(p, nleaves, local, &loc));
697         else loc = (p >= 0 && p < nleaves) ? p : -1;
698         if (loc >= 0) {
699           const PetscInt remoteRank = remote[loc].rank;
700 
701           PetscCall(DMPlexGetAdjacency(dm, p, &adjSize, &adj));
702           for (PetscInt a = 0; a < adjSize; ++a) {
703             PetscBool insert = PETSC_TRUE;
704 
705             for (el = 0; el < numExLabels; ++el) {
706               PetscInt exVal;
707               PetscCall(DMLabelGetValue(exLabel[el], adj[a], &exVal));
708               if (exVal == exValue[el]) {
709                 insert = PETSC_FALSE;
710                 break;
711               }
712             }
713             if (insert) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remoteRank));
714           }
715         }
716         /* Some leaves share a root with other leaves on different processes */
717         PetscCall(HandlePoint_Private(dm, p, leafSection, nrank, numExLabels, exLabel, exValue, ovAdjByRank));
718       }
719       /* Roots are shared with leaves */
720       PetscCall(HandlePoint_Private(dm, p, rootSection, rrank, numExLabels, exLabel, exValue, ovAdjByRank));
721     }
722     PetscCall(ISRestoreIndices(valIS, &points));
723     PetscCall(ISDestroy(&valIS));
724   }
725   PetscCall(PetscFree(adj));
726   PetscCall(ISRestoreIndices(rootrank, &rrank));
727   PetscCall(ISRestoreIndices(leafrank, &nrank));
728   /* We require the closure in the overlap */
729   PetscCall(DMPlexPartitionLabelClosure(dm, ovAdjByRank));
730   PetscCall(PetscOptionsHasName(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-overlap_view", &flg));
731   if (flg) {
732     PetscViewer viewer;
733     PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm), &viewer));
734     PetscCall(DMLabelView(ovAdjByRank, viewer));
735   }
736   /* Invert sender to receiver label */
737   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel));
738   PetscCall(DMPlexPartitionLabelInvert(dm, ovAdjByRank, NULL, *ovLabel));
739   /* Add owned points, except for shared local points */
740   for (p = pStart; p < pEnd; ++p) PetscCall(DMLabelSetValue(*ovLabel, p, rank));
741   for (l = 0; l < nleaves; ++l) {
742     PetscCall(DMLabelClearValue(*ovLabel, local[l], rank));
743     PetscCall(DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank));
744   }
745   /* Clean up */
746   PetscCall(DMLabelDestroy(&ovAdjByRank));
747   PetscFunctionReturn(PETSC_SUCCESS);
748 }
749 
750 /*@C
751   DMPlexCreateOverlapMigrationSF - Create an SF describing the new mesh distribution to make the overlap described by the input SF
752 
753   Collective on dm
754 
755   Input Parameters:
756 + dm          - The DM
757 - overlapSF   - The SF mapping ghost points in overlap to owner points on other processes
758 
759   Output Parameter:
760 . migrationSF - An SF that maps original points in old locations to points in new locations
761 
762   Level: developer
763 
764 .seealso: `DMPlexCreateOverlapLabel()`, `DMPlexDistributeOverlap()`, `DMPlexDistribute()`
765 @*/
766 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
767 {
768   MPI_Comm           comm;
769   PetscMPIInt        rank, size;
770   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
771   PetscInt          *pointDepths, *remoteDepths, *ilocal;
772   PetscInt          *depthRecv, *depthShift, *depthIdx;
773   PetscSFNode       *iremote;
774   PetscSF            pointSF;
775   const PetscInt    *sharedLocal;
776   const PetscSFNode *overlapRemote, *sharedRemote;
777 
778   PetscFunctionBegin;
779   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
780   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
781   PetscCallMPI(MPI_Comm_rank(comm, &rank));
782   PetscCallMPI(MPI_Comm_size(comm, &size));
783   PetscCall(DMGetDimension(dm, &dim));
784 
785   /* Before building the migration SF we need to know the new stratum offsets */
786   PetscCall(PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote));
787   PetscCall(PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths));
788   for (d = 0; d < dim + 1; d++) {
789     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
790     for (p = pStart; p < pEnd; p++) pointDepths[p] = d;
791   }
792   for (p = 0; p < nleaves; p++) remoteDepths[p] = -1;
793   PetscCall(PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths, MPI_REPLACE));
794   PetscCall(PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths, MPI_REPLACE));
795 
796   /* Count received points in each stratum and compute the internal strata shift */
797   PetscCall(PetscMalloc3(dim + 1, &depthRecv, dim + 1, &depthShift, dim + 1, &depthIdx));
798   for (d = 0; d < dim + 1; d++) depthRecv[d] = 0;
799   for (p = 0; p < nleaves; p++) depthRecv[remoteDepths[p]]++;
800   depthShift[dim] = 0;
801   for (d = 0; d < dim; d++) depthShift[d] = depthRecv[dim];
802   for (d = 1; d < dim; d++) depthShift[d] += depthRecv[0];
803   for (d = dim - 2; d > 0; d--) depthShift[d] += depthRecv[d + 1];
804   for (d = 0; d < dim + 1; d++) {
805     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
806     depthIdx[d] = pStart + depthShift[d];
807   }
808 
809   /* Form the overlap SF build an SF that describes the full overlap migration SF */
810   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
811   newLeaves = pEnd - pStart + nleaves;
812   PetscCall(PetscMalloc1(newLeaves, &ilocal));
813   PetscCall(PetscMalloc1(newLeaves, &iremote));
814   /* First map local points to themselves */
815   for (d = 0; d < dim + 1; d++) {
816     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
817     for (p = pStart; p < pEnd; p++) {
818       point                = p + depthShift[d];
819       ilocal[point]        = point;
820       iremote[point].index = p;
821       iremote[point].rank  = rank;
822       depthIdx[d]++;
823     }
824   }
825 
826   /* Add in the remote roots for currently shared points */
827   PetscCall(DMGetPointSF(dm, &pointSF));
828   PetscCall(PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote));
829   for (d = 0; d < dim + 1; d++) {
830     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
831     for (p = 0; p < numSharedPoints; p++) {
832       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
833         point                = sharedLocal[p] + depthShift[d];
834         iremote[point].index = sharedRemote[p].index;
835         iremote[point].rank  = sharedRemote[p].rank;
836       }
837     }
838   }
839 
840   /* Now add the incoming overlap points */
841   for (p = 0; p < nleaves; p++) {
842     point                = depthIdx[remoteDepths[p]];
843     ilocal[point]        = point;
844     iremote[point].index = overlapRemote[p].index;
845     iremote[point].rank  = overlapRemote[p].rank;
846     depthIdx[remoteDepths[p]]++;
847   }
848   PetscCall(PetscFree2(pointDepths, remoteDepths));
849 
850   PetscCall(PetscSFCreate(comm, migrationSF));
851   PetscCall(PetscObjectSetName((PetscObject)*migrationSF, "Overlap Migration SF"));
852   PetscCall(PetscSFSetFromOptions(*migrationSF));
853   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
854   PetscCall(PetscSFSetGraph(*migrationSF, pEnd - pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER));
855 
856   PetscCall(PetscFree3(depthRecv, depthShift, depthIdx));
857   PetscFunctionReturn(PETSC_SUCCESS);
858 }
859 
860 /*@
861   DMPlexStratifyMigrationSF - Rearrange the leaves of a migration sf for stratification.
862 
863   Input Parameters:
864 + dm          - The DM
865 - sf          - A star forest with non-ordered leaves, usually defining a DM point migration
866 
867   Output Parameter:
868 . migrationSF - A star forest with added leaf indirection that ensures the resulting DM is stratified
869 
870   Note:
871   This lexicographically sorts by (depth, cellType)
872 
873   Level: developer
874 
875 .seealso: `DMPlexPartitionLabelCreateSF()`, `DMPlexDistribute()`, `DMPlexDistributeOverlap()`
876 @*/
877 PetscErrorCode DMPlexStratifyMigrationSF(DM dm, PetscSF sf, PetscSF *migrationSF)
878 {
879   MPI_Comm           comm;
880   PetscMPIInt        rank, size;
881   PetscInt           d, ldepth, depth, dim, p, pStart, pEnd, nroots, nleaves;
882   PetscSFNode       *pointDepths, *remoteDepths;
883   PetscInt          *ilocal;
884   PetscInt          *depthRecv, *depthShift, *depthIdx;
885   PetscInt          *ctRecv, *ctShift, *ctIdx;
886   const PetscSFNode *iremote;
887 
888   PetscFunctionBegin;
889   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
890   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
891   PetscCallMPI(MPI_Comm_rank(comm, &rank));
892   PetscCallMPI(MPI_Comm_size(comm, &size));
893   PetscCall(DMPlexGetDepth(dm, &ldepth));
894   PetscCall(DMGetDimension(dm, &dim));
895   PetscCall(MPIU_Allreduce(&ldepth, &depth, 1, MPIU_INT, MPI_MAX, comm));
896   PetscCheck(!(ldepth >= 0) || !(depth != ldepth), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %" PetscInt_FMT " != %" PetscInt_FMT, ldepth, depth);
897   PetscCall(PetscLogEventBegin(DMPLEX_PartStratSF, dm, 0, 0, 0));
898 
899   /* Before building the migration SF we need to know the new stratum offsets */
900   PetscCall(PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote));
901   PetscCall(PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths));
902   for (d = 0; d < depth + 1; ++d) {
903     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
904     for (p = pStart; p < pEnd; ++p) {
905       DMPolytopeType ct;
906 
907       PetscCall(DMPlexGetCellType(dm, p, &ct));
908       pointDepths[p].index = d;
909       pointDepths[p].rank  = ct;
910     }
911   }
912   for (p = 0; p < nleaves; ++p) {
913     remoteDepths[p].index = -1;
914     remoteDepths[p].rank  = -1;
915   }
916   PetscCall(PetscSFBcastBegin(sf, MPIU_2INT, pointDepths, remoteDepths, MPI_REPLACE));
917   PetscCall(PetscSFBcastEnd(sf, MPIU_2INT, pointDepths, remoteDepths, MPI_REPLACE));
918   /* Count received points in each stratum and compute the internal strata shift */
919   PetscCall(PetscCalloc6(depth + 1, &depthRecv, depth + 1, &depthShift, depth + 1, &depthIdx, DM_NUM_POLYTOPES, &ctRecv, DM_NUM_POLYTOPES, &ctShift, DM_NUM_POLYTOPES, &ctIdx));
920   for (p = 0; p < nleaves; ++p) {
921     if (remoteDepths[p].rank < 0) {
922       ++depthRecv[remoteDepths[p].index];
923     } else {
924       ++ctRecv[remoteDepths[p].rank];
925     }
926   }
927   {
928     PetscInt depths[4], dims[4], shift = 0, i, c;
929 
930     /* Cells (depth), Vertices (0), Faces (depth-1), Edges (1)
931          Consider DM_POLYTOPE_FV_GHOST and DM_POLYTOPE_INTERIOR_GHOST as cells
932      */
933     depths[0] = depth;
934     depths[1] = 0;
935     depths[2] = depth - 1;
936     depths[3] = 1;
937     dims[0]   = dim;
938     dims[1]   = 0;
939     dims[2]   = dim - 1;
940     dims[3]   = 1;
941     for (i = 0; i <= depth; ++i) {
942       const PetscInt dep = depths[i];
943       const PetscInt dim = dims[i];
944 
945       for (c = 0; c < DM_NUM_POLYTOPES; ++c) {
946         if (DMPolytopeTypeGetDim((DMPolytopeType)c) != dim && !(i == 0 && (c == DM_POLYTOPE_FV_GHOST || c == DM_POLYTOPE_INTERIOR_GHOST))) continue;
947         ctShift[c] = shift;
948         shift += ctRecv[c];
949       }
950       depthShift[dep] = shift;
951       shift += depthRecv[dep];
952     }
953     for (c = 0; c < DM_NUM_POLYTOPES; ++c) {
954       const PetscInt ctDim = DMPolytopeTypeGetDim((DMPolytopeType)c);
955 
956       if ((ctDim < 0 || ctDim > dim) && (c != DM_POLYTOPE_FV_GHOST && c != DM_POLYTOPE_INTERIOR_GHOST)) {
957         ctShift[c] = shift;
958         shift += ctRecv[c];
959       }
960     }
961   }
962   /* Derive a new local permutation based on stratified indices */
963   PetscCall(PetscMalloc1(nleaves, &ilocal));
964   for (p = 0; p < nleaves; ++p) {
965     const PetscInt       dep = remoteDepths[p].index;
966     const DMPolytopeType ct  = (DMPolytopeType)remoteDepths[p].rank;
967 
968     if ((PetscInt)ct < 0) {
969       ilocal[p] = depthShift[dep] + depthIdx[dep];
970       ++depthIdx[dep];
971     } else {
972       ilocal[p] = ctShift[ct] + ctIdx[ct];
973       ++ctIdx[ct];
974     }
975   }
976   PetscCall(PetscSFCreate(comm, migrationSF));
977   PetscCall(PetscObjectSetName((PetscObject)*migrationSF, "Migration SF"));
978   PetscCall(PetscSFSetGraph(*migrationSF, nroots, nleaves, ilocal, PETSC_OWN_POINTER, (PetscSFNode *)iremote, PETSC_COPY_VALUES));
979   PetscCall(PetscFree2(pointDepths, remoteDepths));
980   PetscCall(PetscFree6(depthRecv, depthShift, depthIdx, ctRecv, ctShift, ctIdx));
981   PetscCall(PetscLogEventEnd(DMPLEX_PartStratSF, dm, 0, 0, 0));
982   PetscFunctionReturn(PETSC_SUCCESS);
983 }
984 
985 /*@
986   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
987 
988   Collective on dm
989 
990   Input Parameters:
991 + dm - The DMPlex object
992 . pointSF - The PetscSF describing the communication pattern
993 . originalSection - The PetscSection for existing data layout
994 - originalVec - The existing data in a local vector
995 
996   Output Parameters:
997 + newSection - The PetscSF describing the new data layout
998 - newVec - The new data in a local vector
999 
1000   Level: developer
1001 
1002 .seealso: `DMPlexDistribute()`, `DMPlexDistributeFieldIS()`, `DMPlexDistributeData()`
1003 @*/
1004 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
1005 {
1006   PetscSF      fieldSF;
1007   PetscInt    *remoteOffsets, fieldSize;
1008   PetscScalar *originalValues, *newValues;
1009 
1010   PetscFunctionBegin;
1011   PetscCall(PetscLogEventBegin(DMPLEX_DistributeField, dm, 0, 0, 0));
1012   PetscCall(PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection));
1013 
1014   PetscCall(PetscSectionGetStorageSize(newSection, &fieldSize));
1015   PetscCall(VecSetSizes(newVec, fieldSize, PETSC_DETERMINE));
1016   PetscCall(VecSetType(newVec, dm->vectype));
1017 
1018   PetscCall(VecGetArray(originalVec, &originalValues));
1019   PetscCall(VecGetArray(newVec, &newValues));
1020   PetscCall(PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF));
1021   PetscCall(PetscFree(remoteOffsets));
1022   PetscCall(PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues, MPI_REPLACE));
1023   PetscCall(PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues, MPI_REPLACE));
1024   PetscCall(PetscSFDestroy(&fieldSF));
1025   PetscCall(VecRestoreArray(newVec, &newValues));
1026   PetscCall(VecRestoreArray(originalVec, &originalValues));
1027   PetscCall(PetscLogEventEnd(DMPLEX_DistributeField, dm, 0, 0, 0));
1028   PetscFunctionReturn(PETSC_SUCCESS);
1029 }
1030 
1031 /*@
1032   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
1033 
1034   Collective on dm
1035 
1036   Input Parameters:
1037 + dm - The DMPlex object
1038 . pointSF - The PetscSF describing the communication pattern
1039 . originalSection - The PetscSection for existing data layout
1040 - originalIS - The existing data
1041 
1042   Output Parameters:
1043 + newSection - The PetscSF describing the new data layout
1044 - newIS - The new data
1045 
1046   Level: developer
1047 
1048 .seealso: `DMPlexDistribute()`, `DMPlexDistributeField()`, `DMPlexDistributeData()`
1049 @*/
1050 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
1051 {
1052   PetscSF         fieldSF;
1053   PetscInt       *newValues, *remoteOffsets, fieldSize;
1054   const PetscInt *originalValues;
1055 
1056   PetscFunctionBegin;
1057   PetscCall(PetscLogEventBegin(DMPLEX_DistributeField, dm, 0, 0, 0));
1058   PetscCall(PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection));
1059 
1060   PetscCall(PetscSectionGetStorageSize(newSection, &fieldSize));
1061   PetscCall(PetscMalloc1(fieldSize, &newValues));
1062 
1063   PetscCall(ISGetIndices(originalIS, &originalValues));
1064   PetscCall(PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF));
1065   PetscCall(PetscFree(remoteOffsets));
1066   PetscCall(PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *)originalValues, newValues, MPI_REPLACE));
1067   PetscCall(PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *)originalValues, newValues, MPI_REPLACE));
1068   PetscCall(PetscSFDestroy(&fieldSF));
1069   PetscCall(ISRestoreIndices(originalIS, &originalValues));
1070   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS));
1071   PetscCall(PetscLogEventEnd(DMPLEX_DistributeField, dm, 0, 0, 0));
1072   PetscFunctionReturn(PETSC_SUCCESS);
1073 }
1074 
1075 /*@
1076   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
1077 
1078   Collective on dm
1079 
1080   Input Parameters:
1081 + dm - The DMPlex object
1082 . pointSF - The PetscSF describing the communication pattern
1083 . originalSection - The PetscSection for existing data layout
1084 . datatype - The type of data
1085 - originalData - The existing data
1086 
1087   Output Parameters:
1088 + newSection - The PetscSection describing the new data layout
1089 - newData - The new data
1090 
1091   Level: developer
1092 
1093 .seealso: `DMPlexDistribute()`, `DMPlexDistributeField()`
1094 @*/
1095 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
1096 {
1097   PetscSF     fieldSF;
1098   PetscInt   *remoteOffsets, fieldSize;
1099   PetscMPIInt dataSize;
1100 
1101   PetscFunctionBegin;
1102   PetscCall(PetscLogEventBegin(DMPLEX_DistributeData, dm, 0, 0, 0));
1103   PetscCall(PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection));
1104 
1105   PetscCall(PetscSectionGetStorageSize(newSection, &fieldSize));
1106   PetscCallMPI(MPI_Type_size(datatype, &dataSize));
1107   PetscCall(PetscMalloc(fieldSize * dataSize, newData));
1108 
1109   PetscCall(PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF));
1110   PetscCall(PetscFree(remoteOffsets));
1111   PetscCall(PetscSFBcastBegin(fieldSF, datatype, originalData, *newData, MPI_REPLACE));
1112   PetscCall(PetscSFBcastEnd(fieldSF, datatype, originalData, *newData, MPI_REPLACE));
1113   PetscCall(PetscSFDestroy(&fieldSF));
1114   PetscCall(PetscLogEventEnd(DMPLEX_DistributeData, dm, 0, 0, 0));
1115   PetscFunctionReturn(PETSC_SUCCESS);
1116 }
1117 
1118 static PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1119 {
1120   DM_Plex     *pmesh = (DM_Plex *)(dmParallel)->data;
1121   MPI_Comm     comm;
1122   PetscSF      coneSF;
1123   PetscSection originalConeSection, newConeSection;
1124   PetscInt    *remoteOffsets, *cones, *globCones, *newCones, newConesSize;
1125   PetscBool    flg;
1126 
1127   PetscFunctionBegin;
1128   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1129   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
1130   PetscCall(PetscLogEventBegin(DMPLEX_DistributeCones, dm, 0, 0, 0));
1131   /* Distribute cone section */
1132   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1133   PetscCall(DMPlexGetConeSection(dm, &originalConeSection));
1134   PetscCall(DMPlexGetConeSection(dmParallel, &newConeSection));
1135   PetscCall(PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection));
1136   PetscCall(DMSetUp(dmParallel));
1137   /* Communicate and renumber cones */
1138   PetscCall(PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF));
1139   PetscCall(PetscFree(remoteOffsets));
1140   PetscCall(DMPlexGetCones(dm, &cones));
1141   if (original) {
1142     PetscInt numCones;
1143 
1144     PetscCall(PetscSectionGetStorageSize(originalConeSection, &numCones));
1145     PetscCall(PetscMalloc1(numCones, &globCones));
1146     PetscCall(ISLocalToGlobalMappingApplyBlock(original, numCones, cones, globCones));
1147   } else {
1148     globCones = cones;
1149   }
1150   PetscCall(DMPlexGetCones(dmParallel, &newCones));
1151   PetscCall(PetscSFBcastBegin(coneSF, MPIU_INT, globCones, newCones, MPI_REPLACE));
1152   PetscCall(PetscSFBcastEnd(coneSF, MPIU_INT, globCones, newCones, MPI_REPLACE));
1153   if (original) PetscCall(PetscFree(globCones));
1154   PetscCall(PetscSectionGetStorageSize(newConeSection, &newConesSize));
1155   PetscCall(ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones));
1156   if (PetscDefined(USE_DEBUG)) {
1157     PetscInt  p;
1158     PetscBool valid = PETSC_TRUE;
1159     for (p = 0; p < newConesSize; ++p) {
1160       if (newCones[p] < 0) {
1161         valid = PETSC_FALSE;
1162         PetscCall(PetscPrintf(PETSC_COMM_SELF, "[%d] Point %" PetscInt_FMT " not in overlap SF\n", PetscGlobalRank, p));
1163       }
1164     }
1165     PetscCheck(valid, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1166   }
1167   PetscCall(PetscOptionsHasName(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-cones_view", &flg));
1168   if (flg) {
1169     PetscCall(PetscPrintf(comm, "Serial Cone Section:\n"));
1170     PetscCall(PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_(comm)));
1171     PetscCall(PetscPrintf(comm, "Parallel Cone Section:\n"));
1172     PetscCall(PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_(comm)));
1173     PetscCall(PetscSFView(coneSF, NULL));
1174   }
1175   PetscCall(DMPlexGetConeOrientations(dm, &cones));
1176   PetscCall(DMPlexGetConeOrientations(dmParallel, &newCones));
1177   PetscCall(PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones, MPI_REPLACE));
1178   PetscCall(PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones, MPI_REPLACE));
1179   PetscCall(PetscSFDestroy(&coneSF));
1180   PetscCall(PetscLogEventEnd(DMPLEX_DistributeCones, dm, 0, 0, 0));
1181   /* Create supports and stratify DMPlex */
1182   {
1183     PetscInt pStart, pEnd;
1184 
1185     PetscCall(PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd));
1186     PetscCall(PetscSectionSetChart(pmesh->supportSection, pStart, pEnd));
1187   }
1188   PetscCall(DMPlexSymmetrize(dmParallel));
1189   PetscCall(DMPlexStratify(dmParallel));
1190   {
1191     PetscBool useCone, useClosure, useAnchors;
1192 
1193     PetscCall(DMGetBasicAdjacency(dm, &useCone, &useClosure));
1194     PetscCall(DMSetBasicAdjacency(dmParallel, useCone, useClosure));
1195     PetscCall(DMPlexGetAdjacencyUseAnchors(dm, &useAnchors));
1196     PetscCall(DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors));
1197   }
1198   PetscFunctionReturn(PETSC_SUCCESS);
1199 }
1200 
1201 static PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
1202 {
1203   MPI_Comm         comm;
1204   DM               cdm, cdmParallel;
1205   PetscSection     originalCoordSection, newCoordSection;
1206   Vec              originalCoordinates, newCoordinates;
1207   PetscInt         bs;
1208   const char      *name;
1209   const PetscReal *maxCell, *Lstart, *L;
1210 
1211   PetscFunctionBegin;
1212   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1213   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1214 
1215   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1216   PetscCall(DMGetCoordinateDM(dm, &cdm));
1217   PetscCall(DMGetCoordinateDM(dmParallel, &cdmParallel));
1218   PetscCall(DMCopyDisc(cdm, cdmParallel));
1219   PetscCall(DMGetCoordinateSection(dm, &originalCoordSection));
1220   PetscCall(DMGetCoordinateSection(dmParallel, &newCoordSection));
1221   PetscCall(DMGetCoordinatesLocal(dm, &originalCoordinates));
1222   if (originalCoordinates) {
1223     PetscCall(VecCreate(PETSC_COMM_SELF, &newCoordinates));
1224     PetscCall(PetscObjectGetName((PetscObject)originalCoordinates, &name));
1225     PetscCall(PetscObjectSetName((PetscObject)newCoordinates, name));
1226 
1227     PetscCall(DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates));
1228     PetscCall(DMSetCoordinatesLocal(dmParallel, newCoordinates));
1229     PetscCall(VecGetBlockSize(originalCoordinates, &bs));
1230     PetscCall(VecSetBlockSize(newCoordinates, bs));
1231     PetscCall(VecDestroy(&newCoordinates));
1232   }
1233 
1234   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1235   PetscCall(DMGetPeriodicity(dm, &maxCell, &Lstart, &L));
1236   PetscCall(DMSetPeriodicity(dmParallel, maxCell, Lstart, L));
1237   PetscCall(DMGetCellCoordinateDM(dm, &cdm));
1238   if (cdm) {
1239     PetscCall(DMClone(dmParallel, &cdmParallel));
1240     PetscCall(DMSetCellCoordinateDM(dmParallel, cdmParallel));
1241     PetscCall(DMCopyDisc(cdm, cdmParallel));
1242     PetscCall(DMDestroy(&cdmParallel));
1243     PetscCall(DMGetCellCoordinateSection(dm, &originalCoordSection));
1244     PetscCall(DMGetCellCoordinatesLocal(dm, &originalCoordinates));
1245     PetscCall(PetscSectionCreate(comm, &newCoordSection));
1246     if (originalCoordinates) {
1247       PetscCall(VecCreate(PETSC_COMM_SELF, &newCoordinates));
1248       PetscCall(PetscObjectGetName((PetscObject)originalCoordinates, &name));
1249       PetscCall(PetscObjectSetName((PetscObject)newCoordinates, name));
1250 
1251       PetscCall(DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates));
1252       PetscCall(VecGetBlockSize(originalCoordinates, &bs));
1253       PetscCall(VecSetBlockSize(newCoordinates, bs));
1254       PetscCall(DMSetCellCoordinateSection(dmParallel, bs, newCoordSection));
1255       PetscCall(DMSetCellCoordinatesLocal(dmParallel, newCoordinates));
1256       PetscCall(VecDestroy(&newCoordinates));
1257     }
1258     PetscCall(PetscSectionDestroy(&newCoordSection));
1259   }
1260   PetscFunctionReturn(PETSC_SUCCESS);
1261 }
1262 
1263 static PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1264 {
1265   DM_Plex         *mesh = (DM_Plex *)dm->data;
1266   MPI_Comm         comm;
1267   DMLabel          depthLabel;
1268   PetscMPIInt      rank;
1269   PetscInt         depth, d, numLabels, numLocalLabels, l;
1270   PetscBool        hasLabels  = PETSC_FALSE, lsendDepth, sendDepth;
1271   PetscObjectState depthState = -1;
1272 
1273   PetscFunctionBegin;
1274   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1275   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1276 
1277   PetscCall(PetscLogEventBegin(DMPLEX_DistributeLabels, dm, 0, 0, 0));
1278   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1279   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1280 
1281   /* If the user has changed the depth label, communicate it instead */
1282   PetscCall(DMPlexGetDepth(dm, &depth));
1283   PetscCall(DMPlexGetDepthLabel(dm, &depthLabel));
1284   if (depthLabel) PetscCall(PetscObjectStateGet((PetscObject)depthLabel, &depthState));
1285   lsendDepth = mesh->depthState != depthState ? PETSC_TRUE : PETSC_FALSE;
1286   PetscCall(MPIU_Allreduce(&lsendDepth, &sendDepth, 1, MPIU_BOOL, MPI_LOR, comm));
1287   if (sendDepth) {
1288     PetscCall(DMPlexGetDepthLabel(dmParallel, &dmParallel->depthLabel));
1289     PetscCall(DMRemoveLabelBySelf(dmParallel, &dmParallel->depthLabel, PETSC_FALSE));
1290   }
1291   /* Everyone must have either the same number of labels, or none */
1292   PetscCall(DMGetNumLabels(dm, &numLocalLabels));
1293   numLabels = numLocalLabels;
1294   PetscCallMPI(MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm));
1295   if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1296   for (l = 0; l < numLabels; ++l) {
1297     DMLabel     label = NULL, labelNew = NULL;
1298     PetscBool   isDepth, lisOutput     = PETSC_TRUE, isOutput;
1299     const char *name = NULL;
1300 
1301     if (hasLabels) {
1302       PetscCall(DMGetLabelByNum(dm, l, &label));
1303       /* Skip "depth" because it is recreated */
1304       PetscCall(PetscObjectGetName((PetscObject)label, &name));
1305       PetscCall(PetscStrcmp(name, "depth", &isDepth));
1306     } else {
1307       isDepth = PETSC_FALSE;
1308     }
1309     PetscCallMPI(MPI_Bcast(&isDepth, 1, MPIU_BOOL, 0, comm));
1310     if (isDepth && !sendDepth) continue;
1311     PetscCall(DMLabelDistribute(label, migrationSF, &labelNew));
1312     if (isDepth) {
1313       /* Put in any missing strata which can occur if users are managing the depth label themselves */
1314       PetscInt gdepth;
1315 
1316       PetscCall(MPIU_Allreduce(&depth, &gdepth, 1, MPIU_INT, MPI_MAX, comm));
1317       PetscCheck(!(depth >= 0) || !(gdepth != depth), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %" PetscInt_FMT " != %" PetscInt_FMT, depth, gdepth);
1318       for (d = 0; d <= gdepth; ++d) {
1319         PetscBool has;
1320 
1321         PetscCall(DMLabelHasStratum(labelNew, d, &has));
1322         if (!has) PetscCall(DMLabelAddStratum(labelNew, d));
1323       }
1324     }
1325     PetscCall(DMAddLabel(dmParallel, labelNew));
1326     /* Put the output flag in the new label */
1327     if (hasLabels) PetscCall(DMGetLabelOutput(dm, name, &lisOutput));
1328     PetscCall(MPIU_Allreduce(&lisOutput, &isOutput, 1, MPIU_BOOL, MPI_LAND, comm));
1329     PetscCall(PetscObjectGetName((PetscObject)labelNew, &name));
1330     PetscCall(DMSetLabelOutput(dmParallel, name, isOutput));
1331     PetscCall(DMLabelDestroy(&labelNew));
1332   }
1333   {
1334     DMLabel ctLabel;
1335 
1336     // Reset label for fast lookup
1337     PetscCall(DMPlexGetCellTypeLabel(dmParallel, &ctLabel));
1338     PetscCall(DMLabelMakeAllInvalid_Internal(ctLabel));
1339   }
1340   PetscCall(PetscLogEventEnd(DMPLEX_DistributeLabels, dm, 0, 0, 0));
1341   PetscFunctionReturn(PETSC_SUCCESS);
1342 }
1343 
1344 static PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1345 {
1346   DM_Plex     *mesh  = (DM_Plex *)dm->data;
1347   DM_Plex     *pmesh = (DM_Plex *)(dmParallel)->data;
1348   MPI_Comm     comm;
1349   DM           refTree;
1350   PetscSection origParentSection, newParentSection;
1351   PetscInt    *origParents, *origChildIDs;
1352   PetscBool    flg;
1353 
1354   PetscFunctionBegin;
1355   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1356   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
1357   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1358 
1359   /* Set up tree */
1360   PetscCall(DMPlexGetReferenceTree(dm, &refTree));
1361   PetscCall(DMPlexSetReferenceTree(dmParallel, refTree));
1362   PetscCall(DMPlexGetTree(dm, &origParentSection, &origParents, &origChildIDs, NULL, NULL));
1363   if (origParentSection) {
1364     PetscInt  pStart, pEnd;
1365     PetscInt *newParents, *newChildIDs, *globParents;
1366     PetscInt *remoteOffsetsParents, newParentSize;
1367     PetscSF   parentSF;
1368 
1369     PetscCall(DMPlexGetChart(dmParallel, &pStart, &pEnd));
1370     PetscCall(PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel), &newParentSection));
1371     PetscCall(PetscSectionSetChart(newParentSection, pStart, pEnd));
1372     PetscCall(PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection));
1373     PetscCall(PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF));
1374     PetscCall(PetscFree(remoteOffsetsParents));
1375     PetscCall(PetscSectionGetStorageSize(newParentSection, &newParentSize));
1376     PetscCall(PetscMalloc2(newParentSize, &newParents, newParentSize, &newChildIDs));
1377     if (original) {
1378       PetscInt numParents;
1379 
1380       PetscCall(PetscSectionGetStorageSize(origParentSection, &numParents));
1381       PetscCall(PetscMalloc1(numParents, &globParents));
1382       PetscCall(ISLocalToGlobalMappingApplyBlock(original, numParents, origParents, globParents));
1383     } else {
1384       globParents = origParents;
1385     }
1386     PetscCall(PetscSFBcastBegin(parentSF, MPIU_INT, globParents, newParents, MPI_REPLACE));
1387     PetscCall(PetscSFBcastEnd(parentSF, MPIU_INT, globParents, newParents, MPI_REPLACE));
1388     if (original) PetscCall(PetscFree(globParents));
1389     PetscCall(PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs, MPI_REPLACE));
1390     PetscCall(PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs, MPI_REPLACE));
1391     PetscCall(ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents));
1392     if (PetscDefined(USE_DEBUG)) {
1393       PetscInt  p;
1394       PetscBool valid = PETSC_TRUE;
1395       for (p = 0; p < newParentSize; ++p) {
1396         if (newParents[p] < 0) {
1397           valid = PETSC_FALSE;
1398           PetscCall(PetscPrintf(PETSC_COMM_SELF, "Point %" PetscInt_FMT " not in overlap SF\n", p));
1399         }
1400       }
1401       PetscCheck(valid, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1402     }
1403     PetscCall(PetscOptionsHasName(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-parents_view", &flg));
1404     if (flg) {
1405       PetscCall(PetscPrintf(comm, "Serial Parent Section: \n"));
1406       PetscCall(PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_(comm)));
1407       PetscCall(PetscPrintf(comm, "Parallel Parent Section: \n"));
1408       PetscCall(PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_(comm)));
1409       PetscCall(PetscSFView(parentSF, NULL));
1410     }
1411     PetscCall(DMPlexSetTree(dmParallel, newParentSection, newParents, newChildIDs));
1412     PetscCall(PetscSectionDestroy(&newParentSection));
1413     PetscCall(PetscFree2(newParents, newChildIDs));
1414     PetscCall(PetscSFDestroy(&parentSF));
1415   }
1416   pmesh->useAnchors = mesh->useAnchors;
1417   PetscFunctionReturn(PETSC_SUCCESS);
1418 }
1419 
1420 PETSC_UNUSED static PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, DM dmParallel)
1421 {
1422   PetscMPIInt rank, size;
1423   MPI_Comm    comm;
1424 
1425   PetscFunctionBegin;
1426   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1427   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1428 
1429   /* Create point SF for parallel mesh */
1430   PetscCall(PetscLogEventBegin(DMPLEX_DistributeSF, dm, 0, 0, 0));
1431   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1432   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1433   PetscCallMPI(MPI_Comm_size(comm, &size));
1434   {
1435     const PetscInt *leaves;
1436     PetscSFNode    *remotePoints, *rowners, *lowners;
1437     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1438     PetscInt        pStart, pEnd;
1439 
1440     PetscCall(DMPlexGetChart(dmParallel, &pStart, &pEnd));
1441     PetscCall(PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL));
1442     PetscCall(PetscMalloc2(numRoots, &rowners, numLeaves, &lowners));
1443     for (p = 0; p < numRoots; p++) {
1444       rowners[p].rank  = -1;
1445       rowners[p].index = -1;
1446     }
1447     PetscCall(PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners, MPI_REPLACE));
1448     PetscCall(PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners, MPI_REPLACE));
1449     for (p = 0; p < numLeaves; ++p) {
1450       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1451         lowners[p].rank  = rank;
1452         lowners[p].index = leaves ? leaves[p] : p;
1453       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1454         lowners[p].rank  = -2;
1455         lowners[p].index = -2;
1456       }
1457     }
1458     for (p = 0; p < numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1459       rowners[p].rank  = -3;
1460       rowners[p].index = -3;
1461     }
1462     PetscCall(PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC));
1463     PetscCall(PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC));
1464     PetscCall(PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners, MPI_REPLACE));
1465     PetscCall(PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners, MPI_REPLACE));
1466     for (p = 0; p < numLeaves; ++p) {
1467       PetscCheck(lowners[p].rank >= 0 && lowners[p].index >= 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Cell partition corrupt: point not claimed");
1468       if (lowners[p].rank != rank) ++numGhostPoints;
1469     }
1470     PetscCall(PetscMalloc1(numGhostPoints, &ghostPoints));
1471     PetscCall(PetscMalloc1(numGhostPoints, &remotePoints));
1472     for (p = 0, gp = 0; p < numLeaves; ++p) {
1473       if (lowners[p].rank != rank) {
1474         ghostPoints[gp]        = leaves ? leaves[p] : p;
1475         remotePoints[gp].rank  = lowners[p].rank;
1476         remotePoints[gp].index = lowners[p].index;
1477         ++gp;
1478       }
1479     }
1480     PetscCall(PetscFree2(rowners, lowners));
1481     PetscCall(PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER));
1482     PetscCall(PetscSFSetFromOptions((dmParallel)->sf));
1483   }
1484   {
1485     PetscBool useCone, useClosure, useAnchors;
1486 
1487     PetscCall(DMGetBasicAdjacency(dm, &useCone, &useClosure));
1488     PetscCall(DMSetBasicAdjacency(dmParallel, useCone, useClosure));
1489     PetscCall(DMPlexGetAdjacencyUseAnchors(dm, &useAnchors));
1490     PetscCall(DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors));
1491   }
1492   PetscCall(PetscLogEventEnd(DMPLEX_DistributeSF, dm, 0, 0, 0));
1493   PetscFunctionReturn(PETSC_SUCCESS);
1494 }
1495 
1496 /*@
1497   DMPlexSetPartitionBalance - Should distribution of the DM attempt to balance the shared point partition?
1498 
1499   Input Parameters:
1500 + dm - The DMPlex object
1501 - flg - Balance the partition?
1502 
1503   Level: intermediate
1504 
1505 .seealso: `DMPlexDistribute()`, `DMPlexGetPartitionBalance()`
1506 @*/
1507 PetscErrorCode DMPlexSetPartitionBalance(DM dm, PetscBool flg)
1508 {
1509   DM_Plex *mesh = (DM_Plex *)dm->data;
1510 
1511   PetscFunctionBegin;
1512   mesh->partitionBalance = flg;
1513   PetscFunctionReturn(PETSC_SUCCESS);
1514 }
1515 
1516 /*@
1517   DMPlexGetPartitionBalance - Does distribution of the DM attempt to balance the shared point partition?
1518 
1519   Input Parameter:
1520 . dm - The DMPlex object
1521 
1522   Output Parameter:
1523 . flg - Balance the partition?
1524 
1525   Level: intermediate
1526 
1527 .seealso: `DMPlexDistribute()`, `DMPlexSetPartitionBalance()`
1528 @*/
1529 PetscErrorCode DMPlexGetPartitionBalance(DM dm, PetscBool *flg)
1530 {
1531   DM_Plex *mesh = (DM_Plex *)dm->data;
1532 
1533   PetscFunctionBegin;
1534   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1535   PetscValidBoolPointer(flg, 2);
1536   *flg = mesh->partitionBalance;
1537   PetscFunctionReturn(PETSC_SUCCESS);
1538 }
1539 
1540 typedef struct {
1541   PetscInt vote, rank, index;
1542 } Petsc3Int;
1543 
1544 /* MaxLoc, but carry a third piece of information around */
1545 static void MPIAPI MaxLocCarry(void *in_, void *inout_, PetscMPIInt *len_, MPI_Datatype *dtype)
1546 {
1547   Petsc3Int *a = (Petsc3Int *)inout_;
1548   Petsc3Int *b = (Petsc3Int *)in_;
1549   PetscInt   i, len = *len_;
1550   for (i = 0; i < len; i++) {
1551     if (a[i].vote < b[i].vote) {
1552       a[i].vote  = b[i].vote;
1553       a[i].rank  = b[i].rank;
1554       a[i].index = b[i].index;
1555     } else if (a[i].vote <= b[i].vote) {
1556       if (a[i].rank >= b[i].rank) {
1557         a[i].rank  = b[i].rank;
1558         a[i].index = b[i].index;
1559       }
1560     }
1561   }
1562 }
1563 
1564 /*@C
1565   DMPlexCreatePointSF - Build a point SF from an SF describing a point migration
1566 
1567   Input Parameters:
1568 + dm          - The source DMPlex object
1569 . migrationSF - The star forest that describes the parallel point remapping
1570 - ownership   - Flag causing a vote to determine point ownership
1571 
1572   Output Parameter:
1573 . pointSF     - The star forest describing the point overlap in the remapped DM
1574 
1575   Notes:
1576   Output pointSF is guaranteed to have the array of local indices (leaves) sorted.
1577 
1578   Level: developer
1579 
1580 .seealso: `DMPlexDistribute()`, `DMPlexDistributeOverlap()`
1581 @*/
1582 PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownership, PetscSF *pointSF)
1583 {
1584   PetscMPIInt        rank, size;
1585   PetscInt           p, nroots, nleaves, idx, npointLeaves;
1586   PetscInt          *pointLocal;
1587   const PetscInt    *leaves;
1588   const PetscSFNode *roots;
1589   PetscSFNode       *rootNodes, *leafNodes, *pointRemote;
1590   Vec                shifts;
1591   const PetscInt     numShifts  = 13759;
1592   const PetscScalar *shift      = NULL;
1593   const PetscBool    shiftDebug = PETSC_FALSE;
1594   PetscBool          balance;
1595 
1596   PetscFunctionBegin;
1597   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1598   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
1599   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)dm), &size));
1600   PetscCall(PetscLogEventBegin(DMPLEX_CreatePointSF, dm, 0, 0, 0));
1601 
1602   PetscCall(DMPlexGetPartitionBalance(dm, &balance));
1603   PetscCall(PetscSFGetGraph(migrationSF, &nroots, &nleaves, &leaves, &roots));
1604   PetscCall(PetscMalloc2(nroots, &rootNodes, nleaves, &leafNodes));
1605   if (ownership) {
1606     MPI_Op       op;
1607     MPI_Datatype datatype;
1608     Petsc3Int   *rootVote = NULL, *leafVote = NULL;
1609     /* If balancing, we compute a random cyclic shift of the rank for each remote point. That way, the max will evenly distribute among ranks. */
1610     if (balance) {
1611       PetscRandom r;
1612 
1613       PetscCall(PetscRandomCreate(PETSC_COMM_SELF, &r));
1614       PetscCall(PetscRandomSetInterval(r, 0, 2467 * size));
1615       PetscCall(VecCreate(PETSC_COMM_SELF, &shifts));
1616       PetscCall(VecSetSizes(shifts, numShifts, numShifts));
1617       PetscCall(VecSetType(shifts, VECSTANDARD));
1618       PetscCall(VecSetRandom(shifts, r));
1619       PetscCall(PetscRandomDestroy(&r));
1620       PetscCall(VecGetArrayRead(shifts, &shift));
1621     }
1622 
1623     PetscCall(PetscMalloc1(nroots, &rootVote));
1624     PetscCall(PetscMalloc1(nleaves, &leafVote));
1625     /* Point ownership vote: Process with highest rank owns shared points */
1626     for (p = 0; p < nleaves; ++p) {
1627       if (shiftDebug) {
1628         PetscCall(PetscSynchronizedPrintf(PetscObjectComm((PetscObject)dm), "[%d] Point %" PetscInt_FMT " RemotePoint %" PetscInt_FMT " Shift %" PetscInt_FMT " MyRank %" PetscInt_FMT "\n", rank, leaves ? leaves[p] : p, roots[p].index,
1629                                           (PetscInt)PetscRealPart(shift[roots[p].index % numShifts]), (rank + (shift ? (PetscInt)PetscRealPart(shift[roots[p].index % numShifts]) : 0)) % size));
1630       }
1631       /* Either put in a bid or we know we own it */
1632       leafVote[p].vote  = (rank + (shift ? (PetscInt)PetscRealPart(shift[roots[p].index % numShifts]) : 0)) % size;
1633       leafVote[p].rank  = rank;
1634       leafVote[p].index = p;
1635     }
1636     for (p = 0; p < nroots; p++) {
1637       /* Root must not participate in the reduction, flag so that MAXLOC does not use */
1638       rootVote[p].vote  = -3;
1639       rootVote[p].rank  = -3;
1640       rootVote[p].index = -3;
1641     }
1642     PetscCallMPI(MPI_Type_contiguous(3, MPIU_INT, &datatype));
1643     PetscCallMPI(MPI_Type_commit(&datatype));
1644     PetscCallMPI(MPI_Op_create(&MaxLocCarry, 1, &op));
1645     PetscCall(PetscSFReduceBegin(migrationSF, datatype, leafVote, rootVote, op));
1646     PetscCall(PetscSFReduceEnd(migrationSF, datatype, leafVote, rootVote, op));
1647     PetscCallMPI(MPI_Op_free(&op));
1648     PetscCallMPI(MPI_Type_free(&datatype));
1649     for (p = 0; p < nroots; p++) {
1650       rootNodes[p].rank  = rootVote[p].rank;
1651       rootNodes[p].index = rootVote[p].index;
1652     }
1653     PetscCall(PetscFree(leafVote));
1654     PetscCall(PetscFree(rootVote));
1655   } else {
1656     for (p = 0; p < nroots; p++) {
1657       rootNodes[p].index = -1;
1658       rootNodes[p].rank  = rank;
1659     }
1660     for (p = 0; p < nleaves; p++) {
1661       /* Write new local id into old location */
1662       if (roots[p].rank == rank) rootNodes[roots[p].index].index = leaves ? leaves[p] : p;
1663     }
1664   }
1665   PetscCall(PetscSFBcastBegin(migrationSF, MPIU_2INT, rootNodes, leafNodes, MPI_REPLACE));
1666   PetscCall(PetscSFBcastEnd(migrationSF, MPIU_2INT, rootNodes, leafNodes, MPI_REPLACE));
1667 
1668   for (npointLeaves = 0, p = 0; p < nleaves; p++) {
1669     if (leafNodes[p].rank != rank) npointLeaves++;
1670   }
1671   PetscCall(PetscMalloc1(npointLeaves, &pointLocal));
1672   PetscCall(PetscMalloc1(npointLeaves, &pointRemote));
1673   for (idx = 0, p = 0; p < nleaves; p++) {
1674     if (leafNodes[p].rank != rank) {
1675       /* Note that pointLocal is automatically sorted as it is sublist of 0, ..., nleaves-1 */
1676       pointLocal[idx]  = p;
1677       pointRemote[idx] = leafNodes[p];
1678       idx++;
1679     }
1680   }
1681   if (shift) {
1682     PetscCall(VecRestoreArrayRead(shifts, &shift));
1683     PetscCall(VecDestroy(&shifts));
1684   }
1685   if (shiftDebug) PetscCall(PetscSynchronizedFlush(PetscObjectComm((PetscObject)dm), PETSC_STDOUT));
1686   PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)dm), pointSF));
1687   PetscCall(PetscSFSetFromOptions(*pointSF));
1688   PetscCall(PetscSFSetGraph(*pointSF, nleaves, npointLeaves, pointLocal, PETSC_OWN_POINTER, pointRemote, PETSC_OWN_POINTER));
1689   PetscCall(PetscFree2(rootNodes, leafNodes));
1690   PetscCall(PetscLogEventEnd(DMPLEX_CreatePointSF, dm, 0, 0, 0));
1691   if (PetscDefined(USE_DEBUG)) PetscCall(DMPlexCheckPointSF(dm, *pointSF, PETSC_FALSE));
1692   PetscFunctionReturn(PETSC_SUCCESS);
1693 }
1694 
1695 /*@C
1696   DMPlexMigrate  - Migrates internal DM data over the supplied star forest
1697 
1698   Collective on dm
1699 
1700   Input Parameters:
1701 + dm       - The source DMPlex object
1702 - sf       - The star forest communication context describing the migration pattern
1703 
1704   Output Parameter:
1705 . targetDM - The target DMPlex object
1706 
1707   Level: intermediate
1708 
1709 .seealso: `DMPlexDistribute()`, `DMPlexDistributeOverlap()`
1710 @*/
1711 PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM)
1712 {
1713   MPI_Comm               comm;
1714   PetscInt               dim, cdim, nroots;
1715   PetscSF                sfPoint;
1716   ISLocalToGlobalMapping ltogMigration;
1717   ISLocalToGlobalMapping ltogOriginal = NULL;
1718 
1719   PetscFunctionBegin;
1720   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1721   PetscCall(PetscLogEventBegin(DMPLEX_Migrate, dm, 0, 0, 0));
1722   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1723   PetscCall(DMGetDimension(dm, &dim));
1724   PetscCall(DMSetDimension(targetDM, dim));
1725   PetscCall(DMGetCoordinateDim(dm, &cdim));
1726   PetscCall(DMSetCoordinateDim(targetDM, cdim));
1727 
1728   /* Check for a one-to-all distribution pattern */
1729   PetscCall(DMGetPointSF(dm, &sfPoint));
1730   PetscCall(PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL));
1731   if (nroots >= 0) {
1732     IS        isOriginal;
1733     PetscInt  n, size, nleaves;
1734     PetscInt *numbering_orig, *numbering_new;
1735 
1736     /* Get the original point numbering */
1737     PetscCall(DMPlexCreatePointNumbering(dm, &isOriginal));
1738     PetscCall(ISLocalToGlobalMappingCreateIS(isOriginal, &ltogOriginal));
1739     PetscCall(ISLocalToGlobalMappingGetSize(ltogOriginal, &size));
1740     PetscCall(ISLocalToGlobalMappingGetBlockIndices(ltogOriginal, (const PetscInt **)&numbering_orig));
1741     /* Convert to positive global numbers */
1742     for (n = 0; n < size; n++) {
1743       if (numbering_orig[n] < 0) numbering_orig[n] = -(numbering_orig[n] + 1);
1744     }
1745     /* Derive the new local-to-global mapping from the old one */
1746     PetscCall(PetscSFGetGraph(sf, NULL, &nleaves, NULL, NULL));
1747     PetscCall(PetscMalloc1(nleaves, &numbering_new));
1748     PetscCall(PetscSFBcastBegin(sf, MPIU_INT, numbering_orig, numbering_new, MPI_REPLACE));
1749     PetscCall(PetscSFBcastEnd(sf, MPIU_INT, numbering_orig, numbering_new, MPI_REPLACE));
1750     PetscCall(ISLocalToGlobalMappingCreate(comm, 1, nleaves, numbering_new, PETSC_OWN_POINTER, &ltogMigration));
1751     PetscCall(ISLocalToGlobalMappingRestoreIndices(ltogOriginal, (const PetscInt **)&numbering_orig));
1752     PetscCall(ISDestroy(&isOriginal));
1753   } else {
1754     /* One-to-all distribution pattern: We can derive LToG from SF */
1755     PetscCall(ISLocalToGlobalMappingCreateSF(sf, 0, &ltogMigration));
1756   }
1757   PetscCall(PetscObjectSetName((PetscObject)ltogMigration, "Point renumbering for DM migration"));
1758   PetscCall(ISLocalToGlobalMappingViewFromOptions(ltogMigration, (PetscObject)dm, "-partition_view"));
1759   /* Migrate DM data to target DM */
1760   PetscCall(DMPlexDistributeCones(dm, sf, ltogOriginal, ltogMigration, targetDM));
1761   PetscCall(DMPlexDistributeLabels(dm, sf, targetDM));
1762   PetscCall(DMPlexDistributeCoordinates(dm, sf, targetDM));
1763   PetscCall(DMPlexDistributeSetupTree(dm, sf, ltogOriginal, ltogMigration, targetDM));
1764   PetscCall(ISLocalToGlobalMappingDestroy(&ltogOriginal));
1765   PetscCall(ISLocalToGlobalMappingDestroy(&ltogMigration));
1766   PetscCall(PetscLogEventEnd(DMPLEX_Migrate, dm, 0, 0, 0));
1767   PetscFunctionReturn(PETSC_SUCCESS);
1768 }
1769 
1770 /*@C
1771   DMPlexDistribute - Distributes the mesh and any associated sections.
1772 
1773   Collective on dm
1774 
1775   Input Parameters:
1776 + dm  - The original DMPlex object
1777 - overlap - The overlap of partitions, 0 is the default
1778 
1779   Output Parameters:
1780 + sf - The PetscSF used for point distribution, or NULL if not needed
1781 - dmParallel - The distributed DMPlex object
1782 
1783   Note: If the mesh was not distributed, the output dmParallel will be NULL.
1784 
1785   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
1786   representation on the mesh.
1787 
1788   Level: intermediate
1789 
1790 .seealso: `DMPlexCreate()`, `DMSetAdjacency()`, `DMPlexGetOverlap()`
1791 @*/
1792 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1793 {
1794   MPI_Comm         comm;
1795   PetscPartitioner partitioner;
1796   IS               cellPart;
1797   PetscSection     cellPartSection;
1798   DM               dmCoord;
1799   DMLabel          lblPartition, lblMigration;
1800   PetscSF          sfMigration, sfStratified, sfPoint;
1801   PetscBool        flg, balance;
1802   PetscMPIInt      rank, size;
1803 
1804   PetscFunctionBegin;
1805   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1806   PetscValidLogicalCollectiveInt(dm, overlap, 2);
1807   if (sf) PetscValidPointer(sf, 3);
1808   PetscValidPointer(dmParallel, 4);
1809 
1810   if (sf) *sf = NULL;
1811   *dmParallel = NULL;
1812   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1813   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1814   PetscCallMPI(MPI_Comm_size(comm, &size));
1815   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
1816 
1817   PetscCall(PetscLogEventBegin(DMPLEX_Distribute, dm, 0, 0, 0));
1818   /* Create cell partition */
1819   PetscCall(PetscLogEventBegin(DMPLEX_Partition, dm, 0, 0, 0));
1820   PetscCall(PetscSectionCreate(comm, &cellPartSection));
1821   PetscCall(DMPlexGetPartitioner(dm, &partitioner));
1822   PetscCall(PetscPartitionerDMPlexPartition(partitioner, dm, NULL, cellPartSection, &cellPart));
1823   PetscCall(PetscLogEventBegin(DMPLEX_PartSelf, dm, 0, 0, 0));
1824   {
1825     /* Convert partition to DMLabel */
1826     IS              is;
1827     PetscHSetI      ht;
1828     const PetscInt *points;
1829     PetscInt       *iranks;
1830     PetscInt        pStart, pEnd, proc, npoints, poff = 0, nranks;
1831 
1832     PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Point Partition", &lblPartition));
1833     /* Preallocate strata */
1834     PetscCall(PetscHSetICreate(&ht));
1835     PetscCall(PetscSectionGetChart(cellPartSection, &pStart, &pEnd));
1836     for (proc = pStart; proc < pEnd; proc++) {
1837       PetscCall(PetscSectionGetDof(cellPartSection, proc, &npoints));
1838       if (npoints) PetscCall(PetscHSetIAdd(ht, proc));
1839     }
1840     PetscCall(PetscHSetIGetSize(ht, &nranks));
1841     PetscCall(PetscMalloc1(nranks, &iranks));
1842     PetscCall(PetscHSetIGetElems(ht, &poff, iranks));
1843     PetscCall(PetscHSetIDestroy(&ht));
1844     PetscCall(DMLabelAddStrata(lblPartition, nranks, iranks));
1845     PetscCall(PetscFree(iranks));
1846     /* Inline DMPlexPartitionLabelClosure() */
1847     PetscCall(ISGetIndices(cellPart, &points));
1848     PetscCall(PetscSectionGetChart(cellPartSection, &pStart, &pEnd));
1849     for (proc = pStart; proc < pEnd; proc++) {
1850       PetscCall(PetscSectionGetDof(cellPartSection, proc, &npoints));
1851       if (!npoints) continue;
1852       PetscCall(PetscSectionGetOffset(cellPartSection, proc, &poff));
1853       PetscCall(DMPlexClosurePoints_Private(dm, npoints, points + poff, &is));
1854       PetscCall(DMLabelSetStratumIS(lblPartition, proc, is));
1855       PetscCall(ISDestroy(&is));
1856     }
1857     PetscCall(ISRestoreIndices(cellPart, &points));
1858   }
1859   PetscCall(PetscLogEventEnd(DMPLEX_PartSelf, dm, 0, 0, 0));
1860 
1861   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Point migration", &lblMigration));
1862   PetscCall(DMPlexPartitionLabelInvert(dm, lblPartition, NULL, lblMigration));
1863   PetscCall(DMPlexPartitionLabelCreateSF(dm, lblMigration, &sfMigration));
1864   PetscCall(DMPlexStratifyMigrationSF(dm, sfMigration, &sfStratified));
1865   PetscCall(PetscSFDestroy(&sfMigration));
1866   sfMigration = sfStratified;
1867   PetscCall(PetscSFSetUp(sfMigration));
1868   PetscCall(PetscLogEventEnd(DMPLEX_Partition, dm, 0, 0, 0));
1869   PetscCall(PetscOptionsHasName(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-partition_view", &flg));
1870   if (flg) {
1871     PetscCall(DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_(comm)));
1872     PetscCall(PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_(comm)));
1873   }
1874 
1875   /* Create non-overlapping parallel DM and migrate internal data */
1876   PetscCall(DMPlexCreate(comm, dmParallel));
1877   PetscCall(PetscObjectSetName((PetscObject)*dmParallel, "Parallel Mesh"));
1878   PetscCall(DMPlexMigrate(dm, sfMigration, *dmParallel));
1879 
1880   /* Build the point SF without overlap */
1881   PetscCall(DMPlexGetPartitionBalance(dm, &balance));
1882   PetscCall(DMPlexSetPartitionBalance(*dmParallel, balance));
1883   PetscCall(DMPlexCreatePointSF(*dmParallel, sfMigration, PETSC_TRUE, &sfPoint));
1884   PetscCall(DMSetPointSF(*dmParallel, sfPoint));
1885   PetscCall(DMPlexMigrateIsoperiodicFaceSF_Internal(dm, *dmParallel, sfMigration));
1886   PetscCall(DMGetCoordinateDM(*dmParallel, &dmCoord));
1887   if (dmCoord) PetscCall(DMSetPointSF(dmCoord, sfPoint));
1888   if (flg) PetscCall(PetscSFView(sfPoint, NULL));
1889 
1890   if (overlap > 0) {
1891     DM                 dmOverlap;
1892     PetscInt           nroots, nleaves, noldleaves, l;
1893     const PetscInt    *oldLeaves;
1894     PetscSFNode       *newRemote, *permRemote;
1895     const PetscSFNode *oldRemote;
1896     PetscSF            sfOverlap, sfOverlapPoint;
1897 
1898     /* Add the partition overlap to the distributed DM */
1899     PetscCall(DMPlexDistributeOverlap(*dmParallel, overlap, &sfOverlap, &dmOverlap));
1900     PetscCall(DMDestroy(dmParallel));
1901     *dmParallel = dmOverlap;
1902     if (flg) {
1903       PetscCall(PetscPrintf(comm, "Overlap Migration SF:\n"));
1904       PetscCall(PetscSFView(sfOverlap, NULL));
1905     }
1906 
1907     /* Re-map the migration SF to establish the full migration pattern */
1908     PetscCall(PetscSFGetGraph(sfMigration, &nroots, &noldleaves, &oldLeaves, &oldRemote));
1909     PetscCall(PetscSFGetGraph(sfOverlap, NULL, &nleaves, NULL, NULL));
1910     PetscCall(PetscMalloc1(nleaves, &newRemote));
1911     /* oldRemote: original root point mapping to original leaf point
1912        newRemote: original leaf point mapping to overlapped leaf point */
1913     if (oldLeaves) {
1914       /* After stratification, the migration remotes may not be in root (canonical) order, so we reorder using the leaf numbering */
1915       PetscCall(PetscMalloc1(noldleaves, &permRemote));
1916       for (l = 0; l < noldleaves; ++l) permRemote[oldLeaves[l]] = oldRemote[l];
1917       oldRemote = permRemote;
1918     }
1919     PetscCall(PetscSFBcastBegin(sfOverlap, MPIU_2INT, oldRemote, newRemote, MPI_REPLACE));
1920     PetscCall(PetscSFBcastEnd(sfOverlap, MPIU_2INT, oldRemote, newRemote, MPI_REPLACE));
1921     if (oldLeaves) PetscCall(PetscFree(oldRemote));
1922     PetscCall(PetscSFCreate(comm, &sfOverlapPoint));
1923     PetscCall(PetscSFSetGraph(sfOverlapPoint, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER));
1924     PetscCall(PetscSFDestroy(&sfOverlap));
1925     PetscCall(PetscSFDestroy(&sfMigration));
1926     sfMigration = sfOverlapPoint;
1927   }
1928   /* Cleanup Partition */
1929   PetscCall(DMLabelDestroy(&lblPartition));
1930   PetscCall(DMLabelDestroy(&lblMigration));
1931   PetscCall(PetscSectionDestroy(&cellPartSection));
1932   PetscCall(ISDestroy(&cellPart));
1933   /* Copy discretization */
1934   PetscCall(DMCopyDisc(dm, *dmParallel));
1935   /* Create sfNatural */
1936   if (dm->useNatural) {
1937     PetscSection section;
1938 
1939     PetscCall(DMSetUseNatural(*dmParallel, PETSC_TRUE));
1940     PetscCall(DMGetLocalSection(dm, &section));
1941 
1942     /* First DM with useNatural = PETSC_TRUE is considered natural */
1943     /* sfMigration and sfNatural are respectively the point and dofs SFs mapping to this natural DM */
1944     /* Compose with a previous sfNatural if present */
1945     if (dm->sfNatural) {
1946       PetscSF natSF;
1947 
1948       PetscCall(DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &natSF));
1949       PetscCall(PetscSFCompose(dm->sfNatural, natSF, &(*dmParallel)->sfNatural));
1950       PetscCall(PetscSFDestroy(&natSF));
1951     } else {
1952       PetscCall(DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &(*dmParallel)->sfNatural));
1953     }
1954     /* Compose with a previous sfMigration if present */
1955     if (dm->sfMigration) {
1956       PetscSF naturalPointSF;
1957 
1958       PetscCall(PetscSFCompose(dm->sfMigration, sfMigration, &naturalPointSF));
1959       PetscCall(PetscSFDestroy(&sfMigration));
1960       sfMigration = naturalPointSF;
1961     }
1962   }
1963   PetscCall(DMPlexCopy_Internal(dm, PETSC_TRUE, PETSC_FALSE, *dmParallel));
1964   /* Cleanup */
1965   if (sf) {
1966     *sf = sfMigration;
1967   } else PetscCall(PetscSFDestroy(&sfMigration));
1968   PetscCall(PetscSFDestroy(&sfPoint));
1969   PetscCall(PetscLogEventEnd(DMPLEX_Distribute, dm, 0, 0, 0));
1970   PetscFunctionReturn(PETSC_SUCCESS);
1971 }
1972 
1973 /*@C
1974   DMPlexDistributeOverlap - Add partition overlap to a distributed non-overlapping DM.
1975 
1976   Collective on dm
1977 
1978   Input Parameters:
1979 + dm  - The non-overlapping distributed DMPlex object
1980 - overlap - The overlap of partitions (the same on all ranks)
1981 
1982   Output Parameters:
1983 + sf - The PetscSF used for point distribution
1984 - dmOverlap - The overlapping distributed DMPlex object, or NULL
1985 
1986   Notes:
1987   If the mesh was not distributed, the return value is NULL.
1988 
1989   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
1990   representation on the mesh.
1991 
1992   Options Database Keys:
1993 + -dm_plex_overlap_labels <name1,name2,...> - List of overlap label names
1994 . -dm_plex_overlap_values <int1,int2,...>   - List of overlap label values
1995 . -dm_plex_overlap_exclude_label <name>     - Label used to exclude points from overlap
1996 - -dm_plex_overlap_exclude_value <int>      - Label value used to exclude points from overlap
1997 
1998   Level: advanced
1999 
2000 .seealso: `DMPlexCreate()`, `DMSetAdjacency()`, `DMPlexDistribute()`, `DMPlexCreateOverlapLabel()`, `DMPlexGetOverlap()`
2001 @*/
2002 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, PetscSF *sf, DM *dmOverlap)
2003 {
2004   DM_Plex     *mesh = (DM_Plex *)dm->data;
2005   MPI_Comm     comm;
2006   PetscMPIInt  size, rank;
2007   PetscSection rootSection, leafSection;
2008   IS           rootrank, leafrank;
2009   DM           dmCoord;
2010   DMLabel      lblOverlap;
2011   PetscSF      sfOverlap, sfStratified, sfPoint;
2012 
2013   PetscFunctionBegin;
2014   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2015   PetscValidLogicalCollectiveInt(dm, overlap, 2);
2016   if (sf) PetscValidPointer(sf, 3);
2017   PetscValidPointer(dmOverlap, 4);
2018 
2019   if (sf) *sf = NULL;
2020   *dmOverlap = NULL;
2021   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
2022   PetscCallMPI(MPI_Comm_size(comm, &size));
2023   PetscCallMPI(MPI_Comm_rank(comm, &rank));
2024   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
2025   {
2026     // We need to get options for the _already_distributed mesh, so it must be done here
2027     PetscInt    overlap;
2028     const char *prefix;
2029     char        oldPrefix[PETSC_MAX_PATH_LEN];
2030 
2031     oldPrefix[0] = '\0';
2032     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)dm, &prefix));
2033     PetscCall(PetscStrcpy(oldPrefix, prefix));
2034     PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)dm, "dist_"));
2035     PetscCall(DMPlexGetOverlap(dm, &overlap));
2036     PetscObjectOptionsBegin((PetscObject)dm);
2037     PetscCall(DMSetFromOptions_Overlap_Plex(dm, PetscOptionsObject, &overlap));
2038     PetscOptionsEnd();
2039     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)dm, oldPrefix[0] == '\0' ? NULL : oldPrefix));
2040   }
2041   PetscCall(PetscLogEventBegin(DMPLEX_DistributeOverlap, dm, 0, 0, 0));
2042   /* Compute point overlap with neighbouring processes on the distributed DM */
2043   PetscCall(PetscLogEventBegin(DMPLEX_Partition, dm, 0, 0, 0));
2044   PetscCall(PetscSectionCreate(comm, &rootSection));
2045   PetscCall(PetscSectionCreate(comm, &leafSection));
2046   PetscCall(DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank));
2047   if (mesh->numOvLabels) PetscCall(DMPlexCreateOverlapLabelFromLabels(dm, mesh->numOvLabels, mesh->ovLabels, mesh->ovValues, mesh->numOvExLabels, mesh->ovExLabels, mesh->ovExValues, rootSection, rootrank, leafSection, leafrank, &lblOverlap));
2048   else PetscCall(DMPlexCreateOverlapLabel(dm, overlap, rootSection, rootrank, leafSection, leafrank, &lblOverlap));
2049   /* Convert overlap label to stratified migration SF */
2050   PetscCall(DMPlexPartitionLabelCreateSF(dm, lblOverlap, &sfOverlap));
2051   PetscCall(DMPlexStratifyMigrationSF(dm, sfOverlap, &sfStratified));
2052   PetscCall(PetscSFDestroy(&sfOverlap));
2053   sfOverlap = sfStratified;
2054   PetscCall(PetscObjectSetName((PetscObject)sfOverlap, "Overlap SF"));
2055   PetscCall(PetscSFSetFromOptions(sfOverlap));
2056 
2057   PetscCall(PetscSectionDestroy(&rootSection));
2058   PetscCall(PetscSectionDestroy(&leafSection));
2059   PetscCall(ISDestroy(&rootrank));
2060   PetscCall(ISDestroy(&leafrank));
2061   PetscCall(PetscLogEventEnd(DMPLEX_Partition, dm, 0, 0, 0));
2062 
2063   /* Build the overlapping DM */
2064   PetscCall(DMPlexCreate(comm, dmOverlap));
2065   PetscCall(PetscObjectSetName((PetscObject)*dmOverlap, "Parallel Mesh"));
2066   PetscCall(DMPlexMigrate(dm, sfOverlap, *dmOverlap));
2067   /* Store the overlap in the new DM */
2068   PetscCall(DMPlexSetOverlap_Plex(*dmOverlap, dm, overlap));
2069   /* Build the new point SF */
2070   PetscCall(DMPlexCreatePointSF(*dmOverlap, sfOverlap, PETSC_FALSE, &sfPoint));
2071   PetscCall(DMSetPointSF(*dmOverlap, sfPoint));
2072   PetscCall(DMGetCoordinateDM(*dmOverlap, &dmCoord));
2073   if (dmCoord) PetscCall(DMSetPointSF(dmCoord, sfPoint));
2074   PetscCall(DMGetCellCoordinateDM(*dmOverlap, &dmCoord));
2075   if (dmCoord) PetscCall(DMSetPointSF(dmCoord, sfPoint));
2076   PetscCall(PetscSFDestroy(&sfPoint));
2077   /* Cleanup overlap partition */
2078   PetscCall(DMLabelDestroy(&lblOverlap));
2079   if (sf) *sf = sfOverlap;
2080   else PetscCall(PetscSFDestroy(&sfOverlap));
2081   PetscCall(PetscLogEventEnd(DMPLEX_DistributeOverlap, dm, 0, 0, 0));
2082   PetscFunctionReturn(PETSC_SUCCESS);
2083 }
2084 
2085 PetscErrorCode DMPlexGetOverlap_Plex(DM dm, PetscInt *overlap)
2086 {
2087   DM_Plex *mesh = (DM_Plex *)dm->data;
2088 
2089   PetscFunctionBegin;
2090   *overlap = mesh->overlap;
2091   PetscFunctionReturn(PETSC_SUCCESS);
2092 }
2093 
2094 PetscErrorCode DMPlexSetOverlap_Plex(DM dm, DM dmSrc, PetscInt overlap)
2095 {
2096   DM_Plex *mesh    = NULL;
2097   DM_Plex *meshSrc = NULL;
2098 
2099   PetscFunctionBegin;
2100   PetscValidHeaderSpecificType(dm, DM_CLASSID, 1, DMPLEX);
2101   mesh          = (DM_Plex *)dm->data;
2102   mesh->overlap = overlap;
2103   if (dmSrc) {
2104     PetscValidHeaderSpecificType(dmSrc, DM_CLASSID, 2, DMPLEX);
2105     meshSrc = (DM_Plex *)dmSrc->data;
2106     mesh->overlap += meshSrc->overlap;
2107   }
2108   PetscFunctionReturn(PETSC_SUCCESS);
2109 }
2110 
2111 /*@
2112   DMPlexGetOverlap - Get the width of the cell overlap
2113 
2114   Not collective
2115 
2116   Input Parameter:
2117 . dm   - The DM
2118 
2119   Output Parameter:
2120 . overlap - the width of the cell overlap
2121 
2122   Level: intermediate
2123 
2124 .seealso: `DMPlexSetOverlap()`, `DMPlexDistribute()`
2125 @*/
2126 PetscErrorCode DMPlexGetOverlap(DM dm, PetscInt *overlap)
2127 {
2128   PetscFunctionBegin;
2129   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2130   PetscValidIntPointer(overlap, 2);
2131   PetscUseMethod(dm, "DMPlexGetOverlap_C", (DM, PetscInt *), (dm, overlap));
2132   PetscFunctionReturn(PETSC_SUCCESS);
2133 }
2134 
2135 /*@
2136   DMPlexSetOverlap - Set the width of the cell overlap
2137 
2138   Logically collective
2139 
2140   Input Parameters:
2141 + dm      - The DM
2142 . dmSrc   - The DM that produced this one, or NULL
2143 - overlap - the width of the cell overlap
2144 
2145   Note:
2146   The overlap from dmSrc is added to dm
2147 
2148   Level: intermediate
2149 
2150 .seealso: `DMPlexGetOverlap()`, `DMPlexDistribute()`
2151 @*/
2152 PetscErrorCode DMPlexSetOverlap(DM dm, DM dmSrc, PetscInt overlap)
2153 {
2154   PetscFunctionBegin;
2155   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2156   PetscValidLogicalCollectiveInt(dm, overlap, 3);
2157   PetscTryMethod(dm, "DMPlexSetOverlap_C", (DM, DM, PetscInt), (dm, dmSrc, overlap));
2158   PetscFunctionReturn(PETSC_SUCCESS);
2159 }
2160 
2161 PetscErrorCode DMPlexDistributeSetDefault_Plex(DM dm, PetscBool dist)
2162 {
2163   DM_Plex *mesh = (DM_Plex *)dm->data;
2164 
2165   PetscFunctionBegin;
2166   mesh->distDefault = dist;
2167   PetscFunctionReturn(PETSC_SUCCESS);
2168 }
2169 
2170 /*@
2171   DMPlexDistributeSetDefault - Set flag indicating whether the DM should be distributed by default
2172 
2173   Logically collective
2174 
2175   Input Parameters:
2176 + dm   - The DM
2177 - dist - Flag for distribution
2178 
2179   Level: intermediate
2180 
2181 .seealso: `DMPlexDistributeGetDefault()`, `DMPlexDistribute()`
2182 @*/
2183 PetscErrorCode DMPlexDistributeSetDefault(DM dm, PetscBool dist)
2184 {
2185   PetscFunctionBegin;
2186   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2187   PetscValidLogicalCollectiveBool(dm, dist, 2);
2188   PetscTryMethod(dm, "DMPlexDistributeSetDefault_C", (DM, PetscBool), (dm, dist));
2189   PetscFunctionReturn(PETSC_SUCCESS);
2190 }
2191 
2192 PetscErrorCode DMPlexDistributeGetDefault_Plex(DM dm, PetscBool *dist)
2193 {
2194   DM_Plex *mesh = (DM_Plex *)dm->data;
2195 
2196   PetscFunctionBegin;
2197   *dist = mesh->distDefault;
2198   PetscFunctionReturn(PETSC_SUCCESS);
2199 }
2200 
2201 /*@
2202   DMPlexDistributeGetDefault - Get flag indicating whether the DM should be distributed by default
2203 
2204   Not collective
2205 
2206   Input Parameter:
2207 . dm   - The DM
2208 
2209   Output Parameter:
2210 . dist - Flag for distribution
2211 
2212   Level: intermediate
2213 
2214 .seealso: `DMPlexDistributeSetDefault()`, `DMPlexDistribute()`
2215 @*/
2216 PetscErrorCode DMPlexDistributeGetDefault(DM dm, PetscBool *dist)
2217 {
2218   PetscFunctionBegin;
2219   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2220   PetscValidBoolPointer(dist, 2);
2221   PetscUseMethod(dm, "DMPlexDistributeGetDefault_C", (DM, PetscBool *), (dm, dist));
2222   PetscFunctionReturn(PETSC_SUCCESS);
2223 }
2224 
2225 /*@C
2226   DMPlexGetGatherDM - Get a copy of the DMPlex that gathers all points on the
2227   root process of the original's communicator.
2228 
2229   Collective on dm
2230 
2231   Input Parameter:
2232 . dm - the original DMPlex object
2233 
2234   Output Parameters:
2235 + sf - the PetscSF used for point distribution (optional)
2236 - gatherMesh - the gathered DM object, or NULL
2237 
2238   Level: intermediate
2239 
2240 .seealso: `DMPlexDistribute()`, `DMPlexGetRedundantDM()`
2241 @*/
2242 PetscErrorCode DMPlexGetGatherDM(DM dm, PetscSF *sf, DM *gatherMesh)
2243 {
2244   MPI_Comm         comm;
2245   PetscMPIInt      size;
2246   PetscPartitioner oldPart, gatherPart;
2247 
2248   PetscFunctionBegin;
2249   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2250   PetscValidPointer(gatherMesh, 3);
2251   *gatherMesh = NULL;
2252   if (sf) *sf = NULL;
2253   comm = PetscObjectComm((PetscObject)dm);
2254   PetscCallMPI(MPI_Comm_size(comm, &size));
2255   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
2256   PetscCall(DMPlexGetPartitioner(dm, &oldPart));
2257   PetscCall(PetscObjectReference((PetscObject)oldPart));
2258   PetscCall(PetscPartitionerCreate(comm, &gatherPart));
2259   PetscCall(PetscPartitionerSetType(gatherPart, PETSCPARTITIONERGATHER));
2260   PetscCall(DMPlexSetPartitioner(dm, gatherPart));
2261   PetscCall(DMPlexDistribute(dm, 0, sf, gatherMesh));
2262 
2263   PetscCall(DMPlexSetPartitioner(dm, oldPart));
2264   PetscCall(PetscPartitionerDestroy(&gatherPart));
2265   PetscCall(PetscPartitionerDestroy(&oldPart));
2266   PetscFunctionReturn(PETSC_SUCCESS);
2267 }
2268 
2269 /*@C
2270   DMPlexGetRedundantDM - Get a copy of the DMPlex that is completely copied on each process.
2271 
2272   Collective on dm
2273 
2274   Input Parameter:
2275 . dm - the original DMPlex object
2276 
2277   Output Parameters:
2278 + sf - the PetscSF used for point distribution (optional)
2279 - redundantMesh - the redundant DM object, or NULL
2280 
2281   Level: intermediate
2282 
2283 .seealso: `DMPlexDistribute()`, `DMPlexGetGatherDM()`
2284 @*/
2285 PetscErrorCode DMPlexGetRedundantDM(DM dm, PetscSF *sf, DM *redundantMesh)
2286 {
2287   MPI_Comm     comm;
2288   PetscMPIInt  size, rank;
2289   PetscInt     pStart, pEnd, p;
2290   PetscInt     numPoints = -1;
2291   PetscSF      migrationSF, sfPoint, gatherSF;
2292   DM           gatherDM, dmCoord;
2293   PetscSFNode *points;
2294 
2295   PetscFunctionBegin;
2296   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2297   PetscValidPointer(redundantMesh, 3);
2298   *redundantMesh = NULL;
2299   comm           = PetscObjectComm((PetscObject)dm);
2300   PetscCallMPI(MPI_Comm_size(comm, &size));
2301   if (size == 1) {
2302     PetscCall(PetscObjectReference((PetscObject)dm));
2303     *redundantMesh = dm;
2304     if (sf) *sf = NULL;
2305     PetscFunctionReturn(PETSC_SUCCESS);
2306   }
2307   PetscCall(DMPlexGetGatherDM(dm, &gatherSF, &gatherDM));
2308   if (!gatherDM) PetscFunctionReturn(PETSC_SUCCESS);
2309   PetscCallMPI(MPI_Comm_rank(comm, &rank));
2310   PetscCall(DMPlexGetChart(gatherDM, &pStart, &pEnd));
2311   numPoints = pEnd - pStart;
2312   PetscCallMPI(MPI_Bcast(&numPoints, 1, MPIU_INT, 0, comm));
2313   PetscCall(PetscMalloc1(numPoints, &points));
2314   PetscCall(PetscSFCreate(comm, &migrationSF));
2315   for (p = 0; p < numPoints; p++) {
2316     points[p].index = p;
2317     points[p].rank  = 0;
2318   }
2319   PetscCall(PetscSFSetGraph(migrationSF, pEnd - pStart, numPoints, NULL, PETSC_OWN_POINTER, points, PETSC_OWN_POINTER));
2320   PetscCall(DMPlexCreate(comm, redundantMesh));
2321   PetscCall(PetscObjectSetName((PetscObject)*redundantMesh, "Redundant Mesh"));
2322   PetscCall(DMPlexMigrate(gatherDM, migrationSF, *redundantMesh));
2323   /* This is to express that all point are in overlap */
2324   PetscCall(DMPlexSetOverlap_Plex(*redundantMesh, NULL, PETSC_MAX_INT));
2325   PetscCall(DMPlexCreatePointSF(*redundantMesh, migrationSF, PETSC_FALSE, &sfPoint));
2326   PetscCall(DMSetPointSF(*redundantMesh, sfPoint));
2327   PetscCall(DMGetCoordinateDM(*redundantMesh, &dmCoord));
2328   if (dmCoord) PetscCall(DMSetPointSF(dmCoord, sfPoint));
2329   PetscCall(PetscSFDestroy(&sfPoint));
2330   if (sf) {
2331     PetscSF tsf;
2332 
2333     PetscCall(PetscSFCompose(gatherSF, migrationSF, &tsf));
2334     PetscCall(DMPlexStratifyMigrationSF(dm, tsf, sf));
2335     PetscCall(PetscSFDestroy(&tsf));
2336   }
2337   PetscCall(PetscSFDestroy(&migrationSF));
2338   PetscCall(PetscSFDestroy(&gatherSF));
2339   PetscCall(DMDestroy(&gatherDM));
2340   PetscCall(DMCopyDisc(dm, *redundantMesh));
2341   PetscCall(DMPlexCopy_Internal(dm, PETSC_TRUE, PETSC_FALSE, *redundantMesh));
2342   PetscFunctionReturn(PETSC_SUCCESS);
2343 }
2344 
2345 /*@
2346   DMPlexIsDistributed - Find out whether this DM is distributed, i.e. more than one rank owns some points.
2347 
2348   Collective
2349 
2350   Input Parameter:
2351 . dm      - The DM object
2352 
2353   Output Parameter:
2354 . distributed - Flag whether the DM is distributed
2355 
2356   Level: intermediate
2357 
2358   Notes:
2359   This currently finds out whether at least two ranks have any DAG points.
2360   This involves MPI_Allreduce() with one integer.
2361   The result is currently not stashed so every call to this routine involves this global communication.
2362 
2363 .seealso: `DMPlexDistribute()`, `DMPlexGetOverlap()`, `DMPlexIsInterpolated()`
2364 @*/
2365 PetscErrorCode DMPlexIsDistributed(DM dm, PetscBool *distributed)
2366 {
2367   PetscInt    pStart, pEnd, count;
2368   MPI_Comm    comm;
2369   PetscMPIInt size;
2370 
2371   PetscFunctionBegin;
2372   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2373   PetscValidBoolPointer(distributed, 2);
2374   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
2375   PetscCallMPI(MPI_Comm_size(comm, &size));
2376   if (size == 1) {
2377     *distributed = PETSC_FALSE;
2378     PetscFunctionReturn(PETSC_SUCCESS);
2379   }
2380   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
2381   count = (pEnd - pStart) > 0 ? 1 : 0;
2382   PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE, &count, 1, MPIU_INT, MPI_SUM, comm));
2383   *distributed = count > 1 ? PETSC_TRUE : PETSC_FALSE;
2384   PetscFunctionReturn(PETSC_SUCCESS);
2385 }
2386 
2387 /*@C
2388   DMPlexDistributionSetName - Set the name of the specific parallel distribution
2389 
2390   Input Parameters:
2391 + dm   - The DM
2392 - name - The name of the specific parallel distribution
2393 
2394   Note:
2395   If distribution name is set when saving, DMPlexTopologyView() saves the plex's
2396   parallel distribution (i.e., partition, ownership, and local ordering of points) under
2397   this name. Conversely, if distribution name is set when loading, DMPlexTopologyLoad()
2398   loads the parallel distribution stored in file under this name.
2399 
2400   Level: developer
2401 
2402 .seealso: `DMPlexDistributionGetName()`, `DMPlexTopologyView()`, `DMPlexTopologyLoad()`
2403 @*/
2404 PetscErrorCode DMPlexDistributionSetName(DM dm, const char name[])
2405 {
2406   DM_Plex *mesh = (DM_Plex *)dm->data;
2407 
2408   PetscFunctionBegin;
2409   PetscValidHeaderSpecificType(dm, DM_CLASSID, 1, DMPLEX);
2410   if (name) PetscValidCharPointer(name, 2);
2411   PetscCall(PetscFree(mesh->distributionName));
2412   PetscCall(PetscStrallocpy(name, &mesh->distributionName));
2413   PetscFunctionReturn(PETSC_SUCCESS);
2414 }
2415 
2416 /*@C
2417   DMPlexDistributionGetName - Retrieve the name of the specific parallel distribution
2418 
2419   Input Parameter:
2420 . dm - The DM
2421 
2422   Output Parameter:
2423 . name - The name of the specific parallel distribution
2424 
2425   Note:
2426   If distribution name is set when saving, DMPlexTopologyView() saves the plex's
2427   parallel distribution (i.e., partition, ownership, and local ordering of points) under
2428   this name. Conversely, if distribution name is set when loading, DMPlexTopologyLoad()
2429   loads the parallel distribution stored in file under this name.
2430 
2431   Level: developer
2432 
2433 .seealso: `DMPlexDistributionSetName()`, `DMPlexTopologyView()`, `DMPlexTopologyLoad()`
2434 @*/
2435 PetscErrorCode DMPlexDistributionGetName(DM dm, const char *name[])
2436 {
2437   DM_Plex *mesh = (DM_Plex *)dm->data;
2438 
2439   PetscFunctionBegin;
2440   PetscValidHeaderSpecificType(dm, DM_CLASSID, 1, DMPLEX);
2441   PetscValidPointer(name, 2);
2442   *name = mesh->distributionName;
2443   PetscFunctionReturn(PETSC_SUCCESS);
2444 }
2445