xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision f3fa974cd8ac3307c23c9d7e81b13b5f402f9e6e)
1 #include <petsc/private/dmpleximpl.h>  /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/dmlabelimpl.h> /*I      "petscdmlabel.h"  I*/
3 
4 /*@C
5   DMPlexSetAdjacencyUser - Define adjacency in the mesh using a user-provided callback
6 
7   Input Parameters:
8 + dm      - The DM object
9 . user    - The user callback, may be NULL (to clear the callback)
10 - ctx     - context for callback evaluation, may be NULL
11 
12   Level: advanced
13 
14   Notes:
15      The caller of DMPlexGetAdjacency may need to arrange that a large enough array is available for the adjacency.
16 
17      Any setting here overrides other configuration of DMPlex adjacency determination.
18 
19 .seealso: `DMSetAdjacency()`, `DMPlexDistribute()`, `DMPlexPreallocateOperator()`, `DMPlexGetAdjacency()`, `DMPlexGetAdjacencyUser()`
20 @*/
21 PetscErrorCode DMPlexSetAdjacencyUser(DM dm, PetscErrorCode (*user)(DM, PetscInt, PetscInt *, PetscInt[], void *), void *ctx)
22 {
23   DM_Plex *mesh = (DM_Plex *)dm->data;
24 
25   PetscFunctionBegin;
26   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
27   mesh->useradjacency    = user;
28   mesh->useradjacencyctx = ctx;
29   PetscFunctionReturn(0);
30 }
31 
32 /*@C
33   DMPlexGetAdjacencyUser - get the user-defined adjacency callback
34 
35   Input Parameter:
36 . dm      - The DM object
37 
38   Output Parameters:
39 + user    - The callback
40 - ctx     - context for callback evaluation
41 
42   Level: advanced
43 
44 .seealso: `DMSetAdjacency()`, `DMPlexDistribute()`, `DMPlexPreallocateOperator()`, `DMPlexGetAdjacency()`, `DMPlexSetAdjacencyUser()`
45 @*/
46 PetscErrorCode DMPlexGetAdjacencyUser(DM dm, PetscErrorCode (**user)(DM, PetscInt, PetscInt *, PetscInt[], void *), void **ctx)
47 {
48   DM_Plex *mesh = (DM_Plex *)dm->data;
49 
50   PetscFunctionBegin;
51   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
52   if (user) *user = mesh->useradjacency;
53   if (ctx) *ctx = mesh->useradjacencyctx;
54   PetscFunctionReturn(0);
55 }
56 
57 /*@
58   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
59 
60   Input Parameters:
61 + dm      - The DM object
62 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
63 
64   Level: intermediate
65 
66 .seealso: `DMGetAdjacency()`, `DMSetAdjacency()`, `DMPlexDistribute()`, `DMPlexPreallocateOperator()`, `DMPlexSetAnchors()`
67 @*/
68 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
69 {
70   DM_Plex *mesh = (DM_Plex *)dm->data;
71 
72   PetscFunctionBegin;
73   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
74   mesh->useAnchors = useAnchors;
75   PetscFunctionReturn(0);
76 }
77 
78 /*@
79   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
80 
81   Input Parameter:
82 . dm      - The DM object
83 
84   Output Parameter:
85 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
86 
87   Level: intermediate
88 
89 .seealso: `DMPlexSetAdjacencyUseAnchors()`, `DMSetAdjacency()`, `DMGetAdjacency()`, `DMPlexDistribute()`, `DMPlexPreallocateOperator()`, `DMPlexSetAnchors()`
90 @*/
91 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
92 {
93   DM_Plex *mesh = (DM_Plex *)dm->data;
94 
95   PetscFunctionBegin;
96   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
97   PetscValidBoolPointer(useAnchors, 2);
98   *useAnchors = mesh->useAnchors;
99   PetscFunctionReturn(0);
100 }
101 
102 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
103 {
104   const PetscInt *cone   = NULL;
105   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
106 
107   PetscFunctionBeginHot;
108   PetscCall(DMPlexGetConeSize(dm, p, &coneSize));
109   PetscCall(DMPlexGetCone(dm, p, &cone));
110   for (c = 0; c <= coneSize; ++c) {
111     const PetscInt  point   = !c ? p : cone[c - 1];
112     const PetscInt *support = NULL;
113     PetscInt        supportSize, s, q;
114 
115     PetscCall(DMPlexGetSupportSize(dm, point, &supportSize));
116     PetscCall(DMPlexGetSupport(dm, point, &support));
117     for (s = 0; s < supportSize; ++s) {
118       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = support[s]), 0); ++q) {
119         if (support[s] == adj[q]) break;
120       }
121       PetscCheck(numAdj <= maxAdjSize, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%" PetscInt_FMT ")", maxAdjSize);
122     }
123   }
124   *adjSize = numAdj;
125   PetscFunctionReturn(0);
126 }
127 
128 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
129 {
130   const PetscInt *support = NULL;
131   PetscInt        numAdj = 0, maxAdjSize = *adjSize, supportSize, s;
132 
133   PetscFunctionBeginHot;
134   PetscCall(DMPlexGetSupportSize(dm, p, &supportSize));
135   PetscCall(DMPlexGetSupport(dm, p, &support));
136   for (s = 0; s <= supportSize; ++s) {
137     const PetscInt  point = !s ? p : support[s - 1];
138     const PetscInt *cone  = NULL;
139     PetscInt        coneSize, c, q;
140 
141     PetscCall(DMPlexGetConeSize(dm, point, &coneSize));
142     PetscCall(DMPlexGetCone(dm, point, &cone));
143     for (c = 0; c < coneSize; ++c) {
144       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = cone[c]), 0); ++q) {
145         if (cone[c] == adj[q]) break;
146       }
147       PetscCheck(numAdj <= maxAdjSize, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%" PetscInt_FMT ")", maxAdjSize);
148     }
149   }
150   *adjSize = numAdj;
151   PetscFunctionReturn(0);
152 }
153 
154 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
155 {
156   PetscInt *star   = NULL;
157   PetscInt  numAdj = 0, maxAdjSize = *adjSize, starSize, s;
158 
159   PetscFunctionBeginHot;
160   PetscCall(DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star));
161   for (s = 0; s < starSize * 2; s += 2) {
162     const PetscInt *closure = NULL;
163     PetscInt        closureSize, c, q;
164 
165     PetscCall(DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt **)&closure));
166     for (c = 0; c < closureSize * 2; c += 2) {
167       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = closure[c]), 0); ++q) {
168         if (closure[c] == adj[q]) break;
169       }
170       PetscCheck(numAdj <= maxAdjSize, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%" PetscInt_FMT ")", maxAdjSize);
171     }
172     PetscCall(DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt **)&closure));
173   }
174   PetscCall(DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star));
175   *adjSize = numAdj;
176   PetscFunctionReturn(0);
177 }
178 
179 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
180 {
181   static PetscInt asiz       = 0;
182   PetscInt        maxAnchors = 1;
183   PetscInt        aStart = -1, aEnd = -1;
184   PetscInt        maxAdjSize;
185   PetscSection    aSec = NULL;
186   IS              aIS  = NULL;
187   const PetscInt *anchors;
188   DM_Plex        *mesh = (DM_Plex *)dm->data;
189 
190   PetscFunctionBeginHot;
191   if (useAnchors) {
192     PetscCall(DMPlexGetAnchors(dm, &aSec, &aIS));
193     if (aSec) {
194       PetscCall(PetscSectionGetMaxDof(aSec, &maxAnchors));
195       maxAnchors = PetscMax(1, maxAnchors);
196       PetscCall(PetscSectionGetChart(aSec, &aStart, &aEnd));
197       PetscCall(ISGetIndices(aIS, &anchors));
198     }
199   }
200   if (!*adj) {
201     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
202 
203     PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
204     PetscCall(DMPlexGetDepth(dm, &depth));
205     depth = PetscMax(depth, -depth);
206     PetscCall(DMPlexGetMaxSizes(dm, &maxC, &maxS));
207     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC, depth + 1) - 1) / (maxC - 1)) : depth + 1;
208     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS, depth + 1) - 1) / (maxS - 1)) : depth + 1;
209     asiz          = PetscMax(PetscPowInt(maxS, depth) * coneSeries, PetscPowInt(maxC, depth) * supportSeries);
210     asiz *= maxAnchors;
211     asiz = PetscMin(asiz, pEnd - pStart);
212     PetscCall(PetscMalloc1(asiz, adj));
213   }
214   if (*adjSize < 0) *adjSize = asiz;
215   maxAdjSize = *adjSize;
216   if (mesh->useradjacency) {
217     PetscCall((*mesh->useradjacency)(dm, p, adjSize, *adj, mesh->useradjacencyctx));
218   } else if (useTransitiveClosure) {
219     PetscCall(DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj));
220   } else if (useCone) {
221     PetscCall(DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj));
222   } else {
223     PetscCall(DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj));
224   }
225   if (useAnchors && aSec) {
226     PetscInt  origSize = *adjSize;
227     PetscInt  numAdj   = origSize;
228     PetscInt  i        = 0, j;
229     PetscInt *orig     = *adj;
230 
231     while (i < origSize) {
232       PetscInt p    = orig[i];
233       PetscInt aDof = 0;
234 
235       if (p >= aStart && p < aEnd) PetscCall(PetscSectionGetDof(aSec, p, &aDof));
236       if (aDof) {
237         PetscInt aOff;
238         PetscInt s, q;
239 
240         for (j = i + 1; j < numAdj; j++) orig[j - 1] = orig[j];
241         origSize--;
242         numAdj--;
243         PetscCall(PetscSectionGetOffset(aSec, p, &aOff));
244         for (s = 0; s < aDof; ++s) {
245           for (q = 0; q < numAdj || ((void)(orig[numAdj++] = anchors[aOff + s]), 0); ++q) {
246             if (anchors[aOff + s] == orig[q]) break;
247           }
248           PetscCheck(numAdj <= maxAdjSize, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%" PetscInt_FMT ")", maxAdjSize);
249         }
250       } else {
251         i++;
252       }
253     }
254     *adjSize = numAdj;
255     PetscCall(ISRestoreIndices(aIS, &anchors));
256   }
257   PetscFunctionReturn(0);
258 }
259 
260 /*@
261   DMPlexGetAdjacency - Return all points adjacent to the given point
262 
263   Input Parameters:
264 + dm - The DM object
265 - p  - The point
266 
267   Input/Output Parameters:
268 + adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE;
269             on output the number of adjacent points
270 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize;
271         on output contains the adjacent points
272 
273   Level: advanced
274 
275   Notes:
276     The user must PetscFree the adj array if it was not passed in.
277 
278 .seealso: `DMSetAdjacency()`, `DMPlexDistribute()`, `DMCreateMatrix()`, `DMPlexPreallocateOperator()`
279 @*/
280 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
281 {
282   PetscBool useCone, useClosure, useAnchors;
283 
284   PetscFunctionBeginHot;
285   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
286   PetscValidIntPointer(adjSize, 3);
287   PetscValidPointer(adj, 4);
288   PetscCall(DMGetBasicAdjacency(dm, &useCone, &useClosure));
289   PetscCall(DMPlexGetAdjacencyUseAnchors(dm, &useAnchors));
290   PetscCall(DMPlexGetAdjacency_Internal(dm, p, useCone, useClosure, useAnchors, adjSize, adj));
291   PetscFunctionReturn(0);
292 }
293 
294 /*@
295   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
296 
297   Collective on dm
298 
299   Input Parameters:
300 + dm      - The DM
301 . sfPoint - The PetscSF which encodes point connectivity
302 . rootRankSection -
303 . rootRanks -
304 . leftRankSection -
305 - leafRanks -
306 
307   Output Parameters:
308 + processRanks - A list of process neighbors, or NULL
309 - sfProcess    - An SF encoding the two-sided process connectivity, or NULL
310 
311   Level: developer
312 
313 .seealso: `PetscSFCreate()`, `DMPlexCreateProcessSF()`
314 @*/
315 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
316 {
317   const PetscSFNode *remotePoints;
318   PetscInt          *localPointsNew;
319   PetscSFNode       *remotePointsNew;
320   const PetscInt    *nranks;
321   PetscInt          *ranksNew;
322   PetscBT            neighbors;
323   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
324   PetscMPIInt        size, proc, rank;
325 
326   PetscFunctionBegin;
327   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
328   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
329   if (processRanks) PetscValidPointer(processRanks, 7);
330   if (sfProcess) PetscValidPointer(sfProcess, 8);
331   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)dm), &size));
332   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
333   PetscCall(PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints));
334   PetscCall(PetscBTCreate(size, &neighbors));
335   PetscCall(PetscBTMemzero(size, neighbors));
336   /* Compute root-to-leaf process connectivity */
337   PetscCall(PetscSectionGetChart(rootRankSection, &pStart, &pEnd));
338   PetscCall(ISGetIndices(rootRanks, &nranks));
339   for (p = pStart; p < pEnd; ++p) {
340     PetscInt ndof, noff, n;
341 
342     PetscCall(PetscSectionGetDof(rootRankSection, p, &ndof));
343     PetscCall(PetscSectionGetOffset(rootRankSection, p, &noff));
344     for (n = 0; n < ndof; ++n) PetscCall(PetscBTSet(neighbors, nranks[noff + n]));
345   }
346   PetscCall(ISRestoreIndices(rootRanks, &nranks));
347   /* Compute leaf-to-neighbor process connectivity */
348   PetscCall(PetscSectionGetChart(leafRankSection, &pStart, &pEnd));
349   PetscCall(ISGetIndices(leafRanks, &nranks));
350   for (p = pStart; p < pEnd; ++p) {
351     PetscInt ndof, noff, n;
352 
353     PetscCall(PetscSectionGetDof(leafRankSection, p, &ndof));
354     PetscCall(PetscSectionGetOffset(leafRankSection, p, &noff));
355     for (n = 0; n < ndof; ++n) PetscCall(PetscBTSet(neighbors, nranks[noff + n]));
356   }
357   PetscCall(ISRestoreIndices(leafRanks, &nranks));
358   /* Compute leaf-to-root process connectivity */
359   for (l = 0; l < numLeaves; ++l) PetscBTSet(neighbors, remotePoints[l].rank);
360   /* Calculate edges */
361   PetscBTClear(neighbors, rank);
362   for (proc = 0, numNeighbors = 0; proc < size; ++proc) {
363     if (PetscBTLookup(neighbors, proc)) ++numNeighbors;
364   }
365   PetscCall(PetscMalloc1(numNeighbors, &ranksNew));
366   PetscCall(PetscMalloc1(numNeighbors, &localPointsNew));
367   PetscCall(PetscMalloc1(numNeighbors, &remotePointsNew));
368   for (proc = 0, n = 0; proc < size; ++proc) {
369     if (PetscBTLookup(neighbors, proc)) {
370       ranksNew[n]              = proc;
371       localPointsNew[n]        = proc;
372       remotePointsNew[n].index = rank;
373       remotePointsNew[n].rank  = proc;
374       ++n;
375     }
376   }
377   PetscCall(PetscBTDestroy(&neighbors));
378   if (processRanks) PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks));
379   else PetscCall(PetscFree(ranksNew));
380   if (sfProcess) {
381     PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess));
382     PetscCall(PetscObjectSetName((PetscObject)*sfProcess, "Two-Sided Process SF"));
383     PetscCall(PetscSFSetFromOptions(*sfProcess));
384     PetscCall(PetscSFSetGraph(*sfProcess, size, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER));
385   }
386   PetscFunctionReturn(0);
387 }
388 
389 /*@
390   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
391 
392   Collective on dm
393 
394   Input Parameter:
395 . dm - The DM
396 
397   Output Parameters:
398 + rootSection - The number of leaves for a given root point
399 . rootrank    - The rank of each edge into the root point
400 . leafSection - The number of processes sharing a given leaf point
401 - leafrank    - The rank of each process sharing a leaf point
402 
403   Level: developer
404 
405 .seealso: `DMPlexCreateOverlapLabel()`, `DMPlexDistribute()`, `DMPlexDistributeOverlap()`
406 @*/
407 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
408 {
409   MPI_Comm        comm;
410   PetscSF         sfPoint;
411   const PetscInt *rootdegree;
412   PetscInt       *myrank, *remoterank;
413   PetscInt        pStart, pEnd, p, nedges;
414   PetscMPIInt     rank;
415 
416   PetscFunctionBegin;
417   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
418   PetscCallMPI(MPI_Comm_rank(comm, &rank));
419   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
420   PetscCall(DMGetPointSF(dm, &sfPoint));
421   /* Compute number of leaves for each root */
422   PetscCall(PetscObjectSetName((PetscObject)rootSection, "Root Section"));
423   PetscCall(PetscSectionSetChart(rootSection, pStart, pEnd));
424   PetscCall(PetscSFComputeDegreeBegin(sfPoint, &rootdegree));
425   PetscCall(PetscSFComputeDegreeEnd(sfPoint, &rootdegree));
426   for (p = pStart; p < pEnd; ++p) PetscCall(PetscSectionSetDof(rootSection, p, rootdegree[p - pStart]));
427   PetscCall(PetscSectionSetUp(rootSection));
428   /* Gather rank of each leaf to root */
429   PetscCall(PetscSectionGetStorageSize(rootSection, &nedges));
430   PetscCall(PetscMalloc1(pEnd - pStart, &myrank));
431   PetscCall(PetscMalloc1(nedges, &remoterank));
432   for (p = 0; p < pEnd - pStart; ++p) myrank[p] = rank;
433   PetscCall(PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank));
434   PetscCall(PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank));
435   PetscCall(PetscFree(myrank));
436   PetscCall(ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank));
437   /* Distribute remote ranks to leaves */
438   PetscCall(PetscObjectSetName((PetscObject)leafSection, "Leaf Section"));
439   PetscCall(DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank));
440   PetscFunctionReturn(0);
441 }
442 
443 #if 0
444 static PetscErrorCode DMPlexCopyOverlapLabels(DM dm, DM ndm)
445 {
446   DM_Plex *mesh  = (DM_Plex *) dm->data;
447   DM_Plex *nmesh = (DM_Plex *) ndm->data;
448 
449   PetscFunctionBegin;
450   if (mesh->numOvLabels) {
451     const char *name;
452     PetscInt    l;
453 
454     nmesh->numOvLabels = mesh->numOvLabels;
455     for (l = 0; l < mesh->numOvLabels; ++l) {
456       PetscCall(PetscObjectGetName((PetscObject) mesh->ovLabels[l], &name));
457       PetscCall(DMGetLabel(ndm, name, &nmesh->ovLabels[l]));
458       nmesh->ovValues[l] = mesh->ovValues[l];
459     }
460     PetscCall(PetscObjectGetName((PetscObject) mesh->ovExLabel, &name));
461     PetscCall(DMGetLabel(ndm, name, &nmesh->ovExLabel));
462     nmesh->ovExValue = mesh->ovExValue;
463   }
464   PetscFunctionReturn(0);
465 }
466 #endif
467 
468 /*@C
469   DMPlexCreateOverlapLabel - Compute a label indicating what overlap points should be sent to new processes
470 
471   Collective on dm
472 
473   Input Parameters:
474 + dm          - The DM
475 . levels      - Number of overlap levels
476 . rootSection - The number of leaves for a given root point
477 . rootrank    - The rank of each edge into the root point
478 . leafSection - The number of processes sharing a given leaf point
479 - leafrank    - The rank of each process sharing a leaf point
480 
481   Output Parameter:
482 . ovLabel     - DMLabel containing remote overlap contributions as point/rank pairings
483 
484   Level: developer
485 
486 .seealso: `DMPlexCreateOverlapLabelFromLabels()`, `DMPlexGetAdjacency()`, `DMPlexDistributeOwnership()`, `DMPlexDistribute()`
487 @*/
488 PetscErrorCode DMPlexCreateOverlapLabel(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
489 {
490   MPI_Comm           comm;
491   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
492   PetscSF            sfPoint;
493   const PetscSFNode *remote;
494   const PetscInt    *local;
495   const PetscInt    *nrank, *rrank;
496   PetscInt          *adj = NULL;
497   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l;
498   PetscMPIInt        rank, size;
499   PetscBool          flg;
500 
501   PetscFunctionBegin;
502   *ovLabel = NULL;
503   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
504   PetscCallMPI(MPI_Comm_size(comm, &size));
505   PetscCallMPI(MPI_Comm_rank(comm, &rank));
506   if (size == 1) PetscFunctionReturn(0);
507   PetscCall(DMGetPointSF(dm, &sfPoint));
508   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
509   if (!levels) {
510     /* Add owned points */
511     PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel));
512     for (p = pStart; p < pEnd; ++p) PetscCall(DMLabelSetValue(*ovLabel, p, rank));
513     PetscFunctionReturn(0);
514   }
515   PetscCall(PetscSectionGetChart(leafSection, &sStart, &sEnd));
516   PetscCall(PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote));
517   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap adjacency", &ovAdjByRank));
518   /* Handle leaves: shared with the root point */
519   for (l = 0; l < nleaves; ++l) {
520     PetscInt adjSize = PETSC_DETERMINE, a;
521 
522     PetscCall(DMPlexGetAdjacency(dm, local ? local[l] : l, &adjSize, &adj));
523     for (a = 0; a < adjSize; ++a) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank));
524   }
525   PetscCall(ISGetIndices(rootrank, &rrank));
526   PetscCall(ISGetIndices(leafrank, &nrank));
527   /* Handle roots */
528   for (p = pStart; p < pEnd; ++p) {
529     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
530 
531     if ((p >= sStart) && (p < sEnd)) {
532       /* Some leaves share a root with other leaves on different processes */
533       PetscCall(PetscSectionGetDof(leafSection, p, &neighbors));
534       if (neighbors) {
535         PetscCall(PetscSectionGetOffset(leafSection, p, &noff));
536         PetscCall(DMPlexGetAdjacency(dm, p, &adjSize, &adj));
537         for (n = 0; n < neighbors; ++n) {
538           const PetscInt remoteRank = nrank[noff + n];
539 
540           if (remoteRank == rank) continue;
541           for (a = 0; a < adjSize; ++a) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remoteRank));
542         }
543       }
544     }
545     /* Roots are shared with leaves */
546     PetscCall(PetscSectionGetDof(rootSection, p, &neighbors));
547     if (!neighbors) continue;
548     PetscCall(PetscSectionGetOffset(rootSection, p, &noff));
549     PetscCall(DMPlexGetAdjacency(dm, p, &adjSize, &adj));
550     for (n = 0; n < neighbors; ++n) {
551       const PetscInt remoteRank = rrank[noff + n];
552 
553       if (remoteRank == rank) continue;
554       for (a = 0; a < adjSize; ++a) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remoteRank));
555     }
556   }
557   PetscCall(PetscFree(adj));
558   PetscCall(ISRestoreIndices(rootrank, &rrank));
559   PetscCall(ISRestoreIndices(leafrank, &nrank));
560   /* Add additional overlap levels */
561   for (l = 1; l < levels; l++) {
562     /* Propagate point donations over SF to capture remote connections */
563     PetscCall(DMPlexPartitionLabelPropagate(dm, ovAdjByRank));
564     /* Add next level of point donations to the label */
565     PetscCall(DMPlexPartitionLabelAdjacency(dm, ovAdjByRank));
566   }
567   /* We require the closure in the overlap */
568   PetscCall(DMPlexPartitionLabelClosure(dm, ovAdjByRank));
569   PetscCall(PetscOptionsHasName(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-overlap_view", &flg));
570   if (flg) {
571     PetscViewer viewer;
572     PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm), &viewer));
573     PetscCall(DMLabelView(ovAdjByRank, viewer));
574   }
575   /* Invert sender to receiver label */
576   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel));
577   PetscCall(DMPlexPartitionLabelInvert(dm, ovAdjByRank, NULL, *ovLabel));
578   /* Add owned points, except for shared local points */
579   for (p = pStart; p < pEnd; ++p) PetscCall(DMLabelSetValue(*ovLabel, p, rank));
580   for (l = 0; l < nleaves; ++l) {
581     PetscCall(DMLabelClearValue(*ovLabel, local[l], rank));
582     PetscCall(DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank));
583   }
584   /* Clean up */
585   PetscCall(DMLabelDestroy(&ovAdjByRank));
586   PetscFunctionReturn(0);
587 }
588 
589 static PetscErrorCode HandlePoint_Private(DM dm, PetscInt p, PetscSection section, const PetscInt ranks[], PetscInt numExLabels, const DMLabel exLabel[], const PetscInt exValue[], DMLabel ovAdjByRank)
590 {
591   PetscInt neighbors, el;
592 
593   PetscFunctionBegin;
594   PetscCall(PetscSectionGetDof(section, p, &neighbors));
595   if (neighbors) {
596     PetscInt   *adj     = NULL;
597     PetscInt    adjSize = PETSC_DETERMINE, noff, n, a;
598     PetscMPIInt rank;
599 
600     PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
601     PetscCall(PetscSectionGetOffset(section, p, &noff));
602     PetscCall(DMPlexGetAdjacency(dm, p, &adjSize, &adj));
603     for (n = 0; n < neighbors; ++n) {
604       const PetscInt remoteRank = ranks[noff + n];
605 
606       if (remoteRank == rank) continue;
607       for (a = 0; a < adjSize; ++a) {
608         PetscBool insert = PETSC_TRUE;
609 
610         for (el = 0; el < numExLabels; ++el) {
611           PetscInt exVal;
612           PetscCall(DMLabelGetValue(exLabel[el], adj[a], &exVal));
613           if (exVal == exValue[el]) {
614             insert = PETSC_FALSE;
615             break;
616           }
617         }
618         if (insert) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remoteRank));
619       }
620     }
621     PetscCall(PetscFree(adj));
622   }
623   PetscFunctionReturn(0);
624 }
625 
626 /*@C
627   DMPlexCreateOverlapLabelFromLabels - Compute a label indicating what overlap points should be sent to new processes
628 
629   Collective on dm
630 
631   Input Parameters:
632 + dm          - The DM
633 . numLabels   - The number of labels to draw candidate points from
634 . label       - An array of labels containing candidate points
635 . value       - An array of label values marking the candidate points
636 . numExLabels - The number of labels to use for exclusion
637 . exLabel     - An array of labels indicating points to be excluded, or NULL
638 . exValue     - An array of label values to be excluded, or NULL
639 . rootSection - The number of leaves for a given root point
640 . rootrank    - The rank of each edge into the root point
641 . leafSection - The number of processes sharing a given leaf point
642 - leafrank    - The rank of each process sharing a leaf point
643 
644   Output Parameter:
645 . ovLabel     - DMLabel containing remote overlap contributions as point/rank pairings
646 
647   Note:
648   The candidate points are only added to the overlap if they are adjacent to a shared point
649 
650   Level: developer
651 
652 .seealso: `DMPlexCreateOverlapLabel()`, `DMPlexGetAdjacency()`, `DMPlexDistributeOwnership()`, `DMPlexDistribute()`
653 @*/
654 PetscErrorCode DMPlexCreateOverlapLabelFromLabels(DM dm, PetscInt numLabels, const DMLabel label[], const PetscInt value[], PetscInt numExLabels, const DMLabel exLabel[], const PetscInt exValue[], PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
655 {
656   MPI_Comm           comm;
657   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
658   PetscSF            sfPoint;
659   const PetscSFNode *remote;
660   const PetscInt    *local;
661   const PetscInt    *nrank, *rrank;
662   PetscInt          *adj = NULL;
663   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l, el;
664   PetscMPIInt        rank, size;
665   PetscBool          flg;
666 
667   PetscFunctionBegin;
668   *ovLabel = NULL;
669   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
670   PetscCallMPI(MPI_Comm_size(comm, &size));
671   PetscCallMPI(MPI_Comm_rank(comm, &rank));
672   if (size == 1) PetscFunctionReturn(0);
673   PetscCall(DMGetPointSF(dm, &sfPoint));
674   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
675   PetscCall(PetscSectionGetChart(leafSection, &sStart, &sEnd));
676   PetscCall(PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote));
677   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap adjacency", &ovAdjByRank));
678   PetscCall(ISGetIndices(rootrank, &rrank));
679   PetscCall(ISGetIndices(leafrank, &nrank));
680   for (l = 0; l < numLabels; ++l) {
681     IS              valIS;
682     const PetscInt *points;
683     PetscInt        n;
684 
685     PetscCall(DMLabelGetStratumIS(label[l], value[l], &valIS));
686     if (!valIS) continue;
687     PetscCall(ISGetIndices(valIS, &points));
688     PetscCall(ISGetLocalSize(valIS, &n));
689     for (PetscInt i = 0; i < n; ++i) {
690       const PetscInt p = points[i];
691 
692       if ((p >= sStart) && (p < sEnd)) {
693         PetscInt loc, adjSize = PETSC_DETERMINE;
694 
695         /* Handle leaves: shared with the root point */
696         if (local) PetscCall(PetscFindInt(p, nleaves, local, &loc));
697         else loc = (p >= 0 && p < nleaves) ? p : -1;
698         if (loc >= 0) {
699           const PetscInt remoteRank = remote[loc].rank;
700 
701           PetscCall(DMPlexGetAdjacency(dm, p, &adjSize, &adj));
702           for (PetscInt a = 0; a < adjSize; ++a) {
703             PetscBool insert = PETSC_TRUE;
704 
705             for (el = 0; el < numExLabels; ++el) {
706               PetscInt exVal;
707               PetscCall(DMLabelGetValue(exLabel[el], adj[a], &exVal));
708               if (exVal == exValue[el]) {
709                 insert = PETSC_FALSE;
710                 break;
711               }
712             }
713             if (insert) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remoteRank));
714           }
715         }
716         /* Some leaves share a root with other leaves on different processes */
717         HandlePoint_Private(dm, p, leafSection, nrank, numExLabels, exLabel, exValue, ovAdjByRank);
718       }
719       /* Roots are shared with leaves */
720       HandlePoint_Private(dm, p, rootSection, rrank, numExLabels, exLabel, exValue, ovAdjByRank);
721     }
722     PetscCall(ISRestoreIndices(valIS, &points));
723     PetscCall(ISDestroy(&valIS));
724   }
725   PetscCall(PetscFree(adj));
726   PetscCall(ISRestoreIndices(rootrank, &rrank));
727   PetscCall(ISRestoreIndices(leafrank, &nrank));
728   /* We require the closure in the overlap */
729   PetscCall(DMPlexPartitionLabelClosure(dm, ovAdjByRank));
730   PetscCall(PetscOptionsHasName(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-overlap_view", &flg));
731   if (flg) {
732     PetscViewer viewer;
733     PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm), &viewer));
734     PetscCall(DMLabelView(ovAdjByRank, viewer));
735   }
736   /* Invert sender to receiver label */
737   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel));
738   PetscCall(DMPlexPartitionLabelInvert(dm, ovAdjByRank, NULL, *ovLabel));
739   /* Add owned points, except for shared local points */
740   for (p = pStart; p < pEnd; ++p) PetscCall(DMLabelSetValue(*ovLabel, p, rank));
741   for (l = 0; l < nleaves; ++l) {
742     PetscCall(DMLabelClearValue(*ovLabel, local[l], rank));
743     PetscCall(DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank));
744   }
745   /* Clean up */
746   PetscCall(DMLabelDestroy(&ovAdjByRank));
747   PetscFunctionReturn(0);
748 }
749 
750 /*@C
751   DMPlexCreateOverlapMigrationSF - Create an SF describing the new mesh distribution to make the overlap described by the input SF
752 
753   Collective on dm
754 
755   Input Parameters:
756 + dm          - The DM
757 - overlapSF   - The SF mapping ghost points in overlap to owner points on other processes
758 
759   Output Parameter:
760 . migrationSF - An SF that maps original points in old locations to points in new locations
761 
762   Level: developer
763 
764 .seealso: `DMPlexCreateOverlapLabel()`, `DMPlexDistributeOverlap()`, `DMPlexDistribute()`
765 @*/
766 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
767 {
768   MPI_Comm           comm;
769   PetscMPIInt        rank, size;
770   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
771   PetscInt          *pointDepths, *remoteDepths, *ilocal;
772   PetscInt          *depthRecv, *depthShift, *depthIdx;
773   PetscSFNode       *iremote;
774   PetscSF            pointSF;
775   const PetscInt    *sharedLocal;
776   const PetscSFNode *overlapRemote, *sharedRemote;
777 
778   PetscFunctionBegin;
779   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
780   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
781   PetscCallMPI(MPI_Comm_rank(comm, &rank));
782   PetscCallMPI(MPI_Comm_size(comm, &size));
783   PetscCall(DMGetDimension(dm, &dim));
784 
785   /* Before building the migration SF we need to know the new stratum offsets */
786   PetscCall(PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote));
787   PetscCall(PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths));
788   for (d = 0; d < dim + 1; d++) {
789     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
790     for (p = pStart; p < pEnd; p++) pointDepths[p] = d;
791   }
792   for (p = 0; p < nleaves; p++) remoteDepths[p] = -1;
793   PetscCall(PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths, MPI_REPLACE));
794   PetscCall(PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths, MPI_REPLACE));
795 
796   /* Count received points in each stratum and compute the internal strata shift */
797   PetscCall(PetscMalloc3(dim + 1, &depthRecv, dim + 1, &depthShift, dim + 1, &depthIdx));
798   for (d = 0; d < dim + 1; d++) depthRecv[d] = 0;
799   for (p = 0; p < nleaves; p++) depthRecv[remoteDepths[p]]++;
800   depthShift[dim] = 0;
801   for (d = 0; d < dim; d++) depthShift[d] = depthRecv[dim];
802   for (d = 1; d < dim; d++) depthShift[d] += depthRecv[0];
803   for (d = dim - 2; d > 0; d--) depthShift[d] += depthRecv[d + 1];
804   for (d = 0; d < dim + 1; d++) {
805     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
806     depthIdx[d] = pStart + depthShift[d];
807   }
808 
809   /* Form the overlap SF build an SF that describes the full overlap migration SF */
810   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
811   newLeaves = pEnd - pStart + nleaves;
812   PetscCall(PetscMalloc1(newLeaves, &ilocal));
813   PetscCall(PetscMalloc1(newLeaves, &iremote));
814   /* First map local points to themselves */
815   for (d = 0; d < dim + 1; d++) {
816     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
817     for (p = pStart; p < pEnd; p++) {
818       point                = p + depthShift[d];
819       ilocal[point]        = point;
820       iremote[point].index = p;
821       iremote[point].rank  = rank;
822       depthIdx[d]++;
823     }
824   }
825 
826   /* Add in the remote roots for currently shared points */
827   PetscCall(DMGetPointSF(dm, &pointSF));
828   PetscCall(PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote));
829   for (d = 0; d < dim + 1; d++) {
830     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
831     for (p = 0; p < numSharedPoints; p++) {
832       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
833         point                = sharedLocal[p] + depthShift[d];
834         iremote[point].index = sharedRemote[p].index;
835         iremote[point].rank  = sharedRemote[p].rank;
836       }
837     }
838   }
839 
840   /* Now add the incoming overlap points */
841   for (p = 0; p < nleaves; p++) {
842     point                = depthIdx[remoteDepths[p]];
843     ilocal[point]        = point;
844     iremote[point].index = overlapRemote[p].index;
845     iremote[point].rank  = overlapRemote[p].rank;
846     depthIdx[remoteDepths[p]]++;
847   }
848   PetscCall(PetscFree2(pointDepths, remoteDepths));
849 
850   PetscCall(PetscSFCreate(comm, migrationSF));
851   PetscCall(PetscObjectSetName((PetscObject)*migrationSF, "Overlap Migration SF"));
852   PetscCall(PetscSFSetFromOptions(*migrationSF));
853   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
854   PetscCall(PetscSFSetGraph(*migrationSF, pEnd - pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER));
855 
856   PetscCall(PetscFree3(depthRecv, depthShift, depthIdx));
857   PetscFunctionReturn(0);
858 }
859 
860 /*@
861   DMPlexStratifyMigrationSF - Rearrange the leaves of a migration sf for stratification.
862 
863   Input Parameters:
864 + dm          - The DM
865 - sf          - A star forest with non-ordered leaves, usually defining a DM point migration
866 
867   Output Parameter:
868 . migrationSF - A star forest with added leaf indirection that ensures the resulting DM is stratified
869 
870   Note:
871   This lexicographically sorts by (depth, cellType)
872 
873   Level: developer
874 
875 .seealso: `DMPlexPartitionLabelCreateSF()`, `DMPlexDistribute()`, `DMPlexDistributeOverlap()`
876 @*/
877 PetscErrorCode DMPlexStratifyMigrationSF(DM dm, PetscSF sf, PetscSF *migrationSF)
878 {
879   MPI_Comm           comm;
880   PetscMPIInt        rank, size;
881   PetscInt           d, ldepth, depth, dim, p, pStart, pEnd, nroots, nleaves;
882   PetscSFNode       *pointDepths, *remoteDepths;
883   PetscInt          *ilocal;
884   PetscInt          *depthRecv, *depthShift, *depthIdx;
885   PetscInt          *ctRecv, *ctShift, *ctIdx;
886   const PetscSFNode *iremote;
887 
888   PetscFunctionBegin;
889   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
890   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
891   PetscCallMPI(MPI_Comm_rank(comm, &rank));
892   PetscCallMPI(MPI_Comm_size(comm, &size));
893   PetscCall(DMPlexGetDepth(dm, &ldepth));
894   PetscCall(DMGetDimension(dm, &dim));
895   PetscCall(MPIU_Allreduce(&ldepth, &depth, 1, MPIU_INT, MPI_MAX, comm));
896   PetscCheck(!(ldepth >= 0) || !(depth != ldepth), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %" PetscInt_FMT " != %" PetscInt_FMT, ldepth, depth);
897   PetscCall(PetscLogEventBegin(DMPLEX_PartStratSF, dm, 0, 0, 0));
898 
899   /* Before building the migration SF we need to know the new stratum offsets */
900   PetscCall(PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote));
901   PetscCall(PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths));
902   for (d = 0; d < depth + 1; ++d) {
903     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
904     for (p = pStart; p < pEnd; ++p) {
905       DMPolytopeType ct;
906 
907       PetscCall(DMPlexGetCellType(dm, p, &ct));
908       pointDepths[p].index = d;
909       pointDepths[p].rank  = ct;
910     }
911   }
912   for (p = 0; p < nleaves; ++p) {
913     remoteDepths[p].index = -1;
914     remoteDepths[p].rank  = -1;
915   }
916   PetscCall(PetscSFBcastBegin(sf, MPIU_2INT, pointDepths, remoteDepths, MPI_REPLACE));
917   PetscCall(PetscSFBcastEnd(sf, MPIU_2INT, pointDepths, remoteDepths, MPI_REPLACE));
918   /* Count received points in each stratum and compute the internal strata shift */
919   PetscCall(PetscCalloc6(depth + 1, &depthRecv, depth + 1, &depthShift, depth + 1, &depthIdx, DM_NUM_POLYTOPES, &ctRecv, DM_NUM_POLYTOPES, &ctShift, DM_NUM_POLYTOPES, &ctIdx));
920   for (p = 0; p < nleaves; ++p) {
921     if (remoteDepths[p].rank < 0) {
922       ++depthRecv[remoteDepths[p].index];
923     } else {
924       ++ctRecv[remoteDepths[p].rank];
925     }
926   }
927   {
928     PetscInt depths[4], dims[4], shift = 0, i, c;
929 
930     /* Cells (depth), Vertices (0), Faces (depth-1), Edges (1)
931          Consider DM_POLYTOPE_FV_GHOST and DM_POLYTOPE_INTERIOR_GHOST as cells
932      */
933     depths[0] = depth;
934     depths[1] = 0;
935     depths[2] = depth - 1;
936     depths[3] = 1;
937     dims[0]   = dim;
938     dims[1]   = 0;
939     dims[2]   = dim - 1;
940     dims[3]   = 1;
941     for (i = 0; i <= depth; ++i) {
942       const PetscInt dep = depths[i];
943       const PetscInt dim = dims[i];
944 
945       for (c = 0; c < DM_NUM_POLYTOPES; ++c) {
946         if (DMPolytopeTypeGetDim((DMPolytopeType)c) != dim && !(i == 0 && (c == DM_POLYTOPE_FV_GHOST || c == DM_POLYTOPE_INTERIOR_GHOST))) continue;
947         ctShift[c] = shift;
948         shift += ctRecv[c];
949       }
950       depthShift[dep] = shift;
951       shift += depthRecv[dep];
952     }
953     for (c = 0; c < DM_NUM_POLYTOPES; ++c) {
954       const PetscInt ctDim = DMPolytopeTypeGetDim((DMPolytopeType)c);
955 
956       if ((ctDim < 0 || ctDim > dim) && (c != DM_POLYTOPE_FV_GHOST && c != DM_POLYTOPE_INTERIOR_GHOST)) {
957         ctShift[c] = shift;
958         shift += ctRecv[c];
959       }
960     }
961   }
962   /* Derive a new local permutation based on stratified indices */
963   PetscCall(PetscMalloc1(nleaves, &ilocal));
964   for (p = 0; p < nleaves; ++p) {
965     const PetscInt       dep = remoteDepths[p].index;
966     const DMPolytopeType ct  = (DMPolytopeType)remoteDepths[p].rank;
967 
968     if ((PetscInt)ct < 0) {
969       ilocal[p] = depthShift[dep] + depthIdx[dep];
970       ++depthIdx[dep];
971     } else {
972       ilocal[p] = ctShift[ct] + ctIdx[ct];
973       ++ctIdx[ct];
974     }
975   }
976   PetscCall(PetscSFCreate(comm, migrationSF));
977   PetscCall(PetscObjectSetName((PetscObject)*migrationSF, "Migration SF"));
978   PetscCall(PetscSFSetGraph(*migrationSF, nroots, nleaves, ilocal, PETSC_OWN_POINTER, (PetscSFNode *)iremote, PETSC_COPY_VALUES));
979   PetscCall(PetscFree2(pointDepths, remoteDepths));
980   PetscCall(PetscFree6(depthRecv, depthShift, depthIdx, ctRecv, ctShift, ctIdx));
981   PetscCall(PetscLogEventEnd(DMPLEX_PartStratSF, dm, 0, 0, 0));
982   PetscFunctionReturn(0);
983 }
984 
985 /*@
986   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
987 
988   Collective on dm
989 
990   Input Parameters:
991 + dm - The DMPlex object
992 . pointSF - The PetscSF describing the communication pattern
993 . originalSection - The PetscSection for existing data layout
994 - originalVec - The existing data in a local vector
995 
996   Output Parameters:
997 + newSection - The PetscSF describing the new data layout
998 - newVec - The new data in a local vector
999 
1000   Level: developer
1001 
1002 .seealso: `DMPlexDistribute()`, `DMPlexDistributeFieldIS()`, `DMPlexDistributeData()`
1003 @*/
1004 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
1005 {
1006   PetscSF      fieldSF;
1007   PetscInt    *remoteOffsets, fieldSize;
1008   PetscScalar *originalValues, *newValues;
1009 
1010   PetscFunctionBegin;
1011   PetscCall(PetscLogEventBegin(DMPLEX_DistributeField, dm, 0, 0, 0));
1012   PetscCall(PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection));
1013 
1014   PetscCall(PetscSectionGetStorageSize(newSection, &fieldSize));
1015   PetscCall(VecSetSizes(newVec, fieldSize, PETSC_DETERMINE));
1016   PetscCall(VecSetType(newVec, dm->vectype));
1017 
1018   PetscCall(VecGetArray(originalVec, &originalValues));
1019   PetscCall(VecGetArray(newVec, &newValues));
1020   PetscCall(PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF));
1021   PetscCall(PetscFree(remoteOffsets));
1022   PetscCall(PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues, MPI_REPLACE));
1023   PetscCall(PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues, MPI_REPLACE));
1024   PetscCall(PetscSFDestroy(&fieldSF));
1025   PetscCall(VecRestoreArray(newVec, &newValues));
1026   PetscCall(VecRestoreArray(originalVec, &originalValues));
1027   PetscCall(PetscLogEventEnd(DMPLEX_DistributeField, dm, 0, 0, 0));
1028   PetscFunctionReturn(0);
1029 }
1030 
1031 /*@
1032   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
1033 
1034   Collective on dm
1035 
1036   Input Parameters:
1037 + dm - The DMPlex object
1038 . pointSF - The PetscSF describing the communication pattern
1039 . originalSection - The PetscSection for existing data layout
1040 - originalIS - The existing data
1041 
1042   Output Parameters:
1043 + newSection - The PetscSF describing the new data layout
1044 - newIS - The new data
1045 
1046   Level: developer
1047 
1048 .seealso: `DMPlexDistribute()`, `DMPlexDistributeField()`, `DMPlexDistributeData()`
1049 @*/
1050 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
1051 {
1052   PetscSF         fieldSF;
1053   PetscInt       *newValues, *remoteOffsets, fieldSize;
1054   const PetscInt *originalValues;
1055 
1056   PetscFunctionBegin;
1057   PetscCall(PetscLogEventBegin(DMPLEX_DistributeField, dm, 0, 0, 0));
1058   PetscCall(PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection));
1059 
1060   PetscCall(PetscSectionGetStorageSize(newSection, &fieldSize));
1061   PetscCall(PetscMalloc1(fieldSize, &newValues));
1062 
1063   PetscCall(ISGetIndices(originalIS, &originalValues));
1064   PetscCall(PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF));
1065   PetscCall(PetscFree(remoteOffsets));
1066   PetscCall(PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *)originalValues, newValues, MPI_REPLACE));
1067   PetscCall(PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *)originalValues, newValues, MPI_REPLACE));
1068   PetscCall(PetscSFDestroy(&fieldSF));
1069   PetscCall(ISRestoreIndices(originalIS, &originalValues));
1070   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS));
1071   PetscCall(PetscLogEventEnd(DMPLEX_DistributeField, dm, 0, 0, 0));
1072   PetscFunctionReturn(0);
1073 }
1074 
1075 /*@
1076   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
1077 
1078   Collective on dm
1079 
1080   Input Parameters:
1081 + dm - The DMPlex object
1082 . pointSF - The PetscSF describing the communication pattern
1083 . originalSection - The PetscSection for existing data layout
1084 . datatype - The type of data
1085 - originalData - The existing data
1086 
1087   Output Parameters:
1088 + newSection - The PetscSection describing the new data layout
1089 - newData - The new data
1090 
1091   Level: developer
1092 
1093 .seealso: `DMPlexDistribute()`, `DMPlexDistributeField()`
1094 @*/
1095 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
1096 {
1097   PetscSF     fieldSF;
1098   PetscInt   *remoteOffsets, fieldSize;
1099   PetscMPIInt dataSize;
1100 
1101   PetscFunctionBegin;
1102   PetscCall(PetscLogEventBegin(DMPLEX_DistributeData, dm, 0, 0, 0));
1103   PetscCall(PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection));
1104 
1105   PetscCall(PetscSectionGetStorageSize(newSection, &fieldSize));
1106   PetscCallMPI(MPI_Type_size(datatype, &dataSize));
1107   PetscCall(PetscMalloc(fieldSize * dataSize, newData));
1108 
1109   PetscCall(PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF));
1110   PetscCall(PetscFree(remoteOffsets));
1111   PetscCall(PetscSFBcastBegin(fieldSF, datatype, originalData, *newData, MPI_REPLACE));
1112   PetscCall(PetscSFBcastEnd(fieldSF, datatype, originalData, *newData, MPI_REPLACE));
1113   PetscCall(PetscSFDestroy(&fieldSF));
1114   PetscCall(PetscLogEventEnd(DMPLEX_DistributeData, dm, 0, 0, 0));
1115   PetscFunctionReturn(0);
1116 }
1117 
1118 static PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1119 {
1120   DM_Plex     *pmesh = (DM_Plex *)(dmParallel)->data;
1121   MPI_Comm     comm;
1122   PetscSF      coneSF;
1123   PetscSection originalConeSection, newConeSection;
1124   PetscInt    *remoteOffsets, *cones, *globCones, *newCones, newConesSize;
1125   PetscBool    flg;
1126 
1127   PetscFunctionBegin;
1128   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1129   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
1130   PetscCall(PetscLogEventBegin(DMPLEX_DistributeCones, dm, 0, 0, 0));
1131   /* Distribute cone section */
1132   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1133   PetscCall(DMPlexGetConeSection(dm, &originalConeSection));
1134   PetscCall(DMPlexGetConeSection(dmParallel, &newConeSection));
1135   PetscCall(PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection));
1136   PetscCall(DMSetUp(dmParallel));
1137   /* Communicate and renumber cones */
1138   PetscCall(PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF));
1139   PetscCall(PetscFree(remoteOffsets));
1140   PetscCall(DMPlexGetCones(dm, &cones));
1141   if (original) {
1142     PetscInt numCones;
1143 
1144     PetscCall(PetscSectionGetStorageSize(originalConeSection, &numCones));
1145     PetscCall(PetscMalloc1(numCones, &globCones));
1146     PetscCall(ISLocalToGlobalMappingApplyBlock(original, numCones, cones, globCones));
1147   } else {
1148     globCones = cones;
1149   }
1150   PetscCall(DMPlexGetCones(dmParallel, &newCones));
1151   PetscCall(PetscSFBcastBegin(coneSF, MPIU_INT, globCones, newCones, MPI_REPLACE));
1152   PetscCall(PetscSFBcastEnd(coneSF, MPIU_INT, globCones, newCones, MPI_REPLACE));
1153   if (original) PetscCall(PetscFree(globCones));
1154   PetscCall(PetscSectionGetStorageSize(newConeSection, &newConesSize));
1155   PetscCall(ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones));
1156   if (PetscDefined(USE_DEBUG)) {
1157     PetscInt  p;
1158     PetscBool valid = PETSC_TRUE;
1159     for (p = 0; p < newConesSize; ++p) {
1160       if (newCones[p] < 0) {
1161         valid = PETSC_FALSE;
1162         PetscCall(PetscPrintf(PETSC_COMM_SELF, "[%d] Point %" PetscInt_FMT " not in overlap SF\n", PetscGlobalRank, p));
1163       }
1164     }
1165     PetscCheck(valid, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1166   }
1167   PetscCall(PetscOptionsHasName(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-cones_view", &flg));
1168   if (flg) {
1169     PetscCall(PetscPrintf(comm, "Serial Cone Section:\n"));
1170     PetscCall(PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_(comm)));
1171     PetscCall(PetscPrintf(comm, "Parallel Cone Section:\n"));
1172     PetscCall(PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_(comm)));
1173     PetscCall(PetscSFView(coneSF, NULL));
1174   }
1175   PetscCall(DMPlexGetConeOrientations(dm, &cones));
1176   PetscCall(DMPlexGetConeOrientations(dmParallel, &newCones));
1177   PetscCall(PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones, MPI_REPLACE));
1178   PetscCall(PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones, MPI_REPLACE));
1179   PetscCall(PetscSFDestroy(&coneSF));
1180   PetscCall(PetscLogEventEnd(DMPLEX_DistributeCones, dm, 0, 0, 0));
1181   /* Create supports and stratify DMPlex */
1182   {
1183     PetscInt pStart, pEnd;
1184 
1185     PetscCall(PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd));
1186     PetscCall(PetscSectionSetChart(pmesh->supportSection, pStart, pEnd));
1187   }
1188   PetscCall(DMPlexSymmetrize(dmParallel));
1189   PetscCall(DMPlexStratify(dmParallel));
1190   {
1191     PetscBool useCone, useClosure, useAnchors;
1192 
1193     PetscCall(DMGetBasicAdjacency(dm, &useCone, &useClosure));
1194     PetscCall(DMSetBasicAdjacency(dmParallel, useCone, useClosure));
1195     PetscCall(DMPlexGetAdjacencyUseAnchors(dm, &useAnchors));
1196     PetscCall(DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors));
1197   }
1198   PetscFunctionReturn(0);
1199 }
1200 
1201 static PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
1202 {
1203   MPI_Comm         comm;
1204   DM               cdm, cdmParallel;
1205   PetscSection     originalCoordSection, newCoordSection;
1206   Vec              originalCoordinates, newCoordinates;
1207   PetscInt         bs;
1208   const char      *name;
1209   const PetscReal *maxCell, *Lstart, *L;
1210 
1211   PetscFunctionBegin;
1212   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1213   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1214 
1215   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1216   PetscCall(DMGetCoordinateDM(dm, &cdm));
1217   PetscCall(DMGetCoordinateDM(dmParallel, &cdmParallel));
1218   PetscCall(DMCopyDisc(cdm, cdmParallel));
1219   PetscCall(DMGetCoordinateSection(dm, &originalCoordSection));
1220   PetscCall(DMGetCoordinateSection(dmParallel, &newCoordSection));
1221   PetscCall(DMGetCoordinatesLocal(dm, &originalCoordinates));
1222   if (originalCoordinates) {
1223     PetscCall(VecCreate(PETSC_COMM_SELF, &newCoordinates));
1224     PetscCall(PetscObjectGetName((PetscObject)originalCoordinates, &name));
1225     PetscCall(PetscObjectSetName((PetscObject)newCoordinates, name));
1226 
1227     PetscCall(DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates));
1228     PetscCall(DMSetCoordinatesLocal(dmParallel, newCoordinates));
1229     PetscCall(VecGetBlockSize(originalCoordinates, &bs));
1230     PetscCall(VecSetBlockSize(newCoordinates, bs));
1231     PetscCall(VecDestroy(&newCoordinates));
1232   }
1233 
1234   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1235   PetscCall(DMGetPeriodicity(dm, &maxCell, &Lstart, &L));
1236   PetscCall(DMSetPeriodicity(dmParallel, maxCell, Lstart, L));
1237   PetscCall(DMGetCellCoordinateDM(dm, &cdm));
1238   if (cdm) {
1239     PetscCall(DMClone(dmParallel, &cdmParallel));
1240     PetscCall(DMSetCellCoordinateDM(dmParallel, cdmParallel));
1241     PetscCall(DMCopyDisc(cdm, cdmParallel));
1242     PetscCall(DMDestroy(&cdmParallel));
1243     PetscCall(DMGetCellCoordinateSection(dm, &originalCoordSection));
1244     PetscCall(DMGetCellCoordinatesLocal(dm, &originalCoordinates));
1245     PetscCall(PetscSectionCreate(comm, &newCoordSection));
1246     if (originalCoordinates) {
1247       PetscCall(VecCreate(PETSC_COMM_SELF, &newCoordinates));
1248       PetscCall(PetscObjectGetName((PetscObject)originalCoordinates, &name));
1249       PetscCall(PetscObjectSetName((PetscObject)newCoordinates, name));
1250 
1251       PetscCall(DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates));
1252       PetscCall(VecGetBlockSize(originalCoordinates, &bs));
1253       PetscCall(VecSetBlockSize(newCoordinates, bs));
1254       PetscCall(DMSetCellCoordinateSection(dmParallel, bs, newCoordSection));
1255       PetscCall(DMSetCellCoordinatesLocal(dmParallel, newCoordinates));
1256       PetscCall(VecDestroy(&newCoordinates));
1257     }
1258     PetscCall(PetscSectionDestroy(&newCoordSection));
1259   }
1260   PetscFunctionReturn(0);
1261 }
1262 
1263 static PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1264 {
1265   DM_Plex         *mesh = (DM_Plex *)dm->data;
1266   MPI_Comm         comm;
1267   DMLabel          depthLabel;
1268   PetscMPIInt      rank;
1269   PetscInt         depth, d, numLabels, numLocalLabels, l;
1270   PetscBool        hasLabels  = PETSC_FALSE, lsendDepth, sendDepth;
1271   PetscObjectState depthState = -1;
1272 
1273   PetscFunctionBegin;
1274   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1275   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1276 
1277   PetscCall(PetscLogEventBegin(DMPLEX_DistributeLabels, dm, 0, 0, 0));
1278   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1279   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1280 
1281   /* If the user has changed the depth label, communicate it instead */
1282   PetscCall(DMPlexGetDepth(dm, &depth));
1283   PetscCall(DMPlexGetDepthLabel(dm, &depthLabel));
1284   if (depthLabel) PetscCall(PetscObjectStateGet((PetscObject)depthLabel, &depthState));
1285   lsendDepth = mesh->depthState != depthState ? PETSC_TRUE : PETSC_FALSE;
1286   PetscCall(MPIU_Allreduce(&lsendDepth, &sendDepth, 1, MPIU_BOOL, MPI_LOR, comm));
1287   if (sendDepth) {
1288     PetscCall(DMPlexGetDepthLabel(dmParallel, &dmParallel->depthLabel));
1289     PetscCall(DMRemoveLabelBySelf(dmParallel, &dmParallel->depthLabel, PETSC_FALSE));
1290   }
1291   /* Everyone must have either the same number of labels, or none */
1292   PetscCall(DMGetNumLabels(dm, &numLocalLabels));
1293   numLabels = numLocalLabels;
1294   PetscCallMPI(MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm));
1295   if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1296   for (l = 0; l < numLabels; ++l) {
1297     DMLabel     label = NULL, labelNew = NULL;
1298     PetscBool   isDepth, lisOutput     = PETSC_TRUE, isOutput;
1299     const char *name = NULL;
1300 
1301     if (hasLabels) {
1302       PetscCall(DMGetLabelByNum(dm, l, &label));
1303       /* Skip "depth" because it is recreated */
1304       PetscCall(PetscObjectGetName((PetscObject)label, &name));
1305       PetscCall(PetscStrcmp(name, "depth", &isDepth));
1306     }
1307     PetscCallMPI(MPI_Bcast(&isDepth, 1, MPIU_BOOL, 0, comm));
1308     if (isDepth && !sendDepth) continue;
1309     PetscCall(DMLabelDistribute(label, migrationSF, &labelNew));
1310     if (isDepth) {
1311       /* Put in any missing strata which can occur if users are managing the depth label themselves */
1312       PetscInt gdepth;
1313 
1314       PetscCall(MPIU_Allreduce(&depth, &gdepth, 1, MPIU_INT, MPI_MAX, comm));
1315       PetscCheck(!(depth >= 0) || !(gdepth != depth), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %" PetscInt_FMT " != %" PetscInt_FMT, depth, gdepth);
1316       for (d = 0; d <= gdepth; ++d) {
1317         PetscBool has;
1318 
1319         PetscCall(DMLabelHasStratum(labelNew, d, &has));
1320         if (!has) PetscCall(DMLabelAddStratum(labelNew, d));
1321       }
1322     }
1323     PetscCall(DMAddLabel(dmParallel, labelNew));
1324     /* Put the output flag in the new label */
1325     if (hasLabels) PetscCall(DMGetLabelOutput(dm, name, &lisOutput));
1326     PetscCall(MPIU_Allreduce(&lisOutput, &isOutput, 1, MPIU_BOOL, MPI_LAND, comm));
1327     PetscCall(PetscObjectGetName((PetscObject)labelNew, &name));
1328     PetscCall(DMSetLabelOutput(dmParallel, name, isOutput));
1329     PetscCall(DMLabelDestroy(&labelNew));
1330   }
1331   {
1332     DMLabel ctLabel;
1333 
1334     // Reset label for fast lookup
1335     PetscCall(DMPlexGetCellTypeLabel(dmParallel, &ctLabel));
1336     PetscCall(DMLabelMakeAllInvalid_Internal(ctLabel));
1337   }
1338   PetscCall(PetscLogEventEnd(DMPLEX_DistributeLabels, dm, 0, 0, 0));
1339   PetscFunctionReturn(0);
1340 }
1341 
1342 static PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1343 {
1344   DM_Plex     *mesh  = (DM_Plex *)dm->data;
1345   DM_Plex     *pmesh = (DM_Plex *)(dmParallel)->data;
1346   MPI_Comm     comm;
1347   DM           refTree;
1348   PetscSection origParentSection, newParentSection;
1349   PetscInt    *origParents, *origChildIDs;
1350   PetscBool    flg;
1351 
1352   PetscFunctionBegin;
1353   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1354   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
1355   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1356 
1357   /* Set up tree */
1358   PetscCall(DMPlexGetReferenceTree(dm, &refTree));
1359   PetscCall(DMPlexSetReferenceTree(dmParallel, refTree));
1360   PetscCall(DMPlexGetTree(dm, &origParentSection, &origParents, &origChildIDs, NULL, NULL));
1361   if (origParentSection) {
1362     PetscInt  pStart, pEnd;
1363     PetscInt *newParents, *newChildIDs, *globParents;
1364     PetscInt *remoteOffsetsParents, newParentSize;
1365     PetscSF   parentSF;
1366 
1367     PetscCall(DMPlexGetChart(dmParallel, &pStart, &pEnd));
1368     PetscCall(PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel), &newParentSection));
1369     PetscCall(PetscSectionSetChart(newParentSection, pStart, pEnd));
1370     PetscCall(PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection));
1371     PetscCall(PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF));
1372     PetscCall(PetscFree(remoteOffsetsParents));
1373     PetscCall(PetscSectionGetStorageSize(newParentSection, &newParentSize));
1374     PetscCall(PetscMalloc2(newParentSize, &newParents, newParentSize, &newChildIDs));
1375     if (original) {
1376       PetscInt numParents;
1377 
1378       PetscCall(PetscSectionGetStorageSize(origParentSection, &numParents));
1379       PetscCall(PetscMalloc1(numParents, &globParents));
1380       PetscCall(ISLocalToGlobalMappingApplyBlock(original, numParents, origParents, globParents));
1381     } else {
1382       globParents = origParents;
1383     }
1384     PetscCall(PetscSFBcastBegin(parentSF, MPIU_INT, globParents, newParents, MPI_REPLACE));
1385     PetscCall(PetscSFBcastEnd(parentSF, MPIU_INT, globParents, newParents, MPI_REPLACE));
1386     if (original) PetscCall(PetscFree(globParents));
1387     PetscCall(PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs, MPI_REPLACE));
1388     PetscCall(PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs, MPI_REPLACE));
1389     PetscCall(ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents));
1390     if (PetscDefined(USE_DEBUG)) {
1391       PetscInt  p;
1392       PetscBool valid = PETSC_TRUE;
1393       for (p = 0; p < newParentSize; ++p) {
1394         if (newParents[p] < 0) {
1395           valid = PETSC_FALSE;
1396           PetscCall(PetscPrintf(PETSC_COMM_SELF, "Point %" PetscInt_FMT " not in overlap SF\n", p));
1397         }
1398       }
1399       PetscCheck(valid, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1400     }
1401     PetscCall(PetscOptionsHasName(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-parents_view", &flg));
1402     if (flg) {
1403       PetscCall(PetscPrintf(comm, "Serial Parent Section: \n"));
1404       PetscCall(PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_(comm)));
1405       PetscCall(PetscPrintf(comm, "Parallel Parent Section: \n"));
1406       PetscCall(PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_(comm)));
1407       PetscCall(PetscSFView(parentSF, NULL));
1408     }
1409     PetscCall(DMPlexSetTree(dmParallel, newParentSection, newParents, newChildIDs));
1410     PetscCall(PetscSectionDestroy(&newParentSection));
1411     PetscCall(PetscFree2(newParents, newChildIDs));
1412     PetscCall(PetscSFDestroy(&parentSF));
1413   }
1414   pmesh->useAnchors = mesh->useAnchors;
1415   PetscFunctionReturn(0);
1416 }
1417 
1418 PETSC_UNUSED static PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, DM dmParallel)
1419 {
1420   PetscMPIInt rank, size;
1421   MPI_Comm    comm;
1422 
1423   PetscFunctionBegin;
1424   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1425   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1426 
1427   /* Create point SF for parallel mesh */
1428   PetscCall(PetscLogEventBegin(DMPLEX_DistributeSF, dm, 0, 0, 0));
1429   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1430   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1431   PetscCallMPI(MPI_Comm_size(comm, &size));
1432   {
1433     const PetscInt *leaves;
1434     PetscSFNode    *remotePoints, *rowners, *lowners;
1435     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1436     PetscInt        pStart, pEnd;
1437 
1438     PetscCall(DMPlexGetChart(dmParallel, &pStart, &pEnd));
1439     PetscCall(PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL));
1440     PetscCall(PetscMalloc2(numRoots, &rowners, numLeaves, &lowners));
1441     for (p = 0; p < numRoots; p++) {
1442       rowners[p].rank  = -1;
1443       rowners[p].index = -1;
1444     }
1445     PetscCall(PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners, MPI_REPLACE));
1446     PetscCall(PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners, MPI_REPLACE));
1447     for (p = 0; p < numLeaves; ++p) {
1448       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1449         lowners[p].rank  = rank;
1450         lowners[p].index = leaves ? leaves[p] : p;
1451       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1452         lowners[p].rank  = -2;
1453         lowners[p].index = -2;
1454       }
1455     }
1456     for (p = 0; p < numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1457       rowners[p].rank  = -3;
1458       rowners[p].index = -3;
1459     }
1460     PetscCall(PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC));
1461     PetscCall(PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC));
1462     PetscCall(PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners, MPI_REPLACE));
1463     PetscCall(PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners, MPI_REPLACE));
1464     for (p = 0; p < numLeaves; ++p) {
1465       PetscCheck(lowners[p].rank >= 0 && lowners[p].index >= 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Cell partition corrupt: point not claimed");
1466       if (lowners[p].rank != rank) ++numGhostPoints;
1467     }
1468     PetscCall(PetscMalloc1(numGhostPoints, &ghostPoints));
1469     PetscCall(PetscMalloc1(numGhostPoints, &remotePoints));
1470     for (p = 0, gp = 0; p < numLeaves; ++p) {
1471       if (lowners[p].rank != rank) {
1472         ghostPoints[gp]        = leaves ? leaves[p] : p;
1473         remotePoints[gp].rank  = lowners[p].rank;
1474         remotePoints[gp].index = lowners[p].index;
1475         ++gp;
1476       }
1477     }
1478     PetscCall(PetscFree2(rowners, lowners));
1479     PetscCall(PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER));
1480     PetscCall(PetscSFSetFromOptions((dmParallel)->sf));
1481   }
1482   {
1483     PetscBool useCone, useClosure, useAnchors;
1484 
1485     PetscCall(DMGetBasicAdjacency(dm, &useCone, &useClosure));
1486     PetscCall(DMSetBasicAdjacency(dmParallel, useCone, useClosure));
1487     PetscCall(DMPlexGetAdjacencyUseAnchors(dm, &useAnchors));
1488     PetscCall(DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors));
1489   }
1490   PetscCall(PetscLogEventEnd(DMPLEX_DistributeSF, dm, 0, 0, 0));
1491   PetscFunctionReturn(0);
1492 }
1493 
1494 /*@
1495   DMPlexSetPartitionBalance - Should distribution of the DM attempt to balance the shared point partition?
1496 
1497   Input Parameters:
1498 + dm - The DMPlex object
1499 - flg - Balance the partition?
1500 
1501   Level: intermediate
1502 
1503 .seealso: `DMPlexDistribute()`, `DMPlexGetPartitionBalance()`
1504 @*/
1505 PetscErrorCode DMPlexSetPartitionBalance(DM dm, PetscBool flg)
1506 {
1507   DM_Plex *mesh = (DM_Plex *)dm->data;
1508 
1509   PetscFunctionBegin;
1510   mesh->partitionBalance = flg;
1511   PetscFunctionReturn(0);
1512 }
1513 
1514 /*@
1515   DMPlexGetPartitionBalance - Does distribution of the DM attempt to balance the shared point partition?
1516 
1517   Input Parameter:
1518 . dm - The DMPlex object
1519 
1520   Output Parameter:
1521 . flg - Balance the partition?
1522 
1523   Level: intermediate
1524 
1525 .seealso: `DMPlexDistribute()`, `DMPlexSetPartitionBalance()`
1526 @*/
1527 PetscErrorCode DMPlexGetPartitionBalance(DM dm, PetscBool *flg)
1528 {
1529   DM_Plex *mesh = (DM_Plex *)dm->data;
1530 
1531   PetscFunctionBegin;
1532   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1533   PetscValidBoolPointer(flg, 2);
1534   *flg = mesh->partitionBalance;
1535   PetscFunctionReturn(0);
1536 }
1537 
1538 typedef struct {
1539   PetscInt vote, rank, index;
1540 } Petsc3Int;
1541 
1542 /* MaxLoc, but carry a third piece of information around */
1543 static void MPIAPI MaxLocCarry(void *in_, void *inout_, PetscMPIInt *len_, MPI_Datatype *dtype)
1544 {
1545   Petsc3Int *a = (Petsc3Int *)inout_;
1546   Petsc3Int *b = (Petsc3Int *)in_;
1547   PetscInt   i, len = *len_;
1548   for (i = 0; i < len; i++) {
1549     if (a[i].vote < b[i].vote) {
1550       a[i].vote  = b[i].vote;
1551       a[i].rank  = b[i].rank;
1552       a[i].index = b[i].index;
1553     } else if (a[i].vote <= b[i].vote) {
1554       if (a[i].rank >= b[i].rank) {
1555         a[i].rank  = b[i].rank;
1556         a[i].index = b[i].index;
1557       }
1558     }
1559   }
1560 }
1561 
1562 /*@C
1563   DMPlexCreatePointSF - Build a point SF from an SF describing a point migration
1564 
1565   Input Parameters:
1566 + dm          - The source DMPlex object
1567 . migrationSF - The star forest that describes the parallel point remapping
1568 - ownership   - Flag causing a vote to determine point ownership
1569 
1570   Output Parameter:
1571 . pointSF     - The star forest describing the point overlap in the remapped DM
1572 
1573   Notes:
1574   Output pointSF is guaranteed to have the array of local indices (leaves) sorted.
1575 
1576   Level: developer
1577 
1578 .seealso: `DMPlexDistribute()`, `DMPlexDistributeOverlap()`
1579 @*/
1580 PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownership, PetscSF *pointSF)
1581 {
1582   PetscMPIInt        rank, size;
1583   PetscInt           p, nroots, nleaves, idx, npointLeaves;
1584   PetscInt          *pointLocal;
1585   const PetscInt    *leaves;
1586   const PetscSFNode *roots;
1587   PetscSFNode       *rootNodes, *leafNodes, *pointRemote;
1588   Vec                shifts;
1589   const PetscInt     numShifts  = 13759;
1590   const PetscScalar *shift      = NULL;
1591   const PetscBool    shiftDebug = PETSC_FALSE;
1592   PetscBool          balance;
1593 
1594   PetscFunctionBegin;
1595   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1596   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
1597   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)dm), &size));
1598   PetscCall(PetscLogEventBegin(DMPLEX_CreatePointSF, dm, 0, 0, 0));
1599 
1600   PetscCall(DMPlexGetPartitionBalance(dm, &balance));
1601   PetscCall(PetscSFGetGraph(migrationSF, &nroots, &nleaves, &leaves, &roots));
1602   PetscCall(PetscMalloc2(nroots, &rootNodes, nleaves, &leafNodes));
1603   if (ownership) {
1604     MPI_Op       op;
1605     MPI_Datatype datatype;
1606     Petsc3Int   *rootVote = NULL, *leafVote = NULL;
1607     /* If balancing, we compute a random cyclic shift of the rank for each remote point. That way, the max will evenly distribute among ranks. */
1608     if (balance) {
1609       PetscRandom r;
1610 
1611       PetscCall(PetscRandomCreate(PETSC_COMM_SELF, &r));
1612       PetscCall(PetscRandomSetInterval(r, 0, 2467 * size));
1613       PetscCall(VecCreate(PETSC_COMM_SELF, &shifts));
1614       PetscCall(VecSetSizes(shifts, numShifts, numShifts));
1615       PetscCall(VecSetType(shifts, VECSTANDARD));
1616       PetscCall(VecSetRandom(shifts, r));
1617       PetscCall(PetscRandomDestroy(&r));
1618       PetscCall(VecGetArrayRead(shifts, &shift));
1619     }
1620 
1621     PetscCall(PetscMalloc1(nroots, &rootVote));
1622     PetscCall(PetscMalloc1(nleaves, &leafVote));
1623     /* Point ownership vote: Process with highest rank owns shared points */
1624     for (p = 0; p < nleaves; ++p) {
1625       if (shiftDebug) {
1626         PetscCall(PetscSynchronizedPrintf(PetscObjectComm((PetscObject)dm), "[%d] Point %" PetscInt_FMT " RemotePoint %" PetscInt_FMT " Shift %" PetscInt_FMT " MyRank %" PetscInt_FMT "\n", rank, leaves ? leaves[p] : p, roots[p].index,
1627                                           (PetscInt)PetscRealPart(shift[roots[p].index % numShifts]), (rank + (shift ? (PetscInt)PetscRealPart(shift[roots[p].index % numShifts]) : 0)) % size));
1628       }
1629       /* Either put in a bid or we know we own it */
1630       leafVote[p].vote  = (rank + (shift ? (PetscInt)PetscRealPart(shift[roots[p].index % numShifts]) : 0)) % size;
1631       leafVote[p].rank  = rank;
1632       leafVote[p].index = p;
1633     }
1634     for (p = 0; p < nroots; p++) {
1635       /* Root must not participate in the reduction, flag so that MAXLOC does not use */
1636       rootVote[p].vote  = -3;
1637       rootVote[p].rank  = -3;
1638       rootVote[p].index = -3;
1639     }
1640     PetscCallMPI(MPI_Type_contiguous(3, MPIU_INT, &datatype));
1641     PetscCallMPI(MPI_Type_commit(&datatype));
1642     PetscCallMPI(MPI_Op_create(&MaxLocCarry, 1, &op));
1643     PetscCall(PetscSFReduceBegin(migrationSF, datatype, leafVote, rootVote, op));
1644     PetscCall(PetscSFReduceEnd(migrationSF, datatype, leafVote, rootVote, op));
1645     PetscCallMPI(MPI_Op_free(&op));
1646     PetscCallMPI(MPI_Type_free(&datatype));
1647     for (p = 0; p < nroots; p++) {
1648       rootNodes[p].rank  = rootVote[p].rank;
1649       rootNodes[p].index = rootVote[p].index;
1650     }
1651     PetscCall(PetscFree(leafVote));
1652     PetscCall(PetscFree(rootVote));
1653   } else {
1654     for (p = 0; p < nroots; p++) {
1655       rootNodes[p].index = -1;
1656       rootNodes[p].rank  = rank;
1657     }
1658     for (p = 0; p < nleaves; p++) {
1659       /* Write new local id into old location */
1660       if (roots[p].rank == rank) rootNodes[roots[p].index].index = leaves ? leaves[p] : p;
1661     }
1662   }
1663   PetscCall(PetscSFBcastBegin(migrationSF, MPIU_2INT, rootNodes, leafNodes, MPI_REPLACE));
1664   PetscCall(PetscSFBcastEnd(migrationSF, MPIU_2INT, rootNodes, leafNodes, MPI_REPLACE));
1665 
1666   for (npointLeaves = 0, p = 0; p < nleaves; p++) {
1667     if (leafNodes[p].rank != rank) npointLeaves++;
1668   }
1669   PetscCall(PetscMalloc1(npointLeaves, &pointLocal));
1670   PetscCall(PetscMalloc1(npointLeaves, &pointRemote));
1671   for (idx = 0, p = 0; p < nleaves; p++) {
1672     if (leafNodes[p].rank != rank) {
1673       /* Note that pointLocal is automatically sorted as it is sublist of 0, ..., nleaves-1 */
1674       pointLocal[idx]  = p;
1675       pointRemote[idx] = leafNodes[p];
1676       idx++;
1677     }
1678   }
1679   if (shift) {
1680     PetscCall(VecRestoreArrayRead(shifts, &shift));
1681     PetscCall(VecDestroy(&shifts));
1682   }
1683   if (shiftDebug) PetscCall(PetscSynchronizedFlush(PetscObjectComm((PetscObject)dm), PETSC_STDOUT));
1684   PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)dm), pointSF));
1685   PetscCall(PetscSFSetFromOptions(*pointSF));
1686   PetscCall(PetscSFSetGraph(*pointSF, nleaves, npointLeaves, pointLocal, PETSC_OWN_POINTER, pointRemote, PETSC_OWN_POINTER));
1687   PetscCall(PetscFree2(rootNodes, leafNodes));
1688   PetscCall(PetscLogEventEnd(DMPLEX_CreatePointSF, dm, 0, 0, 0));
1689   if (PetscDefined(USE_DEBUG)) PetscCall(DMPlexCheckPointSF(dm, *pointSF, PETSC_FALSE));
1690   PetscFunctionReturn(0);
1691 }
1692 
1693 /*@C
1694   DMPlexMigrate  - Migrates internal DM data over the supplied star forest
1695 
1696   Collective on dm
1697 
1698   Input Parameters:
1699 + dm       - The source DMPlex object
1700 - sf       - The star forest communication context describing the migration pattern
1701 
1702   Output Parameter:
1703 . targetDM - The target DMPlex object
1704 
1705   Level: intermediate
1706 
1707 .seealso: `DMPlexDistribute()`, `DMPlexDistributeOverlap()`
1708 @*/
1709 PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM)
1710 {
1711   MPI_Comm               comm;
1712   PetscInt               dim, cdim, nroots;
1713   PetscSF                sfPoint;
1714   ISLocalToGlobalMapping ltogMigration;
1715   ISLocalToGlobalMapping ltogOriginal = NULL;
1716 
1717   PetscFunctionBegin;
1718   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1719   PetscCall(PetscLogEventBegin(DMPLEX_Migrate, dm, 0, 0, 0));
1720   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1721   PetscCall(DMGetDimension(dm, &dim));
1722   PetscCall(DMSetDimension(targetDM, dim));
1723   PetscCall(DMGetCoordinateDim(dm, &cdim));
1724   PetscCall(DMSetCoordinateDim(targetDM, cdim));
1725 
1726   /* Check for a one-to-all distribution pattern */
1727   PetscCall(DMGetPointSF(dm, &sfPoint));
1728   PetscCall(PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL));
1729   if (nroots >= 0) {
1730     IS        isOriginal;
1731     PetscInt  n, size, nleaves;
1732     PetscInt *numbering_orig, *numbering_new;
1733 
1734     /* Get the original point numbering */
1735     PetscCall(DMPlexCreatePointNumbering(dm, &isOriginal));
1736     PetscCall(ISLocalToGlobalMappingCreateIS(isOriginal, &ltogOriginal));
1737     PetscCall(ISLocalToGlobalMappingGetSize(ltogOriginal, &size));
1738     PetscCall(ISLocalToGlobalMappingGetBlockIndices(ltogOriginal, (const PetscInt **)&numbering_orig));
1739     /* Convert to positive global numbers */
1740     for (n = 0; n < size; n++) {
1741       if (numbering_orig[n] < 0) numbering_orig[n] = -(numbering_orig[n] + 1);
1742     }
1743     /* Derive the new local-to-global mapping from the old one */
1744     PetscCall(PetscSFGetGraph(sf, NULL, &nleaves, NULL, NULL));
1745     PetscCall(PetscMalloc1(nleaves, &numbering_new));
1746     PetscCall(PetscSFBcastBegin(sf, MPIU_INT, numbering_orig, numbering_new, MPI_REPLACE));
1747     PetscCall(PetscSFBcastEnd(sf, MPIU_INT, numbering_orig, numbering_new, MPI_REPLACE));
1748     PetscCall(ISLocalToGlobalMappingCreate(comm, 1, nleaves, numbering_new, PETSC_OWN_POINTER, &ltogMigration));
1749     PetscCall(ISLocalToGlobalMappingRestoreIndices(ltogOriginal, (const PetscInt **)&numbering_orig));
1750     PetscCall(ISDestroy(&isOriginal));
1751   } else {
1752     /* One-to-all distribution pattern: We can derive LToG from SF */
1753     PetscCall(ISLocalToGlobalMappingCreateSF(sf, 0, &ltogMigration));
1754   }
1755   PetscCall(PetscObjectSetName((PetscObject)ltogMigration, "Point renumbering for DM migration"));
1756   PetscCall(ISLocalToGlobalMappingViewFromOptions(ltogMigration, (PetscObject)dm, "-partition_view"));
1757   /* Migrate DM data to target DM */
1758   PetscCall(DMPlexDistributeCones(dm, sf, ltogOriginal, ltogMigration, targetDM));
1759   PetscCall(DMPlexDistributeLabels(dm, sf, targetDM));
1760   PetscCall(DMPlexDistributeCoordinates(dm, sf, targetDM));
1761   PetscCall(DMPlexDistributeSetupTree(dm, sf, ltogOriginal, ltogMigration, targetDM));
1762   PetscCall(ISLocalToGlobalMappingDestroy(&ltogOriginal));
1763   PetscCall(ISLocalToGlobalMappingDestroy(&ltogMigration));
1764   PetscCall(PetscLogEventEnd(DMPLEX_Migrate, dm, 0, 0, 0));
1765   PetscFunctionReturn(0);
1766 }
1767 
1768 /*@C
1769   DMPlexDistribute - Distributes the mesh and any associated sections.
1770 
1771   Collective on dm
1772 
1773   Input Parameters:
1774 + dm  - The original DMPlex object
1775 - overlap - The overlap of partitions, 0 is the default
1776 
1777   Output Parameters:
1778 + sf - The PetscSF used for point distribution, or NULL if not needed
1779 - dmParallel - The distributed DMPlex object
1780 
1781   Note: If the mesh was not distributed, the output dmParallel will be NULL.
1782 
1783   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
1784   representation on the mesh.
1785 
1786   Level: intermediate
1787 
1788 .seealso: `DMPlexCreate()`, `DMSetAdjacency()`, `DMPlexGetOverlap()`
1789 @*/
1790 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1791 {
1792   MPI_Comm         comm;
1793   PetscPartitioner partitioner;
1794   IS               cellPart;
1795   PetscSection     cellPartSection;
1796   DM               dmCoord;
1797   DMLabel          lblPartition, lblMigration;
1798   PetscSF          sfMigration, sfStratified, sfPoint;
1799   PetscBool        flg, balance;
1800   PetscMPIInt      rank, size;
1801 
1802   PetscFunctionBegin;
1803   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1804   PetscValidLogicalCollectiveInt(dm, overlap, 2);
1805   if (sf) PetscValidPointer(sf, 3);
1806   PetscValidPointer(dmParallel, 4);
1807 
1808   if (sf) *sf = NULL;
1809   *dmParallel = NULL;
1810   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1811   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1812   PetscCallMPI(MPI_Comm_size(comm, &size));
1813   if (size == 1) PetscFunctionReturn(0);
1814 
1815   PetscCall(PetscLogEventBegin(DMPLEX_Distribute, dm, 0, 0, 0));
1816   /* Create cell partition */
1817   PetscCall(PetscLogEventBegin(DMPLEX_Partition, dm, 0, 0, 0));
1818   PetscCall(PetscSectionCreate(comm, &cellPartSection));
1819   PetscCall(DMPlexGetPartitioner(dm, &partitioner));
1820   PetscCall(PetscPartitionerDMPlexPartition(partitioner, dm, NULL, cellPartSection, &cellPart));
1821   PetscCall(PetscLogEventBegin(DMPLEX_PartSelf, dm, 0, 0, 0));
1822   {
1823     /* Convert partition to DMLabel */
1824     IS              is;
1825     PetscHSetI      ht;
1826     const PetscInt *points;
1827     PetscInt       *iranks;
1828     PetscInt        pStart, pEnd, proc, npoints, poff = 0, nranks;
1829 
1830     PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Point Partition", &lblPartition));
1831     /* Preallocate strata */
1832     PetscCall(PetscHSetICreate(&ht));
1833     PetscCall(PetscSectionGetChart(cellPartSection, &pStart, &pEnd));
1834     for (proc = pStart; proc < pEnd; proc++) {
1835       PetscCall(PetscSectionGetDof(cellPartSection, proc, &npoints));
1836       if (npoints) PetscCall(PetscHSetIAdd(ht, proc));
1837     }
1838     PetscCall(PetscHSetIGetSize(ht, &nranks));
1839     PetscCall(PetscMalloc1(nranks, &iranks));
1840     PetscCall(PetscHSetIGetElems(ht, &poff, iranks));
1841     PetscCall(PetscHSetIDestroy(&ht));
1842     PetscCall(DMLabelAddStrata(lblPartition, nranks, iranks));
1843     PetscCall(PetscFree(iranks));
1844     /* Inline DMPlexPartitionLabelClosure() */
1845     PetscCall(ISGetIndices(cellPart, &points));
1846     PetscCall(PetscSectionGetChart(cellPartSection, &pStart, &pEnd));
1847     for (proc = pStart; proc < pEnd; proc++) {
1848       PetscCall(PetscSectionGetDof(cellPartSection, proc, &npoints));
1849       if (!npoints) continue;
1850       PetscCall(PetscSectionGetOffset(cellPartSection, proc, &poff));
1851       PetscCall(DMPlexClosurePoints_Private(dm, npoints, points + poff, &is));
1852       PetscCall(DMLabelSetStratumIS(lblPartition, proc, is));
1853       PetscCall(ISDestroy(&is));
1854     }
1855     PetscCall(ISRestoreIndices(cellPart, &points));
1856   }
1857   PetscCall(PetscLogEventEnd(DMPLEX_PartSelf, dm, 0, 0, 0));
1858 
1859   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Point migration", &lblMigration));
1860   PetscCall(DMPlexPartitionLabelInvert(dm, lblPartition, NULL, lblMigration));
1861   PetscCall(DMPlexPartitionLabelCreateSF(dm, lblMigration, &sfMigration));
1862   PetscCall(DMPlexStratifyMigrationSF(dm, sfMigration, &sfStratified));
1863   PetscCall(PetscSFDestroy(&sfMigration));
1864   sfMigration = sfStratified;
1865   PetscCall(PetscSFSetUp(sfMigration));
1866   PetscCall(PetscLogEventEnd(DMPLEX_Partition, dm, 0, 0, 0));
1867   PetscCall(PetscOptionsHasName(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-partition_view", &flg));
1868   if (flg) {
1869     PetscCall(DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_(comm)));
1870     PetscCall(PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_(comm)));
1871   }
1872 
1873   /* Create non-overlapping parallel DM and migrate internal data */
1874   PetscCall(DMPlexCreate(comm, dmParallel));
1875   PetscCall(PetscObjectSetName((PetscObject)*dmParallel, "Parallel Mesh"));
1876   PetscCall(DMPlexMigrate(dm, sfMigration, *dmParallel));
1877 
1878   /* Build the point SF without overlap */
1879   PetscCall(DMPlexGetPartitionBalance(dm, &balance));
1880   PetscCall(DMPlexSetPartitionBalance(*dmParallel, balance));
1881   PetscCall(DMPlexCreatePointSF(*dmParallel, sfMigration, PETSC_TRUE, &sfPoint));
1882   PetscCall(DMSetPointSF(*dmParallel, sfPoint));
1883   PetscCall(DMPlexMigrateIsoperiodicFaceSF_Internal(dm, *dmParallel, sfMigration));
1884   PetscCall(DMGetCoordinateDM(*dmParallel, &dmCoord));
1885   if (dmCoord) PetscCall(DMSetPointSF(dmCoord, sfPoint));
1886   if (flg) PetscCall(PetscSFView(sfPoint, NULL));
1887 
1888   if (overlap > 0) {
1889     DM                 dmOverlap;
1890     PetscInt           nroots, nleaves, noldleaves, l;
1891     const PetscInt    *oldLeaves;
1892     PetscSFNode       *newRemote, *permRemote;
1893     const PetscSFNode *oldRemote;
1894     PetscSF            sfOverlap, sfOverlapPoint;
1895 
1896     /* Add the partition overlap to the distributed DM */
1897     PetscCall(DMPlexDistributeOverlap(*dmParallel, overlap, &sfOverlap, &dmOverlap));
1898     PetscCall(DMDestroy(dmParallel));
1899     *dmParallel = dmOverlap;
1900     if (flg) {
1901       PetscCall(PetscPrintf(comm, "Overlap Migration SF:\n"));
1902       PetscCall(PetscSFView(sfOverlap, NULL));
1903     }
1904 
1905     /* Re-map the migration SF to establish the full migration pattern */
1906     PetscCall(PetscSFGetGraph(sfMigration, &nroots, &noldleaves, &oldLeaves, &oldRemote));
1907     PetscCall(PetscSFGetGraph(sfOverlap, NULL, &nleaves, NULL, NULL));
1908     PetscCall(PetscMalloc1(nleaves, &newRemote));
1909     /* oldRemote: original root point mapping to original leaf point
1910        newRemote: original leaf point mapping to overlapped leaf point */
1911     if (oldLeaves) {
1912       /* After stratification, the migration remotes may not be in root (canonical) order, so we reorder using the leaf numbering */
1913       PetscCall(PetscMalloc1(noldleaves, &permRemote));
1914       for (l = 0; l < noldleaves; ++l) permRemote[oldLeaves[l]] = oldRemote[l];
1915       oldRemote = permRemote;
1916     }
1917     PetscCall(PetscSFBcastBegin(sfOverlap, MPIU_2INT, oldRemote, newRemote, MPI_REPLACE));
1918     PetscCall(PetscSFBcastEnd(sfOverlap, MPIU_2INT, oldRemote, newRemote, MPI_REPLACE));
1919     if (oldLeaves) PetscCall(PetscFree(oldRemote));
1920     PetscCall(PetscSFCreate(comm, &sfOverlapPoint));
1921     PetscCall(PetscSFSetGraph(sfOverlapPoint, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER));
1922     PetscCall(PetscSFDestroy(&sfOverlap));
1923     PetscCall(PetscSFDestroy(&sfMigration));
1924     sfMigration = sfOverlapPoint;
1925   }
1926   /* Cleanup Partition */
1927   PetscCall(DMLabelDestroy(&lblPartition));
1928   PetscCall(DMLabelDestroy(&lblMigration));
1929   PetscCall(PetscSectionDestroy(&cellPartSection));
1930   PetscCall(ISDestroy(&cellPart));
1931   /* Copy discretization */
1932   PetscCall(DMCopyDisc(dm, *dmParallel));
1933   /* Create sfNatural */
1934   if (dm->useNatural) {
1935     PetscSection section;
1936 
1937     PetscCall(DMSetUseNatural(*dmParallel, PETSC_TRUE));
1938     PetscCall(DMGetLocalSection(dm, &section));
1939 
1940     /* First DM with useNatural = PETSC_TRUE is considered natural */
1941     /* sfMigration and sfNatural are respectively the point and dofs SFs mapping to this natural DM */
1942     /* Compose with a previous sfNatural if present */
1943     if (dm->sfNatural) {
1944       PetscSF natSF;
1945 
1946       PetscCall(DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &natSF));
1947       PetscCall(PetscSFCompose(dm->sfNatural, natSF, &(*dmParallel)->sfNatural));
1948       PetscCall(PetscSFDestroy(&natSF));
1949     } else {
1950       PetscCall(DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &(*dmParallel)->sfNatural));
1951     }
1952     /* Compose with a previous sfMigration if present */
1953     if (dm->sfMigration) {
1954       PetscSF naturalPointSF;
1955 
1956       PetscCall(PetscSFCompose(dm->sfMigration, sfMigration, &naturalPointSF));
1957       PetscCall(PetscSFDestroy(&sfMigration));
1958       sfMigration = naturalPointSF;
1959     }
1960   }
1961   PetscCall(DMPlexCopy_Internal(dm, PETSC_TRUE, PETSC_FALSE, *dmParallel));
1962   /* Cleanup */
1963   if (sf) {
1964     *sf = sfMigration;
1965   } else PetscCall(PetscSFDestroy(&sfMigration));
1966   PetscCall(PetscSFDestroy(&sfPoint));
1967   PetscCall(PetscLogEventEnd(DMPLEX_Distribute, dm, 0, 0, 0));
1968   PetscFunctionReturn(0);
1969 }
1970 
1971 /*@C
1972   DMPlexDistributeOverlap - Add partition overlap to a distributed non-overlapping DM.
1973 
1974   Collective on dm
1975 
1976   Input Parameters:
1977 + dm  - The non-overlapping distributed DMPlex object
1978 - overlap - The overlap of partitions (the same on all ranks)
1979 
1980   Output Parameters:
1981 + sf - The PetscSF used for point distribution
1982 - dmOverlap - The overlapping distributed DMPlex object, or NULL
1983 
1984   Notes:
1985   If the mesh was not distributed, the return value is NULL.
1986 
1987   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
1988   representation on the mesh.
1989 
1990   Options Database Keys:
1991 + -dm_plex_overlap_labels <name1,name2,...> - List of overlap label names
1992 . -dm_plex_overlap_values <int1,int2,...>   - List of overlap label values
1993 . -dm_plex_overlap_exclude_label <name>     - Label used to exclude points from overlap
1994 - -dm_plex_overlap_exclude_value <int>      - Label value used to exclude points from overlap
1995 
1996   Level: advanced
1997 
1998 .seealso: `DMPlexCreate()`, `DMSetAdjacency()`, `DMPlexDistribute()`, `DMPlexCreateOverlapLabel()`, `DMPlexGetOverlap()`
1999 @*/
2000 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, PetscSF *sf, DM *dmOverlap)
2001 {
2002   DM_Plex     *mesh = (DM_Plex *)dm->data;
2003   MPI_Comm     comm;
2004   PetscMPIInt  size, rank;
2005   PetscSection rootSection, leafSection;
2006   IS           rootrank, leafrank;
2007   DM           dmCoord;
2008   DMLabel      lblOverlap;
2009   PetscSF      sfOverlap, sfStratified, sfPoint;
2010 
2011   PetscFunctionBegin;
2012   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2013   PetscValidLogicalCollectiveInt(dm, overlap, 2);
2014   if (sf) PetscValidPointer(sf, 3);
2015   PetscValidPointer(dmOverlap, 4);
2016 
2017   if (sf) *sf = NULL;
2018   *dmOverlap = NULL;
2019   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
2020   PetscCallMPI(MPI_Comm_size(comm, &size));
2021   PetscCallMPI(MPI_Comm_rank(comm, &rank));
2022   if (size == 1) PetscFunctionReturn(0);
2023   {
2024     // We need to get options for the _already_distributed mesh, so it must be done here
2025     PetscInt    overlap;
2026     const char *prefix;
2027     char        oldPrefix[PETSC_MAX_PATH_LEN];
2028 
2029     oldPrefix[0] = '\0';
2030     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)dm, &prefix));
2031     PetscCall(PetscStrcpy(oldPrefix, prefix));
2032     PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)dm, "dist_"));
2033     PetscCall(DMPlexGetOverlap(dm, &overlap));
2034     PetscObjectOptionsBegin((PetscObject)dm);
2035     PetscCall(DMSetFromOptions_Overlap_Plex(dm, PetscOptionsObject, &overlap));
2036     PetscOptionsEnd();
2037     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)dm, oldPrefix[0] == '\0' ? NULL : oldPrefix));
2038   }
2039   PetscCall(PetscLogEventBegin(DMPLEX_DistributeOverlap, dm, 0, 0, 0));
2040   /* Compute point overlap with neighbouring processes on the distributed DM */
2041   PetscCall(PetscLogEventBegin(DMPLEX_Partition, dm, 0, 0, 0));
2042   PetscCall(PetscSectionCreate(comm, &rootSection));
2043   PetscCall(PetscSectionCreate(comm, &leafSection));
2044   PetscCall(DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank));
2045   if (mesh->numOvLabels) PetscCall(DMPlexCreateOverlapLabelFromLabels(dm, mesh->numOvLabels, mesh->ovLabels, mesh->ovValues, mesh->numOvExLabels, mesh->ovExLabels, mesh->ovExValues, rootSection, rootrank, leafSection, leafrank, &lblOverlap));
2046   else PetscCall(DMPlexCreateOverlapLabel(dm, overlap, rootSection, rootrank, leafSection, leafrank, &lblOverlap));
2047   /* Convert overlap label to stratified migration SF */
2048   PetscCall(DMPlexPartitionLabelCreateSF(dm, lblOverlap, &sfOverlap));
2049   PetscCall(DMPlexStratifyMigrationSF(dm, sfOverlap, &sfStratified));
2050   PetscCall(PetscSFDestroy(&sfOverlap));
2051   sfOverlap = sfStratified;
2052   PetscCall(PetscObjectSetName((PetscObject)sfOverlap, "Overlap SF"));
2053   PetscCall(PetscSFSetFromOptions(sfOverlap));
2054 
2055   PetscCall(PetscSectionDestroy(&rootSection));
2056   PetscCall(PetscSectionDestroy(&leafSection));
2057   PetscCall(ISDestroy(&rootrank));
2058   PetscCall(ISDestroy(&leafrank));
2059   PetscCall(PetscLogEventEnd(DMPLEX_Partition, dm, 0, 0, 0));
2060 
2061   /* Build the overlapping DM */
2062   PetscCall(DMPlexCreate(comm, dmOverlap));
2063   PetscCall(PetscObjectSetName((PetscObject)*dmOverlap, "Parallel Mesh"));
2064   PetscCall(DMPlexMigrate(dm, sfOverlap, *dmOverlap));
2065   /* Store the overlap in the new DM */
2066   PetscCall(DMPlexSetOverlap_Plex(*dmOverlap, dm, overlap));
2067   /* Build the new point SF */
2068   PetscCall(DMPlexCreatePointSF(*dmOverlap, sfOverlap, PETSC_FALSE, &sfPoint));
2069   PetscCall(DMSetPointSF(*dmOverlap, sfPoint));
2070   PetscCall(DMGetCoordinateDM(*dmOverlap, &dmCoord));
2071   if (dmCoord) PetscCall(DMSetPointSF(dmCoord, sfPoint));
2072   PetscCall(DMGetCellCoordinateDM(*dmOverlap, &dmCoord));
2073   if (dmCoord) PetscCall(DMSetPointSF(dmCoord, sfPoint));
2074   PetscCall(PetscSFDestroy(&sfPoint));
2075   /* Cleanup overlap partition */
2076   PetscCall(DMLabelDestroy(&lblOverlap));
2077   if (sf) *sf = sfOverlap;
2078   else PetscCall(PetscSFDestroy(&sfOverlap));
2079   PetscCall(PetscLogEventEnd(DMPLEX_DistributeOverlap, dm, 0, 0, 0));
2080   PetscFunctionReturn(0);
2081 }
2082 
2083 PetscErrorCode DMPlexGetOverlap_Plex(DM dm, PetscInt *overlap)
2084 {
2085   DM_Plex *mesh = (DM_Plex *)dm->data;
2086 
2087   PetscFunctionBegin;
2088   *overlap = mesh->overlap;
2089   PetscFunctionReturn(0);
2090 }
2091 
2092 PetscErrorCode DMPlexSetOverlap_Plex(DM dm, DM dmSrc, PetscInt overlap)
2093 {
2094   DM_Plex *mesh    = NULL;
2095   DM_Plex *meshSrc = NULL;
2096 
2097   PetscFunctionBegin;
2098   PetscValidHeaderSpecificType(dm, DM_CLASSID, 1, DMPLEX);
2099   mesh          = (DM_Plex *)dm->data;
2100   mesh->overlap = overlap;
2101   if (dmSrc) {
2102     PetscValidHeaderSpecificType(dmSrc, DM_CLASSID, 2, DMPLEX);
2103     meshSrc = (DM_Plex *)dmSrc->data;
2104     mesh->overlap += meshSrc->overlap;
2105   }
2106   PetscFunctionReturn(0);
2107 }
2108 
2109 /*@
2110   DMPlexGetOverlap - Get the width of the cell overlap
2111 
2112   Not collective
2113 
2114   Input Parameter:
2115 . dm   - The DM
2116 
2117   Output Parameter:
2118 . overlap - the width of the cell overlap
2119 
2120   Level: intermediate
2121 
2122 .seealso: `DMPlexSetOverlap()`, `DMPlexDistribute()`
2123 @*/
2124 PetscErrorCode DMPlexGetOverlap(DM dm, PetscInt *overlap)
2125 {
2126   PetscFunctionBegin;
2127   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2128   PetscValidIntPointer(overlap, 2);
2129   PetscUseMethod(dm, "DMPlexGetOverlap_C", (DM, PetscInt *), (dm, overlap));
2130   PetscFunctionReturn(0);
2131 }
2132 
2133 /*@
2134   DMPlexSetOverlap - Set the width of the cell overlap
2135 
2136   Logically collective
2137 
2138   Input Parameters:
2139 + dm      - The DM
2140 . dmSrc   - The DM that produced this one, or NULL
2141 - overlap - the width of the cell overlap
2142 
2143   Note:
2144   The overlap from dmSrc is added to dm
2145 
2146   Level: intermediate
2147 
2148 .seealso: `DMPlexGetOverlap()`, `DMPlexDistribute()`
2149 @*/
2150 PetscErrorCode DMPlexSetOverlap(DM dm, DM dmSrc, PetscInt overlap)
2151 {
2152   PetscFunctionBegin;
2153   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2154   PetscValidLogicalCollectiveInt(dm, overlap, 3);
2155   PetscTryMethod(dm, "DMPlexSetOverlap_C", (DM, DM, PetscInt), (dm, dmSrc, overlap));
2156   PetscFunctionReturn(0);
2157 }
2158 
2159 PetscErrorCode DMPlexDistributeSetDefault_Plex(DM dm, PetscBool dist)
2160 {
2161   DM_Plex *mesh = (DM_Plex *)dm->data;
2162 
2163   PetscFunctionBegin;
2164   mesh->distDefault = dist;
2165   PetscFunctionReturn(0);
2166 }
2167 
2168 /*@
2169   DMPlexDistributeSetDefault - Set flag indicating whether the DM should be distributed by default
2170 
2171   Logically collective
2172 
2173   Input Parameters:
2174 + dm   - The DM
2175 - dist - Flag for distribution
2176 
2177   Level: intermediate
2178 
2179 .seealso: `DMPlexDistributeGetDefault()`, `DMPlexDistribute()`
2180 @*/
2181 PetscErrorCode DMPlexDistributeSetDefault(DM dm, PetscBool dist)
2182 {
2183   PetscFunctionBegin;
2184   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2185   PetscValidLogicalCollectiveBool(dm, dist, 2);
2186   PetscTryMethod(dm, "DMPlexDistributeSetDefault_C", (DM, PetscBool), (dm, dist));
2187   PetscFunctionReturn(0);
2188 }
2189 
2190 PetscErrorCode DMPlexDistributeGetDefault_Plex(DM dm, PetscBool *dist)
2191 {
2192   DM_Plex *mesh = (DM_Plex *)dm->data;
2193 
2194   PetscFunctionBegin;
2195   *dist = mesh->distDefault;
2196   PetscFunctionReturn(0);
2197 }
2198 
2199 /*@
2200   DMPlexDistributeGetDefault - Get flag indicating whether the DM should be distributed by default
2201 
2202   Not collective
2203 
2204   Input Parameter:
2205 . dm   - The DM
2206 
2207   Output Parameter:
2208 . dist - Flag for distribution
2209 
2210   Level: intermediate
2211 
2212 .seealso: `DMPlexDistributeSetDefault()`, `DMPlexDistribute()`
2213 @*/
2214 PetscErrorCode DMPlexDistributeGetDefault(DM dm, PetscBool *dist)
2215 {
2216   PetscFunctionBegin;
2217   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2218   PetscValidBoolPointer(dist, 2);
2219   PetscUseMethod(dm, "DMPlexDistributeGetDefault_C", (DM, PetscBool *), (dm, dist));
2220   PetscFunctionReturn(0);
2221 }
2222 
2223 /*@C
2224   DMPlexGetGatherDM - Get a copy of the DMPlex that gathers all points on the
2225   root process of the original's communicator.
2226 
2227   Collective on dm
2228 
2229   Input Parameter:
2230 . dm - the original DMPlex object
2231 
2232   Output Parameters:
2233 + sf - the PetscSF used for point distribution (optional)
2234 - gatherMesh - the gathered DM object, or NULL
2235 
2236   Level: intermediate
2237 
2238 .seealso: `DMPlexDistribute()`, `DMPlexGetRedundantDM()`
2239 @*/
2240 PetscErrorCode DMPlexGetGatherDM(DM dm, PetscSF *sf, DM *gatherMesh)
2241 {
2242   MPI_Comm         comm;
2243   PetscMPIInt      size;
2244   PetscPartitioner oldPart, gatherPart;
2245 
2246   PetscFunctionBegin;
2247   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2248   PetscValidPointer(gatherMesh, 3);
2249   *gatherMesh = NULL;
2250   if (sf) *sf = NULL;
2251   comm = PetscObjectComm((PetscObject)dm);
2252   PetscCallMPI(MPI_Comm_size(comm, &size));
2253   if (size == 1) PetscFunctionReturn(0);
2254   PetscCall(DMPlexGetPartitioner(dm, &oldPart));
2255   PetscCall(PetscObjectReference((PetscObject)oldPart));
2256   PetscCall(PetscPartitionerCreate(comm, &gatherPart));
2257   PetscCall(PetscPartitionerSetType(gatherPart, PETSCPARTITIONERGATHER));
2258   PetscCall(DMPlexSetPartitioner(dm, gatherPart));
2259   PetscCall(DMPlexDistribute(dm, 0, sf, gatherMesh));
2260 
2261   PetscCall(DMPlexSetPartitioner(dm, oldPart));
2262   PetscCall(PetscPartitionerDestroy(&gatherPart));
2263   PetscCall(PetscPartitionerDestroy(&oldPart));
2264   PetscFunctionReturn(0);
2265 }
2266 
2267 /*@C
2268   DMPlexGetRedundantDM - Get a copy of the DMPlex that is completely copied on each process.
2269 
2270   Collective on dm
2271 
2272   Input Parameter:
2273 . dm - the original DMPlex object
2274 
2275   Output Parameters:
2276 + sf - the PetscSF used for point distribution (optional)
2277 - redundantMesh - the redundant DM object, or NULL
2278 
2279   Level: intermediate
2280 
2281 .seealso: `DMPlexDistribute()`, `DMPlexGetGatherDM()`
2282 @*/
2283 PetscErrorCode DMPlexGetRedundantDM(DM dm, PetscSF *sf, DM *redundantMesh)
2284 {
2285   MPI_Comm     comm;
2286   PetscMPIInt  size, rank;
2287   PetscInt     pStart, pEnd, p;
2288   PetscInt     numPoints = -1;
2289   PetscSF      migrationSF, sfPoint, gatherSF;
2290   DM           gatherDM, dmCoord;
2291   PetscSFNode *points;
2292 
2293   PetscFunctionBegin;
2294   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2295   PetscValidPointer(redundantMesh, 3);
2296   *redundantMesh = NULL;
2297   comm           = PetscObjectComm((PetscObject)dm);
2298   PetscCallMPI(MPI_Comm_size(comm, &size));
2299   if (size == 1) {
2300     PetscCall(PetscObjectReference((PetscObject)dm));
2301     *redundantMesh = dm;
2302     if (sf) *sf = NULL;
2303     PetscFunctionReturn(0);
2304   }
2305   PetscCall(DMPlexGetGatherDM(dm, &gatherSF, &gatherDM));
2306   if (!gatherDM) PetscFunctionReturn(0);
2307   PetscCallMPI(MPI_Comm_rank(comm, &rank));
2308   PetscCall(DMPlexGetChart(gatherDM, &pStart, &pEnd));
2309   numPoints = pEnd - pStart;
2310   PetscCallMPI(MPI_Bcast(&numPoints, 1, MPIU_INT, 0, comm));
2311   PetscCall(PetscMalloc1(numPoints, &points));
2312   PetscCall(PetscSFCreate(comm, &migrationSF));
2313   for (p = 0; p < numPoints; p++) {
2314     points[p].index = p;
2315     points[p].rank  = 0;
2316   }
2317   PetscCall(PetscSFSetGraph(migrationSF, pEnd - pStart, numPoints, NULL, PETSC_OWN_POINTER, points, PETSC_OWN_POINTER));
2318   PetscCall(DMPlexCreate(comm, redundantMesh));
2319   PetscCall(PetscObjectSetName((PetscObject)*redundantMesh, "Redundant Mesh"));
2320   PetscCall(DMPlexMigrate(gatherDM, migrationSF, *redundantMesh));
2321   /* This is to express that all point are in overlap */
2322   PetscCall(DMPlexSetOverlap_Plex(*redundantMesh, NULL, PETSC_MAX_INT));
2323   PetscCall(DMPlexCreatePointSF(*redundantMesh, migrationSF, PETSC_FALSE, &sfPoint));
2324   PetscCall(DMSetPointSF(*redundantMesh, sfPoint));
2325   PetscCall(DMGetCoordinateDM(*redundantMesh, &dmCoord));
2326   if (dmCoord) PetscCall(DMSetPointSF(dmCoord, sfPoint));
2327   PetscCall(PetscSFDestroy(&sfPoint));
2328   if (sf) {
2329     PetscSF tsf;
2330 
2331     PetscCall(PetscSFCompose(gatherSF, migrationSF, &tsf));
2332     PetscCall(DMPlexStratifyMigrationSF(dm, tsf, sf));
2333     PetscCall(PetscSFDestroy(&tsf));
2334   }
2335   PetscCall(PetscSFDestroy(&migrationSF));
2336   PetscCall(PetscSFDestroy(&gatherSF));
2337   PetscCall(DMDestroy(&gatherDM));
2338   PetscCall(DMCopyDisc(dm, *redundantMesh));
2339   PetscCall(DMPlexCopy_Internal(dm, PETSC_TRUE, PETSC_FALSE, *redundantMesh));
2340   PetscFunctionReturn(0);
2341 }
2342 
2343 /*@
2344   DMPlexIsDistributed - Find out whether this DM is distributed, i.e. more than one rank owns some points.
2345 
2346   Collective
2347 
2348   Input Parameter:
2349 . dm      - The DM object
2350 
2351   Output Parameter:
2352 . distributed - Flag whether the DM is distributed
2353 
2354   Level: intermediate
2355 
2356   Notes:
2357   This currently finds out whether at least two ranks have any DAG points.
2358   This involves MPI_Allreduce() with one integer.
2359   The result is currently not stashed so every call to this routine involves this global communication.
2360 
2361 .seealso: `DMPlexDistribute()`, `DMPlexGetOverlap()`, `DMPlexIsInterpolated()`
2362 @*/
2363 PetscErrorCode DMPlexIsDistributed(DM dm, PetscBool *distributed)
2364 {
2365   PetscInt    pStart, pEnd, count;
2366   MPI_Comm    comm;
2367   PetscMPIInt size;
2368 
2369   PetscFunctionBegin;
2370   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2371   PetscValidBoolPointer(distributed, 2);
2372   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
2373   PetscCallMPI(MPI_Comm_size(comm, &size));
2374   if (size == 1) {
2375     *distributed = PETSC_FALSE;
2376     PetscFunctionReturn(0);
2377   }
2378   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
2379   count = (pEnd - pStart) > 0 ? 1 : 0;
2380   PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE, &count, 1, MPIU_INT, MPI_SUM, comm));
2381   *distributed = count > 1 ? PETSC_TRUE : PETSC_FALSE;
2382   PetscFunctionReturn(0);
2383 }
2384 
2385 /*@C
2386   DMPlexDistributionSetName - Set the name of the specific parallel distribution
2387 
2388   Input Parameters:
2389 + dm   - The DM
2390 - name - The name of the specific parallel distribution
2391 
2392   Note:
2393   If distribution name is set when saving, DMPlexTopologyView() saves the plex's
2394   parallel distribution (i.e., partition, ownership, and local ordering of points) under
2395   this name. Conversely, if distribution name is set when loading, DMPlexTopologyLoad()
2396   loads the parallel distribution stored in file under this name.
2397 
2398   Level: developer
2399 
2400 .seealso: `DMPlexDistributionGetName()`, `DMPlexTopologyView()`, `DMPlexTopologyLoad()`
2401 @*/
2402 PetscErrorCode DMPlexDistributionSetName(DM dm, const char name[])
2403 {
2404   DM_Plex *mesh = (DM_Plex *)dm->data;
2405 
2406   PetscFunctionBegin;
2407   PetscValidHeaderSpecificType(dm, DM_CLASSID, 1, DMPLEX);
2408   if (name) PetscValidCharPointer(name, 2);
2409   PetscCall(PetscFree(mesh->distributionName));
2410   PetscCall(PetscStrallocpy(name, &mesh->distributionName));
2411   PetscFunctionReturn(0);
2412 }
2413 
2414 /*@C
2415   DMPlexDistributionGetName - Retrieve the name of the specific parallel distribution
2416 
2417   Input Parameter:
2418 . dm - The DM
2419 
2420   Output Parameter:
2421 . name - The name of the specific parallel distribution
2422 
2423   Note:
2424   If distribution name is set when saving, DMPlexTopologyView() saves the plex's
2425   parallel distribution (i.e., partition, ownership, and local ordering of points) under
2426   this name. Conversely, if distribution name is set when loading, DMPlexTopologyLoad()
2427   loads the parallel distribution stored in file under this name.
2428 
2429   Level: developer
2430 
2431 .seealso: `DMPlexDistributionSetName()`, `DMPlexTopologyView()`, `DMPlexTopologyLoad()`
2432 @*/
2433 PetscErrorCode DMPlexDistributionGetName(DM dm, const char *name[])
2434 {
2435   DM_Plex *mesh = (DM_Plex *)dm->data;
2436 
2437   PetscFunctionBegin;
2438   PetscValidHeaderSpecificType(dm, DM_CLASSID, 1, DMPLEX);
2439   PetscValidPointer(name, 2);
2440   *name = mesh->distributionName;
2441   PetscFunctionReturn(0);
2442 }
2443