xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision 11486bccf1aeea1ca5536228f99d437b39bdaca6)
1 #include <petsc/private/dmpleximpl.h>    /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/dmlabelimpl.h>   /*I      "petscdmlabel.h"  I*/
3 
4 /*@C
5   DMPlexSetAdjacencyUser - Define adjacency in the mesh using a user-provided callback
6 
7   Input Parameters:
8 + dm      - The DM object
9 . user    - The user callback, may be NULL (to clear the callback)
10 - ctx     - context for callback evaluation, may be NULL
11 
12   Level: advanced
13 
14   Notes:
15      The caller of DMPlexGetAdjacency may need to arrange that a large enough array is available for the adjacency.
16 
17      Any setting here overrides other configuration of DMPlex adjacency determination.
18 
19 .seealso: `DMSetAdjacency()`, `DMPlexDistribute()`, `DMPlexPreallocateOperator()`, `DMPlexGetAdjacency()`, `DMPlexGetAdjacencyUser()`
20 @*/
21 PetscErrorCode DMPlexSetAdjacencyUser(DM dm,PetscErrorCode (*user)(DM,PetscInt,PetscInt*,PetscInt[],void*),void *ctx)
22 {
23   DM_Plex *mesh = (DM_Plex *)dm->data;
24 
25   PetscFunctionBegin;
26   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
27   mesh->useradjacency = user;
28   mesh->useradjacencyctx = ctx;
29   PetscFunctionReturn(0);
30 }
31 
32 /*@C
33   DMPlexGetAdjacencyUser - get the user-defined adjacency callback
34 
35   Input Parameter:
36 . dm      - The DM object
37 
38   Output Parameters:
39 + user    - The callback
40 - ctx     - context for callback evaluation
41 
42   Level: advanced
43 
44 .seealso: `DMSetAdjacency()`, `DMPlexDistribute()`, `DMPlexPreallocateOperator()`, `DMPlexGetAdjacency()`, `DMPlexSetAdjacencyUser()`
45 @*/
46 PetscErrorCode DMPlexGetAdjacencyUser(DM dm, PetscErrorCode (**user)(DM,PetscInt,PetscInt*,PetscInt[],void*), void **ctx)
47 {
48   DM_Plex *mesh = (DM_Plex *)dm->data;
49 
50   PetscFunctionBegin;
51   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
52   if (user) *user = mesh->useradjacency;
53   if (ctx) *ctx = mesh->useradjacencyctx;
54   PetscFunctionReturn(0);
55 }
56 
57 /*@
58   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
59 
60   Input Parameters:
61 + dm      - The DM object
62 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
63 
64   Level: intermediate
65 
66 .seealso: `DMGetAdjacency()`, `DMSetAdjacency()`, `DMPlexDistribute()`, `DMPlexPreallocateOperator()`, `DMPlexSetAnchors()`
67 @*/
68 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
69 {
70   DM_Plex *mesh = (DM_Plex *) dm->data;
71 
72   PetscFunctionBegin;
73   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
74   mesh->useAnchors = useAnchors;
75   PetscFunctionReturn(0);
76 }
77 
78 /*@
79   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
80 
81   Input Parameter:
82 . dm      - The DM object
83 
84   Output Parameter:
85 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
86 
87   Level: intermediate
88 
89 .seealso: `DMPlexSetAdjacencyUseAnchors()`, `DMSetAdjacency()`, `DMGetAdjacency()`, `DMPlexDistribute()`, `DMPlexPreallocateOperator()`, `DMPlexSetAnchors()`
90 @*/
91 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
92 {
93   DM_Plex *mesh = (DM_Plex *) dm->data;
94 
95   PetscFunctionBegin;
96   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
97   PetscValidBoolPointer(useAnchors, 2);
98   *useAnchors = mesh->useAnchors;
99   PetscFunctionReturn(0);
100 }
101 
102 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
103 {
104   const PetscInt *cone = NULL;
105   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
106 
107   PetscFunctionBeginHot;
108   PetscCall(DMPlexGetConeSize(dm, p, &coneSize));
109   PetscCall(DMPlexGetCone(dm, p, &cone));
110   for (c = 0; c <= coneSize; ++c) {
111     const PetscInt  point   = !c ? p : cone[c-1];
112     const PetscInt *support = NULL;
113     PetscInt        supportSize, s, q;
114 
115     PetscCall(DMPlexGetSupportSize(dm, point, &supportSize));
116     PetscCall(DMPlexGetSupport(dm, point, &support));
117     for (s = 0; s < supportSize; ++s) {
118       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = support[s]),0); ++q) {
119         if (support[s] == adj[q]) break;
120       }
121       PetscCheck(numAdj <= maxAdjSize,PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%" PetscInt_FMT ")", maxAdjSize);
122     }
123   }
124   *adjSize = numAdj;
125   PetscFunctionReturn(0);
126 }
127 
128 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
129 {
130   const PetscInt *support = NULL;
131   PetscInt        numAdj   = 0, maxAdjSize = *adjSize, supportSize, s;
132 
133   PetscFunctionBeginHot;
134   PetscCall(DMPlexGetSupportSize(dm, p, &supportSize));
135   PetscCall(DMPlexGetSupport(dm, p, &support));
136   for (s = 0; s <= supportSize; ++s) {
137     const PetscInt  point = !s ? p : support[s-1];
138     const PetscInt *cone  = NULL;
139     PetscInt        coneSize, c, q;
140 
141     PetscCall(DMPlexGetConeSize(dm, point, &coneSize));
142     PetscCall(DMPlexGetCone(dm, point, &cone));
143     for (c = 0; c < coneSize; ++c) {
144       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = cone[c]),0); ++q) {
145         if (cone[c] == adj[q]) break;
146       }
147       PetscCheck(numAdj <= maxAdjSize,PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%" PetscInt_FMT ")", maxAdjSize);
148     }
149   }
150   *adjSize = numAdj;
151   PetscFunctionReturn(0);
152 }
153 
154 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
155 {
156   PetscInt      *star = NULL;
157   PetscInt       numAdj = 0, maxAdjSize = *adjSize, starSize, s;
158 
159   PetscFunctionBeginHot;
160   PetscCall(DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star));
161   for (s = 0; s < starSize*2; s += 2) {
162     const PetscInt *closure = NULL;
163     PetscInt        closureSize, c, q;
164 
165     PetscCall(DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure));
166     for (c = 0; c < closureSize*2; c += 2) {
167       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = closure[c]),0); ++q) {
168         if (closure[c] == adj[q]) break;
169       }
170       PetscCheck(numAdj <= maxAdjSize,PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%" PetscInt_FMT ")", maxAdjSize);
171     }
172     PetscCall(DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure));
173   }
174   PetscCall(DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star));
175   *adjSize = numAdj;
176   PetscFunctionReturn(0);
177 }
178 
179 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
180 {
181   static PetscInt asiz = 0;
182   PetscInt maxAnchors = 1;
183   PetscInt aStart = -1, aEnd = -1;
184   PetscInt maxAdjSize;
185   PetscSection aSec = NULL;
186   IS aIS = NULL;
187   const PetscInt *anchors;
188   DM_Plex *mesh = (DM_Plex *)dm->data;
189 
190   PetscFunctionBeginHot;
191   if (useAnchors) {
192     PetscCall(DMPlexGetAnchors(dm,&aSec,&aIS));
193     if (aSec) {
194       PetscCall(PetscSectionGetMaxDof(aSec,&maxAnchors));
195       maxAnchors = PetscMax(1,maxAnchors);
196       PetscCall(PetscSectionGetChart(aSec,&aStart,&aEnd));
197       PetscCall(ISGetIndices(aIS,&anchors));
198     }
199   }
200   if (!*adj) {
201     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
202 
203     PetscCall(DMPlexGetChart(dm, &pStart,&pEnd));
204     PetscCall(DMPlexGetDepth(dm, &depth));
205     depth = PetscMax(depth, -depth);
206     PetscCall(DMPlexGetMaxSizes(dm, &maxC, &maxS));
207     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1;
208     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1;
209     asiz  = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries);
210     asiz *= maxAnchors;
211     asiz  = PetscMin(asiz,pEnd-pStart);
212     PetscCall(PetscMalloc1(asiz,adj));
213   }
214   if (*adjSize < 0) *adjSize = asiz;
215   maxAdjSize = *adjSize;
216   if (mesh->useradjacency) {
217     PetscCall((*mesh->useradjacency)(dm, p, adjSize, *adj, mesh->useradjacencyctx));
218   } else if (useTransitiveClosure) {
219     PetscCall(DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj));
220   } else if (useCone) {
221     PetscCall(DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj));
222   } else {
223     PetscCall(DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj));
224   }
225   if (useAnchors && aSec) {
226     PetscInt origSize = *adjSize;
227     PetscInt numAdj = origSize;
228     PetscInt i = 0, j;
229     PetscInt *orig = *adj;
230 
231     while (i < origSize) {
232       PetscInt p = orig[i];
233       PetscInt aDof = 0;
234 
235       if (p >= aStart && p < aEnd) {
236         PetscCall(PetscSectionGetDof(aSec,p,&aDof));
237       }
238       if (aDof) {
239         PetscInt aOff;
240         PetscInt s, q;
241 
242         for (j = i + 1; j < numAdj; j++) {
243           orig[j - 1] = orig[j];
244         }
245         origSize--;
246         numAdj--;
247         PetscCall(PetscSectionGetOffset(aSec,p,&aOff));
248         for (s = 0; s < aDof; ++s) {
249           for (q = 0; q < numAdj || ((void)(orig[numAdj++] = anchors[aOff+s]),0); ++q) {
250             if (anchors[aOff+s] == orig[q]) break;
251           }
252           PetscCheck(numAdj <= maxAdjSize,PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%" PetscInt_FMT ")", maxAdjSize);
253         }
254       }
255       else {
256         i++;
257       }
258     }
259     *adjSize = numAdj;
260     PetscCall(ISRestoreIndices(aIS,&anchors));
261   }
262   PetscFunctionReturn(0);
263 }
264 
265 /*@
266   DMPlexGetAdjacency - Return all points adjacent to the given point
267 
268   Input Parameters:
269 + dm - The DM object
270 - p  - The point
271 
272   Input/Output Parameters:
273 + adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE;
274             on output the number of adjacent points
275 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize;
276         on output contains the adjacent points
277 
278   Level: advanced
279 
280   Notes:
281     The user must PetscFree the adj array if it was not passed in.
282 
283 .seealso: `DMSetAdjacency()`, `DMPlexDistribute()`, `DMCreateMatrix()`, `DMPlexPreallocateOperator()`
284 @*/
285 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
286 {
287   PetscBool      useCone, useClosure, useAnchors;
288 
289   PetscFunctionBeginHot;
290   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
291   PetscValidIntPointer(adjSize,3);
292   PetscValidPointer(adj,4);
293   PetscCall(DMGetBasicAdjacency(dm, &useCone, &useClosure));
294   PetscCall(DMPlexGetAdjacencyUseAnchors(dm, &useAnchors));
295   PetscCall(DMPlexGetAdjacency_Internal(dm, p, useCone, useClosure, useAnchors, adjSize, adj));
296   PetscFunctionReturn(0);
297 }
298 
299 /*@
300   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
301 
302   Collective on dm
303 
304   Input Parameters:
305 + dm      - The DM
306 . sfPoint - The PetscSF which encodes point connectivity
307 . rootRankSection -
308 . rootRanks -
309 . leftRankSection -
310 - leafRanks -
311 
312   Output Parameters:
313 + processRanks - A list of process neighbors, or NULL
314 - sfProcess    - An SF encoding the two-sided process connectivity, or NULL
315 
316   Level: developer
317 
318 .seealso: `PetscSFCreate()`, `DMPlexCreateProcessSF()`
319 @*/
320 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
321 {
322   const PetscSFNode *remotePoints;
323   PetscInt          *localPointsNew;
324   PetscSFNode       *remotePointsNew;
325   const PetscInt    *nranks;
326   PetscInt          *ranksNew;
327   PetscBT            neighbors;
328   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
329   PetscMPIInt        size, proc, rank;
330 
331   PetscFunctionBegin;
332   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
333   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
334   if (processRanks) {PetscValidPointer(processRanks, 7);}
335   if (sfProcess)    {PetscValidPointer(sfProcess, 8);}
336   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size));
337   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank));
338   PetscCall(PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints));
339   PetscCall(PetscBTCreate(size, &neighbors));
340   PetscCall(PetscBTMemzero(size, neighbors));
341   /* Compute root-to-leaf process connectivity */
342   PetscCall(PetscSectionGetChart(rootRankSection, &pStart, &pEnd));
343   PetscCall(ISGetIndices(rootRanks, &nranks));
344   for (p = pStart; p < pEnd; ++p) {
345     PetscInt ndof, noff, n;
346 
347     PetscCall(PetscSectionGetDof(rootRankSection, p, &ndof));
348     PetscCall(PetscSectionGetOffset(rootRankSection, p, &noff));
349     for (n = 0; n < ndof; ++n) PetscCall(PetscBTSet(neighbors, nranks[noff+n]));
350   }
351   PetscCall(ISRestoreIndices(rootRanks, &nranks));
352   /* Compute leaf-to-neighbor process connectivity */
353   PetscCall(PetscSectionGetChart(leafRankSection, &pStart, &pEnd));
354   PetscCall(ISGetIndices(leafRanks, &nranks));
355   for (p = pStart; p < pEnd; ++p) {
356     PetscInt ndof, noff, n;
357 
358     PetscCall(PetscSectionGetDof(leafRankSection, p, &ndof));
359     PetscCall(PetscSectionGetOffset(leafRankSection, p, &noff));
360     for (n = 0; n < ndof; ++n) PetscCall(PetscBTSet(neighbors, nranks[noff+n]));
361   }
362   PetscCall(ISRestoreIndices(leafRanks, &nranks));
363   /* Compute leaf-to-root process connectivity */
364   for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);}
365   /* Calculate edges */
366   PetscBTClear(neighbors, rank);
367   for (proc = 0, numNeighbors = 0; proc < size; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;}
368   PetscCall(PetscMalloc1(numNeighbors, &ranksNew));
369   PetscCall(PetscMalloc1(numNeighbors, &localPointsNew));
370   PetscCall(PetscMalloc1(numNeighbors, &remotePointsNew));
371   for (proc = 0, n = 0; proc < size; ++proc) {
372     if (PetscBTLookup(neighbors, proc)) {
373       ranksNew[n]              = proc;
374       localPointsNew[n]        = proc;
375       remotePointsNew[n].index = rank;
376       remotePointsNew[n].rank  = proc;
377       ++n;
378     }
379   }
380   PetscCall(PetscBTDestroy(&neighbors));
381   if (processRanks) PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks));
382   else              PetscCall(PetscFree(ranksNew));
383   if (sfProcess) {
384     PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess));
385     PetscCall(PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF"));
386     PetscCall(PetscSFSetFromOptions(*sfProcess));
387     PetscCall(PetscSFSetGraph(*sfProcess, size, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER));
388   }
389   PetscFunctionReturn(0);
390 }
391 
392 /*@
393   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
394 
395   Collective on dm
396 
397   Input Parameter:
398 . dm - The DM
399 
400   Output Parameters:
401 + rootSection - The number of leaves for a given root point
402 . rootrank    - The rank of each edge into the root point
403 . leafSection - The number of processes sharing a given leaf point
404 - leafrank    - The rank of each process sharing a leaf point
405 
406   Level: developer
407 
408 .seealso: `DMPlexCreateOverlapLabel()`, `DMPlexDistribute()`, `DMPlexDistributeOverlap()`
409 @*/
410 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
411 {
412   MPI_Comm        comm;
413   PetscSF         sfPoint;
414   const PetscInt *rootdegree;
415   PetscInt       *myrank, *remoterank;
416   PetscInt        pStart, pEnd, p, nedges;
417   PetscMPIInt     rank;
418 
419   PetscFunctionBegin;
420   PetscCall(PetscObjectGetComm((PetscObject) dm, &comm));
421   PetscCallMPI(MPI_Comm_rank(comm, &rank));
422   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
423   PetscCall(DMGetPointSF(dm, &sfPoint));
424   /* Compute number of leaves for each root */
425   PetscCall(PetscObjectSetName((PetscObject) rootSection, "Root Section"));
426   PetscCall(PetscSectionSetChart(rootSection, pStart, pEnd));
427   PetscCall(PetscSFComputeDegreeBegin(sfPoint, &rootdegree));
428   PetscCall(PetscSFComputeDegreeEnd(sfPoint, &rootdegree));
429   for (p = pStart; p < pEnd; ++p) PetscCall(PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]));
430   PetscCall(PetscSectionSetUp(rootSection));
431   /* Gather rank of each leaf to root */
432   PetscCall(PetscSectionGetStorageSize(rootSection, &nedges));
433   PetscCall(PetscMalloc1(pEnd-pStart, &myrank));
434   PetscCall(PetscMalloc1(nedges,  &remoterank));
435   for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank;
436   PetscCall(PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank));
437   PetscCall(PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank));
438   PetscCall(PetscFree(myrank));
439   PetscCall(ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank));
440   /* Distribute remote ranks to leaves */
441   PetscCall(PetscObjectSetName((PetscObject) leafSection, "Leaf Section"));
442   PetscCall(DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank));
443   PetscFunctionReturn(0);
444 }
445 
446 #if 0
447 static PetscErrorCode DMPlexCopyOverlapLabels(DM dm, DM ndm)
448 {
449   DM_Plex *mesh  = (DM_Plex *) dm->data;
450   DM_Plex *nmesh = (DM_Plex *) ndm->data;
451 
452   PetscFunctionBegin;
453   if (mesh->numOvLabels) {
454     const char *name;
455     PetscInt    l;
456 
457     nmesh->numOvLabels = mesh->numOvLabels;
458     for (l = 0; l < mesh->numOvLabels; ++l) {
459       PetscCall(PetscObjectGetName((PetscObject) mesh->ovLabels[l], &name));
460       PetscCall(DMGetLabel(ndm, name, &nmesh->ovLabels[l]));
461       nmesh->ovValues[l] = mesh->ovValues[l];
462     }
463     PetscCall(PetscObjectGetName((PetscObject) mesh->ovExLabel, &name));
464     PetscCall(DMGetLabel(ndm, name, &nmesh->ovExLabel));
465     nmesh->ovExValue = mesh->ovExValue;
466   }
467   PetscFunctionReturn(0);
468 }
469 #endif
470 
471 /*@C
472   DMPlexCreateOverlapLabel - Compute a label indicating what overlap points should be sent to new processes
473 
474   Collective on dm
475 
476   Input Parameters:
477 + dm          - The DM
478 . levels      - Number of overlap levels
479 . rootSection - The number of leaves for a given root point
480 . rootrank    - The rank of each edge into the root point
481 . leafSection - The number of processes sharing a given leaf point
482 - leafrank    - The rank of each process sharing a leaf point
483 
484   Output Parameter:
485 . ovLabel     - DMLabel containing remote overlap contributions as point/rank pairings
486 
487   Level: developer
488 
489 .seealso: `DMPlexCreateOverlapLabelFromLabels()`, `DMPlexGetAdjacency()`, `DMPlexDistributeOwnership()`, `DMPlexDistribute()`
490 @*/
491 PetscErrorCode DMPlexCreateOverlapLabel(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
492 {
493   MPI_Comm           comm;
494   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
495   PetscSF            sfPoint;
496   const PetscSFNode *remote;
497   const PetscInt    *local;
498   const PetscInt    *nrank, *rrank;
499   PetscInt          *adj = NULL;
500   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l;
501   PetscMPIInt        rank, size;
502   PetscBool          flg;
503 
504   PetscFunctionBegin;
505   *ovLabel = NULL;
506   PetscCall(PetscObjectGetComm((PetscObject) dm, &comm));
507   PetscCallMPI(MPI_Comm_size(comm, &size));
508   PetscCallMPI(MPI_Comm_rank(comm, &rank));
509   if (size == 1) PetscFunctionReturn(0);
510   PetscCall(DMGetPointSF(dm, &sfPoint));
511   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
512   if (!levels) {
513     /* Add owned points */
514     PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel));
515     for (p = pStart; p < pEnd; ++p) PetscCall(DMLabelSetValue(*ovLabel, p, rank));
516     PetscFunctionReturn(0);
517   }
518   PetscCall(PetscSectionGetChart(leafSection, &sStart, &sEnd));
519   PetscCall(PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote));
520   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap adjacency", &ovAdjByRank));
521   /* Handle leaves: shared with the root point */
522   for (l = 0; l < nleaves; ++l) {
523     PetscInt adjSize = PETSC_DETERMINE, a;
524 
525     PetscCall(DMPlexGetAdjacency(dm, local ? local[l] : l, &adjSize, &adj));
526     for (a = 0; a < adjSize; ++a) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank));
527   }
528   PetscCall(ISGetIndices(rootrank, &rrank));
529   PetscCall(ISGetIndices(leafrank, &nrank));
530   /* Handle roots */
531   for (p = pStart; p < pEnd; ++p) {
532     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
533 
534     if ((p >= sStart) && (p < sEnd)) {
535       /* Some leaves share a root with other leaves on different processes */
536       PetscCall(PetscSectionGetDof(leafSection, p, &neighbors));
537       if (neighbors) {
538         PetscCall(PetscSectionGetOffset(leafSection, p, &noff));
539         PetscCall(DMPlexGetAdjacency(dm, p, &adjSize, &adj));
540         for (n = 0; n < neighbors; ++n) {
541           const PetscInt remoteRank = nrank[noff+n];
542 
543           if (remoteRank == rank) continue;
544           for (a = 0; a < adjSize; ++a) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remoteRank));
545         }
546       }
547     }
548     /* Roots are shared with leaves */
549     PetscCall(PetscSectionGetDof(rootSection, p, &neighbors));
550     if (!neighbors) continue;
551     PetscCall(PetscSectionGetOffset(rootSection, p, &noff));
552     PetscCall(DMPlexGetAdjacency(dm, p, &adjSize, &adj));
553     for (n = 0; n < neighbors; ++n) {
554       const PetscInt remoteRank = rrank[noff+n];
555 
556       if (remoteRank == rank) continue;
557       for (a = 0; a < adjSize; ++a) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remoteRank));
558     }
559   }
560   PetscCall(PetscFree(adj));
561   PetscCall(ISRestoreIndices(rootrank, &rrank));
562   PetscCall(ISRestoreIndices(leafrank, &nrank));
563   /* Add additional overlap levels */
564   for (l = 1; l < levels; l++) {
565     /* Propagate point donations over SF to capture remote connections */
566     PetscCall(DMPlexPartitionLabelPropagate(dm, ovAdjByRank));
567     /* Add next level of point donations to the label */
568     PetscCall(DMPlexPartitionLabelAdjacency(dm, ovAdjByRank));
569   }
570   /* We require the closure in the overlap */
571   PetscCall(DMPlexPartitionLabelClosure(dm, ovAdjByRank));
572   PetscCall(PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-overlap_view", &flg));
573   if (flg) {
574     PetscViewer viewer;
575     PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm), &viewer));
576     PetscCall(DMLabelView(ovAdjByRank, viewer));
577   }
578   /* Invert sender to receiver label */
579   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel));
580   PetscCall(DMPlexPartitionLabelInvert(dm, ovAdjByRank, NULL, *ovLabel));
581   /* Add owned points, except for shared local points */
582   for (p = pStart; p < pEnd; ++p) PetscCall(DMLabelSetValue(*ovLabel, p, rank));
583   for (l = 0; l < nleaves; ++l) {
584     PetscCall(DMLabelClearValue(*ovLabel, local[l], rank));
585     PetscCall(DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank));
586   }
587   /* Clean up */
588   PetscCall(DMLabelDestroy(&ovAdjByRank));
589   PetscFunctionReturn(0);
590 }
591 
592 static PetscErrorCode HandlePoint_Private(DM dm, PetscInt p, PetscSection section, const PetscInt ranks[], PetscInt numExLabels, const DMLabel exLabel[], const PetscInt exValue[], DMLabel ovAdjByRank)
593 {
594   PetscInt neighbors, el;
595 
596   PetscFunctionBegin;
597   PetscCall(PetscSectionGetDof(section, p, &neighbors));
598   if (neighbors) {
599     PetscInt   *adj = NULL;
600     PetscInt    adjSize = PETSC_DETERMINE, noff, n, a;
601     PetscMPIInt rank;
602 
603     PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank));
604     PetscCall(PetscSectionGetOffset(section, p, &noff));
605     PetscCall(DMPlexGetAdjacency(dm, p, &adjSize, &adj));
606     for (n = 0; n < neighbors; ++n) {
607       const PetscInt remoteRank = ranks[noff+n];
608 
609       if (remoteRank == rank) continue;
610       for (a = 0; a < adjSize; ++a) {
611         PetscBool insert = PETSC_TRUE;
612 
613         for (el = 0; el < numExLabels; ++el) {
614           PetscInt exVal;
615           PetscCall(DMLabelGetValue(exLabel[el], adj[a], &exVal));
616           if (exVal == exValue[el]) {insert = PETSC_FALSE; break;}
617         }
618         if (insert) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remoteRank));
619       }
620     }
621   }
622   PetscFunctionReturn(0);
623 }
624 
625 /*@C
626   DMPlexCreateOverlapLabelFromLabels - Compute a label indicating what overlap points should be sent to new processes
627 
628   Collective on dm
629 
630   Input Parameters:
631 + dm          - The DM
632 . numLabels   - The number of labels to draw candidate points from
633 . label       - An array of labels containing candidate points
634 . value       - An array of label values marking the candidate points
635 . numExLabels - The number of labels to use for exclusion
636 . exLabel     - An array of labels indicating points to be excluded, or NULL
637 . exValue     - An array of label values to be excluded, or NULL
638 . rootSection - The number of leaves for a given root point
639 . rootrank    - The rank of each edge into the root point
640 . leafSection - The number of processes sharing a given leaf point
641 - leafrank    - The rank of each process sharing a leaf point
642 
643   Output Parameter:
644 . ovLabel     - DMLabel containing remote overlap contributions as point/rank pairings
645 
646   Note:
647   The candidate points are only added to the overlap if they are adjacent to a shared point
648 
649   Level: developer
650 
651 .seealso: `DMPlexCreateOverlapLabel()`, `DMPlexGetAdjacency()`, `DMPlexDistributeOwnership()`, `DMPlexDistribute()`
652 @*/
653 PetscErrorCode DMPlexCreateOverlapLabelFromLabels(DM dm, PetscInt numLabels, const DMLabel label[], const PetscInt value[],
654   PetscInt numExLabels, const DMLabel exLabel[], const PetscInt exValue[], PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
655 {
656   MPI_Comm           comm;
657   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
658   PetscSF            sfPoint;
659   const PetscSFNode *remote;
660   const PetscInt    *local;
661   const PetscInt    *nrank, *rrank;
662   PetscInt          *adj = NULL;
663   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l, el;
664   PetscMPIInt        rank, size;
665   PetscBool          flg;
666 
667   PetscFunctionBegin;
668   *ovLabel = NULL;
669   PetscCall(PetscObjectGetComm((PetscObject) dm, &comm));
670   PetscCallMPI(MPI_Comm_size(comm, &size));
671   PetscCallMPI(MPI_Comm_rank(comm, &rank));
672   if (size == 1) PetscFunctionReturn(0);
673   PetscCall(DMGetPointSF(dm, &sfPoint));
674   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
675   PetscCall(PetscSectionGetChart(leafSection, &sStart, &sEnd));
676   PetscCall(PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote));
677   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap adjacency", &ovAdjByRank));
678   PetscCall(ISGetIndices(rootrank, &rrank));
679   PetscCall(ISGetIndices(leafrank, &nrank));
680   for (l = 0; l < numLabels; ++l) {
681     IS              valIS;
682     const PetscInt *points;
683     PetscInt        n;
684 
685     PetscCall(DMLabelGetStratumIS(label[l], value[l], &valIS));
686     if (!valIS) continue;
687     PetscCall(ISGetIndices(valIS, &points));
688     PetscCall(ISGetLocalSize(valIS, &n));
689     for (PetscInt i = 0; i < n; ++i) {
690       const PetscInt p = points[i];
691 
692       if ((p >= sStart) && (p < sEnd)) {
693         PetscInt loc, adjSize = PETSC_DETERMINE;
694 
695         /* Handle leaves: shared with the root point */
696         if (local) PetscCall(PetscFindInt(p, nleaves, local, &loc));
697         else       loc = (p >= 0 && p < nleaves) ? p : -1;
698         if (loc >= 0) {
699           const PetscInt remoteRank = remote[loc].rank;
700 
701           PetscCall(DMPlexGetAdjacency(dm, p, &adjSize, &adj));
702           for (PetscInt a = 0; a < adjSize; ++a) {
703             PetscBool insert = PETSC_TRUE;
704 
705             for (el = 0; el < numExLabels; ++el) {
706               PetscInt exVal;
707               PetscCall(DMLabelGetValue(exLabel[el], adj[a], &exVal));
708               if (exVal == exValue[el]) {insert = PETSC_FALSE; break;}
709             }
710             if (insert) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remoteRank));
711           }
712         }
713         /* Some leaves share a root with other leaves on different processes */
714         HandlePoint_Private(dm, p, leafSection, nrank, numExLabels, exLabel, exValue, ovAdjByRank);
715       }
716       /* Roots are shared with leaves */
717       HandlePoint_Private(dm, p, rootSection, rrank, numExLabels, exLabel, exValue, ovAdjByRank);
718     }
719     PetscCall(ISRestoreIndices(valIS, &points));
720     PetscCall(ISDestroy(&valIS));
721   }
722   PetscCall(PetscFree(adj));
723   PetscCall(ISRestoreIndices(rootrank, &rrank));
724   PetscCall(ISRestoreIndices(leafrank, &nrank));
725   /* We require the closure in the overlap */
726   PetscCall(DMPlexPartitionLabelClosure(dm, ovAdjByRank));
727   PetscCall(PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-overlap_view", &flg));
728   if (flg) {
729     PetscViewer viewer;
730     PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm), &viewer));
731     PetscCall(DMLabelView(ovAdjByRank, viewer));
732   }
733   /* Invert sender to receiver label */
734   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel));
735   PetscCall(DMPlexPartitionLabelInvert(dm, ovAdjByRank, NULL, *ovLabel));
736   /* Add owned points, except for shared local points */
737   for (p = pStart; p < pEnd; ++p) PetscCall(DMLabelSetValue(*ovLabel, p, rank));
738   for (l = 0; l < nleaves; ++l) {
739     PetscCall(DMLabelClearValue(*ovLabel, local[l], rank));
740     PetscCall(DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank));
741   }
742   /* Clean up */
743   PetscCall(DMLabelDestroy(&ovAdjByRank));
744   PetscFunctionReturn(0);
745 }
746 
747 /*@C
748   DMPlexCreateOverlapMigrationSF - Create an SF describing the new mesh distribution to make the overlap described by the input SF
749 
750   Collective on dm
751 
752   Input Parameters:
753 + dm          - The DM
754 - overlapSF   - The SF mapping ghost points in overlap to owner points on other processes
755 
756   Output Parameter:
757 . migrationSF - An SF that maps original points in old locations to points in new locations
758 
759   Level: developer
760 
761 .seealso: `DMPlexCreateOverlapLabel()`, `DMPlexDistributeOverlap()`, `DMPlexDistribute()`
762 @*/
763 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
764 {
765   MPI_Comm           comm;
766   PetscMPIInt        rank, size;
767   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
768   PetscInt          *pointDepths, *remoteDepths, *ilocal;
769   PetscInt          *depthRecv, *depthShift, *depthIdx;
770   PetscSFNode       *iremote;
771   PetscSF            pointSF;
772   const PetscInt    *sharedLocal;
773   const PetscSFNode *overlapRemote, *sharedRemote;
774 
775   PetscFunctionBegin;
776   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
777   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
778   PetscCallMPI(MPI_Comm_rank(comm, &rank));
779   PetscCallMPI(MPI_Comm_size(comm, &size));
780   PetscCall(DMGetDimension(dm, &dim));
781 
782   /* Before building the migration SF we need to know the new stratum offsets */
783   PetscCall(PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote));
784   PetscCall(PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths));
785   for (d=0; d<dim+1; d++) {
786     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
787     for (p=pStart; p<pEnd; p++) pointDepths[p] = d;
788   }
789   for (p=0; p<nleaves; p++) remoteDepths[p] = -1;
790   PetscCall(PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths,MPI_REPLACE));
791   PetscCall(PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths,MPI_REPLACE));
792 
793   /* Count received points in each stratum and compute the internal strata shift */
794   PetscCall(PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx));
795   for (d=0; d<dim+1; d++) depthRecv[d]=0;
796   for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++;
797   depthShift[dim] = 0;
798   for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim];
799   for (d=1; d<dim; d++) depthShift[d] += depthRecv[0];
800   for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1];
801   for (d=0; d<dim+1; d++) {
802     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
803     depthIdx[d] = pStart + depthShift[d];
804   }
805 
806   /* Form the overlap SF build an SF that describes the full overlap migration SF */
807   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
808   newLeaves = pEnd - pStart + nleaves;
809   PetscCall(PetscMalloc1(newLeaves, &ilocal));
810   PetscCall(PetscMalloc1(newLeaves, &iremote));
811   /* First map local points to themselves */
812   for (d=0; d<dim+1; d++) {
813     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
814     for (p=pStart; p<pEnd; p++) {
815       point = p + depthShift[d];
816       ilocal[point] = point;
817       iremote[point].index = p;
818       iremote[point].rank = rank;
819       depthIdx[d]++;
820     }
821   }
822 
823   /* Add in the remote roots for currently shared points */
824   PetscCall(DMGetPointSF(dm, &pointSF));
825   PetscCall(PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote));
826   for (d=0; d<dim+1; d++) {
827     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
828     for (p=0; p<numSharedPoints; p++) {
829       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
830         point = sharedLocal[p] + depthShift[d];
831         iremote[point].index = sharedRemote[p].index;
832         iremote[point].rank = sharedRemote[p].rank;
833       }
834     }
835   }
836 
837   /* Now add the incoming overlap points */
838   for (p=0; p<nleaves; p++) {
839     point = depthIdx[remoteDepths[p]];
840     ilocal[point] = point;
841     iremote[point].index = overlapRemote[p].index;
842     iremote[point].rank = overlapRemote[p].rank;
843     depthIdx[remoteDepths[p]]++;
844   }
845   PetscCall(PetscFree2(pointDepths,remoteDepths));
846 
847   PetscCall(PetscSFCreate(comm, migrationSF));
848   PetscCall(PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF"));
849   PetscCall(PetscSFSetFromOptions(*migrationSF));
850   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
851   PetscCall(PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER));
852 
853   PetscCall(PetscFree3(depthRecv, depthShift, depthIdx));
854   PetscFunctionReturn(0);
855 }
856 
857 /*@
858   DMPlexStratifyMigrationSF - Rearrange the leaves of a migration sf for stratification.
859 
860   Input Parameters:
861 + dm          - The DM
862 - sf          - A star forest with non-ordered leaves, usually defining a DM point migration
863 
864   Output Parameter:
865 . migrationSF - A star forest with added leaf indirection that ensures the resulting DM is stratified
866 
867   Note:
868   This lexicographically sorts by (depth, cellType)
869 
870   Level: developer
871 
872 .seealso: `DMPlexPartitionLabelCreateSF()`, `DMPlexDistribute()`, `DMPlexDistributeOverlap()`
873 @*/
874 PetscErrorCode DMPlexStratifyMigrationSF(DM dm, PetscSF sf, PetscSF *migrationSF)
875 {
876   MPI_Comm           comm;
877   PetscMPIInt        rank, size;
878   PetscInt           d, ldepth, depth, dim, p, pStart, pEnd, nroots, nleaves;
879   PetscSFNode       *pointDepths, *remoteDepths;
880   PetscInt          *ilocal;
881   PetscInt          *depthRecv, *depthShift, *depthIdx;
882   PetscInt          *ctRecv,    *ctShift,    *ctIdx;
883   const PetscSFNode *iremote;
884 
885   PetscFunctionBegin;
886   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
887   PetscCall(PetscObjectGetComm((PetscObject) dm, &comm));
888   PetscCallMPI(MPI_Comm_rank(comm, &rank));
889   PetscCallMPI(MPI_Comm_size(comm, &size));
890   PetscCall(DMPlexGetDepth(dm, &ldepth));
891   PetscCall(DMGetDimension(dm, &dim));
892   PetscCall(MPIU_Allreduce(&ldepth, &depth, 1, MPIU_INT, MPI_MAX, comm));
893   PetscCheck(!(ldepth >= 0) || !(depth != ldepth),PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %" PetscInt_FMT " != %" PetscInt_FMT, ldepth, depth);
894   PetscCall(PetscLogEventBegin(DMPLEX_PartStratSF,dm,0,0,0));
895 
896   /* Before building the migration SF we need to know the new stratum offsets */
897   PetscCall(PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote));
898   PetscCall(PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths));
899   for (d = 0; d < depth+1; ++d) {
900     PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
901     for (p = pStart; p < pEnd; ++p) {
902       DMPolytopeType ct;
903 
904       PetscCall(DMPlexGetCellType(dm, p, &ct));
905       pointDepths[p].index = d;
906       pointDepths[p].rank  = ct;
907     }
908   }
909   for (p = 0; p < nleaves; ++p) {remoteDepths[p].index = -1; remoteDepths[p].rank = -1;}
910   PetscCall(PetscSFBcastBegin(sf, MPIU_2INT, pointDepths, remoteDepths,MPI_REPLACE));
911   PetscCall(PetscSFBcastEnd(sf, MPIU_2INT, pointDepths, remoteDepths,MPI_REPLACE));
912   /* Count received points in each stratum and compute the internal strata shift */
913   PetscCall(PetscCalloc6(depth+1, &depthRecv, depth+1, &depthShift, depth+1, &depthIdx, DM_NUM_POLYTOPES, &ctRecv, DM_NUM_POLYTOPES, &ctShift, DM_NUM_POLYTOPES, &ctIdx));
914   for (p = 0; p < nleaves; ++p) {
915     if (remoteDepths[p].rank < 0) {
916       ++depthRecv[remoteDepths[p].index];
917     } else {
918       ++ctRecv[remoteDepths[p].rank];
919     }
920   }
921   {
922     PetscInt depths[4], dims[4], shift = 0, i, c;
923 
924     /* Cells (depth), Vertices (0), Faces (depth-1), Edges (1)
925          Consider DM_POLYTOPE_FV_GHOST and DM_POLYTOPE_INTERIOR_GHOST as cells
926      */
927     depths[0] = depth; depths[1] = 0; depths[2] = depth-1; depths[3] = 1;
928     dims[0]   = dim;   dims[1]   = 0; dims[2]   = dim-1;   dims[3]   = 1;
929     for (i = 0; i <= depth; ++i) {
930       const PetscInt dep = depths[i];
931       const PetscInt dim = dims[i];
932 
933       for (c = 0; c < DM_NUM_POLYTOPES; ++c) {
934         if (DMPolytopeTypeGetDim((DMPolytopeType) c) != dim && !(i == 0 && (c == DM_POLYTOPE_FV_GHOST || c == DM_POLYTOPE_INTERIOR_GHOST))) continue;
935         ctShift[c] = shift;
936         shift     += ctRecv[c];
937       }
938       depthShift[dep] = shift;
939       shift          += depthRecv[dep];
940     }
941     for (c = 0; c < DM_NUM_POLYTOPES; ++c) {
942       const PetscInt ctDim = DMPolytopeTypeGetDim((DMPolytopeType) c);
943 
944       if ((ctDim < 0 || ctDim > dim) && (c != DM_POLYTOPE_FV_GHOST && c != DM_POLYTOPE_INTERIOR_GHOST)) {
945         ctShift[c] = shift;
946         shift     += ctRecv[c];
947       }
948     }
949   }
950   /* Derive a new local permutation based on stratified indices */
951   PetscCall(PetscMalloc1(nleaves, &ilocal));
952   for (p = 0; p < nleaves; ++p) {
953     const PetscInt       dep = remoteDepths[p].index;
954     const DMPolytopeType ct  = (DMPolytopeType) remoteDepths[p].rank;
955 
956     if ((PetscInt) ct < 0) {
957       ilocal[p] = depthShift[dep] + depthIdx[dep];
958       ++depthIdx[dep];
959     } else {
960       ilocal[p] = ctShift[ct] + ctIdx[ct];
961       ++ctIdx[ct];
962     }
963   }
964   PetscCall(PetscSFCreate(comm, migrationSF));
965   PetscCall(PetscObjectSetName((PetscObject) *migrationSF, "Migration SF"));
966   PetscCall(PetscSFSetGraph(*migrationSF, nroots, nleaves, ilocal, PETSC_OWN_POINTER, (PetscSFNode*)iremote, PETSC_COPY_VALUES));
967   PetscCall(PetscFree2(pointDepths,remoteDepths));
968   PetscCall(PetscFree6(depthRecv, depthShift, depthIdx, ctRecv, ctShift, ctIdx));
969   PetscCall(PetscLogEventEnd(DMPLEX_PartStratSF,dm,0,0,0));
970   PetscFunctionReturn(0);
971 }
972 
973 /*@
974   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
975 
976   Collective on dm
977 
978   Input Parameters:
979 + dm - The DMPlex object
980 . pointSF - The PetscSF describing the communication pattern
981 . originalSection - The PetscSection for existing data layout
982 - originalVec - The existing data in a local vector
983 
984   Output Parameters:
985 + newSection - The PetscSF describing the new data layout
986 - newVec - The new data in a local vector
987 
988   Level: developer
989 
990 .seealso: `DMPlexDistribute()`, `DMPlexDistributeFieldIS()`, `DMPlexDistributeData()`
991 @*/
992 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
993 {
994   PetscSF        fieldSF;
995   PetscInt      *remoteOffsets, fieldSize;
996   PetscScalar   *originalValues, *newValues;
997 
998   PetscFunctionBegin;
999   PetscCall(PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0));
1000   PetscCall(PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection));
1001 
1002   PetscCall(PetscSectionGetStorageSize(newSection, &fieldSize));
1003   PetscCall(VecSetSizes(newVec, fieldSize, PETSC_DETERMINE));
1004   PetscCall(VecSetType(newVec,dm->vectype));
1005 
1006   PetscCall(VecGetArray(originalVec, &originalValues));
1007   PetscCall(VecGetArray(newVec, &newValues));
1008   PetscCall(PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF));
1009   PetscCall(PetscFree(remoteOffsets));
1010   PetscCall(PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues,MPI_REPLACE));
1011   PetscCall(PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues,MPI_REPLACE));
1012   PetscCall(PetscSFDestroy(&fieldSF));
1013   PetscCall(VecRestoreArray(newVec, &newValues));
1014   PetscCall(VecRestoreArray(originalVec, &originalValues));
1015   PetscCall(PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0));
1016   PetscFunctionReturn(0);
1017 }
1018 
1019 /*@
1020   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
1021 
1022   Collective on dm
1023 
1024   Input Parameters:
1025 + dm - The DMPlex object
1026 . pointSF - The PetscSF describing the communication pattern
1027 . originalSection - The PetscSection for existing data layout
1028 - originalIS - The existing data
1029 
1030   Output Parameters:
1031 + newSection - The PetscSF describing the new data layout
1032 - newIS - The new data
1033 
1034   Level: developer
1035 
1036 .seealso: `DMPlexDistribute()`, `DMPlexDistributeField()`, `DMPlexDistributeData()`
1037 @*/
1038 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
1039 {
1040   PetscSF         fieldSF;
1041   PetscInt       *newValues, *remoteOffsets, fieldSize;
1042   const PetscInt *originalValues;
1043 
1044   PetscFunctionBegin;
1045   PetscCall(PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0));
1046   PetscCall(PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection));
1047 
1048   PetscCall(PetscSectionGetStorageSize(newSection, &fieldSize));
1049   PetscCall(PetscMalloc1(fieldSize, &newValues));
1050 
1051   PetscCall(ISGetIndices(originalIS, &originalValues));
1052   PetscCall(PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF));
1053   PetscCall(PetscFree(remoteOffsets));
1054   PetscCall(PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues,MPI_REPLACE));
1055   PetscCall(PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues,MPI_REPLACE));
1056   PetscCall(PetscSFDestroy(&fieldSF));
1057   PetscCall(ISRestoreIndices(originalIS, &originalValues));
1058   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS));
1059   PetscCall(PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0));
1060   PetscFunctionReturn(0);
1061 }
1062 
1063 /*@
1064   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
1065 
1066   Collective on dm
1067 
1068   Input Parameters:
1069 + dm - The DMPlex object
1070 . pointSF - The PetscSF describing the communication pattern
1071 . originalSection - The PetscSection for existing data layout
1072 . datatype - The type of data
1073 - originalData - The existing data
1074 
1075   Output Parameters:
1076 + newSection - The PetscSection describing the new data layout
1077 - newData - The new data
1078 
1079   Level: developer
1080 
1081 .seealso: `DMPlexDistribute()`, `DMPlexDistributeField()`
1082 @*/
1083 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
1084 {
1085   PetscSF        fieldSF;
1086   PetscInt      *remoteOffsets, fieldSize;
1087   PetscMPIInt    dataSize;
1088 
1089   PetscFunctionBegin;
1090   PetscCall(PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0));
1091   PetscCall(PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection));
1092 
1093   PetscCall(PetscSectionGetStorageSize(newSection, &fieldSize));
1094   PetscCallMPI(MPI_Type_size(datatype, &dataSize));
1095   PetscCall(PetscMalloc(fieldSize * dataSize, newData));
1096 
1097   PetscCall(PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF));
1098   PetscCall(PetscFree(remoteOffsets));
1099   PetscCall(PetscSFBcastBegin(fieldSF, datatype, originalData, *newData,MPI_REPLACE));
1100   PetscCall(PetscSFBcastEnd(fieldSF, datatype, originalData, *newData,MPI_REPLACE));
1101   PetscCall(PetscSFDestroy(&fieldSF));
1102   PetscCall(PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0));
1103   PetscFunctionReturn(0);
1104 }
1105 
1106 static PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1107 {
1108   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
1109   MPI_Comm               comm;
1110   PetscSF                coneSF;
1111   PetscSection           originalConeSection, newConeSection;
1112   PetscInt              *remoteOffsets, *cones, *globCones, *newCones, newConesSize;
1113   PetscBool              flg;
1114 
1115   PetscFunctionBegin;
1116   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1117   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
1118   PetscCall(PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0));
1119   /* Distribute cone section */
1120   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1121   PetscCall(DMPlexGetConeSection(dm, &originalConeSection));
1122   PetscCall(DMPlexGetConeSection(dmParallel, &newConeSection));
1123   PetscCall(PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection));
1124   PetscCall(DMSetUp(dmParallel));
1125   /* Communicate and renumber cones */
1126   PetscCall(PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF));
1127   PetscCall(PetscFree(remoteOffsets));
1128   PetscCall(DMPlexGetCones(dm, &cones));
1129   if (original) {
1130     PetscInt numCones;
1131 
1132     PetscCall(PetscSectionGetStorageSize(originalConeSection,&numCones));
1133     PetscCall(PetscMalloc1(numCones,&globCones));
1134     PetscCall(ISLocalToGlobalMappingApplyBlock(original, numCones, cones, globCones));
1135   } else {
1136     globCones = cones;
1137   }
1138   PetscCall(DMPlexGetCones(dmParallel, &newCones));
1139   PetscCall(PetscSFBcastBegin(coneSF, MPIU_INT, globCones, newCones,MPI_REPLACE));
1140   PetscCall(PetscSFBcastEnd(coneSF, MPIU_INT, globCones, newCones,MPI_REPLACE));
1141   if (original) PetscCall(PetscFree(globCones));
1142   PetscCall(PetscSectionGetStorageSize(newConeSection, &newConesSize));
1143   PetscCall(ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones));
1144   if (PetscDefined(USE_DEBUG)) {
1145     PetscInt  p;
1146     PetscBool valid = PETSC_TRUE;
1147     for (p = 0; p < newConesSize; ++p) {
1148       if (newCones[p] < 0) {valid = PETSC_FALSE; PetscCall(PetscPrintf(PETSC_COMM_SELF, "[%d] Point %" PetscInt_FMT " not in overlap SF\n", PetscGlobalRank,p));}
1149     }
1150     PetscCheck(valid,PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1151   }
1152   PetscCall(PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-cones_view", &flg));
1153   if (flg) {
1154     PetscCall(PetscPrintf(comm, "Serial Cone Section:\n"));
1155     PetscCall(PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_(comm)));
1156     PetscCall(PetscPrintf(comm, "Parallel Cone Section:\n"));
1157     PetscCall(PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_(comm)));
1158     PetscCall(PetscSFView(coneSF, NULL));
1159   }
1160   PetscCall(DMPlexGetConeOrientations(dm, &cones));
1161   PetscCall(DMPlexGetConeOrientations(dmParallel, &newCones));
1162   PetscCall(PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones,MPI_REPLACE));
1163   PetscCall(PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones,MPI_REPLACE));
1164   PetscCall(PetscSFDestroy(&coneSF));
1165   PetscCall(PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0));
1166   /* Create supports and stratify DMPlex */
1167   {
1168     PetscInt pStart, pEnd;
1169 
1170     PetscCall(PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd));
1171     PetscCall(PetscSectionSetChart(pmesh->supportSection, pStart, pEnd));
1172   }
1173   PetscCall(DMPlexSymmetrize(dmParallel));
1174   PetscCall(DMPlexStratify(dmParallel));
1175   {
1176     PetscBool useCone, useClosure, useAnchors;
1177 
1178     PetscCall(DMGetBasicAdjacency(dm, &useCone, &useClosure));
1179     PetscCall(DMSetBasicAdjacency(dmParallel, useCone, useClosure));
1180     PetscCall(DMPlexGetAdjacencyUseAnchors(dm, &useAnchors));
1181     PetscCall(DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors));
1182   }
1183   PetscFunctionReturn(0);
1184 }
1185 
1186 static PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
1187 {
1188   MPI_Comm         comm;
1189   DM               cdm, cdmParallel;
1190   PetscSection     originalCoordSection, newCoordSection;
1191   Vec              originalCoordinates, newCoordinates;
1192   PetscInt         bs;
1193   const char      *name;
1194   const PetscReal *maxCell, *Lstart, *L;
1195 
1196   PetscFunctionBegin;
1197   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1198   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1199 
1200   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1201   PetscCall(DMGetCoordinateDM(dm, &cdm));
1202   PetscCall(DMGetCoordinateDM(dmParallel, &cdmParallel));
1203   PetscCall(DMCopyDisc(cdm, cdmParallel));
1204   PetscCall(DMGetCoordinateSection(dm, &originalCoordSection));
1205   PetscCall(DMGetCoordinateSection(dmParallel, &newCoordSection));
1206   PetscCall(DMGetCoordinatesLocal(dm, &originalCoordinates));
1207   if (originalCoordinates) {
1208     PetscCall(VecCreate(PETSC_COMM_SELF, &newCoordinates));
1209     PetscCall(PetscObjectGetName((PetscObject) originalCoordinates, &name));
1210     PetscCall(PetscObjectSetName((PetscObject) newCoordinates, name));
1211 
1212     PetscCall(DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates));
1213     PetscCall(DMSetCoordinatesLocal(dmParallel, newCoordinates));
1214     PetscCall(VecGetBlockSize(originalCoordinates, &bs));
1215     PetscCall(VecSetBlockSize(newCoordinates, bs));
1216     PetscCall(VecDestroy(&newCoordinates));
1217   }
1218 
1219   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1220   PetscCall(DMGetPeriodicity(dm, &maxCell, &Lstart, &L));
1221   PetscCall(DMSetPeriodicity(dmParallel, maxCell, Lstart, L));
1222   PetscCall(DMGetCellCoordinateDM(dm, &cdm));
1223   if (cdm) {
1224     PetscCall(DMClone(dmParallel, &cdmParallel));
1225     PetscCall(DMSetCellCoordinateDM(dmParallel, cdmParallel));
1226     PetscCall(DMCopyDisc(cdm, cdmParallel));
1227     PetscCall(DMDestroy(&cdmParallel));
1228     PetscCall(DMGetCellCoordinateSection(dm, &originalCoordSection));
1229     PetscCall(DMGetCellCoordinatesLocal(dm, &originalCoordinates));
1230     PetscCall(PetscSectionCreate(comm, &newCoordSection));
1231     if (originalCoordinates) {
1232       PetscCall(VecCreate(PETSC_COMM_SELF, &newCoordinates));
1233       PetscCall(PetscObjectGetName((PetscObject) originalCoordinates, &name));
1234       PetscCall(PetscObjectSetName((PetscObject) newCoordinates, name));
1235 
1236       PetscCall(DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates));
1237       PetscCall(VecGetBlockSize(originalCoordinates, &bs));
1238       PetscCall(VecSetBlockSize(newCoordinates, bs));
1239       PetscCall(DMSetCellCoordinateSection(dmParallel, bs, newCoordSection));
1240       PetscCall(DMSetCellCoordinatesLocal(dmParallel, newCoordinates));
1241       PetscCall(VecDestroy(&newCoordinates));
1242     }
1243     PetscCall(PetscSectionDestroy(&newCoordSection));
1244   }
1245   PetscFunctionReturn(0);
1246 }
1247 
1248 static PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1249 {
1250   DM_Plex         *mesh = (DM_Plex*) dm->data;
1251   MPI_Comm         comm;
1252   DMLabel          depthLabel;
1253   PetscMPIInt      rank;
1254   PetscInt         depth, d, numLabels, numLocalLabels, l;
1255   PetscBool        hasLabels = PETSC_FALSE, lsendDepth, sendDepth;
1256   PetscObjectState depthState = -1;
1257 
1258   PetscFunctionBegin;
1259   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1260   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1261 
1262   PetscCall(PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0));
1263   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1264   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1265 
1266   /* If the user has changed the depth label, communicate it instead */
1267   PetscCall(DMPlexGetDepth(dm, &depth));
1268   PetscCall(DMPlexGetDepthLabel(dm, &depthLabel));
1269   if (depthLabel) PetscCall(PetscObjectStateGet((PetscObject) depthLabel, &depthState));
1270   lsendDepth = mesh->depthState != depthState ? PETSC_TRUE : PETSC_FALSE;
1271   PetscCall(MPIU_Allreduce(&lsendDepth, &sendDepth, 1, MPIU_BOOL, MPI_LOR, comm));
1272   if (sendDepth) {
1273     PetscCall(DMPlexGetDepthLabel(dmParallel, &dmParallel->depthLabel));
1274     PetscCall(DMRemoveLabelBySelf(dmParallel, &dmParallel->depthLabel, PETSC_FALSE));
1275   }
1276   /* Everyone must have either the same number of labels, or none */
1277   PetscCall(DMGetNumLabels(dm, &numLocalLabels));
1278   numLabels = numLocalLabels;
1279   PetscCallMPI(MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm));
1280   if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1281   for (l = 0; l < numLabels; ++l) {
1282     DMLabel     label = NULL, labelNew = NULL;
1283     PetscBool   isDepth, lisOutput = PETSC_TRUE, isOutput;
1284     const char *name = NULL;
1285 
1286     if (hasLabels) {
1287       PetscCall(DMGetLabelByNum(dm, l, &label));
1288       /* Skip "depth" because it is recreated */
1289       PetscCall(PetscObjectGetName((PetscObject) label, &name));
1290       PetscCall(PetscStrcmp(name, "depth", &isDepth));
1291     }
1292     PetscCallMPI(MPI_Bcast(&isDepth, 1, MPIU_BOOL, 0, comm));
1293     if (isDepth && !sendDepth) continue;
1294     PetscCall(DMLabelDistribute(label, migrationSF, &labelNew));
1295     if (isDepth) {
1296       /* Put in any missing strata which can occur if users are managing the depth label themselves */
1297       PetscInt gdepth;
1298 
1299       PetscCall(MPIU_Allreduce(&depth, &gdepth, 1, MPIU_INT, MPI_MAX, comm));
1300       PetscCheck(!(depth >= 0) || !(gdepth != depth),PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %" PetscInt_FMT " != %" PetscInt_FMT, depth, gdepth);
1301       for (d = 0; d <= gdepth; ++d) {
1302         PetscBool has;
1303 
1304         PetscCall(DMLabelHasStratum(labelNew, d, &has));
1305         if (!has) PetscCall(DMLabelAddStratum(labelNew, d));
1306       }
1307     }
1308     PetscCall(DMAddLabel(dmParallel, labelNew));
1309     /* Put the output flag in the new label */
1310     if (hasLabels) PetscCall(DMGetLabelOutput(dm, name, &lisOutput));
1311     PetscCall(MPIU_Allreduce(&lisOutput, &isOutput, 1, MPIU_BOOL, MPI_LAND, comm));
1312     PetscCall(PetscObjectGetName((PetscObject) labelNew, &name));
1313     PetscCall(DMSetLabelOutput(dmParallel, name, isOutput));
1314     PetscCall(DMLabelDestroy(&labelNew));
1315   }
1316   {
1317     DMLabel ctLabel;
1318 
1319     // Reset label for fast lookup
1320     PetscCall(DMPlexGetCellTypeLabel(dmParallel, &ctLabel));
1321     PetscCall(DMLabelMakeAllInvalid_Internal(ctLabel));
1322   }
1323   PetscCall(PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0));
1324   PetscFunctionReturn(0);
1325 }
1326 
1327 static PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1328 {
1329   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1330   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1331   MPI_Comm        comm;
1332   DM              refTree;
1333   PetscSection    origParentSection, newParentSection;
1334   PetscInt        *origParents, *origChildIDs;
1335   PetscBool       flg;
1336 
1337   PetscFunctionBegin;
1338   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1339   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
1340   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1341 
1342   /* Set up tree */
1343   PetscCall(DMPlexGetReferenceTree(dm,&refTree));
1344   PetscCall(DMPlexSetReferenceTree(dmParallel,refTree));
1345   PetscCall(DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL));
1346   if (origParentSection) {
1347     PetscInt        pStart, pEnd;
1348     PetscInt        *newParents, *newChildIDs, *globParents;
1349     PetscInt        *remoteOffsetsParents, newParentSize;
1350     PetscSF         parentSF;
1351 
1352     PetscCall(DMPlexGetChart(dmParallel, &pStart, &pEnd));
1353     PetscCall(PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection));
1354     PetscCall(PetscSectionSetChart(newParentSection,pStart,pEnd));
1355     PetscCall(PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection));
1356     PetscCall(PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF));
1357     PetscCall(PetscFree(remoteOffsetsParents));
1358     PetscCall(PetscSectionGetStorageSize(newParentSection,&newParentSize));
1359     PetscCall(PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs));
1360     if (original) {
1361       PetscInt numParents;
1362 
1363       PetscCall(PetscSectionGetStorageSize(origParentSection,&numParents));
1364       PetscCall(PetscMalloc1(numParents,&globParents));
1365       PetscCall(ISLocalToGlobalMappingApplyBlock(original, numParents, origParents, globParents));
1366     }
1367     else {
1368       globParents = origParents;
1369     }
1370     PetscCall(PetscSFBcastBegin(parentSF, MPIU_INT, globParents, newParents,MPI_REPLACE));
1371     PetscCall(PetscSFBcastEnd(parentSF, MPIU_INT, globParents, newParents,MPI_REPLACE));
1372     if (original) PetscCall(PetscFree(globParents));
1373     PetscCall(PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs,MPI_REPLACE));
1374     PetscCall(PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs,MPI_REPLACE));
1375     PetscCall(ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents));
1376     if (PetscDefined(USE_DEBUG)) {
1377       PetscInt  p;
1378       PetscBool valid = PETSC_TRUE;
1379       for (p = 0; p < newParentSize; ++p) {
1380         if (newParents[p] < 0) {valid = PETSC_FALSE; PetscCall(PetscPrintf(PETSC_COMM_SELF, "Point %" PetscInt_FMT " not in overlap SF\n", p));}
1381       }
1382       PetscCheck(valid,PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1383     }
1384     PetscCall(PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-parents_view", &flg));
1385     if (flg) {
1386       PetscCall(PetscPrintf(comm, "Serial Parent Section: \n"));
1387       PetscCall(PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_(comm)));
1388       PetscCall(PetscPrintf(comm, "Parallel Parent Section: \n"));
1389       PetscCall(PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_(comm)));
1390       PetscCall(PetscSFView(parentSF, NULL));
1391     }
1392     PetscCall(DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs));
1393     PetscCall(PetscSectionDestroy(&newParentSection));
1394     PetscCall(PetscFree2(newParents,newChildIDs));
1395     PetscCall(PetscSFDestroy(&parentSF));
1396   }
1397   pmesh->useAnchors = mesh->useAnchors;
1398   PetscFunctionReturn(0);
1399 }
1400 
1401 PETSC_UNUSED static PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, DM dmParallel)
1402 {
1403   PetscMPIInt            rank, size;
1404   MPI_Comm               comm;
1405 
1406   PetscFunctionBegin;
1407   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1408   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1409 
1410   /* Create point SF for parallel mesh */
1411   PetscCall(PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0));
1412   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1413   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1414   PetscCallMPI(MPI_Comm_size(comm, &size));
1415   {
1416     const PetscInt *leaves;
1417     PetscSFNode    *remotePoints, *rowners, *lowners;
1418     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1419     PetscInt        pStart, pEnd;
1420 
1421     PetscCall(DMPlexGetChart(dmParallel, &pStart, &pEnd));
1422     PetscCall(PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL));
1423     PetscCall(PetscMalloc2(numRoots,&rowners,numLeaves,&lowners));
1424     for (p=0; p<numRoots; p++) {
1425       rowners[p].rank  = -1;
1426       rowners[p].index = -1;
1427     }
1428     PetscCall(PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners,MPI_REPLACE));
1429     PetscCall(PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners,MPI_REPLACE));
1430     for (p = 0; p < numLeaves; ++p) {
1431       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1432         lowners[p].rank  = rank;
1433         lowners[p].index = leaves ? leaves[p] : p;
1434       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1435         lowners[p].rank  = -2;
1436         lowners[p].index = -2;
1437       }
1438     }
1439     for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1440       rowners[p].rank  = -3;
1441       rowners[p].index = -3;
1442     }
1443     PetscCall(PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC));
1444     PetscCall(PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC));
1445     PetscCall(PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners,MPI_REPLACE));
1446     PetscCall(PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners,MPI_REPLACE));
1447     for (p = 0; p < numLeaves; ++p) {
1448       PetscCheck(lowners[p].rank >= 0 && lowners[p].index >= 0,PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
1449       if (lowners[p].rank != rank) ++numGhostPoints;
1450     }
1451     PetscCall(PetscMalloc1(numGhostPoints, &ghostPoints));
1452     PetscCall(PetscMalloc1(numGhostPoints, &remotePoints));
1453     for (p = 0, gp = 0; p < numLeaves; ++p) {
1454       if (lowners[p].rank != rank) {
1455         ghostPoints[gp]        = leaves ? leaves[p] : p;
1456         remotePoints[gp].rank  = lowners[p].rank;
1457         remotePoints[gp].index = lowners[p].index;
1458         ++gp;
1459       }
1460     }
1461     PetscCall(PetscFree2(rowners,lowners));
1462     PetscCall(PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER));
1463     PetscCall(PetscSFSetFromOptions((dmParallel)->sf));
1464   }
1465   {
1466     PetscBool useCone, useClosure, useAnchors;
1467 
1468     PetscCall(DMGetBasicAdjacency(dm, &useCone, &useClosure));
1469     PetscCall(DMSetBasicAdjacency(dmParallel, useCone, useClosure));
1470     PetscCall(DMPlexGetAdjacencyUseAnchors(dm, &useAnchors));
1471     PetscCall(DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors));
1472   }
1473   PetscCall(PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0));
1474   PetscFunctionReturn(0);
1475 }
1476 
1477 /*@
1478   DMPlexSetPartitionBalance - Should distribution of the DM attempt to balance the shared point partition?
1479 
1480   Input Parameters:
1481 + dm - The DMPlex object
1482 - flg - Balance the partition?
1483 
1484   Level: intermediate
1485 
1486 .seealso: `DMPlexDistribute()`, `DMPlexGetPartitionBalance()`
1487 @*/
1488 PetscErrorCode DMPlexSetPartitionBalance(DM dm, PetscBool flg)
1489 {
1490   DM_Plex *mesh = (DM_Plex *)dm->data;
1491 
1492   PetscFunctionBegin;
1493   mesh->partitionBalance = flg;
1494   PetscFunctionReturn(0);
1495 }
1496 
1497 /*@
1498   DMPlexGetPartitionBalance - Does distribution of the DM attempt to balance the shared point partition?
1499 
1500   Input Parameter:
1501 . dm - The DMPlex object
1502 
1503   Output Parameter:
1504 . flg - Balance the partition?
1505 
1506   Level: intermediate
1507 
1508 .seealso: `DMPlexDistribute()`, `DMPlexSetPartitionBalance()`
1509 @*/
1510 PetscErrorCode DMPlexGetPartitionBalance(DM dm, PetscBool *flg)
1511 {
1512   DM_Plex *mesh = (DM_Plex *)dm->data;
1513 
1514   PetscFunctionBegin;
1515   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1516   PetscValidBoolPointer(flg, 2);
1517   *flg = mesh->partitionBalance;
1518   PetscFunctionReturn(0);
1519 }
1520 
1521 typedef struct {
1522   PetscInt vote, rank, index;
1523 } Petsc3Int;
1524 
1525 /* MaxLoc, but carry a third piece of information around */
1526 static void MPIAPI MaxLocCarry(void *in_, void *inout_, PetscMPIInt *len_, MPI_Datatype *dtype)
1527 {
1528   Petsc3Int *a = (Petsc3Int *)inout_;
1529   Petsc3Int *b = (Petsc3Int *)in_;
1530   PetscInt i, len = *len_;
1531   for (i = 0; i < len; i++) {
1532     if (a[i].vote < b[i].vote) {
1533       a[i].vote = b[i].vote;
1534       a[i].rank = b[i].rank;
1535       a[i].index = b[i].index;
1536     } else if (a[i].vote <= b[i].vote) {
1537       if (a[i].rank >= b[i].rank) {
1538         a[i].rank = b[i].rank;
1539         a[i].index = b[i].index;
1540       }
1541     }
1542   }
1543 }
1544 
1545 /*@C
1546   DMPlexCreatePointSF - Build a point SF from an SF describing a point migration
1547 
1548   Input Parameters:
1549 + dm          - The source DMPlex object
1550 . migrationSF - The star forest that describes the parallel point remapping
1551 - ownership   - Flag causing a vote to determine point ownership
1552 
1553   Output Parameter:
1554 . pointSF     - The star forest describing the point overlap in the remapped DM
1555 
1556   Notes:
1557   Output pointSF is guaranteed to have the array of local indices (leaves) sorted.
1558 
1559   Level: developer
1560 
1561 .seealso: `DMPlexDistribute()`, `DMPlexDistributeOverlap()`
1562 @*/
1563 PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownership, PetscSF *pointSF)
1564 {
1565   PetscMPIInt        rank, size;
1566   PetscInt           p, nroots, nleaves, idx, npointLeaves;
1567   PetscInt          *pointLocal;
1568   const PetscInt    *leaves;
1569   const PetscSFNode *roots;
1570   PetscSFNode       *rootNodes, *leafNodes, *pointRemote;
1571   Vec                shifts;
1572   const PetscInt     numShifts = 13759;
1573   const PetscScalar *shift = NULL;
1574   const PetscBool    shiftDebug = PETSC_FALSE;
1575   PetscBool          balance;
1576 
1577   PetscFunctionBegin;
1578   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1579   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank));
1580   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size));
1581   PetscCall(PetscLogEventBegin(DMPLEX_CreatePointSF,dm,0,0,0));
1582 
1583   PetscCall(DMPlexGetPartitionBalance(dm, &balance));
1584   PetscCall(PetscSFGetGraph(migrationSF, &nroots, &nleaves, &leaves, &roots));
1585   PetscCall(PetscMalloc2(nroots, &rootNodes, nleaves, &leafNodes));
1586   if (ownership) {
1587     MPI_Op       op;
1588     MPI_Datatype datatype;
1589     Petsc3Int   *rootVote = NULL, *leafVote = NULL;
1590     /* If balancing, we compute a random cyclic shift of the rank for each remote point. That way, the max will evenly distribute among ranks. */
1591     if (balance) {
1592       PetscRandom r;
1593 
1594       PetscCall(PetscRandomCreate(PETSC_COMM_SELF, &r));
1595       PetscCall(PetscRandomSetInterval(r, 0, 2467*size));
1596       PetscCall(VecCreate(PETSC_COMM_SELF, &shifts));
1597       PetscCall(VecSetSizes(shifts, numShifts, numShifts));
1598       PetscCall(VecSetType(shifts, VECSTANDARD));
1599       PetscCall(VecSetRandom(shifts, r));
1600       PetscCall(PetscRandomDestroy(&r));
1601       PetscCall(VecGetArrayRead(shifts, &shift));
1602     }
1603 
1604     PetscCall(PetscMalloc1(nroots, &rootVote));
1605     PetscCall(PetscMalloc1(nleaves, &leafVote));
1606     /* Point ownership vote: Process with highest rank owns shared points */
1607     for (p = 0; p < nleaves; ++p) {
1608       if (shiftDebug) {
1609         PetscCall(PetscSynchronizedPrintf(PetscObjectComm((PetscObject) dm), "[%d] Point %" PetscInt_FMT " RemotePoint %" PetscInt_FMT " Shift %" PetscInt_FMT " MyRank %" PetscInt_FMT "\n", rank, leaves ? leaves[p] : p, roots[p].index, (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]), (rank + (shift ? (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]) : 0))%size));
1610       }
1611       /* Either put in a bid or we know we own it */
1612       leafVote[p].vote  = (rank + (shift ? (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]) : 0))%size;
1613       leafVote[p].rank = rank;
1614       leafVote[p].index = p;
1615     }
1616     for (p = 0; p < nroots; p++) {
1617       /* Root must not participate in the reduction, flag so that MAXLOC does not use */
1618       rootVote[p].vote  = -3;
1619       rootVote[p].rank  = -3;
1620       rootVote[p].index = -3;
1621     }
1622     PetscCallMPI(MPI_Type_contiguous(3, MPIU_INT, &datatype));
1623     PetscCallMPI(MPI_Type_commit(&datatype));
1624     PetscCallMPI(MPI_Op_create(&MaxLocCarry, 1, &op));
1625     PetscCall(PetscSFReduceBegin(migrationSF, datatype, leafVote, rootVote, op));
1626     PetscCall(PetscSFReduceEnd(migrationSF, datatype, leafVote, rootVote, op));
1627     PetscCallMPI(MPI_Op_free(&op));
1628     PetscCallMPI(MPI_Type_free(&datatype));
1629     for (p = 0; p < nroots; p++) {
1630       rootNodes[p].rank = rootVote[p].rank;
1631       rootNodes[p].index = rootVote[p].index;
1632     }
1633     PetscCall(PetscFree(leafVote));
1634     PetscCall(PetscFree(rootVote));
1635   } else {
1636     for (p = 0; p < nroots; p++) {
1637       rootNodes[p].index = -1;
1638       rootNodes[p].rank = rank;
1639     }
1640     for (p = 0; p < nleaves; p++) {
1641       /* Write new local id into old location */
1642       if (roots[p].rank == rank) {
1643         rootNodes[roots[p].index].index = leaves ? leaves[p] : p;
1644       }
1645     }
1646   }
1647   PetscCall(PetscSFBcastBegin(migrationSF, MPIU_2INT, rootNodes, leafNodes,MPI_REPLACE));
1648   PetscCall(PetscSFBcastEnd(migrationSF, MPIU_2INT, rootNodes, leafNodes,MPI_REPLACE));
1649 
1650   for (npointLeaves = 0, p = 0; p < nleaves; p++) {
1651     if (leafNodes[p].rank != rank) npointLeaves++;
1652   }
1653   PetscCall(PetscMalloc1(npointLeaves, &pointLocal));
1654   PetscCall(PetscMalloc1(npointLeaves, &pointRemote));
1655   for (idx = 0, p = 0; p < nleaves; p++) {
1656     if (leafNodes[p].rank != rank) {
1657       /* Note that pointLocal is automatically sorted as it is sublist of 0, ..., nleaves-1 */
1658       pointLocal[idx] = p;
1659       pointRemote[idx] = leafNodes[p];
1660       idx++;
1661     }
1662   }
1663   if (shift) {
1664     PetscCall(VecRestoreArrayRead(shifts, &shift));
1665     PetscCall(VecDestroy(&shifts));
1666   }
1667   if (shiftDebug) PetscCall(PetscSynchronizedFlush(PetscObjectComm((PetscObject) dm), PETSC_STDOUT));
1668   PetscCall(PetscSFCreate(PetscObjectComm((PetscObject) dm), pointSF));
1669   PetscCall(PetscSFSetFromOptions(*pointSF));
1670   PetscCall(PetscSFSetGraph(*pointSF, nleaves, npointLeaves, pointLocal, PETSC_OWN_POINTER, pointRemote, PETSC_OWN_POINTER));
1671   PetscCall(PetscFree2(rootNodes, leafNodes));
1672   PetscCall(PetscLogEventEnd(DMPLEX_CreatePointSF,dm,0,0,0));
1673   if (PetscDefined(USE_DEBUG)) PetscCall(DMPlexCheckPointSF(dm, *pointSF));
1674   PetscFunctionReturn(0);
1675 }
1676 
1677 /*@C
1678   DMPlexMigrate  - Migrates internal DM data over the supplied star forest
1679 
1680   Collective on dm
1681 
1682   Input Parameters:
1683 + dm       - The source DMPlex object
1684 - sf       - The star forest communication context describing the migration pattern
1685 
1686   Output Parameter:
1687 . targetDM - The target DMPlex object
1688 
1689   Level: intermediate
1690 
1691 .seealso: `DMPlexDistribute()`, `DMPlexDistributeOverlap()`
1692 @*/
1693 PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM)
1694 {
1695   MPI_Comm               comm;
1696   PetscInt               dim, cdim, nroots;
1697   PetscSF                sfPoint;
1698   ISLocalToGlobalMapping ltogMigration;
1699   ISLocalToGlobalMapping ltogOriginal = NULL;
1700 
1701   PetscFunctionBegin;
1702   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1703   PetscCall(PetscLogEventBegin(DMPLEX_Migrate, dm, 0, 0, 0));
1704   PetscCall(PetscObjectGetComm((PetscObject) dm, &comm));
1705   PetscCall(DMGetDimension(dm, &dim));
1706   PetscCall(DMSetDimension(targetDM, dim));
1707   PetscCall(DMGetCoordinateDim(dm, &cdim));
1708   PetscCall(DMSetCoordinateDim(targetDM, cdim));
1709 
1710   /* Check for a one-to-all distribution pattern */
1711   PetscCall(DMGetPointSF(dm, &sfPoint));
1712   PetscCall(PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL));
1713   if (nroots >= 0) {
1714     IS        isOriginal;
1715     PetscInt  n, size, nleaves;
1716     PetscInt  *numbering_orig, *numbering_new;
1717 
1718     /* Get the original point numbering */
1719     PetscCall(DMPlexCreatePointNumbering(dm, &isOriginal));
1720     PetscCall(ISLocalToGlobalMappingCreateIS(isOriginal, &ltogOriginal));
1721     PetscCall(ISLocalToGlobalMappingGetSize(ltogOriginal, &size));
1722     PetscCall(ISLocalToGlobalMappingGetBlockIndices(ltogOriginal, (const PetscInt**)&numbering_orig));
1723     /* Convert to positive global numbers */
1724     for (n=0; n<size; n++) {if (numbering_orig[n] < 0) numbering_orig[n] = -(numbering_orig[n]+1);}
1725     /* Derive the new local-to-global mapping from the old one */
1726     PetscCall(PetscSFGetGraph(sf, NULL, &nleaves, NULL, NULL));
1727     PetscCall(PetscMalloc1(nleaves, &numbering_new));
1728     PetscCall(PetscSFBcastBegin(sf, MPIU_INT, numbering_orig, numbering_new,MPI_REPLACE));
1729     PetscCall(PetscSFBcastEnd(sf, MPIU_INT, numbering_orig, numbering_new,MPI_REPLACE));
1730     PetscCall(ISLocalToGlobalMappingCreate(comm, 1, nleaves, numbering_new, PETSC_OWN_POINTER, &ltogMigration));
1731     PetscCall(ISLocalToGlobalMappingRestoreIndices(ltogOriginal, (const PetscInt**)&numbering_orig));
1732     PetscCall(ISDestroy(&isOriginal));
1733   } else {
1734     /* One-to-all distribution pattern: We can derive LToG from SF */
1735     PetscCall(ISLocalToGlobalMappingCreateSF(sf, 0, &ltogMigration));
1736   }
1737   PetscCall(PetscObjectSetName((PetscObject)ltogMigration, "Point renumbering for DM migration"));
1738   PetscCall(ISLocalToGlobalMappingViewFromOptions(ltogMigration, (PetscObject)dm, "-partition_view"));
1739   /* Migrate DM data to target DM */
1740   PetscCall(DMPlexDistributeCones(dm, sf, ltogOriginal, ltogMigration, targetDM));
1741   PetscCall(DMPlexDistributeLabels(dm, sf, targetDM));
1742   PetscCall(DMPlexDistributeCoordinates(dm, sf, targetDM));
1743   PetscCall(DMPlexDistributeSetupTree(dm, sf, ltogOriginal, ltogMigration, targetDM));
1744   PetscCall(ISLocalToGlobalMappingDestroy(&ltogOriginal));
1745   PetscCall(ISLocalToGlobalMappingDestroy(&ltogMigration));
1746   PetscCall(PetscLogEventEnd(DMPLEX_Migrate, dm, 0, 0, 0));
1747   PetscFunctionReturn(0);
1748 }
1749 
1750 /*@C
1751   DMPlexDistribute - Distributes the mesh and any associated sections.
1752 
1753   Collective on dm
1754 
1755   Input Parameters:
1756 + dm  - The original DMPlex object
1757 - overlap - The overlap of partitions, 0 is the default
1758 
1759   Output Parameters:
1760 + sf - The PetscSF used for point distribution, or NULL if not needed
1761 - dmParallel - The distributed DMPlex object
1762 
1763   Note: If the mesh was not distributed, the output dmParallel will be NULL.
1764 
1765   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
1766   representation on the mesh.
1767 
1768   Level: intermediate
1769 
1770 .seealso: `DMPlexCreate()`, `DMSetAdjacency()`, `DMPlexGetOverlap()`
1771 @*/
1772 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1773 {
1774   MPI_Comm               comm;
1775   PetscPartitioner       partitioner;
1776   IS                     cellPart;
1777   PetscSection           cellPartSection;
1778   DM                     dmCoord;
1779   DMLabel                lblPartition, lblMigration;
1780   PetscSF                sfMigration, sfStratified, sfPoint;
1781   PetscBool              flg, balance;
1782   PetscMPIInt            rank, size;
1783 
1784   PetscFunctionBegin;
1785   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1786   PetscValidLogicalCollectiveInt(dm, overlap, 2);
1787   if (sf) PetscValidPointer(sf,3);
1788   PetscValidPointer(dmParallel,4);
1789 
1790   if (sf) *sf = NULL;
1791   *dmParallel = NULL;
1792   PetscCall(PetscObjectGetComm((PetscObject)dm,&comm));
1793   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1794   PetscCallMPI(MPI_Comm_size(comm, &size));
1795   if (size == 1) PetscFunctionReturn(0);
1796 
1797   PetscCall(PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0));
1798   /* Create cell partition */
1799   PetscCall(PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0));
1800   PetscCall(PetscSectionCreate(comm, &cellPartSection));
1801   PetscCall(DMPlexGetPartitioner(dm, &partitioner));
1802   PetscCall(PetscPartitionerDMPlexPartition(partitioner, dm, NULL, cellPartSection, &cellPart));
1803   PetscCall(PetscLogEventBegin(DMPLEX_PartSelf,dm,0,0,0));
1804   {
1805     /* Convert partition to DMLabel */
1806     IS             is;
1807     PetscHSetI     ht;
1808     const PetscInt *points;
1809     PetscInt       *iranks;
1810     PetscInt       pStart, pEnd, proc, npoints, poff = 0, nranks;
1811 
1812     PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Point Partition", &lblPartition));
1813     /* Preallocate strata */
1814     PetscCall(PetscHSetICreate(&ht));
1815     PetscCall(PetscSectionGetChart(cellPartSection, &pStart, &pEnd));
1816     for (proc = pStart; proc < pEnd; proc++) {
1817       PetscCall(PetscSectionGetDof(cellPartSection, proc, &npoints));
1818       if (npoints) PetscCall(PetscHSetIAdd(ht, proc));
1819     }
1820     PetscCall(PetscHSetIGetSize(ht, &nranks));
1821     PetscCall(PetscMalloc1(nranks, &iranks));
1822     PetscCall(PetscHSetIGetElems(ht, &poff, iranks));
1823     PetscCall(PetscHSetIDestroy(&ht));
1824     PetscCall(DMLabelAddStrata(lblPartition, nranks, iranks));
1825     PetscCall(PetscFree(iranks));
1826     /* Inline DMPlexPartitionLabelClosure() */
1827     PetscCall(ISGetIndices(cellPart, &points));
1828     PetscCall(PetscSectionGetChart(cellPartSection, &pStart, &pEnd));
1829     for (proc = pStart; proc < pEnd; proc++) {
1830       PetscCall(PetscSectionGetDof(cellPartSection, proc, &npoints));
1831       if (!npoints) continue;
1832       PetscCall(PetscSectionGetOffset(cellPartSection, proc, &poff));
1833       PetscCall(DMPlexClosurePoints_Private(dm, npoints, points+poff, &is));
1834       PetscCall(DMLabelSetStratumIS(lblPartition, proc, is));
1835       PetscCall(ISDestroy(&is));
1836     }
1837     PetscCall(ISRestoreIndices(cellPart, &points));
1838   }
1839   PetscCall(PetscLogEventEnd(DMPLEX_PartSelf,dm,0,0,0));
1840 
1841   PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Point migration", &lblMigration));
1842   PetscCall(DMPlexPartitionLabelInvert(dm, lblPartition, NULL, lblMigration));
1843   PetscCall(DMPlexPartitionLabelCreateSF(dm, lblMigration, &sfMigration));
1844   PetscCall(DMPlexStratifyMigrationSF(dm, sfMigration, &sfStratified));
1845   PetscCall(PetscSFDestroy(&sfMigration));
1846   sfMigration = sfStratified;
1847   PetscCall(PetscSFSetUp(sfMigration));
1848   PetscCall(PetscLogEventEnd(DMPLEX_Partition,dm,0,0,0));
1849   PetscCall(PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg));
1850   if (flg) {
1851     PetscCall(DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_(comm)));
1852     PetscCall(PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_(comm)));
1853   }
1854 
1855   /* Create non-overlapping parallel DM and migrate internal data */
1856   PetscCall(DMPlexCreate(comm, dmParallel));
1857   PetscCall(PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh"));
1858   PetscCall(DMPlexMigrate(dm, sfMigration, *dmParallel));
1859 
1860   /* Build the point SF without overlap */
1861   PetscCall(DMPlexGetPartitionBalance(dm, &balance));
1862   PetscCall(DMPlexSetPartitionBalance(*dmParallel, balance));
1863   PetscCall(DMPlexCreatePointSF(*dmParallel, sfMigration, PETSC_TRUE, &sfPoint));
1864   PetscCall(DMSetPointSF(*dmParallel, sfPoint));
1865   PetscCall(DMGetCoordinateDM(*dmParallel, &dmCoord));
1866   if (dmCoord) PetscCall(DMSetPointSF(dmCoord, sfPoint));
1867   if (flg) PetscCall(PetscSFView(sfPoint, NULL));
1868 
1869   if (overlap > 0) {
1870     DM                 dmOverlap;
1871     PetscInt           nroots, nleaves, noldleaves, l;
1872     const PetscInt    *oldLeaves;
1873     PetscSFNode       *newRemote, *permRemote;
1874     const PetscSFNode *oldRemote;
1875     PetscSF            sfOverlap, sfOverlapPoint;
1876 
1877     /* Add the partition overlap to the distributed DM */
1878     PetscCall(DMPlexDistributeOverlap(*dmParallel, overlap, &sfOverlap, &dmOverlap));
1879     PetscCall(DMDestroy(dmParallel));
1880     *dmParallel = dmOverlap;
1881     if (flg) {
1882       PetscCall(PetscPrintf(comm, "Overlap Migration SF:\n"));
1883       PetscCall(PetscSFView(sfOverlap, NULL));
1884     }
1885 
1886     /* Re-map the migration SF to establish the full migration pattern */
1887     PetscCall(PetscSFGetGraph(sfMigration, &nroots, &noldleaves, &oldLeaves, &oldRemote));
1888     PetscCall(PetscSFGetGraph(sfOverlap, NULL, &nleaves, NULL, NULL));
1889     PetscCall(PetscMalloc1(nleaves, &newRemote));
1890     /* oldRemote: original root point mapping to original leaf point
1891        newRemote: original leaf point mapping to overlapped leaf point */
1892     if (oldLeaves) {
1893       /* After stratification, the migration remotes may not be in root (canonical) order, so we reorder using the leaf numbering */
1894       PetscCall(PetscMalloc1(noldleaves, &permRemote));
1895       for (l = 0; l < noldleaves; ++l) permRemote[oldLeaves[l]] = oldRemote[l];
1896       oldRemote = permRemote;
1897     }
1898     PetscCall(PetscSFBcastBegin(sfOverlap, MPIU_2INT, oldRemote, newRemote,MPI_REPLACE));
1899     PetscCall(PetscSFBcastEnd(sfOverlap, MPIU_2INT, oldRemote, newRemote,MPI_REPLACE));
1900     if (oldLeaves) PetscCall(PetscFree(oldRemote));
1901     PetscCall(PetscSFCreate(comm, &sfOverlapPoint));
1902     PetscCall(PetscSFSetGraph(sfOverlapPoint, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER));
1903     PetscCall(PetscSFDestroy(&sfOverlap));
1904     PetscCall(PetscSFDestroy(&sfMigration));
1905     sfMigration = sfOverlapPoint;
1906   }
1907   /* Cleanup Partition */
1908   PetscCall(DMLabelDestroy(&lblPartition));
1909   PetscCall(DMLabelDestroy(&lblMigration));
1910   PetscCall(PetscSectionDestroy(&cellPartSection));
1911   PetscCall(ISDestroy(&cellPart));
1912   /* Copy discretization */
1913   PetscCall(DMCopyDisc(dm, *dmParallel));
1914   /* Create sfNatural */
1915   if (dm->useNatural) {
1916     PetscSection section;
1917 
1918     PetscCall(DMGetLocalSection(dm, &section));
1919     PetscCall(DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &(*dmParallel)->sfNatural));
1920     PetscCall(DMSetUseNatural(*dmParallel, PETSC_TRUE));
1921   }
1922   PetscCall(DMPlexCopy_Internal(dm, PETSC_TRUE, PETSC_FALSE, *dmParallel));
1923   /* Cleanup */
1924   if (sf) {*sf = sfMigration;}
1925   else    PetscCall(PetscSFDestroy(&sfMigration));
1926   PetscCall(PetscSFDestroy(&sfPoint));
1927   PetscCall(PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0));
1928   PetscFunctionReturn(0);
1929 }
1930 
1931 /*@C
1932   DMPlexDistributeOverlap - Add partition overlap to a distributed non-overlapping DM.
1933 
1934   Collective on dm
1935 
1936   Input Parameters:
1937 + dm  - The non-overlapping distributed DMPlex object
1938 - overlap - The overlap of partitions (the same on all ranks)
1939 
1940   Output Parameters:
1941 + sf - The PetscSF used for point distribution
1942 - dmOverlap - The overlapping distributed DMPlex object, or NULL
1943 
1944   Notes:
1945   If the mesh was not distributed, the return value is NULL.
1946 
1947   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
1948   representation on the mesh.
1949 
1950   Options Database Keys:
1951 + -dm_plex_overlap_labels <name1,name2,...> - List of overlap label names
1952 . -dm_plex_overlap_values <int1,int2,...>   - List of overlap label values
1953 . -dm_plex_overlap_exclude_label <name>     - Label used to exclude points from overlap
1954 - -dm_plex_overlap_exclude_value <int>      - Label value used to exclude points from overlap
1955 
1956   Level: advanced
1957 
1958 .seealso: `DMPlexCreate()`, `DMSetAdjacency()`, `DMPlexDistribute()`, `DMPlexCreateOverlapLabel()`, `DMPlexGetOverlap()`
1959 @*/
1960 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, PetscSF *sf, DM *dmOverlap)
1961 {
1962   DM_Plex     *mesh = (DM_Plex *) dm->data;
1963   MPI_Comm     comm;
1964   PetscMPIInt  size, rank;
1965   PetscSection rootSection, leafSection;
1966   IS           rootrank, leafrank;
1967   DM           dmCoord;
1968   DMLabel      lblOverlap;
1969   PetscSF      sfOverlap, sfStratified, sfPoint;
1970 
1971   PetscFunctionBegin;
1972   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1973   PetscValidLogicalCollectiveInt(dm, overlap, 2);
1974   if (sf) PetscValidPointer(sf, 3);
1975   PetscValidPointer(dmOverlap, 4);
1976 
1977   if (sf) *sf = NULL;
1978   *dmOverlap  = NULL;
1979   PetscCall(PetscObjectGetComm((PetscObject)dm,&comm));
1980   PetscCallMPI(MPI_Comm_size(comm, &size));
1981   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1982   if (size == 1) PetscFunctionReturn(0);
1983   {
1984     // We need to get options for the _already_distributed mesh, so it must be done here
1985     PetscInt    overlap;
1986     const char *prefix;
1987     char        oldPrefix[PETSC_MAX_PATH_LEN];
1988 
1989     oldPrefix[0] = '\0';
1990     PetscCall(PetscObjectGetOptionsPrefix((PetscObject) dm, &prefix));
1991     PetscCall(PetscStrcpy(oldPrefix, prefix));
1992     PetscCall(PetscObjectAppendOptionsPrefix((PetscObject) dm, "dist_"));
1993     PetscCall(DMPlexGetOverlap(dm, &overlap));
1994     PetscObjectOptionsBegin((PetscObject) dm);
1995     PetscCall(DMSetFromOptions_Overlap_Plex(PetscOptionsObject, dm, &overlap));
1996     PetscOptionsEnd();
1997     PetscCall(PetscObjectSetOptionsPrefix((PetscObject) dm, oldPrefix[0] == '\0' ? NULL : oldPrefix));
1998   }
1999   PetscCall(PetscLogEventBegin(DMPLEX_DistributeOverlap, dm, 0, 0, 0));
2000   /* Compute point overlap with neighbouring processes on the distributed DM */
2001   PetscCall(PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0));
2002   PetscCall(PetscSectionCreate(comm, &rootSection));
2003   PetscCall(PetscSectionCreate(comm, &leafSection));
2004   PetscCall(DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank));
2005   if (mesh->numOvLabels) PetscCall(DMPlexCreateOverlapLabelFromLabels(dm, mesh->numOvLabels, mesh->ovLabels, mesh->ovValues, mesh->numOvExLabels, mesh->ovExLabels, mesh->ovExValues, rootSection, rootrank, leafSection, leafrank, &lblOverlap));
2006   else PetscCall(DMPlexCreateOverlapLabel(dm, overlap, rootSection, rootrank, leafSection, leafrank, &lblOverlap));
2007   /* Convert overlap label to stratified migration SF */
2008   PetscCall(DMPlexPartitionLabelCreateSF(dm, lblOverlap, &sfOverlap));
2009   PetscCall(DMPlexStratifyMigrationSF(dm, sfOverlap, &sfStratified));
2010   PetscCall(PetscSFDestroy(&sfOverlap));
2011   sfOverlap = sfStratified;
2012   PetscCall(PetscObjectSetName((PetscObject) sfOverlap, "Overlap SF"));
2013   PetscCall(PetscSFSetFromOptions(sfOverlap));
2014 
2015   PetscCall(PetscSectionDestroy(&rootSection));
2016   PetscCall(PetscSectionDestroy(&leafSection));
2017   PetscCall(ISDestroy(&rootrank));
2018   PetscCall(ISDestroy(&leafrank));
2019   PetscCall(PetscLogEventEnd(DMPLEX_Partition,dm,0,0,0));
2020 
2021   /* Build the overlapping DM */
2022   PetscCall(DMPlexCreate(comm, dmOverlap));
2023   PetscCall(PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh"));
2024   PetscCall(DMPlexMigrate(dm, sfOverlap, *dmOverlap));
2025   /* Store the overlap in the new DM */
2026   PetscCall(DMPlexSetOverlap_Plex(*dmOverlap, dm, overlap));
2027   /* Build the new point SF */
2028   PetscCall(DMPlexCreatePointSF(*dmOverlap, sfOverlap, PETSC_FALSE, &sfPoint));
2029   PetscCall(DMSetPointSF(*dmOverlap, sfPoint));
2030   PetscCall(DMGetCoordinateDM(*dmOverlap, &dmCoord));
2031   if (dmCoord) PetscCall(DMSetPointSF(dmCoord, sfPoint));
2032   PetscCall(DMGetCellCoordinateDM(*dmOverlap, &dmCoord));
2033   if (dmCoord) PetscCall(DMSetPointSF(dmCoord, sfPoint));
2034   PetscCall(PetscSFDestroy(&sfPoint));
2035   /* Cleanup overlap partition */
2036   PetscCall(DMLabelDestroy(&lblOverlap));
2037   if (sf) *sf = sfOverlap;
2038   else    PetscCall(PetscSFDestroy(&sfOverlap));
2039   PetscCall(PetscLogEventEnd(DMPLEX_DistributeOverlap, dm, 0, 0, 0));
2040   PetscFunctionReturn(0);
2041 }
2042 
2043 PetscErrorCode DMPlexGetOverlap_Plex(DM dm, PetscInt *overlap)
2044 {
2045   DM_Plex        *mesh  = (DM_Plex*) dm->data;
2046 
2047   PetscFunctionBegin;
2048   *overlap = mesh->overlap;
2049   PetscFunctionReturn(0);
2050 }
2051 
2052 PetscErrorCode DMPlexSetOverlap_Plex(DM dm, DM dmSrc, PetscInt overlap)
2053 {
2054   DM_Plex        *mesh=NULL;
2055   DM_Plex        *meshSrc=NULL;
2056 
2057   PetscFunctionBegin;
2058   PetscValidHeaderSpecificType(dm, DM_CLASSID, 1, DMPLEX);
2059   mesh = (DM_Plex*) dm->data;
2060   mesh->overlap = overlap;
2061   if (dmSrc) {
2062     PetscValidHeaderSpecificType(dmSrc, DM_CLASSID, 2, DMPLEX);
2063     meshSrc = (DM_Plex*) dmSrc->data;
2064     mesh->overlap += meshSrc->overlap;
2065   }
2066   PetscFunctionReturn(0);
2067 }
2068 
2069 /*@
2070   DMPlexGetOverlap - Get the width of the cell overlap
2071 
2072   Not collective
2073 
2074   Input Parameter:
2075 . dm   - The DM
2076 
2077   Output Parameter:
2078 . overlap - the width of the cell overlap
2079 
2080   Level: intermediate
2081 
2082 .seealso: `DMPlexSetOverlap()`, `DMPlexDistribute()`
2083 @*/
2084 PetscErrorCode DMPlexGetOverlap(DM dm, PetscInt *overlap)
2085 {
2086   PetscFunctionBegin;
2087   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2088   PetscValidIntPointer(overlap, 2);
2089   PetscUseMethod(dm, "DMPlexGetOverlap_C",(DM,PetscInt*),(dm,overlap));
2090   PetscFunctionReturn(0);
2091 }
2092 
2093 /*@
2094   DMPlexSetOverlap - Set the width of the cell overlap
2095 
2096   Logically collective
2097 
2098   Input Parameters:
2099 + dm      - The DM
2100 . dmSrc   - The DM that produced this one, or NULL
2101 - overlap - the width of the cell overlap
2102 
2103   Note:
2104   The overlap from dmSrc is added to dm
2105 
2106   Level: intermediate
2107 
2108 .seealso: `DMPlexGetOverlap()`, `DMPlexDistribute()`
2109 @*/
2110 PetscErrorCode DMPlexSetOverlap(DM dm, DM dmSrc, PetscInt overlap)
2111 {
2112   PetscFunctionBegin;
2113   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2114   PetscValidLogicalCollectiveInt(dm, overlap, 3);
2115   PetscTryMethod(dm, "DMPlexSetOverlap_C",(DM,DM,PetscInt),(dm,dmSrc,overlap));
2116   PetscFunctionReturn(0);
2117 }
2118 
2119 PetscErrorCode DMPlexDistributeSetDefault_Plex(DM dm, PetscBool dist)
2120 {
2121   DM_Plex *mesh = (DM_Plex *) dm->data;
2122 
2123   PetscFunctionBegin;
2124   mesh->distDefault = dist;
2125   PetscFunctionReturn(0);
2126 }
2127 
2128 /*@
2129   DMPlexDistributeSetDefault - Set flag indicating whether the DM should be distributed by default
2130 
2131   Logically collective
2132 
2133   Input Parameters:
2134 + dm   - The DM
2135 - dist - Flag for distribution
2136 
2137   Level: intermediate
2138 
2139 .seealso: `DMPlexDistributeGetDefault()`, `DMPlexDistribute()`
2140 @*/
2141 PetscErrorCode DMPlexDistributeSetDefault(DM dm, PetscBool dist)
2142 {
2143   PetscFunctionBegin;
2144   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2145   PetscValidLogicalCollectiveBool(dm, dist, 2);
2146   PetscTryMethod(dm,"DMPlexDistributeSetDefault_C",(DM,PetscBool),(dm,dist));
2147   PetscFunctionReturn(0);
2148 }
2149 
2150 PetscErrorCode DMPlexDistributeGetDefault_Plex(DM dm, PetscBool *dist)
2151 {
2152   DM_Plex *mesh = (DM_Plex *) dm->data;
2153 
2154   PetscFunctionBegin;
2155   *dist = mesh->distDefault;
2156   PetscFunctionReturn(0);
2157 }
2158 
2159 /*@
2160   DMPlexDistributeGetDefault - Get flag indicating whether the DM should be distributed by default
2161 
2162   Not collective
2163 
2164   Input Parameter:
2165 . dm   - The DM
2166 
2167   Output Parameter:
2168 . dist - Flag for distribution
2169 
2170   Level: intermediate
2171 
2172 .seealso: `DMPlexDistributeSetDefault()`, `DMPlexDistribute()`
2173 @*/
2174 PetscErrorCode DMPlexDistributeGetDefault(DM dm, PetscBool *dist)
2175 {
2176   PetscFunctionBegin;
2177   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2178   PetscValidBoolPointer(dist, 2);
2179   PetscUseMethod(dm,"DMPlexDistributeGetDefault_C",(DM,PetscBool*),(dm,dist));
2180   PetscFunctionReturn(0);
2181 }
2182 
2183 /*@C
2184   DMPlexGetGatherDM - Get a copy of the DMPlex that gathers all points on the
2185   root process of the original's communicator.
2186 
2187   Collective on dm
2188 
2189   Input Parameter:
2190 . dm - the original DMPlex object
2191 
2192   Output Parameters:
2193 + sf - the PetscSF used for point distribution (optional)
2194 - gatherMesh - the gathered DM object, or NULL
2195 
2196   Level: intermediate
2197 
2198 .seealso: `DMPlexDistribute()`, `DMPlexGetRedundantDM()`
2199 @*/
2200 PetscErrorCode DMPlexGetGatherDM(DM dm, PetscSF *sf, DM *gatherMesh)
2201 {
2202   MPI_Comm       comm;
2203   PetscMPIInt    size;
2204   PetscPartitioner oldPart, gatherPart;
2205 
2206   PetscFunctionBegin;
2207   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2208   PetscValidPointer(gatherMesh,3);
2209   *gatherMesh = NULL;
2210   if (sf) *sf = NULL;
2211   comm = PetscObjectComm((PetscObject)dm);
2212   PetscCallMPI(MPI_Comm_size(comm,&size));
2213   if (size == 1) PetscFunctionReturn(0);
2214   PetscCall(DMPlexGetPartitioner(dm,&oldPart));
2215   PetscCall(PetscObjectReference((PetscObject)oldPart));
2216   PetscCall(PetscPartitionerCreate(comm,&gatherPart));
2217   PetscCall(PetscPartitionerSetType(gatherPart,PETSCPARTITIONERGATHER));
2218   PetscCall(DMPlexSetPartitioner(dm,gatherPart));
2219   PetscCall(DMPlexDistribute(dm,0,sf,gatherMesh));
2220 
2221   PetscCall(DMPlexSetPartitioner(dm,oldPart));
2222   PetscCall(PetscPartitionerDestroy(&gatherPart));
2223   PetscCall(PetscPartitionerDestroy(&oldPart));
2224   PetscFunctionReturn(0);
2225 }
2226 
2227 /*@C
2228   DMPlexGetRedundantDM - Get a copy of the DMPlex that is completely copied on each process.
2229 
2230   Collective on dm
2231 
2232   Input Parameter:
2233 . dm - the original DMPlex object
2234 
2235   Output Parameters:
2236 + sf - the PetscSF used for point distribution (optional)
2237 - redundantMesh - the redundant DM object, or NULL
2238 
2239   Level: intermediate
2240 
2241 .seealso: `DMPlexDistribute()`, `DMPlexGetGatherDM()`
2242 @*/
2243 PetscErrorCode DMPlexGetRedundantDM(DM dm, PetscSF *sf, DM *redundantMesh)
2244 {
2245   MPI_Comm       comm;
2246   PetscMPIInt    size, rank;
2247   PetscInt       pStart, pEnd, p;
2248   PetscInt       numPoints = -1;
2249   PetscSF        migrationSF, sfPoint, gatherSF;
2250   DM             gatherDM, dmCoord;
2251   PetscSFNode    *points;
2252 
2253   PetscFunctionBegin;
2254   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2255   PetscValidPointer(redundantMesh,3);
2256   *redundantMesh = NULL;
2257   comm = PetscObjectComm((PetscObject)dm);
2258   PetscCallMPI(MPI_Comm_size(comm,&size));
2259   if (size == 1) {
2260     PetscCall(PetscObjectReference((PetscObject) dm));
2261     *redundantMesh = dm;
2262     if (sf) *sf = NULL;
2263     PetscFunctionReturn(0);
2264   }
2265   PetscCall(DMPlexGetGatherDM(dm,&gatherSF,&gatherDM));
2266   if (!gatherDM) PetscFunctionReturn(0);
2267   PetscCallMPI(MPI_Comm_rank(comm,&rank));
2268   PetscCall(DMPlexGetChart(gatherDM,&pStart,&pEnd));
2269   numPoints = pEnd - pStart;
2270   PetscCallMPI(MPI_Bcast(&numPoints,1,MPIU_INT,0,comm));
2271   PetscCall(PetscMalloc1(numPoints,&points));
2272   PetscCall(PetscSFCreate(comm,&migrationSF));
2273   for (p = 0; p < numPoints; p++) {
2274     points[p].index = p;
2275     points[p].rank  = 0;
2276   }
2277   PetscCall(PetscSFSetGraph(migrationSF,pEnd-pStart,numPoints,NULL,PETSC_OWN_POINTER,points,PETSC_OWN_POINTER));
2278   PetscCall(DMPlexCreate(comm, redundantMesh));
2279   PetscCall(PetscObjectSetName((PetscObject) *redundantMesh, "Redundant Mesh"));
2280   PetscCall(DMPlexMigrate(gatherDM, migrationSF, *redundantMesh));
2281   /* This is to express that all point are in overlap */
2282   PetscCall(DMPlexSetOverlap_Plex(*redundantMesh, NULL, PETSC_MAX_INT));
2283   PetscCall(DMPlexCreatePointSF(*redundantMesh, migrationSF, PETSC_FALSE, &sfPoint));
2284   PetscCall(DMSetPointSF(*redundantMesh, sfPoint));
2285   PetscCall(DMGetCoordinateDM(*redundantMesh, &dmCoord));
2286   if (dmCoord) PetscCall(DMSetPointSF(dmCoord, sfPoint));
2287   PetscCall(PetscSFDestroy(&sfPoint));
2288   if (sf) {
2289     PetscSF tsf;
2290 
2291     PetscCall(PetscSFCompose(gatherSF,migrationSF,&tsf));
2292     PetscCall(DMPlexStratifyMigrationSF(dm, tsf, sf));
2293     PetscCall(PetscSFDestroy(&tsf));
2294   }
2295   PetscCall(PetscSFDestroy(&migrationSF));
2296   PetscCall(PetscSFDestroy(&gatherSF));
2297   PetscCall(DMDestroy(&gatherDM));
2298   PetscCall(DMCopyDisc(dm, *redundantMesh));
2299   PetscCall(DMPlexCopy_Internal(dm, PETSC_TRUE, PETSC_FALSE, *redundantMesh));
2300   PetscFunctionReturn(0);
2301 }
2302 
2303 /*@
2304   DMPlexIsDistributed - Find out whether this DM is distributed, i.e. more than one rank owns some points.
2305 
2306   Collective
2307 
2308   Input Parameter:
2309 . dm      - The DM object
2310 
2311   Output Parameter:
2312 . distributed - Flag whether the DM is distributed
2313 
2314   Level: intermediate
2315 
2316   Notes:
2317   This currently finds out whether at least two ranks have any DAG points.
2318   This involves MPI_Allreduce() with one integer.
2319   The result is currently not stashed so every call to this routine involves this global communication.
2320 
2321 .seealso: `DMPlexDistribute()`, `DMPlexGetOverlap()`, `DMPlexIsInterpolated()`
2322 @*/
2323 PetscErrorCode DMPlexIsDistributed(DM dm, PetscBool *distributed)
2324 {
2325   PetscInt          pStart, pEnd, count;
2326   MPI_Comm          comm;
2327   PetscMPIInt       size;
2328 
2329   PetscFunctionBegin;
2330   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2331   PetscValidBoolPointer(distributed,2);
2332   PetscCall(PetscObjectGetComm((PetscObject)dm,&comm));
2333   PetscCallMPI(MPI_Comm_size(comm,&size));
2334   if (size == 1) { *distributed = PETSC_FALSE; PetscFunctionReturn(0); }
2335   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
2336   count = (pEnd - pStart) > 0 ? 1 : 0;
2337   PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE, &count, 1, MPIU_INT, MPI_SUM, comm));
2338   *distributed = count > 1 ? PETSC_TRUE : PETSC_FALSE;
2339   PetscFunctionReturn(0);
2340 }
2341