xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision 2aa6f3193a4caa00a020b0f715c5eb487db60694)
1 #include <petsc/private/dmpleximpl.h>    /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/dmlabelimpl.h>   /*I      "petscdmlabel.h"  I*/
3 
4 /*@C
5   DMPlexSetAdjacencyUser - Define adjacency in the mesh using a user-provided callback
6 
7   Input Parameters:
8 + dm      - The DM object
9 . user    - The user callback, may be NULL (to clear the callback)
10 - ctx     - context for callback evaluation, may be NULL
11 
12   Level: advanced
13 
14   Notes:
15      The caller of DMPlexGetAdjacency may need to arrange that a large enough array is available for the adjacency.
16 
17      Any setting here overrides other configuration of DMPlex adjacency determination.
18 
19 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexGetAdjacency(), DMPlexGetAdjacencyUser()
20 @*/
21 PetscErrorCode DMPlexSetAdjacencyUser(DM dm,PetscErrorCode (*user)(DM,PetscInt,PetscInt*,PetscInt[],void*),void *ctx)
22 {
23   DM_Plex *mesh = (DM_Plex *)dm->data;
24 
25   PetscFunctionBegin;
26   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
27   mesh->useradjacency = user;
28   mesh->useradjacencyctx = ctx;
29   PetscFunctionReturn(0);
30 }
31 
32 /*@C
33   DMPlexGetAdjacencyUser - get the user-defined adjacency callback
34 
35   Input Parameter:
36 . dm      - The DM object
37 
38   Output Parameters:
39 - user    - The user callback
40 - ctx     - context for callback evaluation
41 
42   Level: advanced
43 
44 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexGetAdjacency(), DMPlexSetAdjacencyUser()
45 @*/
46 PetscErrorCode DMPlexGetAdjacencyUser(DM dm, PetscErrorCode (**user)(DM,PetscInt,PetscInt*,PetscInt[],void*), void **ctx)
47 {
48   DM_Plex *mesh = (DM_Plex *)dm->data;
49 
50   PetscFunctionBegin;
51   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
52   if (user) *user = mesh->useradjacency;
53   if (ctx) *ctx = mesh->useradjacencyctx;
54   PetscFunctionReturn(0);
55 }
56 
57 /*@
58   DMPlexSetAdjacencyUseCone - Define adjacency in the mesh using either the cone or the support first
59 
60   Input Parameters:
61 + dm      - The DM object
62 - useCone - Flag to use the cone first
63 
64   Level: intermediate
65 
66   Notes:
67 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
68 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
69 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
70 
71 .seealso: DMPlexGetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
72 @*/
73 PetscErrorCode DMPlexSetAdjacencyUseCone(DM dm, PetscBool useCone)
74 {
75   PetscDS        prob;
76   PetscBool      useClosure;
77   PetscInt       Nf;
78   PetscErrorCode ierr;
79 
80   PetscFunctionBegin;
81   ierr = DMGetDS(dm, &prob);CHKERRQ(ierr);
82   ierr = PetscDSGetNumFields(prob, &Nf);CHKERRQ(ierr);
83   if (!Nf) {
84     ierr = PetscDSGetAdjacency(prob, PETSC_DEFAULT, NULL, &useClosure);CHKERRQ(ierr);
85     ierr = PetscDSSetAdjacency(prob, PETSC_DEFAULT, useCone, useClosure);CHKERRQ(ierr);
86   } else {
87     ierr = PetscDSGetAdjacency(prob, 0, NULL, &useClosure);CHKERRQ(ierr);
88     ierr = PetscDSSetAdjacency(prob, 0, useCone, useClosure);CHKERRQ(ierr);
89   }
90   PetscFunctionReturn(0);
91 }
92 
93 /*@
94   DMPlexGetAdjacencyUseCone - Query whether adjacency in the mesh uses the cone or the support first
95 
96   Input Parameter:
97 . dm      - The DM object
98 
99   Output Parameter:
100 . useCone - Flag to use the cone first
101 
102   Level: intermediate
103 
104   Notes:
105 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
106 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
107 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
108 
109 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
110 @*/
111 PetscErrorCode DMPlexGetAdjacencyUseCone(DM dm, PetscBool *useCone)
112 {
113   PetscDS        prob;
114   PetscInt       Nf;
115   PetscErrorCode ierr;
116 
117   PetscFunctionBegin;
118   ierr = DMGetDS(dm, &prob);CHKERRQ(ierr);
119   ierr = PetscDSGetNumFields(prob, &Nf);CHKERRQ(ierr);
120   if (!Nf) {
121     ierr = PetscDSGetAdjacency(prob, PETSC_DEFAULT, useCone, NULL);CHKERRQ(ierr);
122   } else {
123     ierr = PetscDSGetAdjacency(prob, 0, useCone, NULL);CHKERRQ(ierr);
124   }
125   PetscFunctionReturn(0);
126 }
127 
128 /*@
129   DMPlexSetAdjacencyUseClosure - Define adjacency in the mesh using the transitive closure
130 
131   Input Parameters:
132 + dm      - The DM object
133 - useClosure - Flag to use the closure
134 
135   Level: intermediate
136 
137   Notes:
138 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
139 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
140 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
141 
142 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
143 @*/
144 PetscErrorCode DMPlexSetAdjacencyUseClosure(DM dm, PetscBool useClosure)
145 {
146   PetscDS        prob;
147   PetscBool      useCone;
148   PetscInt       Nf;
149   PetscErrorCode ierr;
150 
151   PetscFunctionBegin;
152   ierr = DMGetDS(dm, &prob);CHKERRQ(ierr);
153   ierr = PetscDSGetNumFields(prob, &Nf);CHKERRQ(ierr);
154   if (!Nf) {
155     ierr = PetscDSGetAdjacency(prob, PETSC_DEFAULT, &useCone, NULL);CHKERRQ(ierr);
156     ierr = PetscDSSetAdjacency(prob, PETSC_DEFAULT, useCone, useClosure);CHKERRQ(ierr);
157   } else {
158     ierr = PetscDSGetAdjacency(prob, 0, &useCone, NULL);CHKERRQ(ierr);
159     ierr = PetscDSSetAdjacency(prob, 0, useCone, useClosure);CHKERRQ(ierr);
160   }
161   PetscFunctionReturn(0);
162 }
163 
164 /*@
165   DMPlexGetAdjacencyUseClosure - Query whether adjacency in the mesh uses the transitive closure
166 
167   Input Parameter:
168 . dm      - The DM object
169 
170   Output Parameter:
171 . useClosure - Flag to use the closure
172 
173   Level: intermediate
174 
175   Notes:
176 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
177 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
178 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
179 
180 .seealso: DMPlexSetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
181 @*/
182 PetscErrorCode DMPlexGetAdjacencyUseClosure(DM dm, PetscBool *useClosure)
183 {
184   PetscDS        prob;
185   PetscInt       Nf;
186   PetscErrorCode ierr;
187 
188   PetscFunctionBegin;
189   ierr = DMGetDS(dm, &prob);CHKERRQ(ierr);
190   ierr = PetscDSGetNumFields(prob, &Nf);CHKERRQ(ierr);
191   if (!Nf) {
192     ierr = PetscDSGetAdjacency(prob, PETSC_DEFAULT, NULL, useClosure);CHKERRQ(ierr);
193   } else {
194     ierr = PetscDSGetAdjacency(prob, 0, NULL, useClosure);CHKERRQ(ierr);
195   }
196   PetscFunctionReturn(0);
197 }
198 
199 /*@
200   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
201 
202   Input Parameters:
203 + dm      - The DM object
204 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
205 
206   Level: intermediate
207 
208 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
209 @*/
210 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
211 {
212   DM_Plex *mesh = (DM_Plex *) dm->data;
213 
214   PetscFunctionBegin;
215   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
216   mesh->useAnchors = useAnchors;
217   PetscFunctionReturn(0);
218 }
219 
220 /*@
221   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
222 
223   Input Parameter:
224 . dm      - The DM object
225 
226   Output Parameter:
227 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
228 
229   Level: intermediate
230 
231 .seealso: DMPlexSetAdjacencyUseAnchors(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
232 @*/
233 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
234 {
235   DM_Plex *mesh = (DM_Plex *) dm->data;
236 
237   PetscFunctionBegin;
238   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
239   PetscValidIntPointer(useAnchors, 2);
240   *useAnchors = mesh->useAnchors;
241   PetscFunctionReturn(0);
242 }
243 
244 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
245 {
246   const PetscInt *cone = NULL;
247   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
248   PetscErrorCode  ierr;
249 
250   PetscFunctionBeginHot;
251   ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
252   ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
253   for (c = 0; c <= coneSize; ++c) {
254     const PetscInt  point   = !c ? p : cone[c-1];
255     const PetscInt *support = NULL;
256     PetscInt        supportSize, s, q;
257 
258     ierr = DMPlexGetSupportSize(dm, point, &supportSize);CHKERRQ(ierr);
259     ierr = DMPlexGetSupport(dm, point, &support);CHKERRQ(ierr);
260     for (s = 0; s < supportSize; ++s) {
261       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = support[s]),0); ++q) {
262         if (support[s] == adj[q]) break;
263       }
264       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
265     }
266   }
267   *adjSize = numAdj;
268   PetscFunctionReturn(0);
269 }
270 
271 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
272 {
273   const PetscInt *support = NULL;
274   PetscInt        numAdj   = 0, maxAdjSize = *adjSize, supportSize, s;
275   PetscErrorCode  ierr;
276 
277   PetscFunctionBeginHot;
278   ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr);
279   ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr);
280   for (s = 0; s <= supportSize; ++s) {
281     const PetscInt  point = !s ? p : support[s-1];
282     const PetscInt *cone  = NULL;
283     PetscInt        coneSize, c, q;
284 
285     ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
286     ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
287     for (c = 0; c < coneSize; ++c) {
288       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = cone[c]),0); ++q) {
289         if (cone[c] == adj[q]) break;
290       }
291       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
292     }
293   }
294   *adjSize = numAdj;
295   PetscFunctionReturn(0);
296 }
297 
298 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
299 {
300   PetscInt      *star = NULL;
301   PetscInt       numAdj = 0, maxAdjSize = *adjSize, starSize, s;
302   PetscErrorCode ierr;
303 
304   PetscFunctionBeginHot;
305   ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
306   for (s = 0; s < starSize*2; s += 2) {
307     const PetscInt *closure = NULL;
308     PetscInt        closureSize, c, q;
309 
310     ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
311     for (c = 0; c < closureSize*2; c += 2) {
312       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = closure[c]),0); ++q) {
313         if (closure[c] == adj[q]) break;
314       }
315       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
316     }
317     ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
318   }
319   ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
320   *adjSize = numAdj;
321   PetscFunctionReturn(0);
322 }
323 
324 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
325 {
326   static PetscInt asiz = 0;
327   PetscInt maxAnchors = 1;
328   PetscInt aStart = -1, aEnd = -1;
329   PetscInt maxAdjSize;
330   PetscSection aSec = NULL;
331   IS aIS = NULL;
332   const PetscInt *anchors;
333   DM_Plex *mesh = (DM_Plex *)dm->data;
334   PetscErrorCode  ierr;
335 
336   PetscFunctionBeginHot;
337   if (useAnchors) {
338     ierr = DMPlexGetAnchors(dm,&aSec,&aIS);CHKERRQ(ierr);
339     if (aSec) {
340       ierr = PetscSectionGetMaxDof(aSec,&maxAnchors);CHKERRQ(ierr);
341       maxAnchors = PetscMax(1,maxAnchors);
342       ierr = PetscSectionGetChart(aSec,&aStart,&aEnd);CHKERRQ(ierr);
343       ierr = ISGetIndices(aIS,&anchors);CHKERRQ(ierr);
344     }
345   }
346   if (!*adj) {
347     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
348 
349     ierr  = DMPlexGetChart(dm, &pStart,&pEnd);CHKERRQ(ierr);
350     ierr  = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
351     ierr  = DMPlexGetMaxSizes(dm, &maxC, &maxS);CHKERRQ(ierr);
352     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1;
353     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1;
354     asiz  = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries);
355     asiz *= maxAnchors;
356     asiz  = PetscMin(asiz,pEnd-pStart);
357     ierr  = PetscMalloc1(asiz,adj);CHKERRQ(ierr);
358   }
359   if (*adjSize < 0) *adjSize = asiz;
360   maxAdjSize = *adjSize;
361   if (mesh->useradjacency) {
362     ierr = mesh->useradjacency(dm, p, adjSize, *adj, mesh->useradjacencyctx);CHKERRQ(ierr);
363   } else if (useTransitiveClosure) {
364     ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr);
365   } else if (useCone) {
366     ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
367   } else {
368     ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
369   }
370   if (useAnchors && aSec) {
371     PetscInt origSize = *adjSize;
372     PetscInt numAdj = origSize;
373     PetscInt i = 0, j;
374     PetscInt *orig = *adj;
375 
376     while (i < origSize) {
377       PetscInt p = orig[i];
378       PetscInt aDof = 0;
379 
380       if (p >= aStart && p < aEnd) {
381         ierr = PetscSectionGetDof(aSec,p,&aDof);CHKERRQ(ierr);
382       }
383       if (aDof) {
384         PetscInt aOff;
385         PetscInt s, q;
386 
387         for (j = i + 1; j < numAdj; j++) {
388           orig[j - 1] = orig[j];
389         }
390         origSize--;
391         numAdj--;
392         ierr = PetscSectionGetOffset(aSec,p,&aOff);CHKERRQ(ierr);
393         for (s = 0; s < aDof; ++s) {
394           for (q = 0; q < numAdj || ((void)(orig[numAdj++] = anchors[aOff+s]),0); ++q) {
395             if (anchors[aOff+s] == orig[q]) break;
396           }
397           if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
398         }
399       }
400       else {
401         i++;
402       }
403     }
404     *adjSize = numAdj;
405     ierr = ISRestoreIndices(aIS,&anchors);CHKERRQ(ierr);
406   }
407   PetscFunctionReturn(0);
408 }
409 
410 /*@
411   DMPlexGetAdjacency - Return all points adjacent to the given point
412 
413   Input Parameters:
414 + dm - The DM object
415 . p  - The point
416 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE
417 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize
418 
419   Output Parameters:
420 + adjSize - The number of adjacent points
421 - adj - The adjacent points
422 
423   Level: advanced
424 
425   Notes:
426     The user must PetscFree the adj array if it was not passed in.
427 
428 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator()
429 @*/
430 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
431 {
432   PetscBool      useCone, useClosure, useAnchors;
433   PetscErrorCode ierr;
434 
435   PetscFunctionBeginHot;
436   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
437   PetscValidPointer(adjSize,3);
438   PetscValidPointer(adj,4);
439   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
440   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
441   ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
442   ierr = DMPlexGetAdjacency_Internal(dm, p, useCone, useClosure, useAnchors, adjSize, adj);CHKERRQ(ierr);
443   PetscFunctionReturn(0);
444 }
445 
446 /*@
447   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
448 
449   Collective on DM
450 
451   Input Parameters:
452 + dm      - The DM
453 - sfPoint - The PetscSF which encodes point connectivity
454 
455   Output Parameters:
456 + processRanks - A list of process neighbors, or NULL
457 - sfProcess    - An SF encoding the two-sided process connectivity, or NULL
458 
459   Level: developer
460 
461 .seealso: PetscSFCreate(), DMPlexCreateProcessSF()
462 @*/
463 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
464 {
465   const PetscSFNode *remotePoints;
466   PetscInt          *localPointsNew;
467   PetscSFNode       *remotePointsNew;
468   const PetscInt    *nranks;
469   PetscInt          *ranksNew;
470   PetscBT            neighbors;
471   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
472   PetscMPIInt        size, proc, rank;
473   PetscErrorCode     ierr;
474 
475   PetscFunctionBegin;
476   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
477   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
478   if (processRanks) {PetscValidPointer(processRanks, 3);}
479   if (sfProcess)    {PetscValidPointer(sfProcess, 4);}
480   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
481   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
482   ierr = PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);CHKERRQ(ierr);
483   ierr = PetscBTCreate(size, &neighbors);CHKERRQ(ierr);
484   ierr = PetscBTMemzero(size, neighbors);CHKERRQ(ierr);
485   /* Compute root-to-leaf process connectivity */
486   ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr);
487   ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr);
488   for (p = pStart; p < pEnd; ++p) {
489     PetscInt ndof, noff, n;
490 
491     ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr);
492     ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr);
493     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
494   }
495   ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr);
496   /* Compute leaf-to-neighbor process connectivity */
497   ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr);
498   ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr);
499   for (p = pStart; p < pEnd; ++p) {
500     PetscInt ndof, noff, n;
501 
502     ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr);
503     ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr);
504     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
505   }
506   ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr);
507   /* Compute leaf-to-root process connectivity */
508   for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);}
509   /* Calculate edges */
510   PetscBTClear(neighbors, rank);
511   for(proc = 0, numNeighbors = 0; proc < size; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;}
512   ierr = PetscMalloc1(numNeighbors, &ranksNew);CHKERRQ(ierr);
513   ierr = PetscMalloc1(numNeighbors, &localPointsNew);CHKERRQ(ierr);
514   ierr = PetscMalloc1(numNeighbors, &remotePointsNew);CHKERRQ(ierr);
515   for(proc = 0, n = 0; proc < size; ++proc) {
516     if (PetscBTLookup(neighbors, proc)) {
517       ranksNew[n]              = proc;
518       localPointsNew[n]        = proc;
519       remotePointsNew[n].index = rank;
520       remotePointsNew[n].rank  = proc;
521       ++n;
522     }
523   }
524   ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr);
525   if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr);}
526   else              {ierr = PetscFree(ranksNew);CHKERRQ(ierr);}
527   if (sfProcess) {
528     ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr);
529     ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr);
530     ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr);
531     ierr = PetscSFSetGraph(*sfProcess, size, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
532   }
533   PetscFunctionReturn(0);
534 }
535 
536 /*@
537   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
538 
539   Collective on DM
540 
541   Input Parameter:
542 . dm - The DM
543 
544   Output Parameters:
545 + rootSection - The number of leaves for a given root point
546 . rootrank    - The rank of each edge into the root point
547 . leafSection - The number of processes sharing a given leaf point
548 - leafrank    - The rank of each process sharing a leaf point
549 
550   Level: developer
551 
552 .seealso: DMPlexCreateOverlap()
553 @*/
554 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
555 {
556   MPI_Comm        comm;
557   PetscSF         sfPoint;
558   const PetscInt *rootdegree;
559   PetscInt       *myrank, *remoterank;
560   PetscInt        pStart, pEnd, p, nedges;
561   PetscMPIInt     rank;
562   PetscErrorCode  ierr;
563 
564   PetscFunctionBegin;
565   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
566   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
567   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
568   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
569   /* Compute number of leaves for each root */
570   ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr);
571   ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr);
572   ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr);
573   ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr);
574   for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr);}
575   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
576   /* Gather rank of each leaf to root */
577   ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr);
578   ierr = PetscMalloc1(pEnd-pStart, &myrank);CHKERRQ(ierr);
579   ierr = PetscMalloc1(nedges,  &remoterank);CHKERRQ(ierr);
580   for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank;
581   ierr = PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
582   ierr = PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
583   ierr = PetscFree(myrank);CHKERRQ(ierr);
584   ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr);
585   /* Distribute remote ranks to leaves */
586   ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr);
587   ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr);
588   PetscFunctionReturn(0);
589 }
590 
591 /*@C
592   DMPlexCreateOverlap - Compute owner information for shared points. This basically gets two-sided for an SF.
593 
594   Collective on DM
595 
596   Input Parameters:
597 + dm          - The DM
598 . levels      - Number of overlap levels
599 . rootSection - The number of leaves for a given root point
600 . rootrank    - The rank of each edge into the root point
601 . leafSection - The number of processes sharing a given leaf point
602 - leafrank    - The rank of each process sharing a leaf point
603 
604   Output Parameters:
605 + ovLabel     - DMLabel containing remote overlap contributions as point/rank pairings
606 
607   Level: developer
608 
609 .seealso: DMPlexDistributeOwnership(), DMPlexDistribute()
610 @*/
611 PetscErrorCode DMPlexCreateOverlap(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
612 {
613   MPI_Comm           comm;
614   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
615   PetscSF            sfPoint, sfProc;
616   const PetscSFNode *remote;
617   const PetscInt    *local;
618   const PetscInt    *nrank, *rrank;
619   PetscInt          *adj = NULL;
620   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l;
621   PetscMPIInt        rank, size;
622   PetscBool          useCone, useClosure, flg;
623   PetscErrorCode     ierr;
624 
625   PetscFunctionBegin;
626   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
627   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
628   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
629   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
630   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
631   ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr);
632   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
633   ierr = DMLabelCreate("Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr);
634   /* Handle leaves: shared with the root point */
635   for (l = 0; l < nleaves; ++l) {
636     PetscInt adjSize = PETSC_DETERMINE, a;
637 
638     ierr = DMPlexGetAdjacency(dm, local[l], &adjSize, &adj);CHKERRQ(ierr);
639     for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr);}
640   }
641   ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr);
642   ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr);
643   /* Handle roots */
644   for (p = pStart; p < pEnd; ++p) {
645     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
646 
647     if ((p >= sStart) && (p < sEnd)) {
648       /* Some leaves share a root with other leaves on different processes */
649       ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr);
650       if (neighbors) {
651         ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr);
652         ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
653         for (n = 0; n < neighbors; ++n) {
654           const PetscInt remoteRank = nrank[noff+n];
655 
656           if (remoteRank == rank) continue;
657           for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
658         }
659       }
660     }
661     /* Roots are shared with leaves */
662     ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr);
663     if (!neighbors) continue;
664     ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr);
665     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
666     for (n = 0; n < neighbors; ++n) {
667       const PetscInt remoteRank = rrank[noff+n];
668 
669       if (remoteRank == rank) continue;
670       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
671     }
672   }
673   ierr = PetscFree(adj);CHKERRQ(ierr);
674   ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr);
675   ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr);
676   /* Add additional overlap levels */
677   for (l = 1; l < levels; l++) {
678     /* Propagate point donations over SF to capture remote connections */
679     ierr = DMPlexPartitionLabelPropagate(dm, ovAdjByRank);CHKERRQ(ierr);
680     /* Add next level of point donations to the label */
681     ierr = DMPlexPartitionLabelAdjacency(dm, ovAdjByRank);CHKERRQ(ierr);
682   }
683   /* We require the closure in the overlap */
684   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
685   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
686   if (useCone || !useClosure) {
687     ierr = DMPlexPartitionLabelClosure(dm, ovAdjByRank);CHKERRQ(ierr);
688   }
689   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-overlap_view", &flg);CHKERRQ(ierr);
690   if (flg) {
691     ierr = DMLabelView(ovAdjByRank, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
692   }
693   /* Make global process SF and invert sender to receiver label */
694   {
695     /* Build a global process SF */
696     PetscSFNode *remoteProc;
697     ierr = PetscMalloc1(size, &remoteProc);CHKERRQ(ierr);
698     for (p = 0; p < size; ++p) {
699       remoteProc[p].rank  = p;
700       remoteProc[p].index = rank;
701     }
702     ierr = PetscSFCreate(comm, &sfProc);CHKERRQ(ierr);
703     ierr = PetscObjectSetName((PetscObject) sfProc, "Process SF");CHKERRQ(ierr);
704     ierr = PetscSFSetGraph(sfProc, size, size, NULL, PETSC_OWN_POINTER, remoteProc, PETSC_OWN_POINTER);CHKERRQ(ierr);
705   }
706   ierr = DMLabelCreate("Overlap label", ovLabel);CHKERRQ(ierr);
707   ierr = DMPlexPartitionLabelInvert(dm, ovAdjByRank, sfProc, *ovLabel);CHKERRQ(ierr);
708   /* Add owned points, except for shared local points */
709   for (p = pStart; p < pEnd; ++p) {ierr = DMLabelSetValue(*ovLabel, p, rank);CHKERRQ(ierr);}
710   for (l = 0; l < nleaves; ++l) {
711     ierr = DMLabelClearValue(*ovLabel, local[l], rank);CHKERRQ(ierr);
712     ierr = DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank);CHKERRQ(ierr);
713   }
714   /* Clean up */
715   ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr);
716   ierr = PetscSFDestroy(&sfProc);CHKERRQ(ierr);
717   PetscFunctionReturn(0);
718 }
719 
720 /*@C
721   DMPlexCreateOverlapMigrationSF - Create an SF describing the new mesh distribution to make the overlap described by the input SF
722 
723   Collective on DM
724 
725   Input Parameters:
726 + dm          - The DM
727 - overlapSF   - The SF mapping ghost points in overlap to owner points on other processes
728 
729   Output Parameters:
730 + migrationSF - An SF that maps original points in old locations to points in new locations
731 
732   Level: developer
733 
734 .seealso: DMPlexCreateOverlap(), DMPlexDistribute()
735 @*/
736 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
737 {
738   MPI_Comm           comm;
739   PetscMPIInt        rank, size;
740   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
741   PetscInt          *pointDepths, *remoteDepths, *ilocal;
742   PetscInt          *depthRecv, *depthShift, *depthIdx;
743   PetscSFNode       *iremote;
744   PetscSF            pointSF;
745   const PetscInt    *sharedLocal;
746   const PetscSFNode *overlapRemote, *sharedRemote;
747   PetscErrorCode     ierr;
748 
749   PetscFunctionBegin;
750   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
751   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
752   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
753   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
754   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
755 
756   /* Before building the migration SF we need to know the new stratum offsets */
757   ierr = PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote);CHKERRQ(ierr);
758   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
759   for (d=0; d<dim+1; d++) {
760     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
761     for (p=pStart; p<pEnd; p++) pointDepths[p] = d;
762   }
763   for (p=0; p<nleaves; p++) remoteDepths[p] = -1;
764   ierr = PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
765   ierr = PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
766 
767   /* Count recevied points in each stratum and compute the internal strata shift */
768   ierr = PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx);CHKERRQ(ierr);
769   for (d=0; d<dim+1; d++) depthRecv[d]=0;
770   for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++;
771   depthShift[dim] = 0;
772   for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim];
773   for (d=1; d<dim; d++) depthShift[d] += depthRecv[0];
774   for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1];
775   for (d=0; d<dim+1; d++) {
776     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
777     depthIdx[d] = pStart + depthShift[d];
778   }
779 
780   /* Form the overlap SF build an SF that describes the full overlap migration SF */
781   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
782   newLeaves = pEnd - pStart + nleaves;
783   ierr = PetscMalloc1(newLeaves, &ilocal);CHKERRQ(ierr);
784   ierr = PetscMalloc1(newLeaves, &iremote);CHKERRQ(ierr);
785   /* First map local points to themselves */
786   for (d=0; d<dim+1; d++) {
787     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
788     for (p=pStart; p<pEnd; p++) {
789       point = p + depthShift[d];
790       ilocal[point] = point;
791       iremote[point].index = p;
792       iremote[point].rank = rank;
793       depthIdx[d]++;
794     }
795   }
796 
797   /* Add in the remote roots for currently shared points */
798   ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
799   ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote);CHKERRQ(ierr);
800   for (d=0; d<dim+1; d++) {
801     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
802     for (p=0; p<numSharedPoints; p++) {
803       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
804         point = sharedLocal[p] + depthShift[d];
805         iremote[point].index = sharedRemote[p].index;
806         iremote[point].rank = sharedRemote[p].rank;
807       }
808     }
809   }
810 
811   /* Now add the incoming overlap points */
812   for (p=0; p<nleaves; p++) {
813     point = depthIdx[remoteDepths[p]];
814     ilocal[point] = point;
815     iremote[point].index = overlapRemote[p].index;
816     iremote[point].rank = overlapRemote[p].rank;
817     depthIdx[remoteDepths[p]]++;
818   }
819   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
820 
821   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
822   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");CHKERRQ(ierr);
823   ierr = PetscSFSetFromOptions(*migrationSF);CHKERRQ(ierr);
824   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
825   ierr = PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);CHKERRQ(ierr);
826 
827   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
828   PetscFunctionReturn(0);
829 }
830 
831 /*@
832   DMPlexStratifyMigrationSF - Rearrange the leaves of a migration sf for stratification.
833 
834   Input Parameter:
835 + dm          - The DM
836 - sf          - A star forest with non-ordered leaves, usually defining a DM point migration
837 
838   Output Parameter:
839 . migrationSF - A star forest with added leaf indirection that ensures the resulting DM is stratified
840 
841   Level: developer
842 
843 .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexDistributeOverlap()
844 @*/
845 PetscErrorCode DMPlexStratifyMigrationSF(DM dm, PetscSF sf, PetscSF *migrationSF)
846 {
847   MPI_Comm           comm;
848   PetscMPIInt        rank, size;
849   PetscInt           d, ldepth, depth, p, pStart, pEnd, nroots, nleaves;
850   PetscInt          *pointDepths, *remoteDepths, *ilocal;
851   PetscInt          *depthRecv, *depthShift, *depthIdx;
852   PetscInt           hybEnd[4];
853   const PetscSFNode *iremote;
854   PetscErrorCode     ierr;
855 
856   PetscFunctionBegin;
857   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
858   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
859   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
860   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
861   ierr = DMPlexGetDepth(dm, &ldepth);CHKERRQ(ierr);
862   ierr = MPIU_Allreduce(&ldepth, &depth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr);
863   if ((ldepth >= 0) && (depth != ldepth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", ldepth, depth);
864 
865   /* Before building the migration SF we need to know the new stratum offsets */
866   ierr = PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote);CHKERRQ(ierr);
867   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
868   ierr = DMPlexGetHybridBounds(dm,&hybEnd[depth],&hybEnd[depth-1],&hybEnd[1],&hybEnd[0]);CHKERRQ(ierr);
869   for (d = 0; d < depth+1; ++d) {
870     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
871     for (p = pStart; p < pEnd; ++p) {
872       if (hybEnd[d] >= 0 && p >= hybEnd[d]) { /* put in a separate value for hybrid points */
873         pointDepths[p] = 2 * d;
874       } else {
875         pointDepths[p] = 2 * d + 1;
876       }
877     }
878   }
879   for (p = 0; p < nleaves; ++p) remoteDepths[p] = -1;
880   ierr = PetscSFBcastBegin(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
881   ierr = PetscSFBcastEnd(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
882   /* Count recevied points in each stratum and compute the internal strata shift */
883   ierr = PetscMalloc3(2*(depth+1), &depthRecv, 2*(depth+1), &depthShift, 2*(depth+1), &depthIdx);CHKERRQ(ierr);
884   for (d = 0; d < 2*(depth+1); ++d) depthRecv[d] = 0;
885   for (p = 0; p < nleaves; ++p) depthRecv[remoteDepths[p]]++;
886   depthShift[2*depth+1] = 0;
887   for (d = 0; d < 2*depth+1; ++d) depthShift[d] = depthRecv[2 * depth + 1];
888   for (d = 0; d < 2*depth; ++d) depthShift[d] += depthRecv[2 * depth];
889   depthShift[0] += depthRecv[1];
890   for (d = 2; d < 2*depth; ++d) depthShift[d] += depthRecv[1];
891   for (d = 2; d < 2*depth; ++d) depthShift[d] += depthRecv[0];
892   for (d = 2 * depth-1; d > 2; --d) {
893     PetscInt e;
894 
895     for (e = d -1; e > 1; --e) depthShift[e] += depthRecv[d];
896   }
897   for (d = 0; d < 2*(depth+1); ++d) {depthIdx[d] = 0;}
898   /* Derive a new local permutation based on stratified indices */
899   ierr = PetscMalloc1(nleaves, &ilocal);CHKERRQ(ierr);
900   for (p = 0; p < nleaves; ++p) {
901     const PetscInt dep = remoteDepths[p];
902 
903     ilocal[p] = depthShift[dep] + depthIdx[dep];
904     depthIdx[dep]++;
905   }
906   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
907   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Migration SF");CHKERRQ(ierr);
908   ierr = PetscSFSetGraph(*migrationSF, nroots, nleaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_COPY_VALUES);CHKERRQ(ierr);
909   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
910   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
911   PetscFunctionReturn(0);
912 }
913 
914 /*@
915   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
916 
917   Collective on DM
918 
919   Input Parameters:
920 + dm - The DMPlex object
921 . pointSF - The PetscSF describing the communication pattern
922 . originalSection - The PetscSection for existing data layout
923 - originalVec - The existing data
924 
925   Output Parameters:
926 + newSection - The PetscSF describing the new data layout
927 - newVec - The new data
928 
929   Level: developer
930 
931 .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData()
932 @*/
933 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
934 {
935   PetscSF        fieldSF;
936   PetscInt      *remoteOffsets, fieldSize;
937   PetscScalar   *originalValues, *newValues;
938   PetscErrorCode ierr;
939 
940   PetscFunctionBegin;
941   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
942   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
943 
944   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
945   ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr);
946   ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr);
947 
948   ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr);
949   ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr);
950   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
951   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
952   ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
953   ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
954   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
955   ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr);
956   ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr);
957   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
958   PetscFunctionReturn(0);
959 }
960 
961 /*@
962   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
963 
964   Collective on DM
965 
966   Input Parameters:
967 + dm - The DMPlex object
968 . pointSF - The PetscSF describing the communication pattern
969 . originalSection - The PetscSection for existing data layout
970 - originalIS - The existing data
971 
972   Output Parameters:
973 + newSection - The PetscSF describing the new data layout
974 - newIS - The new data
975 
976   Level: developer
977 
978 .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData()
979 @*/
980 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
981 {
982   PetscSF         fieldSF;
983   PetscInt       *newValues, *remoteOffsets, fieldSize;
984   const PetscInt *originalValues;
985   PetscErrorCode  ierr;
986 
987   PetscFunctionBegin;
988   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
989   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
990 
991   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
992   ierr = PetscMalloc1(fieldSize, &newValues);CHKERRQ(ierr);
993 
994   ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr);
995   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
996   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
997   ierr = PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
998   ierr = PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
999   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
1000   ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr);
1001   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr);
1002   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
1003   PetscFunctionReturn(0);
1004 }
1005 
1006 /*@
1007   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
1008 
1009   Collective on DM
1010 
1011   Input Parameters:
1012 + dm - The DMPlex object
1013 . pointSF - The PetscSF describing the communication pattern
1014 . originalSection - The PetscSection for existing data layout
1015 . datatype - The type of data
1016 - originalData - The existing data
1017 
1018   Output Parameters:
1019 + newSection - The PetscSection describing the new data layout
1020 - newData - The new data
1021 
1022   Level: developer
1023 
1024 .seealso: DMPlexDistribute(), DMPlexDistributeField()
1025 @*/
1026 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
1027 {
1028   PetscSF        fieldSF;
1029   PetscInt      *remoteOffsets, fieldSize;
1030   PetscMPIInt    dataSize;
1031   PetscErrorCode ierr;
1032 
1033   PetscFunctionBegin;
1034   ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
1035   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
1036 
1037   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
1038   ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr);
1039   ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr);
1040 
1041   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
1042   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
1043   ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
1044   ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
1045   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
1046   ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
1047   PetscFunctionReturn(0);
1048 }
1049 
1050 static PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1051 {
1052   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
1053   MPI_Comm               comm;
1054   PetscSF                coneSF;
1055   PetscSection           originalConeSection, newConeSection;
1056   PetscInt              *remoteOffsets, *cones, *globCones, *newCones, newConesSize;
1057   PetscBool              flg;
1058   PetscErrorCode         ierr;
1059 
1060   PetscFunctionBegin;
1061   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1062   PetscValidPointer(dmParallel,4);
1063   ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
1064 
1065   /* Distribute cone section */
1066   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1067   ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr);
1068   ierr = DMPlexGetConeSection(dmParallel, &newConeSection);CHKERRQ(ierr);
1069   ierr = PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr);
1070   ierr = DMSetUp(dmParallel);CHKERRQ(ierr);
1071   {
1072     PetscInt pStart, pEnd, p;
1073 
1074     ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr);
1075     for (p = pStart; p < pEnd; ++p) {
1076       PetscInt coneSize;
1077       ierr               = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr);
1078       pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize);
1079     }
1080   }
1081   /* Communicate and renumber cones */
1082   ierr = PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr);
1083   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
1084   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
1085   if (original) {
1086     PetscInt numCones;
1087 
1088     ierr = PetscSectionGetStorageSize(originalConeSection,&numCones);CHKERRQ(ierr); ierr = PetscMalloc1(numCones,&globCones);CHKERRQ(ierr);
1089     ierr = ISLocalToGlobalMappingApplyBlock(original, numCones, cones, globCones);CHKERRQ(ierr);
1090   }
1091   else {
1092     globCones = cones;
1093   }
1094   ierr = DMPlexGetCones(dmParallel, &newCones);CHKERRQ(ierr);
1095   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr);
1096   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr);
1097   if (original) {
1098     ierr = PetscFree(globCones);CHKERRQ(ierr);
1099   }
1100   ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr);
1101   ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr);
1102 #if PETSC_USE_DEBUG
1103   {
1104     PetscInt  p;
1105     PetscBool valid = PETSC_TRUE;
1106     for (p = 0; p < newConesSize; ++p) {
1107       if (newCones[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
1108     }
1109     if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1110   }
1111 #endif
1112   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr);
1113   if (flg) {
1114     ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr);
1115     ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1116     ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr);
1117     ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1118     ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr);
1119   }
1120   ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr);
1121   ierr = DMPlexGetConeOrientations(dmParallel, &newCones);CHKERRQ(ierr);
1122   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1123   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1124   ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr);
1125   ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
1126   /* Create supports and stratify DMPlex */
1127   {
1128     PetscInt pStart, pEnd;
1129 
1130     ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr);
1131     ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr);
1132   }
1133   ierr = DMPlexSymmetrize(dmParallel);CHKERRQ(ierr);
1134   ierr = DMPlexStratify(dmParallel);CHKERRQ(ierr);
1135   {
1136     PetscBool useCone, useClosure, useAnchors;
1137 
1138     ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
1139     ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
1140     ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
1141     ierr = DMPlexSetAdjacencyUseCone(dmParallel, useCone);CHKERRQ(ierr);
1142     ierr = DMPlexSetAdjacencyUseClosure(dmParallel, useClosure);CHKERRQ(ierr);
1143     ierr = DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors);CHKERRQ(ierr);
1144   }
1145   PetscFunctionReturn(0);
1146 }
1147 
1148 static PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
1149 {
1150   MPI_Comm         comm;
1151   PetscSection     originalCoordSection, newCoordSection;
1152   Vec              originalCoordinates, newCoordinates;
1153   PetscInt         bs;
1154   PetscBool        isper;
1155   const char      *name;
1156   const PetscReal *maxCell, *L;
1157   const DMBoundaryType *bd;
1158   PetscErrorCode   ierr;
1159 
1160   PetscFunctionBegin;
1161   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1162   PetscValidPointer(dmParallel, 3);
1163 
1164   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1165   ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr);
1166   ierr = DMGetCoordinateSection(dmParallel, &newCoordSection);CHKERRQ(ierr);
1167   ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr);
1168   if (originalCoordinates) {
1169     ierr = VecCreate(PETSC_COMM_SELF, &newCoordinates);CHKERRQ(ierr);
1170     ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr);
1171     ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr);
1172 
1173     ierr = DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr);
1174     ierr = DMSetCoordinatesLocal(dmParallel, newCoordinates);CHKERRQ(ierr);
1175     ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr);
1176     ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr);
1177     ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr);
1178   }
1179   ierr = DMGetPeriodicity(dm, &isper, &maxCell, &L, &bd);CHKERRQ(ierr);
1180   ierr = DMSetPeriodicity(dmParallel, isper, maxCell, L, bd);CHKERRQ(ierr);
1181   PetscFunctionReturn(0);
1182 }
1183 
1184 /* Here we are assuming that process 0 always has everything */
1185 static PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1186 {
1187   DM_Plex         *mesh = (DM_Plex*) dm->data;
1188   MPI_Comm         comm;
1189   DMLabel          depthLabel;
1190   PetscMPIInt      rank;
1191   PetscInt         depth, d, numLabels, numLocalLabels, l;
1192   PetscBool        hasLabels = PETSC_FALSE, lsendDepth, sendDepth;
1193   PetscObjectState depthState = -1;
1194   PetscErrorCode   ierr;
1195 
1196   PetscFunctionBegin;
1197   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1198   PetscValidHeaderSpecific(dm, DM_CLASSID, 3);
1199   ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1200   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1201   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1202 
1203   /* If the user has changed the depth label, communicate it instead */
1204   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1205   ierr = DMPlexGetDepthLabel(dm, &depthLabel);CHKERRQ(ierr);
1206   if (depthLabel) {ierr = DMLabelGetState(depthLabel, &depthState);CHKERRQ(ierr);}
1207   lsendDepth = mesh->depthState != depthState ? PETSC_TRUE : PETSC_FALSE;
1208   ierr = MPIU_Allreduce(&lsendDepth, &sendDepth, 1, MPIU_BOOL, MPI_LOR, comm);CHKERRQ(ierr);
1209   if (sendDepth) {
1210     ierr = DMRemoveLabel(dmParallel, "depth", &depthLabel);CHKERRQ(ierr);
1211     ierr = DMLabelDestroy(&depthLabel);CHKERRQ(ierr);
1212   }
1213   /* Everyone must have either the same number of labels, or none */
1214   ierr = DMGetNumLabels(dm, &numLocalLabels);CHKERRQ(ierr);
1215   numLabels = numLocalLabels;
1216   ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1217   if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1218   for (l = numLabels-1; l >= 0; --l) {
1219     DMLabel     label = NULL, labelNew = NULL;
1220     PetscBool   isDepth, lisOutput = PETSC_TRUE, isOutput;
1221 
1222     if (hasLabels) {ierr = DMGetLabelByNum(dm, l, &label);CHKERRQ(ierr);}
1223     /* Skip "depth" because it is recreated */
1224     if (hasLabels) {ierr = PetscStrcmp(label->name, "depth", &isDepth);CHKERRQ(ierr);}
1225     ierr = MPI_Bcast(&isDepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr);
1226     if (isDepth && !sendDepth) continue;
1227     ierr = DMLabelDistribute(label, migrationSF, &labelNew);CHKERRQ(ierr);
1228     if (isDepth) {
1229       /* Put in any missing strata which can occur if users are managing the depth label themselves */
1230       PetscInt gdepth;
1231 
1232       ierr = MPIU_Allreduce(&depth, &gdepth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr);
1233       if ((depth >= 0) && (gdepth != depth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", depth, gdepth);
1234       for (d = 0; d <= gdepth; ++d) {
1235         PetscBool has;
1236 
1237         ierr = DMLabelHasStratum(labelNew, d, &has);CHKERRQ(ierr);
1238         if (!has) {ierr = DMLabelAddStratum(labelNew, d);CHKERRQ(ierr);}
1239       }
1240     }
1241     ierr = DMAddLabel(dmParallel, labelNew);CHKERRQ(ierr);
1242     /* Put the output flag in the new label */
1243     if (hasLabels) {ierr = DMGetLabelOutput(dm, label->name, &lisOutput);CHKERRQ(ierr);}
1244     ierr = MPIU_Allreduce(&lisOutput, &isOutput, 1, MPIU_BOOL, MPI_LAND, comm);CHKERRQ(ierr);
1245     ierr = DMSetLabelOutput(dmParallel, labelNew->name, isOutput);CHKERRQ(ierr);
1246   }
1247   ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1248   PetscFunctionReturn(0);
1249 }
1250 
1251 static PetscErrorCode DMPlexDistributeSetupHybrid(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1252 {
1253   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1254   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1255   PetscBool      *isHybrid, *isHybridParallel;
1256   PetscInt        dim, depth, d;
1257   PetscInt        pStart, pEnd, pStartP, pEndP;
1258   PetscErrorCode  ierr;
1259 
1260   PetscFunctionBegin;
1261   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1262   PetscValidPointer(dmParallel, 4);
1263 
1264   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1265   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1266   ierr = DMPlexGetChart(dm,&pStart,&pEnd);CHKERRQ(ierr);
1267   ierr = DMPlexGetChart(dmParallel,&pStartP,&pEndP);CHKERRQ(ierr);
1268   ierr = PetscCalloc2(pEnd-pStart,&isHybrid,pEndP-pStartP,&isHybridParallel);CHKERRQ(ierr);
1269   for (d = 0; d <= depth; d++) {
1270     PetscInt hybridMax = (depth == 1 && d == 1) ? mesh->hybridPointMax[dim] : mesh->hybridPointMax[d];
1271 
1272     if (hybridMax >= 0) {
1273       PetscInt sStart, sEnd, p;
1274 
1275       ierr = DMPlexGetDepthStratum(dm,d,&sStart,&sEnd);CHKERRQ(ierr);
1276       for (p = hybridMax; p < sEnd; p++) isHybrid[p-pStart] = PETSC_TRUE;
1277     }
1278   }
1279   ierr = PetscSFBcastBegin(migrationSF,MPIU_BOOL,isHybrid,isHybridParallel);CHKERRQ(ierr);
1280   ierr = PetscSFBcastEnd(migrationSF,MPIU_BOOL,isHybrid,isHybridParallel);CHKERRQ(ierr);
1281   for (d = 0; d <= dim; d++) pmesh->hybridPointMax[d] = -1;
1282   for (d = 0; d <= depth; d++) {
1283     PetscInt sStart, sEnd, p, dd;
1284 
1285     ierr = DMPlexGetDepthStratum(dmParallel,d,&sStart,&sEnd);CHKERRQ(ierr);
1286     dd = (depth == 1 && d == 1) ? dim : d;
1287     for (p = sStart; p < sEnd; p++) {
1288       if (isHybridParallel[p-pStartP]) {
1289         pmesh->hybridPointMax[dd] = p;
1290         break;
1291       }
1292     }
1293   }
1294   ierr = PetscFree2(isHybrid,isHybridParallel);CHKERRQ(ierr);
1295   PetscFunctionReturn(0);
1296 }
1297 
1298 static PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1299 {
1300   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1301   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1302   MPI_Comm        comm;
1303   DM              refTree;
1304   PetscSection    origParentSection, newParentSection;
1305   PetscInt        *origParents, *origChildIDs;
1306   PetscBool       flg;
1307   PetscErrorCode  ierr;
1308 
1309   PetscFunctionBegin;
1310   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1311   PetscValidHeaderSpecific(dm, DM_CLASSID, 4);
1312   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1313 
1314   /* Set up tree */
1315   ierr = DMPlexGetReferenceTree(dm,&refTree);CHKERRQ(ierr);
1316   ierr = DMPlexSetReferenceTree(dmParallel,refTree);CHKERRQ(ierr);
1317   ierr = DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL);CHKERRQ(ierr);
1318   if (origParentSection) {
1319     PetscInt        pStart, pEnd;
1320     PetscInt        *newParents, *newChildIDs, *globParents;
1321     PetscInt        *remoteOffsetsParents, newParentSize;
1322     PetscSF         parentSF;
1323 
1324     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1325     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);CHKERRQ(ierr);
1326     ierr = PetscSectionSetChart(newParentSection,pStart,pEnd);CHKERRQ(ierr);
1327     ierr = PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);CHKERRQ(ierr);
1328     ierr = PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);CHKERRQ(ierr);
1329     ierr = PetscFree(remoteOffsetsParents);CHKERRQ(ierr);
1330     ierr = PetscSectionGetStorageSize(newParentSection,&newParentSize);CHKERRQ(ierr);
1331     ierr = PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs);CHKERRQ(ierr);
1332     if (original) {
1333       PetscInt numParents;
1334 
1335       ierr = PetscSectionGetStorageSize(origParentSection,&numParents);CHKERRQ(ierr);
1336       ierr = PetscMalloc1(numParents,&globParents);CHKERRQ(ierr);
1337       ierr = ISLocalToGlobalMappingApplyBlock(original, numParents, origParents, globParents);CHKERRQ(ierr);
1338     }
1339     else {
1340       globParents = origParents;
1341     }
1342     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr);
1343     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr);
1344     if (original) {
1345       ierr = PetscFree(globParents);CHKERRQ(ierr);
1346     }
1347     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1348     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1349     ierr = ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents);CHKERRQ(ierr);
1350 #if PETSC_USE_DEBUG
1351     {
1352       PetscInt  p;
1353       PetscBool valid = PETSC_TRUE;
1354       for (p = 0; p < newParentSize; ++p) {
1355         if (newParents[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
1356       }
1357       if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1358     }
1359 #endif
1360     ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-parents_view", &flg);CHKERRQ(ierr);
1361     if (flg) {
1362       ierr = PetscPrintf(comm, "Serial Parent Section: \n");CHKERRQ(ierr);
1363       ierr = PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1364       ierr = PetscPrintf(comm, "Parallel Parent Section: \n");CHKERRQ(ierr);
1365       ierr = PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1366       ierr = PetscSFView(parentSF, NULL);CHKERRQ(ierr);
1367     }
1368     ierr = DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);CHKERRQ(ierr);
1369     ierr = PetscSectionDestroy(&newParentSection);CHKERRQ(ierr);
1370     ierr = PetscFree2(newParents,newChildIDs);CHKERRQ(ierr);
1371     ierr = PetscSFDestroy(&parentSF);CHKERRQ(ierr);
1372   }
1373   pmesh->useAnchors = mesh->useAnchors;
1374   PetscFunctionReturn(0);
1375 }
1376 
1377 PETSC_UNUSED static PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, DM dmParallel)
1378 {
1379   PetscMPIInt            rank, size;
1380   MPI_Comm               comm;
1381   PetscErrorCode         ierr;
1382 
1383   PetscFunctionBegin;
1384   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1385   PetscValidPointer(dmParallel,7);
1386 
1387   /* Create point SF for parallel mesh */
1388   ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1389   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1390   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1391   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1392   {
1393     const PetscInt *leaves;
1394     PetscSFNode    *remotePoints, *rowners, *lowners;
1395     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1396     PetscInt        pStart, pEnd;
1397 
1398     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1399     ierr = PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr);
1400     ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr);
1401     for (p=0; p<numRoots; p++) {
1402       rowners[p].rank  = -1;
1403       rowners[p].index = -1;
1404     }
1405     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1406     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1407     for (p = 0; p < numLeaves; ++p) {
1408       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1409         lowners[p].rank  = rank;
1410         lowners[p].index = leaves ? leaves[p] : p;
1411       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1412         lowners[p].rank  = -2;
1413         lowners[p].index = -2;
1414       }
1415     }
1416     for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1417       rowners[p].rank  = -3;
1418       rowners[p].index = -3;
1419     }
1420     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1421     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1422     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1423     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1424     for (p = 0; p < numLeaves; ++p) {
1425       if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
1426       if (lowners[p].rank != rank) ++numGhostPoints;
1427     }
1428     ierr = PetscMalloc1(numGhostPoints, &ghostPoints);CHKERRQ(ierr);
1429     ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr);
1430     for (p = 0, gp = 0; p < numLeaves; ++p) {
1431       if (lowners[p].rank != rank) {
1432         ghostPoints[gp]        = leaves ? leaves[p] : p;
1433         remotePoints[gp].rank  = lowners[p].rank;
1434         remotePoints[gp].index = lowners[p].index;
1435         ++gp;
1436       }
1437     }
1438     ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr);
1439     ierr = PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1440     ierr = PetscSFSetFromOptions((dmParallel)->sf);CHKERRQ(ierr);
1441   }
1442   {
1443     PetscBool useCone, useClosure, useAnchors;
1444 
1445     ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
1446     ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
1447     ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
1448     ierr = DMPlexSetAdjacencyUseCone(dmParallel, useCone);CHKERRQ(ierr);
1449     ierr = DMPlexSetAdjacencyUseClosure(dmParallel, useClosure);CHKERRQ(ierr);
1450     ierr = DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors);CHKERRQ(ierr);
1451   }
1452   ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1453   PetscFunctionReturn(0);
1454 }
1455 
1456 /*@C
1457   DMPlexDerivePointSF - Build a point SF from an SF describing a point migration
1458 
1459   Input Parameter:
1460 + dm          - The source DMPlex object
1461 . migrationSF - The star forest that describes the parallel point remapping
1462 . ownership   - Flag causing a vote to determine point ownership
1463 
1464   Output Parameter:
1465 - pointSF     - The star forest describing the point overlap in the remapped DM
1466 
1467   Level: developer
1468 
1469 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1470 @*/
1471 PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownership, PetscSF *pointSF)
1472 {
1473   DM_Plex           *mesh = (DM_Plex *) dm->data;
1474   PetscMPIInt        rank, size;
1475   PetscInt           p, nroots, nleaves, idx, npointLeaves;
1476   PetscInt          *pointLocal;
1477   const PetscInt    *leaves;
1478   const PetscSFNode *roots;
1479   PetscSFNode       *rootNodes, *leafNodes, *pointRemote;
1480   Vec                shifts;
1481   const PetscInt     numShifts = 37; /* TODO Use larger prime */
1482   const PetscScalar *shift = NULL;
1483   const PetscBool    shiftDebug = PETSC_FALSE;
1484   PetscErrorCode     ierr;
1485 
1486   PetscFunctionBegin;
1487   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1488   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1489   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
1490 
1491   ierr = PetscSFGetGraph(migrationSF, &nroots, &nleaves, &leaves, &roots);CHKERRQ(ierr);
1492   ierr = PetscMalloc2(nroots, &rootNodes, nleaves, &leafNodes);CHKERRQ(ierr);
1493   if (ownership) {
1494     /* If balancing, we compute a random cyclic shift of the rank for each remote point. That way, the max will evenly distribute among ranks. */
1495     if (mesh->partitionBalance) {
1496       PetscRandom r;
1497 
1498       ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
1499       ierr = PetscRandomSetInterval(r, 0, 17*size);CHKERRQ(ierr);
1500       ierr = VecCreate(PETSC_COMM_SELF, &shifts);CHKERRQ(ierr);
1501       ierr = VecSetSizes(shifts, numShifts, numShifts);CHKERRQ(ierr);
1502       ierr = VecSetType(shifts, VECSTANDARD);CHKERRQ(ierr);
1503       ierr = VecSetRandom(shifts, r);CHKERRQ(ierr);
1504       ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
1505       ierr = VecGetArrayRead(shifts, &shift);CHKERRQ(ierr);
1506     }
1507 
1508     /* Point ownership vote: Process with highest rank owns shared points */
1509     for (p = 0; p < nleaves; ++p) {
1510       if (shiftDebug) {
1511         ierr = PetscSynchronizedPrintf(PetscObjectComm((PetscObject) dm), "[%d] Point %D RemotePoint %D Shift %D MyRank %D\n", rank, leaves ? leaves[p] : p, roots[p].index, (PetscInt) shift[roots[p].index%numShifts], (rank + (shift ? (PetscInt) shift[roots[p].index%numShifts] : 0))%size);CHKERRQ(ierr);
1512       }
1513       /* Either put in a bid or we know we own it */
1514       leafNodes[p].rank  = (rank + (shift ? (PetscInt) shift[roots[p].index%numShifts] : 0))%size;
1515       leafNodes[p].index = p;
1516     }
1517     for (p = 0; p < nroots; p++) {
1518       /* Root must not participate in the reduction, flag so that MAXLOC does not use */
1519       rootNodes[p].rank  = -3;
1520       rootNodes[p].index = -3;
1521     }
1522     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);CHKERRQ(ierr);
1523     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);CHKERRQ(ierr);
1524   } else {
1525     for (p = 0; p < nroots; p++) {
1526       rootNodes[p].index = -1;
1527       rootNodes[p].rank = rank;
1528     };
1529     for (p = 0; p < nleaves; p++) {
1530       /* Write new local id into old location */
1531       if (roots[p].rank == rank) {
1532         rootNodes[roots[p].index].index = leaves ? leaves[p] : p;
1533       }
1534     }
1535   }
1536   ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1537   ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1538 
1539   for (npointLeaves = 0, p = 0; p < nleaves; p++) {
1540     if (shiftDebug) {
1541       ierr = PetscSynchronizedPrintf(PetscObjectComm((PetscObject) dm), "[%d] Root %D, Rank %D MyRank %D\n", rank, roots[p].index, leafNodes[p].rank, (rank + (shift ? (PetscInt) shift[roots[p].index%numShifts] : 0))%size);CHKERRQ(ierr);
1542     }
1543     if (leafNodes[p].rank != (rank + (shift ? (PetscInt) shift[roots[p].index%numShifts] : 0))%size) npointLeaves++;
1544   }
1545   ierr = PetscMalloc1(npointLeaves, &pointLocal);CHKERRQ(ierr);
1546   ierr = PetscMalloc1(npointLeaves, &pointRemote);CHKERRQ(ierr);
1547   for (idx = 0, p = 0; p < nleaves; p++) {
1548     if (leafNodes[p].rank != (rank + (shift ? (PetscInt) shift[roots[p].index%numShifts] : 0))%size) {
1549       pointLocal[idx] = p;
1550       pointRemote[idx] = leafNodes[p];
1551       idx++;
1552     }
1553   }
1554   if (shift) {
1555     ierr = VecRestoreArrayRead(shifts, &shift);CHKERRQ(ierr);
1556     ierr = VecDestroy(&shifts);CHKERRQ(ierr);
1557   }
1558   if (shiftDebug) {ierr = PetscSynchronizedFlush(PetscObjectComm((PetscObject) dm), PETSC_STDOUT);CHKERRQ(ierr);}
1559   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), pointSF);CHKERRQ(ierr);
1560   ierr = PetscSFSetFromOptions(*pointSF);CHKERRQ(ierr);
1561   ierr = PetscSFSetGraph(*pointSF, nleaves, npointLeaves, pointLocal, PETSC_OWN_POINTER, pointRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1562   ierr = PetscFree2(rootNodes, leafNodes);CHKERRQ(ierr);
1563   PetscFunctionReturn(0);
1564 }
1565 
1566 /*@C
1567   DMPlexMigrate  - Migrates internal DM data over the supplied star forest
1568 
1569   Input Parameter:
1570 + dm       - The source DMPlex object
1571 . sf       - The star forest communication context describing the migration pattern
1572 
1573   Output Parameter:
1574 - targetDM - The target DMPlex object
1575 
1576   Level: intermediate
1577 
1578 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1579 @*/
1580 PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM)
1581 {
1582   MPI_Comm               comm;
1583   PetscInt               dim, nroots;
1584   PetscSF                sfPoint;
1585   ISLocalToGlobalMapping ltogMigration;
1586   ISLocalToGlobalMapping ltogOriginal = NULL;
1587   PetscBool              flg;
1588   PetscErrorCode         ierr;
1589 
1590   PetscFunctionBegin;
1591   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1592   ierr = PetscLogEventBegin(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1593   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1594   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1595   ierr = DMSetDimension(targetDM, dim);CHKERRQ(ierr);
1596 
1597   /* Check for a one-to-all distribution pattern */
1598   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1599   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
1600   if (nroots >= 0) {
1601     IS                     isOriginal;
1602     PetscInt               n, size, nleaves;
1603     PetscInt              *numbering_orig, *numbering_new;
1604     /* Get the original point numbering */
1605     ierr = DMPlexCreatePointNumbering(dm, &isOriginal);CHKERRQ(ierr);
1606     ierr = ISLocalToGlobalMappingCreateIS(isOriginal, &ltogOriginal);CHKERRQ(ierr);
1607     ierr = ISLocalToGlobalMappingGetSize(ltogOriginal, &size);CHKERRQ(ierr);
1608     ierr = ISLocalToGlobalMappingGetBlockIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1609     /* Convert to positive global numbers */
1610     for (n=0; n<size; n++) {if (numbering_orig[n] < 0) numbering_orig[n] = -(numbering_orig[n]+1);}
1611     /* Derive the new local-to-global mapping from the old one */
1612     ierr = PetscSFGetGraph(sf, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1613     ierr = PetscMalloc1(nleaves, &numbering_new);CHKERRQ(ierr);
1614     ierr = PetscSFBcastBegin(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr);
1615     ierr = PetscSFBcastEnd(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr);
1616     ierr = ISLocalToGlobalMappingCreate(comm, 1, nleaves, (const PetscInt*) numbering_new, PETSC_OWN_POINTER, &ltogMigration);CHKERRQ(ierr);
1617     ierr = ISLocalToGlobalMappingRestoreIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1618     ierr = ISDestroy(&isOriginal);CHKERRQ(ierr);
1619   } else {
1620     /* One-to-all distribution pattern: We can derive LToG from SF */
1621     ierr = ISLocalToGlobalMappingCreateSF(sf, 0, &ltogMigration);CHKERRQ(ierr);
1622   }
1623   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1624   if (flg) {
1625     ierr = PetscPrintf(comm, "Point renumbering for DM migration:\n");CHKERRQ(ierr);
1626     ierr = ISLocalToGlobalMappingView(ltogMigration, NULL);CHKERRQ(ierr);
1627   }
1628   /* Migrate DM data to target DM */
1629   ierr = DMPlexDistributeCones(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1630   ierr = DMPlexDistributeLabels(dm, sf, targetDM);CHKERRQ(ierr);
1631   ierr = DMPlexDistributeCoordinates(dm, sf, targetDM);CHKERRQ(ierr);
1632   ierr = DMPlexDistributeSetupHybrid(dm, sf, ltogMigration, targetDM);CHKERRQ(ierr);
1633   ierr = DMPlexDistributeSetupTree(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1634   ierr = ISLocalToGlobalMappingDestroy(&ltogOriginal);CHKERRQ(ierr);
1635   ierr = ISLocalToGlobalMappingDestroy(&ltogMigration);CHKERRQ(ierr);
1636   ierr = PetscLogEventEnd(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1637   PetscFunctionReturn(0);
1638 }
1639 
1640 /*@C
1641   DMPlexDistribute - Distributes the mesh and any associated sections.
1642 
1643   Not Collective
1644 
1645   Input Parameter:
1646 + dm  - The original DMPlex object
1647 - overlap - The overlap of partitions, 0 is the default
1648 
1649   Output Parameter:
1650 + sf - The PetscSF used for point distribution
1651 - parallelMesh - The distributed DMPlex object, or NULL
1652 
1653   Note: If the mesh was not distributed, the return value is NULL.
1654 
1655   The user can control the definition of adjacency for the mesh using DMPlexSetAdjacencyUseCone() and
1656   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1657   representation on the mesh.
1658 
1659   Level: intermediate
1660 
1661 .keywords: mesh, elements
1662 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1663 @*/
1664 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1665 {
1666   MPI_Comm               comm;
1667   PetscPartitioner       partitioner;
1668   IS                     cellPart;
1669   PetscSection           cellPartSection;
1670   DM                     dmCoord;
1671   DMLabel                lblPartition, lblMigration;
1672   PetscSF                sfProcess, sfMigration, sfStratified, sfPoint;
1673   PetscBool              flg;
1674   PetscMPIInt            rank, size, p;
1675   PetscErrorCode         ierr;
1676 
1677   PetscFunctionBegin;
1678   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1679   if (sf) PetscValidPointer(sf,4);
1680   PetscValidPointer(dmParallel,5);
1681 
1682   ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1683   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1684   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1685   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1686 
1687   if (sf) *sf = NULL;
1688   *dmParallel = NULL;
1689   if (size == 1) {
1690     ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1691     PetscFunctionReturn(0);
1692   }
1693 
1694   /* Create cell partition */
1695   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1696   ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr);
1697   ierr = DMPlexGetPartitioner(dm, &partitioner);CHKERRQ(ierr);
1698   ierr = PetscPartitionerPartition(partitioner, dm, cellPartSection, &cellPart);CHKERRQ(ierr);
1699   {
1700     /* Convert partition to DMLabel */
1701     PetscInt proc, pStart, pEnd, npoints, poffset;
1702     const PetscInt *points;
1703     ierr = DMLabelCreate("Point Partition", &lblPartition);CHKERRQ(ierr);
1704     ierr = ISGetIndices(cellPart, &points);CHKERRQ(ierr);
1705     ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr);
1706     for (proc = pStart; proc < pEnd; proc++) {
1707       ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr);
1708       ierr = PetscSectionGetOffset(cellPartSection, proc, &poffset);CHKERRQ(ierr);
1709       for (p = poffset; p < poffset+npoints; p++) {
1710         ierr = DMLabelSetValue(lblPartition, points[p], proc);CHKERRQ(ierr);
1711       }
1712     }
1713     ierr = ISRestoreIndices(cellPart, &points);CHKERRQ(ierr);
1714   }
1715   ierr = DMPlexPartitionLabelClosure(dm, lblPartition);CHKERRQ(ierr);
1716   {
1717     /* Build a global process SF */
1718     PetscSFNode *remoteProc;
1719     ierr = PetscMalloc1(size, &remoteProc);CHKERRQ(ierr);
1720     for (p = 0; p < size; ++p) {
1721       remoteProc[p].rank  = p;
1722       remoteProc[p].index = rank;
1723     }
1724     ierr = PetscSFCreate(comm, &sfProcess);CHKERRQ(ierr);
1725     ierr = PetscObjectSetName((PetscObject) sfProcess, "Process SF");CHKERRQ(ierr);
1726     ierr = PetscSFSetGraph(sfProcess, size, size, NULL, PETSC_OWN_POINTER, remoteProc, PETSC_OWN_POINTER);CHKERRQ(ierr);
1727   }
1728   ierr = DMLabelCreate("Point migration", &lblMigration);CHKERRQ(ierr);
1729   ierr = DMPlexPartitionLabelInvert(dm, lblPartition, sfProcess, lblMigration);CHKERRQ(ierr);
1730   ierr = DMPlexPartitionLabelCreateSF(dm, lblMigration, &sfMigration);CHKERRQ(ierr);
1731   /* Stratify the SF in case we are migrating an already parallel plex */
1732   ierr = DMPlexStratifyMigrationSF(dm, sfMigration, &sfStratified);CHKERRQ(ierr);
1733   ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1734   sfMigration = sfStratified;
1735   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1736   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1737   if (flg) {
1738     ierr = DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1739     ierr = PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1740   }
1741 
1742   /* Create non-overlapping parallel DM and migrate internal data */
1743   ierr = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr);
1744   ierr = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr);
1745   ierr = DMPlexMigrate(dm, sfMigration, *dmParallel);CHKERRQ(ierr);
1746 
1747   /* Build the point SF without overlap */
1748   ((DM_Plex*) (*dmParallel)->data)->partitionBalance = ((DM_Plex*) dm->data)->partitionBalance;
1749   ierr = DMPlexCreatePointSF(*dmParallel, sfMigration, PETSC_TRUE, &sfPoint);CHKERRQ(ierr);
1750   ierr = DMSetPointSF(*dmParallel, sfPoint);CHKERRQ(ierr);
1751   ierr = DMGetCoordinateDM(*dmParallel, &dmCoord);CHKERRQ(ierr);
1752   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1753   if (flg) {ierr = PetscSFView(sfPoint, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);}
1754 
1755   if (overlap > 0) {
1756     DM                 dmOverlap;
1757     PetscInt           nroots, nleaves;
1758     PetscSFNode       *newRemote;
1759     const PetscSFNode *oldRemote;
1760     PetscSF            sfOverlap, sfOverlapPoint;
1761     /* Add the partition overlap to the distributed DM */
1762     ierr = DMPlexDistributeOverlap(*dmParallel, overlap, &sfOverlap, &dmOverlap);CHKERRQ(ierr);
1763     ierr = DMDestroy(dmParallel);CHKERRQ(ierr);
1764     *dmParallel = dmOverlap;
1765     if (flg) {
1766       ierr = PetscPrintf(comm, "Overlap Migration SF:\n");CHKERRQ(ierr);
1767       ierr = PetscSFView(sfOverlap, NULL);CHKERRQ(ierr);
1768     }
1769 
1770     /* Re-map the migration SF to establish the full migration pattern */
1771     ierr = PetscSFGetGraph(sfMigration, &nroots, NULL, NULL, &oldRemote);CHKERRQ(ierr);
1772     ierr = PetscSFGetGraph(sfOverlap, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1773     ierr = PetscMalloc1(nleaves, &newRemote);CHKERRQ(ierr);
1774     ierr = PetscSFBcastBegin(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1775     ierr = PetscSFBcastEnd(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1776     ierr = PetscSFCreate(comm, &sfOverlapPoint);CHKERRQ(ierr);
1777     ierr = PetscSFSetGraph(sfOverlapPoint, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1778     ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1779     ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1780     sfMigration = sfOverlapPoint;
1781   }
1782   /* Cleanup Partition */
1783   ierr = PetscSFDestroy(&sfProcess);CHKERRQ(ierr);
1784   ierr = DMLabelDestroy(&lblPartition);CHKERRQ(ierr);
1785   ierr = DMLabelDestroy(&lblMigration);CHKERRQ(ierr);
1786   ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr);
1787   ierr = ISDestroy(&cellPart);CHKERRQ(ierr);
1788   /* Copy BC */
1789   ierr = DMCopyBoundary(dm, *dmParallel);CHKERRQ(ierr);
1790   /* Create sfNatural */
1791   if (dm->useNatural) {
1792     PetscSection section;
1793 
1794     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1795     ierr = DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &(*dmParallel)->sfNatural);CHKERRQ(ierr);
1796     ierr = DMSetUseNatural(*dmParallel, PETSC_TRUE);CHKERRQ(ierr);
1797   }
1798   /* Cleanup */
1799   if (sf) {*sf = sfMigration;}
1800   else    {ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);}
1801   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1802   ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1803   PetscFunctionReturn(0);
1804 }
1805 
1806 /*@C
1807   DMPlexDistributeOverlap - Add partition overlap to a distributed non-overlapping DM.
1808 
1809   Not Collective
1810 
1811   Input Parameter:
1812 + dm  - The non-overlapping distrbuted DMPlex object
1813 - overlap - The overlap of partitions, 0 is the default
1814 
1815   Output Parameter:
1816 + sf - The PetscSF used for point distribution
1817 - dmOverlap - The overlapping distributed DMPlex object, or NULL
1818 
1819   Note: If the mesh was not distributed, the return value is NULL.
1820 
1821   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1822   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1823   representation on the mesh.
1824 
1825   Level: intermediate
1826 
1827 .keywords: mesh, elements
1828 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1829 @*/
1830 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, PetscSF *sf, DM *dmOverlap)
1831 {
1832   MPI_Comm               comm;
1833   PetscMPIInt            size, rank;
1834   PetscSection           rootSection, leafSection;
1835   IS                     rootrank, leafrank;
1836   DM                     dmCoord;
1837   DMLabel                lblOverlap;
1838   PetscSF                sfOverlap, sfStratified, sfPoint;
1839   PetscErrorCode         ierr;
1840 
1841   PetscFunctionBegin;
1842   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1843   if (sf) PetscValidPointer(sf, 3);
1844   PetscValidPointer(dmOverlap, 4);
1845 
1846   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1847   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1848   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1849   if (size == 1) {*dmOverlap = NULL; PetscFunctionReturn(0);}
1850   ierr = PetscLogEventBegin(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1851 
1852   /* Compute point overlap with neighbouring processes on the distributed DM */
1853   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1854   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1855   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1856   ierr = DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);CHKERRQ(ierr);
1857   ierr = DMPlexCreateOverlap(dm, overlap, rootSection, rootrank, leafSection, leafrank, &lblOverlap);CHKERRQ(ierr);
1858   /* Convert overlap label to stratified migration SF */
1859   ierr = DMPlexPartitionLabelCreateSF(dm, lblOverlap, &sfOverlap);CHKERRQ(ierr);
1860   ierr = DMPlexStratifyMigrationSF(dm, sfOverlap, &sfStratified);CHKERRQ(ierr);
1861   ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1862   sfOverlap = sfStratified;
1863   ierr = PetscObjectSetName((PetscObject) sfOverlap, "Overlap SF");CHKERRQ(ierr);
1864   ierr = PetscSFSetFromOptions(sfOverlap);CHKERRQ(ierr);
1865 
1866   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1867   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1868   ierr = ISDestroy(&rootrank);CHKERRQ(ierr);
1869   ierr = ISDestroy(&leafrank);CHKERRQ(ierr);
1870   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1871 
1872   /* Build the overlapping DM */
1873   ierr = DMPlexCreate(comm, dmOverlap);CHKERRQ(ierr);
1874   ierr = PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");CHKERRQ(ierr);
1875   ierr = DMPlexMigrate(dm, sfOverlap, *dmOverlap);CHKERRQ(ierr);
1876   /* Build the new point SF */
1877   ierr = DMPlexCreatePointSF(*dmOverlap, sfOverlap, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
1878   ierr = DMSetPointSF(*dmOverlap, sfPoint);CHKERRQ(ierr);
1879   ierr = DMGetCoordinateDM(*dmOverlap, &dmCoord);CHKERRQ(ierr);
1880   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1881   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1882   /* Cleanup overlap partition */
1883   ierr = DMLabelDestroy(&lblOverlap);CHKERRQ(ierr);
1884   if (sf) *sf = sfOverlap;
1885   else    {ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);}
1886   ierr = PetscLogEventEnd(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1887   PetscFunctionReturn(0);
1888 }
1889 
1890 /*@C
1891   DMPlexGetGatherDM - Get a copy of the DMPlex that gathers all points on the
1892   root process of the original's communicator.
1893 
1894   Input Parameters:
1895 . dm - the original DMPlex object
1896 
1897   Output Parameters:
1898 . gatherMesh - the gathered DM object, or NULL
1899 
1900   Level: intermediate
1901 
1902 .keywords: mesh
1903 .seealso: DMPlexDistribute(), DMPlexGetRedundantDM()
1904 @*/
1905 PetscErrorCode DMPlexGetGatherDM(DM dm, DM * gatherMesh)
1906 {
1907   MPI_Comm       comm;
1908   PetscMPIInt    size;
1909   PetscPartitioner oldPart, gatherPart;
1910   PetscErrorCode ierr;
1911 
1912   PetscFunctionBegin;
1913   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1914   comm = PetscObjectComm((PetscObject)dm);
1915   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1916   *gatherMesh = NULL;
1917   if (size == 1) PetscFunctionReturn(0);
1918   ierr = DMPlexGetPartitioner(dm,&oldPart);CHKERRQ(ierr);
1919   ierr = PetscObjectReference((PetscObject)oldPart);CHKERRQ(ierr);
1920   ierr = PetscPartitionerCreate(comm,&gatherPart);CHKERRQ(ierr);
1921   ierr = PetscPartitionerSetType(gatherPart,PETSCPARTITIONERGATHER);CHKERRQ(ierr);
1922   ierr = DMPlexSetPartitioner(dm,gatherPart);CHKERRQ(ierr);
1923   ierr = DMPlexDistribute(dm,0,NULL,gatherMesh);CHKERRQ(ierr);
1924   ierr = DMPlexSetPartitioner(dm,oldPart);CHKERRQ(ierr);
1925   ierr = PetscPartitionerDestroy(&gatherPart);CHKERRQ(ierr);
1926   ierr = PetscPartitionerDestroy(&oldPart);CHKERRQ(ierr);
1927   PetscFunctionReturn(0);
1928 }
1929 
1930 /*@C
1931   DMPlexGetRedundantDM - Get a copy of the DMPlex that is completely copied on each process.
1932 
1933   Input Parameters:
1934 . dm - the original DMPlex object
1935 
1936   Output Parameters:
1937 . redundantMesh - the redundant DM object, or NULL
1938 
1939   Level: intermediate
1940 
1941 .keywords: mesh
1942 .seealso: DMPlexDistribute(), DMPlexGetGatherDM()
1943 @*/
1944 PetscErrorCode DMPlexGetRedundantDM(DM dm, DM * redundantMesh)
1945 {
1946   MPI_Comm       comm;
1947   PetscMPIInt    size, rank;
1948   PetscInt       pStart, pEnd, p;
1949   PetscInt       numPoints = -1;
1950   PetscSF        migrationSF, sfPoint;
1951   DM             gatherDM, dmCoord;
1952   PetscSFNode    *points;
1953   PetscErrorCode ierr;
1954 
1955   PetscFunctionBegin;
1956   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1957   comm = PetscObjectComm((PetscObject)dm);
1958   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1959   *redundantMesh = NULL;
1960   if (size == 1) {
1961     ierr = PetscObjectReference((PetscObject) dm);CHKERRQ(ierr);
1962     *redundantMesh = dm;
1963     PetscFunctionReturn(0);
1964   }
1965   ierr = DMPlexGetGatherDM(dm,&gatherDM);CHKERRQ(ierr);
1966   if (!gatherDM) PetscFunctionReturn(0);
1967   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1968   ierr = DMPlexGetChart(gatherDM,&pStart,&pEnd);CHKERRQ(ierr);
1969   numPoints = pEnd - pStart;
1970   ierr = MPI_Bcast(&numPoints,1,MPIU_INT,0,comm);CHKERRQ(ierr);
1971   ierr = PetscMalloc1(numPoints,&points);CHKERRQ(ierr);
1972   ierr = PetscSFCreate(comm,&migrationSF);CHKERRQ(ierr);
1973   for (p = 0; p < numPoints; p++) {
1974     points[p].index = p;
1975     points[p].rank  = 0;
1976   }
1977   ierr = PetscSFSetGraph(migrationSF,pEnd-pStart,numPoints,NULL,PETSC_OWN_POINTER,points,PETSC_OWN_POINTER);CHKERRQ(ierr);
1978   ierr = DMPlexCreate(comm, redundantMesh);CHKERRQ(ierr);
1979   ierr = PetscObjectSetName((PetscObject) *redundantMesh, "Redundant Mesh");CHKERRQ(ierr);
1980   ierr = DMPlexMigrate(gatherDM, migrationSF, *redundantMesh);CHKERRQ(ierr);
1981   ierr = DMPlexCreatePointSF(*redundantMesh, migrationSF, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
1982   ierr = DMSetPointSF(*redundantMesh, sfPoint);CHKERRQ(ierr);
1983   ierr = DMGetCoordinateDM(*redundantMesh, &dmCoord);CHKERRQ(ierr);
1984   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1985   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1986   ierr = PetscSFDestroy(&migrationSF);CHKERRQ(ierr);
1987   ierr = DMDestroy(&gatherDM);CHKERRQ(ierr);
1988   PetscFunctionReturn(0);
1989 }
1990