xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision cda239d9a8fbe2b07e398fe4fef996ce7a8792ed)
1 #include <petsc/private/dmpleximpl.h>    /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/dmlabelimpl.h>   /*I      "petscdmlabel.h"  I*/
3 
4 /*@C
5   DMPlexSetAdjacencyUser - Define adjacency in the mesh using a user-provided callback
6 
7   Input Parameters:
8 + dm      - The DM object
9 . user    - The user callback, may be NULL (to clear the callback)
10 - ctx     - context for callback evaluation, may be NULL
11 
12   Level: advanced
13 
14   Notes:
15      The caller of DMPlexGetAdjacency may need to arrange that a large enough array is available for the adjacency.
16 
17      Any setting here overrides other configuration of DMPlex adjacency determination.
18 
19 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexGetAdjacency(), DMPlexGetAdjacencyUser()
20 @*/
21 PetscErrorCode DMPlexSetAdjacencyUser(DM dm,PetscErrorCode (*user)(DM,PetscInt,PetscInt*,PetscInt[],void*),void *ctx)
22 {
23   DM_Plex *mesh = (DM_Plex *)dm->data;
24 
25   PetscFunctionBegin;
26   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
27   mesh->useradjacency = user;
28   mesh->useradjacencyctx = ctx;
29   PetscFunctionReturn(0);
30 }
31 
32 /*@C
33   DMPlexGetAdjacencyUser - get the user-defined adjacency callback
34 
35   Input Parameter:
36 . dm      - The DM object
37 
38   Output Parameters:
39 - user    - The user callback
40 - ctx     - context for callback evaluation
41 
42   Level: advanced
43 
44 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexGetAdjacency(), DMPlexSetAdjacencyUser()
45 @*/
46 PetscErrorCode DMPlexGetAdjacencyUser(DM dm, PetscErrorCode (**user)(DM,PetscInt,PetscInt*,PetscInt[],void*), void **ctx)
47 {
48   DM_Plex *mesh = (DM_Plex *)dm->data;
49 
50   PetscFunctionBegin;
51   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
52   if (user) *user = mesh->useradjacency;
53   if (ctx) *ctx = mesh->useradjacencyctx;
54   PetscFunctionReturn(0);
55 }
56 
57 /*@
58   DMPlexSetAdjacencyUseCone - Define adjacency in the mesh using either the cone or the support first
59 
60   Input Parameters:
61 + dm      - The DM object
62 - useCone - Flag to use the cone first
63 
64   Level: intermediate
65 
66   Notes:
67 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
68 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
69 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
70 
71 .seealso: DMPlexGetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
72 @*/
73 PetscErrorCode DMPlexSetAdjacencyUseCone(DM dm, PetscBool useCone)
74 {
75   PetscDS        prob;
76   PetscBool      useClosure;
77   PetscInt       Nf;
78   PetscErrorCode ierr;
79 
80   PetscFunctionBegin;
81   ierr = DMGetDS(dm, &prob);CHKERRQ(ierr);
82   ierr = PetscDSGetNumFields(prob, &Nf);CHKERRQ(ierr);
83   if (!Nf) {
84     ierr = PetscDSGetAdjacency(prob, PETSC_DEFAULT, NULL, &useClosure);CHKERRQ(ierr);
85     ierr = PetscDSSetAdjacency(prob, PETSC_DEFAULT, useCone, useClosure);CHKERRQ(ierr);
86   } else {
87     ierr = PetscDSGetAdjacency(prob, 0, NULL, &useClosure);CHKERRQ(ierr);
88     ierr = PetscDSSetAdjacency(prob, 0, useCone, useClosure);CHKERRQ(ierr);
89   }
90   PetscFunctionReturn(0);
91 }
92 
93 /*@
94   DMPlexGetAdjacencyUseCone - Query whether adjacency in the mesh uses the cone or the support first
95 
96   Input Parameter:
97 . dm      - The DM object
98 
99   Output Parameter:
100 . useCone - Flag to use the cone first
101 
102   Level: intermediate
103 
104   Notes:
105 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
106 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
107 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
108 
109 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
110 @*/
111 PetscErrorCode DMPlexGetAdjacencyUseCone(DM dm, PetscBool *useCone)
112 {
113   PetscDS        prob;
114   PetscInt       Nf;
115   PetscErrorCode ierr;
116 
117   PetscFunctionBegin;
118   ierr = DMGetDS(dm, &prob);CHKERRQ(ierr);
119   ierr = PetscDSGetNumFields(prob, &Nf);CHKERRQ(ierr);
120   if (!Nf) {
121     ierr = PetscDSGetAdjacency(prob, PETSC_DEFAULT, useCone, NULL);CHKERRQ(ierr);
122   } else {
123     ierr = PetscDSGetAdjacency(prob, 0, useCone, NULL);CHKERRQ(ierr);
124   }
125   PetscFunctionReturn(0);
126 }
127 
128 /*@
129   DMPlexSetAdjacencyUseClosure - Define adjacency in the mesh using the transitive closure
130 
131   Input Parameters:
132 + dm      - The DM object
133 - useClosure - Flag to use the closure
134 
135   Level: intermediate
136 
137   Notes:
138 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
139 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
140 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
141 
142 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
143 @*/
144 PetscErrorCode DMPlexSetAdjacencyUseClosure(DM dm, PetscBool useClosure)
145 {
146   PetscDS        prob;
147   PetscBool      useCone;
148   PetscInt       Nf;
149   PetscErrorCode ierr;
150 
151   PetscFunctionBegin;
152   ierr = DMGetDS(dm, &prob);CHKERRQ(ierr);
153   ierr = PetscDSGetNumFields(prob, &Nf);CHKERRQ(ierr);
154   if (!Nf) {
155     ierr = PetscDSGetAdjacency(prob, PETSC_DEFAULT, &useCone, NULL);CHKERRQ(ierr);
156     ierr = PetscDSSetAdjacency(prob, PETSC_DEFAULT, useCone, useClosure);CHKERRQ(ierr);
157   } else {
158     ierr = PetscDSGetAdjacency(prob, 0, &useCone, NULL);CHKERRQ(ierr);
159     ierr = PetscDSSetAdjacency(prob, 0, useCone, useClosure);CHKERRQ(ierr);
160   }
161   PetscFunctionReturn(0);
162 }
163 
164 /*@
165   DMPlexGetAdjacencyUseClosure - Query whether adjacency in the mesh uses the transitive closure
166 
167   Input Parameter:
168 . dm      - The DM object
169 
170   Output Parameter:
171 . useClosure - Flag to use the closure
172 
173   Level: intermediate
174 
175   Notes:
176 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
177 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
178 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
179 
180 .seealso: DMPlexSetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
181 @*/
182 PetscErrorCode DMPlexGetAdjacencyUseClosure(DM dm, PetscBool *useClosure)
183 {
184   PetscDS        prob;
185   PetscInt       Nf;
186   PetscErrorCode ierr;
187 
188   PetscFunctionBegin;
189   ierr = DMGetDS(dm, &prob);CHKERRQ(ierr);
190   ierr = PetscDSGetNumFields(prob, &Nf);CHKERRQ(ierr);
191   if (!Nf) {
192     ierr = PetscDSGetAdjacency(prob, PETSC_DEFAULT, NULL, useClosure);CHKERRQ(ierr);
193   } else {
194     ierr = PetscDSGetAdjacency(prob, 0, NULL, useClosure);CHKERRQ(ierr);
195   }
196   PetscFunctionReturn(0);
197 }
198 
199 /*@
200   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
201 
202   Input Parameters:
203 + dm      - The DM object
204 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
205 
206   Level: intermediate
207 
208 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
209 @*/
210 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
211 {
212   DM_Plex *mesh = (DM_Plex *) dm->data;
213 
214   PetscFunctionBegin;
215   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
216   mesh->useAnchors = useAnchors;
217   PetscFunctionReturn(0);
218 }
219 
220 /*@
221   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
222 
223   Input Parameter:
224 . dm      - The DM object
225 
226   Output Parameter:
227 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
228 
229   Level: intermediate
230 
231 .seealso: DMPlexSetAdjacencyUseAnchors(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
232 @*/
233 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
234 {
235   DM_Plex *mesh = (DM_Plex *) dm->data;
236 
237   PetscFunctionBegin;
238   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
239   PetscValidIntPointer(useAnchors, 2);
240   *useAnchors = mesh->useAnchors;
241   PetscFunctionReturn(0);
242 }
243 
244 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
245 {
246   const PetscInt *cone = NULL;
247   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
248   PetscErrorCode  ierr;
249 
250   PetscFunctionBeginHot;
251   ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
252   ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
253   for (c = 0; c <= coneSize; ++c) {
254     const PetscInt  point   = !c ? p : cone[c-1];
255     const PetscInt *support = NULL;
256     PetscInt        supportSize, s, q;
257 
258     ierr = DMPlexGetSupportSize(dm, point, &supportSize);CHKERRQ(ierr);
259     ierr = DMPlexGetSupport(dm, point, &support);CHKERRQ(ierr);
260     for (s = 0; s < supportSize; ++s) {
261       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = support[s]),0); ++q) {
262         if (support[s] == adj[q]) break;
263       }
264       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
265     }
266   }
267   *adjSize = numAdj;
268   PetscFunctionReturn(0);
269 }
270 
271 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
272 {
273   const PetscInt *support = NULL;
274   PetscInt        numAdj   = 0, maxAdjSize = *adjSize, supportSize, s;
275   PetscErrorCode  ierr;
276 
277   PetscFunctionBeginHot;
278   ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr);
279   ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr);
280   for (s = 0; s <= supportSize; ++s) {
281     const PetscInt  point = !s ? p : support[s-1];
282     const PetscInt *cone  = NULL;
283     PetscInt        coneSize, c, q;
284 
285     ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
286     ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
287     for (c = 0; c < coneSize; ++c) {
288       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = cone[c]),0); ++q) {
289         if (cone[c] == adj[q]) break;
290       }
291       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
292     }
293   }
294   *adjSize = numAdj;
295   PetscFunctionReturn(0);
296 }
297 
298 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
299 {
300   PetscInt      *star = NULL;
301   PetscInt       numAdj = 0, maxAdjSize = *adjSize, starSize, s;
302   PetscErrorCode ierr;
303 
304   PetscFunctionBeginHot;
305   ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
306   for (s = 0; s < starSize*2; s += 2) {
307     const PetscInt *closure = NULL;
308     PetscInt        closureSize, c, q;
309 
310     ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
311     for (c = 0; c < closureSize*2; c += 2) {
312       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = closure[c]),0); ++q) {
313         if (closure[c] == adj[q]) break;
314       }
315       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
316     }
317     ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
318   }
319   ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
320   *adjSize = numAdj;
321   PetscFunctionReturn(0);
322 }
323 
324 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
325 {
326   static PetscInt asiz = 0;
327   PetscInt maxAnchors = 1;
328   PetscInt aStart = -1, aEnd = -1;
329   PetscInt maxAdjSize;
330   PetscSection aSec = NULL;
331   IS aIS = NULL;
332   const PetscInt *anchors;
333   DM_Plex *mesh = (DM_Plex *)dm->data;
334   PetscErrorCode  ierr;
335 
336   PetscFunctionBeginHot;
337   if (useAnchors) {
338     ierr = DMPlexGetAnchors(dm,&aSec,&aIS);CHKERRQ(ierr);
339     if (aSec) {
340       ierr = PetscSectionGetMaxDof(aSec,&maxAnchors);CHKERRQ(ierr);
341       maxAnchors = PetscMax(1,maxAnchors);
342       ierr = PetscSectionGetChart(aSec,&aStart,&aEnd);CHKERRQ(ierr);
343       ierr = ISGetIndices(aIS,&anchors);CHKERRQ(ierr);
344     }
345   }
346   if (!*adj) {
347     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
348 
349     ierr  = DMPlexGetChart(dm, &pStart,&pEnd);CHKERRQ(ierr);
350     ierr  = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
351     ierr  = DMPlexGetMaxSizes(dm, &maxC, &maxS);CHKERRQ(ierr);
352     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1;
353     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1;
354     asiz  = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries);
355     asiz *= maxAnchors;
356     asiz  = PetscMin(asiz,pEnd-pStart);
357     ierr  = PetscMalloc1(asiz,adj);CHKERRQ(ierr);
358   }
359   if (*adjSize < 0) *adjSize = asiz;
360   maxAdjSize = *adjSize;
361   if (mesh->useradjacency) {
362     ierr = mesh->useradjacency(dm, p, adjSize, *adj, mesh->useradjacencyctx);CHKERRQ(ierr);
363   } else if (useTransitiveClosure) {
364     ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr);
365   } else if (useCone) {
366     ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
367   } else {
368     ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
369   }
370   if (useAnchors && aSec) {
371     PetscInt origSize = *adjSize;
372     PetscInt numAdj = origSize;
373     PetscInt i = 0, j;
374     PetscInt *orig = *adj;
375 
376     while (i < origSize) {
377       PetscInt p = orig[i];
378       PetscInt aDof = 0;
379 
380       if (p >= aStart && p < aEnd) {
381         ierr = PetscSectionGetDof(aSec,p,&aDof);CHKERRQ(ierr);
382       }
383       if (aDof) {
384         PetscInt aOff;
385         PetscInt s, q;
386 
387         for (j = i + 1; j < numAdj; j++) {
388           orig[j - 1] = orig[j];
389         }
390         origSize--;
391         numAdj--;
392         ierr = PetscSectionGetOffset(aSec,p,&aOff);CHKERRQ(ierr);
393         for (s = 0; s < aDof; ++s) {
394           for (q = 0; q < numAdj || ((void)(orig[numAdj++] = anchors[aOff+s]),0); ++q) {
395             if (anchors[aOff+s] == orig[q]) break;
396           }
397           if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
398         }
399       }
400       else {
401         i++;
402       }
403     }
404     *adjSize = numAdj;
405     ierr = ISRestoreIndices(aIS,&anchors);CHKERRQ(ierr);
406   }
407   PetscFunctionReturn(0);
408 }
409 
410 /*@
411   DMPlexGetAdjacency - Return all points adjacent to the given point
412 
413   Input Parameters:
414 + dm - The DM object
415 . p  - The point
416 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE
417 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize
418 
419   Output Parameters:
420 + adjSize - The number of adjacent points
421 - adj - The adjacent points
422 
423   Level: advanced
424 
425   Notes:
426     The user must PetscFree the adj array if it was not passed in.
427 
428 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator()
429 @*/
430 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
431 {
432   PetscBool      useCone, useClosure, useAnchors;
433   PetscErrorCode ierr;
434 
435   PetscFunctionBeginHot;
436   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
437   PetscValidPointer(adjSize,3);
438   PetscValidPointer(adj,4);
439   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
440   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
441   ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
442   ierr = DMPlexGetAdjacency_Internal(dm, p, useCone, useClosure, useAnchors, adjSize, adj);CHKERRQ(ierr);
443   PetscFunctionReturn(0);
444 }
445 
446 /*@
447   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
448 
449   Collective on DM
450 
451   Input Parameters:
452 + dm      - The DM
453 - sfPoint - The PetscSF which encodes point connectivity
454 
455   Output Parameters:
456 + processRanks - A list of process neighbors, or NULL
457 - sfProcess    - An SF encoding the two-sided process connectivity, or NULL
458 
459   Level: developer
460 
461 .seealso: PetscSFCreate(), DMPlexCreateProcessSF()
462 @*/
463 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
464 {
465   const PetscSFNode *remotePoints;
466   PetscInt          *localPointsNew;
467   PetscSFNode       *remotePointsNew;
468   const PetscInt    *nranks;
469   PetscInt          *ranksNew;
470   PetscBT            neighbors;
471   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
472   PetscMPIInt        size, proc, rank;
473   PetscErrorCode     ierr;
474 
475   PetscFunctionBegin;
476   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
477   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
478   if (processRanks) {PetscValidPointer(processRanks, 3);}
479   if (sfProcess)    {PetscValidPointer(sfProcess, 4);}
480   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
481   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
482   ierr = PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);CHKERRQ(ierr);
483   ierr = PetscBTCreate(size, &neighbors);CHKERRQ(ierr);
484   ierr = PetscBTMemzero(size, neighbors);CHKERRQ(ierr);
485   /* Compute root-to-leaf process connectivity */
486   ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr);
487   ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr);
488   for (p = pStart; p < pEnd; ++p) {
489     PetscInt ndof, noff, n;
490 
491     ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr);
492     ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr);
493     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
494   }
495   ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr);
496   /* Compute leaf-to-neighbor process connectivity */
497   ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr);
498   ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr);
499   for (p = pStart; p < pEnd; ++p) {
500     PetscInt ndof, noff, n;
501 
502     ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr);
503     ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr);
504     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
505   }
506   ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr);
507   /* Compute leaf-to-root process connectivity */
508   for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);}
509   /* Calculate edges */
510   PetscBTClear(neighbors, rank);
511   for(proc = 0, numNeighbors = 0; proc < size; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;}
512   ierr = PetscMalloc1(numNeighbors, &ranksNew);CHKERRQ(ierr);
513   ierr = PetscMalloc1(numNeighbors, &localPointsNew);CHKERRQ(ierr);
514   ierr = PetscMalloc1(numNeighbors, &remotePointsNew);CHKERRQ(ierr);
515   for(proc = 0, n = 0; proc < size; ++proc) {
516     if (PetscBTLookup(neighbors, proc)) {
517       ranksNew[n]              = proc;
518       localPointsNew[n]        = proc;
519       remotePointsNew[n].index = rank;
520       remotePointsNew[n].rank  = proc;
521       ++n;
522     }
523   }
524   ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr);
525   if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr);}
526   else              {ierr = PetscFree(ranksNew);CHKERRQ(ierr);}
527   if (sfProcess) {
528     ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr);
529     ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr);
530     ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr);
531     ierr = PetscSFSetGraph(*sfProcess, size, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
532   }
533   PetscFunctionReturn(0);
534 }
535 
536 /*@
537   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
538 
539   Collective on DM
540 
541   Input Parameter:
542 . dm - The DM
543 
544   Output Parameters:
545 + rootSection - The number of leaves for a given root point
546 . rootrank    - The rank of each edge into the root point
547 . leafSection - The number of processes sharing a given leaf point
548 - leafrank    - The rank of each process sharing a leaf point
549 
550   Level: developer
551 
552 .seealso: DMPlexCreateOverlap()
553 @*/
554 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
555 {
556   MPI_Comm        comm;
557   PetscSF         sfPoint;
558   const PetscInt *rootdegree;
559   PetscInt       *myrank, *remoterank;
560   PetscInt        pStart, pEnd, p, nedges;
561   PetscMPIInt     rank;
562   PetscErrorCode  ierr;
563 
564   PetscFunctionBegin;
565   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
566   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
567   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
568   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
569   /* Compute number of leaves for each root */
570   ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr);
571   ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr);
572   ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr);
573   ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr);
574   for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr);}
575   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
576   /* Gather rank of each leaf to root */
577   ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr);
578   ierr = PetscMalloc1(pEnd-pStart, &myrank);CHKERRQ(ierr);
579   ierr = PetscMalloc1(nedges,  &remoterank);CHKERRQ(ierr);
580   for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank;
581   ierr = PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
582   ierr = PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
583   ierr = PetscFree(myrank);CHKERRQ(ierr);
584   ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr);
585   /* Distribute remote ranks to leaves */
586   ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr);
587   ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr);
588   PetscFunctionReturn(0);
589 }
590 
591 /*@C
592   DMPlexCreateOverlap - Compute owner information for shared points. This basically gets two-sided for an SF.
593 
594   Collective on DM
595 
596   Input Parameters:
597 + dm          - The DM
598 . levels      - Number of overlap levels
599 . rootSection - The number of leaves for a given root point
600 . rootrank    - The rank of each edge into the root point
601 . leafSection - The number of processes sharing a given leaf point
602 - leafrank    - The rank of each process sharing a leaf point
603 
604   Output Parameters:
605 + ovLabel     - DMLabel containing remote overlap contributions as point/rank pairings
606 
607   Level: developer
608 
609 .seealso: DMPlexDistributeOwnership(), DMPlexDistribute()
610 @*/
611 PetscErrorCode DMPlexCreateOverlap(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
612 {
613   MPI_Comm           comm;
614   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
615   PetscSF            sfPoint, sfProc;
616   const PetscSFNode *remote;
617   const PetscInt    *local;
618   const PetscInt    *nrank, *rrank;
619   PetscInt          *adj = NULL;
620   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l;
621   PetscMPIInt        rank, size;
622   PetscBool          useCone, useClosure, flg;
623   PetscErrorCode     ierr;
624 
625   PetscFunctionBegin;
626   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
627   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
628   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
629   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
630   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
631   ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr);
632   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
633   ierr = DMLabelCreate("Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr);
634   /* Handle leaves: shared with the root point */
635   for (l = 0; l < nleaves; ++l) {
636     PetscInt adjSize = PETSC_DETERMINE, a;
637 
638     ierr = DMPlexGetAdjacency(dm, local[l], &adjSize, &adj);CHKERRQ(ierr);
639     for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr);}
640   }
641   ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr);
642   ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr);
643   /* Handle roots */
644   for (p = pStart; p < pEnd; ++p) {
645     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
646 
647     if ((p >= sStart) && (p < sEnd)) {
648       /* Some leaves share a root with other leaves on different processes */
649       ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr);
650       if (neighbors) {
651         ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr);
652         ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
653         for (n = 0; n < neighbors; ++n) {
654           const PetscInt remoteRank = nrank[noff+n];
655 
656           if (remoteRank == rank) continue;
657           for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
658         }
659       }
660     }
661     /* Roots are shared with leaves */
662     ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr);
663     if (!neighbors) continue;
664     ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr);
665     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
666     for (n = 0; n < neighbors; ++n) {
667       const PetscInt remoteRank = rrank[noff+n];
668 
669       if (remoteRank == rank) continue;
670       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
671     }
672   }
673   ierr = PetscFree(adj);CHKERRQ(ierr);
674   ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr);
675   ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr);
676   /* Add additional overlap levels */
677   for (l = 1; l < levels; l++) {
678     /* Propagate point donations over SF to capture remote connections */
679     ierr = DMPlexPartitionLabelPropagate(dm, ovAdjByRank);CHKERRQ(ierr);
680     /* Add next level of point donations to the label */
681     ierr = DMPlexPartitionLabelAdjacency(dm, ovAdjByRank);CHKERRQ(ierr);
682   }
683   /* We require the closure in the overlap */
684   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
685   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
686   if (useCone || !useClosure) {
687     ierr = DMPlexPartitionLabelClosure(dm, ovAdjByRank);CHKERRQ(ierr);
688   }
689   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-overlap_view", &flg);CHKERRQ(ierr);
690   if (flg) {
691     ierr = DMLabelView(ovAdjByRank, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
692   }
693   /* Make global process SF and invert sender to receiver label */
694   {
695     /* Build a global process SF */
696     PetscSFNode *remoteProc;
697     ierr = PetscMalloc1(size, &remoteProc);CHKERRQ(ierr);
698     for (p = 0; p < size; ++p) {
699       remoteProc[p].rank  = p;
700       remoteProc[p].index = rank;
701     }
702     ierr = PetscSFCreate(comm, &sfProc);CHKERRQ(ierr);
703     ierr = PetscObjectSetName((PetscObject) sfProc, "Process SF");CHKERRQ(ierr);
704     ierr = PetscSFSetGraph(sfProc, size, size, NULL, PETSC_OWN_POINTER, remoteProc, PETSC_OWN_POINTER);CHKERRQ(ierr);
705   }
706   ierr = DMLabelCreate("Overlap label", ovLabel);CHKERRQ(ierr);
707   ierr = DMPlexPartitionLabelInvert(dm, ovAdjByRank, sfProc, *ovLabel);CHKERRQ(ierr);
708   /* Add owned points, except for shared local points */
709   for (p = pStart; p < pEnd; ++p) {ierr = DMLabelSetValue(*ovLabel, p, rank);CHKERRQ(ierr);}
710   for (l = 0; l < nleaves; ++l) {
711     ierr = DMLabelClearValue(*ovLabel, local[l], rank);CHKERRQ(ierr);
712     ierr = DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank);CHKERRQ(ierr);
713   }
714   /* Clean up */
715   ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr);
716   ierr = PetscSFDestroy(&sfProc);CHKERRQ(ierr);
717   PetscFunctionReturn(0);
718 }
719 
720 /*@C
721   DMPlexCreateOverlapMigrationSF - Create an SF describing the new mesh distribution to make the overlap described by the input SF
722 
723   Collective on DM
724 
725   Input Parameters:
726 + dm          - The DM
727 - overlapSF   - The SF mapping ghost points in overlap to owner points on other processes
728 
729   Output Parameters:
730 + migrationSF - An SF that maps original points in old locations to points in new locations
731 
732   Level: developer
733 
734 .seealso: DMPlexCreateOverlap(), DMPlexDistribute()
735 @*/
736 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
737 {
738   MPI_Comm           comm;
739   PetscMPIInt        rank, size;
740   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
741   PetscInt          *pointDepths, *remoteDepths, *ilocal;
742   PetscInt          *depthRecv, *depthShift, *depthIdx;
743   PetscSFNode       *iremote;
744   PetscSF            pointSF;
745   const PetscInt    *sharedLocal;
746   const PetscSFNode *overlapRemote, *sharedRemote;
747   PetscErrorCode     ierr;
748 
749   PetscFunctionBegin;
750   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
751   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
752   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
753   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
754   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
755 
756   /* Before building the migration SF we need to know the new stratum offsets */
757   ierr = PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote);CHKERRQ(ierr);
758   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
759   for (d=0; d<dim+1; d++) {
760     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
761     for (p=pStart; p<pEnd; p++) pointDepths[p] = d;
762   }
763   for (p=0; p<nleaves; p++) remoteDepths[p] = -1;
764   ierr = PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
765   ierr = PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
766 
767   /* Count recevied points in each stratum and compute the internal strata shift */
768   ierr = PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx);CHKERRQ(ierr);
769   for (d=0; d<dim+1; d++) depthRecv[d]=0;
770   for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++;
771   depthShift[dim] = 0;
772   for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim];
773   for (d=1; d<dim; d++) depthShift[d] += depthRecv[0];
774   for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1];
775   for (d=0; d<dim+1; d++) {
776     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
777     depthIdx[d] = pStart + depthShift[d];
778   }
779 
780   /* Form the overlap SF build an SF that describes the full overlap migration SF */
781   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
782   newLeaves = pEnd - pStart + nleaves;
783   ierr = PetscMalloc1(newLeaves, &ilocal);CHKERRQ(ierr);
784   ierr = PetscMalloc1(newLeaves, &iremote);CHKERRQ(ierr);
785   /* First map local points to themselves */
786   for (d=0; d<dim+1; d++) {
787     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
788     for (p=pStart; p<pEnd; p++) {
789       point = p + depthShift[d];
790       ilocal[point] = point;
791       iremote[point].index = p;
792       iremote[point].rank = rank;
793       depthIdx[d]++;
794     }
795   }
796 
797   /* Add in the remote roots for currently shared points */
798   ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
799   ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote);CHKERRQ(ierr);
800   for (d=0; d<dim+1; d++) {
801     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
802     for (p=0; p<numSharedPoints; p++) {
803       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
804         point = sharedLocal[p] + depthShift[d];
805         iremote[point].index = sharedRemote[p].index;
806         iremote[point].rank = sharedRemote[p].rank;
807       }
808     }
809   }
810 
811   /* Now add the incoming overlap points */
812   for (p=0; p<nleaves; p++) {
813     point = depthIdx[remoteDepths[p]];
814     ilocal[point] = point;
815     iremote[point].index = overlapRemote[p].index;
816     iremote[point].rank = overlapRemote[p].rank;
817     depthIdx[remoteDepths[p]]++;
818   }
819   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
820 
821   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
822   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");CHKERRQ(ierr);
823   ierr = PetscSFSetFromOptions(*migrationSF);CHKERRQ(ierr);
824   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
825   ierr = PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);CHKERRQ(ierr);
826 
827   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
828   PetscFunctionReturn(0);
829 }
830 
831 /*@
832   DMPlexStratifyMigrationSF - Rearrange the leaves of a migration sf for stratification.
833 
834   Input Parameter:
835 + dm          - The DM
836 - sf          - A star forest with non-ordered leaves, usually defining a DM point migration
837 
838   Output Parameter:
839 . migrationSF - A star forest with added leaf indirection that ensures the resulting DM is stratified
840 
841   Level: developer
842 
843 .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexDistributeOverlap()
844 @*/
845 PetscErrorCode DMPlexStratifyMigrationSF(DM dm, PetscSF sf, PetscSF *migrationSF)
846 {
847   MPI_Comm           comm;
848   PetscMPIInt        rank, size;
849   PetscInt           d, ldepth, depth, p, pStart, pEnd, nroots, nleaves;
850   PetscInt          *pointDepths, *remoteDepths, *ilocal;
851   PetscInt          *depthRecv, *depthShift, *depthIdx;
852   PetscInt           hybEnd[4];
853   const PetscSFNode *iremote;
854   PetscErrorCode     ierr;
855 
856   PetscFunctionBegin;
857   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
858   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
859   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
860   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
861   ierr = DMPlexGetDepth(dm, &ldepth);CHKERRQ(ierr);
862   ierr = MPIU_Allreduce(&ldepth, &depth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr);
863   if ((ldepth >= 0) && (depth != ldepth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", ldepth, depth);
864 
865   /* Before building the migration SF we need to know the new stratum offsets */
866   ierr = PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote);CHKERRQ(ierr);
867   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
868   ierr = DMPlexGetHybridBounds(dm,&hybEnd[depth],&hybEnd[depth-1],&hybEnd[1],&hybEnd[0]);CHKERRQ(ierr);
869   for (d = 0; d < depth+1; ++d) {
870     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
871     for (p = pStart; p < pEnd; ++p) {
872       if (hybEnd[d] >= 0 && p >= hybEnd[d]) { /* put in a separate value for hybrid points */
873         pointDepths[p] = 2 * d;
874       } else {
875         pointDepths[p] = 2 * d + 1;
876       }
877     }
878   }
879   for (p = 0; p < nleaves; ++p) remoteDepths[p] = -1;
880   ierr = PetscSFBcastBegin(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
881   ierr = PetscSFBcastEnd(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
882   /* Count recevied points in each stratum and compute the internal strata shift */
883   ierr = PetscMalloc3(2*(depth+1), &depthRecv, 2*(depth+1), &depthShift, 2*(depth+1), &depthIdx);CHKERRQ(ierr);
884   for (d = 0; d < 2*(depth+1); ++d) depthRecv[d] = 0;
885   for (p = 0; p < nleaves; ++p) depthRecv[remoteDepths[p]]++;
886   depthShift[2*depth+1] = 0;
887   for (d = 0; d < 2*depth+1; ++d) depthShift[d] = depthRecv[2 * depth + 1];
888   for (d = 0; d < 2*depth; ++d) depthShift[d] += depthRecv[2 * depth];
889   depthShift[0] += depthRecv[1];
890   for (d = 2; d < 2*depth; ++d) depthShift[d] += depthRecv[1];
891   for (d = 2; d < 2*depth; ++d) depthShift[d] += depthRecv[0];
892   for (d = 2 * depth-1; d > 2; --d) {
893     PetscInt e;
894 
895     for (e = d -1; e > 1; --e) depthShift[e] += depthRecv[d];
896   }
897   for (d = 0; d < 2*(depth+1); ++d) {depthIdx[d] = 0;}
898   /* Derive a new local permutation based on stratified indices */
899   ierr = PetscMalloc1(nleaves, &ilocal);CHKERRQ(ierr);
900   for (p = 0; p < nleaves; ++p) {
901     const PetscInt dep = remoteDepths[p];
902 
903     ilocal[p] = depthShift[dep] + depthIdx[dep];
904     depthIdx[dep]++;
905   }
906   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
907   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Migration SF");CHKERRQ(ierr);
908   ierr = PetscSFSetGraph(*migrationSF, nroots, nleaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_COPY_VALUES);CHKERRQ(ierr);
909   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
910   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
911   PetscFunctionReturn(0);
912 }
913 
914 /*@
915   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
916 
917   Collective on DM
918 
919   Input Parameters:
920 + dm - The DMPlex object
921 . pointSF - The PetscSF describing the communication pattern
922 . originalSection - The PetscSection for existing data layout
923 - originalVec - The existing data
924 
925   Output Parameters:
926 + newSection - The PetscSF describing the new data layout
927 - newVec - The new data
928 
929   Level: developer
930 
931 .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData()
932 @*/
933 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
934 {
935   PetscSF        fieldSF;
936   PetscInt      *remoteOffsets, fieldSize;
937   PetscScalar   *originalValues, *newValues;
938   PetscErrorCode ierr;
939 
940   PetscFunctionBegin;
941   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
942   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
943 
944   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
945   ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr);
946   ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr);
947 
948   ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr);
949   ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr);
950   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
951   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
952   ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
953   ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
954   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
955   ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr);
956   ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr);
957   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
958   PetscFunctionReturn(0);
959 }
960 
961 /*@
962   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
963 
964   Collective on DM
965 
966   Input Parameters:
967 + dm - The DMPlex object
968 . pointSF - The PetscSF describing the communication pattern
969 . originalSection - The PetscSection for existing data layout
970 - originalIS - The existing data
971 
972   Output Parameters:
973 + newSection - The PetscSF describing the new data layout
974 - newIS - The new data
975 
976   Level: developer
977 
978 .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData()
979 @*/
980 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
981 {
982   PetscSF         fieldSF;
983   PetscInt       *newValues, *remoteOffsets, fieldSize;
984   const PetscInt *originalValues;
985   PetscErrorCode  ierr;
986 
987   PetscFunctionBegin;
988   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
989   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
990 
991   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
992   ierr = PetscMalloc1(fieldSize, &newValues);CHKERRQ(ierr);
993 
994   ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr);
995   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
996   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
997   ierr = PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
998   ierr = PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
999   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
1000   ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr);
1001   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr);
1002   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
1003   PetscFunctionReturn(0);
1004 }
1005 
1006 /*@
1007   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
1008 
1009   Collective on DM
1010 
1011   Input Parameters:
1012 + dm - The DMPlex object
1013 . pointSF - The PetscSF describing the communication pattern
1014 . originalSection - The PetscSection for existing data layout
1015 . datatype - The type of data
1016 - originalData - The existing data
1017 
1018   Output Parameters:
1019 + newSection - The PetscSection describing the new data layout
1020 - newData - The new data
1021 
1022   Level: developer
1023 
1024 .seealso: DMPlexDistribute(), DMPlexDistributeField()
1025 @*/
1026 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
1027 {
1028   PetscSF        fieldSF;
1029   PetscInt      *remoteOffsets, fieldSize;
1030   PetscMPIInt    dataSize;
1031   PetscErrorCode ierr;
1032 
1033   PetscFunctionBegin;
1034   ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
1035   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
1036 
1037   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
1038   ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr);
1039   ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr);
1040 
1041   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
1042   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
1043   ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
1044   ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
1045   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
1046   ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
1047   PetscFunctionReturn(0);
1048 }
1049 
1050 static PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1051 {
1052   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
1053   MPI_Comm               comm;
1054   PetscSF                coneSF;
1055   PetscSection           originalConeSection, newConeSection;
1056   PetscInt              *remoteOffsets, *cones, *globCones, *newCones, newConesSize;
1057   PetscBool              flg;
1058   PetscErrorCode         ierr;
1059 
1060   PetscFunctionBegin;
1061   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1062   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
1063 
1064   ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
1065   /* Distribute cone section */
1066   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1067   ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr);
1068   ierr = DMPlexGetConeSection(dmParallel, &newConeSection);CHKERRQ(ierr);
1069   ierr = PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr);
1070   ierr = DMSetUp(dmParallel);CHKERRQ(ierr);
1071   {
1072     PetscInt pStart, pEnd, p;
1073 
1074     ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr);
1075     for (p = pStart; p < pEnd; ++p) {
1076       PetscInt coneSize;
1077       ierr               = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr);
1078       pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize);
1079     }
1080   }
1081   /* Communicate and renumber cones */
1082   ierr = PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr);
1083   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
1084   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
1085   if (original) {
1086     PetscInt numCones;
1087 
1088     ierr = PetscSectionGetStorageSize(originalConeSection,&numCones);CHKERRQ(ierr); ierr = PetscMalloc1(numCones,&globCones);CHKERRQ(ierr);
1089     ierr = ISLocalToGlobalMappingApplyBlock(original, numCones, cones, globCones);CHKERRQ(ierr);
1090   }
1091   else {
1092     globCones = cones;
1093   }
1094   ierr = DMPlexGetCones(dmParallel, &newCones);CHKERRQ(ierr);
1095   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr);
1096   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr);
1097   if (original) {
1098     ierr = PetscFree(globCones);CHKERRQ(ierr);
1099   }
1100   ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr);
1101   ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr);
1102 #if PETSC_USE_DEBUG
1103   {
1104     PetscInt  p;
1105     PetscBool valid = PETSC_TRUE;
1106     for (p = 0; p < newConesSize; ++p) {
1107       if (newCones[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
1108     }
1109     if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1110   }
1111 #endif
1112   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr);
1113   if (flg) {
1114     ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr);
1115     ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1116     ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr);
1117     ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1118     ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr);
1119   }
1120   ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr);
1121   ierr = DMPlexGetConeOrientations(dmParallel, &newCones);CHKERRQ(ierr);
1122   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1123   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1124   ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr);
1125   ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
1126   /* Create supports and stratify DMPlex */
1127   {
1128     PetscInt pStart, pEnd;
1129 
1130     ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr);
1131     ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr);
1132   }
1133   ierr = DMPlexSymmetrize(dmParallel);CHKERRQ(ierr);
1134   ierr = DMPlexStratify(dmParallel);CHKERRQ(ierr);
1135   {
1136     PetscBool useCone, useClosure, useAnchors;
1137 
1138     ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
1139     ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
1140     ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
1141     ierr = DMPlexSetAdjacencyUseCone(dmParallel, useCone);CHKERRQ(ierr);
1142     ierr = DMPlexSetAdjacencyUseClosure(dmParallel, useClosure);CHKERRQ(ierr);
1143     ierr = DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors);CHKERRQ(ierr);
1144   }
1145   PetscFunctionReturn(0);
1146 }
1147 
1148 static PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
1149 {
1150   MPI_Comm         comm;
1151   PetscSection     originalCoordSection, newCoordSection;
1152   Vec              originalCoordinates, newCoordinates;
1153   PetscInt         bs;
1154   PetscBool        isper;
1155   const char      *name;
1156   const PetscReal *maxCell, *L;
1157   const DMBoundaryType *bd;
1158   PetscErrorCode   ierr;
1159 
1160   PetscFunctionBegin;
1161   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1162   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1163 
1164   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1165   ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr);
1166   ierr = DMGetCoordinateSection(dmParallel, &newCoordSection);CHKERRQ(ierr);
1167   ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr);
1168   if (originalCoordinates) {
1169     ierr = VecCreate(PETSC_COMM_SELF, &newCoordinates);CHKERRQ(ierr);
1170     ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr);
1171     ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr);
1172 
1173     ierr = DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr);
1174     ierr = DMSetCoordinatesLocal(dmParallel, newCoordinates);CHKERRQ(ierr);
1175     ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr);
1176     ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr);
1177     ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr);
1178   }
1179   ierr = DMGetPeriodicity(dm, &isper, &maxCell, &L, &bd);CHKERRQ(ierr);
1180   ierr = DMSetPeriodicity(dmParallel, isper, maxCell, L, bd);CHKERRQ(ierr);
1181   PetscFunctionReturn(0);
1182 }
1183 
1184 /* Here we are assuming that process 0 always has everything */
1185 static PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1186 {
1187   DM_Plex         *mesh = (DM_Plex*) dm->data;
1188   MPI_Comm         comm;
1189   DMLabel          depthLabel;
1190   PetscMPIInt      rank;
1191   PetscInt         depth, d, numLabels, numLocalLabels, l;
1192   PetscBool        hasLabels = PETSC_FALSE, lsendDepth, sendDepth;
1193   PetscObjectState depthState = -1;
1194   PetscErrorCode   ierr;
1195 
1196   PetscFunctionBegin;
1197   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1198   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1199 
1200   ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1201   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1202   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1203 
1204   /* If the user has changed the depth label, communicate it instead */
1205   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1206   ierr = DMPlexGetDepthLabel(dm, &depthLabel);CHKERRQ(ierr);
1207   if (depthLabel) {ierr = DMLabelGetState(depthLabel, &depthState);CHKERRQ(ierr);}
1208   lsendDepth = mesh->depthState != depthState ? PETSC_TRUE : PETSC_FALSE;
1209   ierr = MPIU_Allreduce(&lsendDepth, &sendDepth, 1, MPIU_BOOL, MPI_LOR, comm);CHKERRQ(ierr);
1210   if (sendDepth) {
1211     ierr = DMRemoveLabel(dmParallel, "depth", &depthLabel);CHKERRQ(ierr);
1212     ierr = DMLabelDestroy(&depthLabel);CHKERRQ(ierr);
1213   }
1214   /* Everyone must have either the same number of labels, or none */
1215   ierr = DMGetNumLabels(dm, &numLocalLabels);CHKERRQ(ierr);
1216   numLabels = numLocalLabels;
1217   ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1218   if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1219   for (l = numLabels-1; l >= 0; --l) {
1220     DMLabel     label = NULL, labelNew = NULL;
1221     PetscBool   isDepth, lisOutput = PETSC_TRUE, isOutput;
1222 
1223     if (hasLabels) {ierr = DMGetLabelByNum(dm, l, &label);CHKERRQ(ierr);}
1224     /* Skip "depth" because it is recreated */
1225     if (hasLabels) {ierr = PetscStrcmp(label->name, "depth", &isDepth);CHKERRQ(ierr);}
1226     ierr = MPI_Bcast(&isDepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr);
1227     if (isDepth && !sendDepth) continue;
1228     ierr = DMLabelDistribute(label, migrationSF, &labelNew);CHKERRQ(ierr);
1229     if (isDepth) {
1230       /* Put in any missing strata which can occur if users are managing the depth label themselves */
1231       PetscInt gdepth;
1232 
1233       ierr = MPIU_Allreduce(&depth, &gdepth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr);
1234       if ((depth >= 0) && (gdepth != depth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", depth, gdepth);
1235       for (d = 0; d <= gdepth; ++d) {
1236         PetscBool has;
1237 
1238         ierr = DMLabelHasStratum(labelNew, d, &has);CHKERRQ(ierr);
1239         if (!has) {ierr = DMLabelAddStratum(labelNew, d);CHKERRQ(ierr);}
1240       }
1241     }
1242     ierr = DMAddLabel(dmParallel, labelNew);CHKERRQ(ierr);
1243     /* Put the output flag in the new label */
1244     if (hasLabels) {ierr = DMGetLabelOutput(dm, label->name, &lisOutput);CHKERRQ(ierr);}
1245     ierr = MPIU_Allreduce(&lisOutput, &isOutput, 1, MPIU_BOOL, MPI_LAND, comm);CHKERRQ(ierr);
1246     ierr = DMSetLabelOutput(dmParallel, labelNew->name, isOutput);CHKERRQ(ierr);
1247   }
1248   ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1249   PetscFunctionReturn(0);
1250 }
1251 
1252 static PetscErrorCode DMPlexDistributeSetupHybrid(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1253 {
1254   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1255   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1256   PetscBool      *isHybrid, *isHybridParallel;
1257   PetscInt        dim, depth, d;
1258   PetscInt        pStart, pEnd, pStartP, pEndP;
1259   PetscErrorCode  ierr;
1260 
1261   PetscFunctionBegin;
1262   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1263   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1264 
1265   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1266   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1267   ierr = DMPlexGetChart(dm,&pStart,&pEnd);CHKERRQ(ierr);
1268   ierr = DMPlexGetChart(dmParallel,&pStartP,&pEndP);CHKERRQ(ierr);
1269   ierr = PetscCalloc2(pEnd-pStart,&isHybrid,pEndP-pStartP,&isHybridParallel);CHKERRQ(ierr);
1270   for (d = 0; d <= depth; d++) {
1271     PetscInt hybridMax = (depth == 1 && d == 1) ? mesh->hybridPointMax[dim] : mesh->hybridPointMax[d];
1272 
1273     if (hybridMax >= 0) {
1274       PetscInt sStart, sEnd, p;
1275 
1276       ierr = DMPlexGetDepthStratum(dm,d,&sStart,&sEnd);CHKERRQ(ierr);
1277       for (p = hybridMax; p < sEnd; p++) isHybrid[p-pStart] = PETSC_TRUE;
1278     }
1279   }
1280   ierr = PetscSFBcastBegin(migrationSF,MPIU_BOOL,isHybrid,isHybridParallel);CHKERRQ(ierr);
1281   ierr = PetscSFBcastEnd(migrationSF,MPIU_BOOL,isHybrid,isHybridParallel);CHKERRQ(ierr);
1282   for (d = 0; d <= dim; d++) pmesh->hybridPointMax[d] = -1;
1283   for (d = 0; d <= depth; d++) {
1284     PetscInt sStart, sEnd, p, dd;
1285 
1286     ierr = DMPlexGetDepthStratum(dmParallel,d,&sStart,&sEnd);CHKERRQ(ierr);
1287     dd = (depth == 1 && d == 1) ? dim : d;
1288     for (p = sStart; p < sEnd; p++) {
1289       if (isHybridParallel[p-pStartP]) {
1290         pmesh->hybridPointMax[dd] = p;
1291         break;
1292       }
1293     }
1294   }
1295   ierr = PetscFree2(isHybrid,isHybridParallel);CHKERRQ(ierr);
1296   PetscFunctionReturn(0);
1297 }
1298 
1299 static PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1300 {
1301   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1302   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1303   MPI_Comm        comm;
1304   DM              refTree;
1305   PetscSection    origParentSection, newParentSection;
1306   PetscInt        *origParents, *origChildIDs;
1307   PetscBool       flg;
1308   PetscErrorCode  ierr;
1309 
1310   PetscFunctionBegin;
1311   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1312   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
1313   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1314 
1315   /* Set up tree */
1316   ierr = DMPlexGetReferenceTree(dm,&refTree);CHKERRQ(ierr);
1317   ierr = DMPlexSetReferenceTree(dmParallel,refTree);CHKERRQ(ierr);
1318   ierr = DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL);CHKERRQ(ierr);
1319   if (origParentSection) {
1320     PetscInt        pStart, pEnd;
1321     PetscInt        *newParents, *newChildIDs, *globParents;
1322     PetscInt        *remoteOffsetsParents, newParentSize;
1323     PetscSF         parentSF;
1324 
1325     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1326     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);CHKERRQ(ierr);
1327     ierr = PetscSectionSetChart(newParentSection,pStart,pEnd);CHKERRQ(ierr);
1328     ierr = PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);CHKERRQ(ierr);
1329     ierr = PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);CHKERRQ(ierr);
1330     ierr = PetscFree(remoteOffsetsParents);CHKERRQ(ierr);
1331     ierr = PetscSectionGetStorageSize(newParentSection,&newParentSize);CHKERRQ(ierr);
1332     ierr = PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs);CHKERRQ(ierr);
1333     if (original) {
1334       PetscInt numParents;
1335 
1336       ierr = PetscSectionGetStorageSize(origParentSection,&numParents);CHKERRQ(ierr);
1337       ierr = PetscMalloc1(numParents,&globParents);CHKERRQ(ierr);
1338       ierr = ISLocalToGlobalMappingApplyBlock(original, numParents, origParents, globParents);CHKERRQ(ierr);
1339     }
1340     else {
1341       globParents = origParents;
1342     }
1343     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr);
1344     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr);
1345     if (original) {
1346       ierr = PetscFree(globParents);CHKERRQ(ierr);
1347     }
1348     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1349     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1350     ierr = ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents);CHKERRQ(ierr);
1351 #if PETSC_USE_DEBUG
1352     {
1353       PetscInt  p;
1354       PetscBool valid = PETSC_TRUE;
1355       for (p = 0; p < newParentSize; ++p) {
1356         if (newParents[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
1357       }
1358       if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1359     }
1360 #endif
1361     ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-parents_view", &flg);CHKERRQ(ierr);
1362     if (flg) {
1363       ierr = PetscPrintf(comm, "Serial Parent Section: \n");CHKERRQ(ierr);
1364       ierr = PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1365       ierr = PetscPrintf(comm, "Parallel Parent Section: \n");CHKERRQ(ierr);
1366       ierr = PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1367       ierr = PetscSFView(parentSF, NULL);CHKERRQ(ierr);
1368     }
1369     ierr = DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);CHKERRQ(ierr);
1370     ierr = PetscSectionDestroy(&newParentSection);CHKERRQ(ierr);
1371     ierr = PetscFree2(newParents,newChildIDs);CHKERRQ(ierr);
1372     ierr = PetscSFDestroy(&parentSF);CHKERRQ(ierr);
1373   }
1374   pmesh->useAnchors = mesh->useAnchors;
1375   PetscFunctionReturn(0);
1376 }
1377 
1378 PETSC_UNUSED static PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, DM dmParallel)
1379 {
1380   PetscMPIInt            rank, size;
1381   MPI_Comm               comm;
1382   PetscErrorCode         ierr;
1383 
1384   PetscFunctionBegin;
1385   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1386   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1387 
1388   /* Create point SF for parallel mesh */
1389   ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1390   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1391   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1392   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1393   {
1394     const PetscInt *leaves;
1395     PetscSFNode    *remotePoints, *rowners, *lowners;
1396     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1397     PetscInt        pStart, pEnd;
1398 
1399     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1400     ierr = PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr);
1401     ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr);
1402     for (p=0; p<numRoots; p++) {
1403       rowners[p].rank  = -1;
1404       rowners[p].index = -1;
1405     }
1406     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1407     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1408     for (p = 0; p < numLeaves; ++p) {
1409       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1410         lowners[p].rank  = rank;
1411         lowners[p].index = leaves ? leaves[p] : p;
1412       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1413         lowners[p].rank  = -2;
1414         lowners[p].index = -2;
1415       }
1416     }
1417     for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1418       rowners[p].rank  = -3;
1419       rowners[p].index = -3;
1420     }
1421     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1422     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1423     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1424     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1425     for (p = 0; p < numLeaves; ++p) {
1426       if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
1427       if (lowners[p].rank != rank) ++numGhostPoints;
1428     }
1429     ierr = PetscMalloc1(numGhostPoints, &ghostPoints);CHKERRQ(ierr);
1430     ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr);
1431     for (p = 0, gp = 0; p < numLeaves; ++p) {
1432       if (lowners[p].rank != rank) {
1433         ghostPoints[gp]        = leaves ? leaves[p] : p;
1434         remotePoints[gp].rank  = lowners[p].rank;
1435         remotePoints[gp].index = lowners[p].index;
1436         ++gp;
1437       }
1438     }
1439     ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr);
1440     ierr = PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1441     ierr = PetscSFSetFromOptions((dmParallel)->sf);CHKERRQ(ierr);
1442   }
1443   {
1444     PetscBool useCone, useClosure, useAnchors;
1445 
1446     ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
1447     ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
1448     ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
1449     ierr = DMPlexSetAdjacencyUseCone(dmParallel, useCone);CHKERRQ(ierr);
1450     ierr = DMPlexSetAdjacencyUseClosure(dmParallel, useClosure);CHKERRQ(ierr);
1451     ierr = DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors);CHKERRQ(ierr);
1452   }
1453   ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1454   PetscFunctionReturn(0);
1455 }
1456 
1457 /*@C
1458   DMPlexDerivePointSF - Build a point SF from an SF describing a point migration
1459 
1460   Input Parameter:
1461 + dm          - The source DMPlex object
1462 . migrationSF - The star forest that describes the parallel point remapping
1463 . ownership   - Flag causing a vote to determine point ownership
1464 
1465   Output Parameter:
1466 - pointSF     - The star forest describing the point overlap in the remapped DM
1467 
1468   Level: developer
1469 
1470 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1471 @*/
1472 PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownership, PetscSF *pointSF)
1473 {
1474   DM_Plex           *mesh = (DM_Plex *) dm->data;
1475   PetscMPIInt        rank, size;
1476   PetscInt           p, nroots, nleaves, idx, npointLeaves;
1477   PetscInt          *pointLocal;
1478   const PetscInt    *leaves;
1479   const PetscSFNode *roots;
1480   PetscSFNode       *rootNodes, *leafNodes, *pointRemote;
1481   Vec                shifts;
1482   const PetscInt     numShifts = 37; /* TODO Use larger prime */
1483   const PetscScalar *shift = NULL;
1484   const PetscBool    shiftDebug = PETSC_FALSE;
1485   PetscErrorCode     ierr;
1486 
1487   PetscFunctionBegin;
1488   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1489   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1490   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
1491 
1492   ierr = PetscSFGetGraph(migrationSF, &nroots, &nleaves, &leaves, &roots);CHKERRQ(ierr);
1493   ierr = PetscMalloc2(nroots, &rootNodes, nleaves, &leafNodes);CHKERRQ(ierr);
1494   if (ownership) {
1495     /* If balancing, we compute a random cyclic shift of the rank for each remote point. That way, the max will evenly distribute among ranks. */
1496     if (mesh->partitionBalance) {
1497       PetscRandom r;
1498 
1499       ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
1500       ierr = PetscRandomSetInterval(r, 0, 17*size);CHKERRQ(ierr);
1501       ierr = VecCreate(PETSC_COMM_SELF, &shifts);CHKERRQ(ierr);
1502       ierr = VecSetSizes(shifts, numShifts, numShifts);CHKERRQ(ierr);
1503       ierr = VecSetType(shifts, VECSTANDARD);CHKERRQ(ierr);
1504       ierr = VecSetRandom(shifts, r);CHKERRQ(ierr);
1505       ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
1506       ierr = VecGetArrayRead(shifts, &shift);CHKERRQ(ierr);
1507     }
1508 
1509     /* Point ownership vote: Process with highest rank owns shared points */
1510     for (p = 0; p < nleaves; ++p) {
1511       if (shiftDebug) {
1512         ierr = PetscSynchronizedPrintf(PetscObjectComm((PetscObject) dm), "[%d] Point %D RemotePoint %D Shift %D MyRank %D\n", rank, leaves ? leaves[p] : p, roots[p].index, (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]), (rank + (shift ? (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]) : 0))%size);CHKERRQ(ierr);
1513       }
1514       /* Either put in a bid or we know we own it */
1515       leafNodes[p].rank  = (rank + (shift ? (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]) : 0))%size;
1516       leafNodes[p].index = p;
1517     }
1518     for (p = 0; p < nroots; p++) {
1519       /* Root must not participate in the reduction, flag so that MAXLOC does not use */
1520       rootNodes[p].rank  = -3;
1521       rootNodes[p].index = -3;
1522     }
1523     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);CHKERRQ(ierr);
1524     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);CHKERRQ(ierr);
1525   } else {
1526     for (p = 0; p < nroots; p++) {
1527       rootNodes[p].index = -1;
1528       rootNodes[p].rank = rank;
1529     };
1530     for (p = 0; p < nleaves; p++) {
1531       /* Write new local id into old location */
1532       if (roots[p].rank == rank) {
1533         rootNodes[roots[p].index].index = leaves ? leaves[p] : p;
1534       }
1535     }
1536   }
1537   ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1538   ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1539 
1540   for (npointLeaves = 0, p = 0; p < nleaves; p++) {
1541     if (shiftDebug) {
1542       ierr = PetscSynchronizedPrintf(PetscObjectComm((PetscObject) dm), "[%d] Root %D, Rank %D MyRank %D\n", rank, roots[p].index, leafNodes[p].rank, (rank + (shift ? (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]) : 0))%size);CHKERRQ(ierr);
1543     }
1544     if (leafNodes[p].rank != (rank + (shift ? (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]) : 0))%size) npointLeaves++;
1545   }
1546   ierr = PetscMalloc1(npointLeaves, &pointLocal);CHKERRQ(ierr);
1547   ierr = PetscMalloc1(npointLeaves, &pointRemote);CHKERRQ(ierr);
1548   for (idx = 0, p = 0; p < nleaves; p++) {
1549     if (leafNodes[p].rank != (rank + (shift ? (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]) : 0))%size) {
1550       pointLocal[idx] = p;
1551       pointRemote[idx] = leafNodes[p];
1552       idx++;
1553     }
1554   }
1555   if (shift) {
1556     ierr = VecRestoreArrayRead(shifts, &shift);CHKERRQ(ierr);
1557     ierr = VecDestroy(&shifts);CHKERRQ(ierr);
1558   }
1559   if (shiftDebug) {ierr = PetscSynchronizedFlush(PetscObjectComm((PetscObject) dm), PETSC_STDOUT);CHKERRQ(ierr);}
1560   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), pointSF);CHKERRQ(ierr);
1561   ierr = PetscSFSetFromOptions(*pointSF);CHKERRQ(ierr);
1562   ierr = PetscSFSetGraph(*pointSF, nleaves, npointLeaves, pointLocal, PETSC_OWN_POINTER, pointRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1563   ierr = PetscFree2(rootNodes, leafNodes);CHKERRQ(ierr);
1564   PetscFunctionReturn(0);
1565 }
1566 
1567 /*@C
1568   DMPlexMigrate  - Migrates internal DM data over the supplied star forest
1569 
1570   Input Parameter:
1571 + dm       - The source DMPlex object
1572 . sf       - The star forest communication context describing the migration pattern
1573 
1574   Output Parameter:
1575 - targetDM - The target DMPlex object
1576 
1577   Level: intermediate
1578 
1579 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1580 @*/
1581 PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM)
1582 {
1583   MPI_Comm               comm;
1584   PetscInt               dim, cdim, nroots;
1585   PetscSF                sfPoint;
1586   ISLocalToGlobalMapping ltogMigration;
1587   ISLocalToGlobalMapping ltogOriginal = NULL;
1588   PetscBool              flg;
1589   PetscErrorCode         ierr;
1590 
1591   PetscFunctionBegin;
1592   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1593   ierr = PetscLogEventBegin(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1594   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1595   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1596   ierr = DMSetDimension(targetDM, dim);CHKERRQ(ierr);
1597   ierr = DMGetCoordinateDim(dm, &cdim);CHKERRQ(ierr);
1598   ierr = DMSetCoordinateDim(targetDM, cdim);CHKERRQ(ierr);
1599 
1600   /* Check for a one-to-all distribution pattern */
1601   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1602   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
1603   if (nroots >= 0) {
1604     IS                     isOriginal;
1605     PetscInt               n, size, nleaves;
1606     PetscInt              *numbering_orig, *numbering_new;
1607     /* Get the original point numbering */
1608     ierr = DMPlexCreatePointNumbering(dm, &isOriginal);CHKERRQ(ierr);
1609     ierr = ISLocalToGlobalMappingCreateIS(isOriginal, &ltogOriginal);CHKERRQ(ierr);
1610     ierr = ISLocalToGlobalMappingGetSize(ltogOriginal, &size);CHKERRQ(ierr);
1611     ierr = ISLocalToGlobalMappingGetBlockIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1612     /* Convert to positive global numbers */
1613     for (n=0; n<size; n++) {if (numbering_orig[n] < 0) numbering_orig[n] = -(numbering_orig[n]+1);}
1614     /* Derive the new local-to-global mapping from the old one */
1615     ierr = PetscSFGetGraph(sf, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1616     ierr = PetscMalloc1(nleaves, &numbering_new);CHKERRQ(ierr);
1617     ierr = PetscSFBcastBegin(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr);
1618     ierr = PetscSFBcastEnd(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr);
1619     ierr = ISLocalToGlobalMappingCreate(comm, 1, nleaves, (const PetscInt*) numbering_new, PETSC_OWN_POINTER, &ltogMigration);CHKERRQ(ierr);
1620     ierr = ISLocalToGlobalMappingRestoreIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1621     ierr = ISDestroy(&isOriginal);CHKERRQ(ierr);
1622   } else {
1623     /* One-to-all distribution pattern: We can derive LToG from SF */
1624     ierr = ISLocalToGlobalMappingCreateSF(sf, 0, &ltogMigration);CHKERRQ(ierr);
1625   }
1626   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1627   if (flg) {
1628     ierr = PetscPrintf(comm, "Point renumbering for DM migration:\n");CHKERRQ(ierr);
1629     ierr = ISLocalToGlobalMappingView(ltogMigration, NULL);CHKERRQ(ierr);
1630   }
1631   /* Migrate DM data to target DM */
1632   ierr = DMPlexDistributeCones(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1633   ierr = DMPlexDistributeLabels(dm, sf, targetDM);CHKERRQ(ierr);
1634   ierr = DMPlexDistributeCoordinates(dm, sf, targetDM);CHKERRQ(ierr);
1635   ierr = DMPlexDistributeSetupHybrid(dm, sf, ltogMigration, targetDM);CHKERRQ(ierr);
1636   ierr = DMPlexDistributeSetupTree(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1637   ierr = ISLocalToGlobalMappingDestroy(&ltogOriginal);CHKERRQ(ierr);
1638   ierr = ISLocalToGlobalMappingDestroy(&ltogMigration);CHKERRQ(ierr);
1639   ierr = PetscLogEventEnd(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1640   PetscFunctionReturn(0);
1641 }
1642 
1643 /*@C
1644   DMPlexDistribute - Distributes the mesh and any associated sections.
1645 
1646   Not Collective
1647 
1648   Input Parameter:
1649 + dm  - The original DMPlex object
1650 - overlap - The overlap of partitions, 0 is the default
1651 
1652   Output Parameter:
1653 + sf - The PetscSF used for point distribution
1654 - parallelMesh - The distributed DMPlex object, or NULL
1655 
1656   Note: If the mesh was not distributed, the return value is NULL.
1657 
1658   The user can control the definition of adjacency for the mesh using DMPlexSetAdjacencyUseCone() and
1659   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1660   representation on the mesh.
1661 
1662   Level: intermediate
1663 
1664 .keywords: mesh, elements
1665 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1666 @*/
1667 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1668 {
1669   MPI_Comm               comm;
1670   PetscPartitioner       partitioner;
1671   IS                     cellPart;
1672   PetscSection           cellPartSection;
1673   DM                     dmCoord;
1674   DMLabel                lblPartition, lblMigration;
1675   PetscSF                sfProcess, sfMigration, sfStratified, sfPoint;
1676   PetscBool              flg;
1677   PetscMPIInt            rank, size, p;
1678   PetscErrorCode         ierr;
1679 
1680   PetscFunctionBegin;
1681   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1682   if (sf) PetscValidPointer(sf,4);
1683   PetscValidPointer(dmParallel,5);
1684 
1685   if (sf) *sf = NULL;
1686   *dmParallel = NULL;
1687   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1688   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1689   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1690   if (size == 1) PetscFunctionReturn(0);
1691 
1692   ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1693   /* Create cell partition */
1694   ierr = PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1695   ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr);
1696   ierr = DMPlexGetPartitioner(dm, &partitioner);CHKERRQ(ierr);
1697   ierr = PetscPartitionerPartition(partitioner, dm, cellPartSection, &cellPart);CHKERRQ(ierr);
1698   {
1699     /* Convert partition to DMLabel */
1700     PetscInt proc, pStart, pEnd, npoints, poffset;
1701     const PetscInt *points;
1702     ierr = DMLabelCreate("Point Partition", &lblPartition);CHKERRQ(ierr);
1703     ierr = ISGetIndices(cellPart, &points);CHKERRQ(ierr);
1704     ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr);
1705     for (proc = pStart; proc < pEnd; proc++) {
1706       ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr);
1707       ierr = PetscSectionGetOffset(cellPartSection, proc, &poffset);CHKERRQ(ierr);
1708       for (p = poffset; p < poffset+npoints; p++) {
1709         ierr = DMLabelSetValue(lblPartition, points[p], proc);CHKERRQ(ierr);
1710       }
1711     }
1712     ierr = ISRestoreIndices(cellPart, &points);CHKERRQ(ierr);
1713   }
1714   ierr = DMPlexPartitionLabelClosure(dm, lblPartition);CHKERRQ(ierr);
1715   {
1716     /* Build a global process SF */
1717     PetscSFNode *remoteProc;
1718     ierr = PetscMalloc1(size, &remoteProc);CHKERRQ(ierr);
1719     for (p = 0; p < size; ++p) {
1720       remoteProc[p].rank  = p;
1721       remoteProc[p].index = rank;
1722     }
1723     ierr = PetscSFCreate(comm, &sfProcess);CHKERRQ(ierr);
1724     ierr = PetscObjectSetName((PetscObject) sfProcess, "Process SF");CHKERRQ(ierr);
1725     ierr = PetscSFSetGraph(sfProcess, size, size, NULL, PETSC_OWN_POINTER, remoteProc, PETSC_OWN_POINTER);CHKERRQ(ierr);
1726   }
1727   ierr = DMLabelCreate("Point migration", &lblMigration);CHKERRQ(ierr);
1728   ierr = DMPlexPartitionLabelInvert(dm, lblPartition, sfProcess, lblMigration);CHKERRQ(ierr);
1729   ierr = DMPlexPartitionLabelCreateSF(dm, lblMigration, &sfMigration);CHKERRQ(ierr);
1730   /* Stratify the SF in case we are migrating an already parallel plex */
1731   ierr = DMPlexStratifyMigrationSF(dm, sfMigration, &sfStratified);CHKERRQ(ierr);
1732   ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1733   sfMigration = sfStratified;
1734   ierr = PetscLogEventEnd(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1735   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1736   if (flg) {
1737     ierr = DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1738     ierr = PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1739   }
1740 
1741   /* Create non-overlapping parallel DM and migrate internal data */
1742   ierr = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr);
1743   ierr = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr);
1744   ierr = DMPlexMigrate(dm, sfMigration, *dmParallel);CHKERRQ(ierr);
1745 
1746   /* Build the point SF without overlap */
1747   ((DM_Plex*) (*dmParallel)->data)->partitionBalance = ((DM_Plex*) dm->data)->partitionBalance;
1748   ierr = DMPlexCreatePointSF(*dmParallel, sfMigration, PETSC_TRUE, &sfPoint);CHKERRQ(ierr);
1749   ierr = DMSetPointSF(*dmParallel, sfPoint);CHKERRQ(ierr);
1750   ierr = DMGetCoordinateDM(*dmParallel, &dmCoord);CHKERRQ(ierr);
1751   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1752   if (flg) {ierr = PetscSFView(sfPoint, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);}
1753 
1754   if (overlap > 0) {
1755     DM                 dmOverlap;
1756     PetscInt           nroots, nleaves;
1757     PetscSFNode       *newRemote;
1758     const PetscSFNode *oldRemote;
1759     PetscSF            sfOverlap, sfOverlapPoint;
1760     /* Add the partition overlap to the distributed DM */
1761     ierr = DMPlexDistributeOverlap(*dmParallel, overlap, &sfOverlap, &dmOverlap);CHKERRQ(ierr);
1762     ierr = DMDestroy(dmParallel);CHKERRQ(ierr);
1763     *dmParallel = dmOverlap;
1764     if (flg) {
1765       ierr = PetscPrintf(comm, "Overlap Migration SF:\n");CHKERRQ(ierr);
1766       ierr = PetscSFView(sfOverlap, NULL);CHKERRQ(ierr);
1767     }
1768 
1769     /* Re-map the migration SF to establish the full migration pattern */
1770     ierr = PetscSFGetGraph(sfMigration, &nroots, NULL, NULL, &oldRemote);CHKERRQ(ierr);
1771     ierr = PetscSFGetGraph(sfOverlap, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1772     ierr = PetscMalloc1(nleaves, &newRemote);CHKERRQ(ierr);
1773     ierr = PetscSFBcastBegin(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1774     ierr = PetscSFBcastEnd(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1775     ierr = PetscSFCreate(comm, &sfOverlapPoint);CHKERRQ(ierr);
1776     ierr = PetscSFSetGraph(sfOverlapPoint, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1777     ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1778     ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1779     sfMigration = sfOverlapPoint;
1780   }
1781   /* Cleanup Partition */
1782   ierr = PetscSFDestroy(&sfProcess);CHKERRQ(ierr);
1783   ierr = DMLabelDestroy(&lblPartition);CHKERRQ(ierr);
1784   ierr = DMLabelDestroy(&lblMigration);CHKERRQ(ierr);
1785   ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr);
1786   ierr = ISDestroy(&cellPart);CHKERRQ(ierr);
1787   /* Copy BC */
1788   ierr = DMCopyBoundary(dm, *dmParallel);CHKERRQ(ierr);
1789   /* Create sfNatural */
1790   if (dm->useNatural) {
1791     PetscSection section;
1792 
1793     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1794     ierr = DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &(*dmParallel)->sfNatural);CHKERRQ(ierr);
1795     ierr = DMSetUseNatural(*dmParallel, PETSC_TRUE);CHKERRQ(ierr);
1796   }
1797   /* Cleanup */
1798   if (sf) {*sf = sfMigration;}
1799   else    {ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);}
1800   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1801   ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1802   PetscFunctionReturn(0);
1803 }
1804 
1805 /*@C
1806   DMPlexDistributeOverlap - Add partition overlap to a distributed non-overlapping DM.
1807 
1808   Not Collective
1809 
1810   Input Parameter:
1811 + dm  - The non-overlapping distrbuted DMPlex object
1812 - overlap - The overlap of partitions
1813 
1814   Output Parameter:
1815 + sf - The PetscSF used for point distribution
1816 - dmOverlap - The overlapping distributed DMPlex object, or NULL
1817 
1818   Note: If the mesh was not distributed, the return value is NULL.
1819 
1820   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1821   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1822   representation on the mesh.
1823 
1824   Level: intermediate
1825 
1826 .keywords: mesh, elements
1827 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1828 @*/
1829 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, PetscSF *sf, DM *dmOverlap)
1830 {
1831   MPI_Comm               comm;
1832   PetscMPIInt            size, rank;
1833   PetscSection           rootSection, leafSection;
1834   IS                     rootrank, leafrank;
1835   DM                     dmCoord;
1836   DMLabel                lblOverlap;
1837   PetscSF                sfOverlap, sfStratified, sfPoint;
1838   PetscErrorCode         ierr;
1839 
1840   PetscFunctionBegin;
1841   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1842   if (sf) PetscValidPointer(sf, 3);
1843   PetscValidPointer(dmOverlap, 4);
1844 
1845   if (sf) *sf = NULL;
1846   *dmOverlap  = NULL;
1847   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1848   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1849   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1850   if (size == 1) PetscFunctionReturn(0);
1851 
1852   ierr = PetscLogEventBegin(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1853   /* Compute point overlap with neighbouring processes on the distributed DM */
1854   ierr = PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1855   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1856   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1857   ierr = DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);CHKERRQ(ierr);
1858   ierr = DMPlexCreateOverlap(dm, overlap, rootSection, rootrank, leafSection, leafrank, &lblOverlap);CHKERRQ(ierr);
1859   /* Convert overlap label to stratified migration SF */
1860   ierr = DMPlexPartitionLabelCreateSF(dm, lblOverlap, &sfOverlap);CHKERRQ(ierr);
1861   ierr = DMPlexStratifyMigrationSF(dm, sfOverlap, &sfStratified);CHKERRQ(ierr);
1862   ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1863   sfOverlap = sfStratified;
1864   ierr = PetscObjectSetName((PetscObject) sfOverlap, "Overlap SF");CHKERRQ(ierr);
1865   ierr = PetscSFSetFromOptions(sfOverlap);CHKERRQ(ierr);
1866 
1867   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1868   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1869   ierr = ISDestroy(&rootrank);CHKERRQ(ierr);
1870   ierr = ISDestroy(&leafrank);CHKERRQ(ierr);
1871   ierr = PetscLogEventEnd(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1872 
1873   /* Build the overlapping DM */
1874   ierr = DMPlexCreate(comm, dmOverlap);CHKERRQ(ierr);
1875   ierr = PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");CHKERRQ(ierr);
1876   ierr = DMPlexMigrate(dm, sfOverlap, *dmOverlap);CHKERRQ(ierr);
1877   /* Build the new point SF */
1878   ierr = DMPlexCreatePointSF(*dmOverlap, sfOverlap, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
1879   ierr = DMSetPointSF(*dmOverlap, sfPoint);CHKERRQ(ierr);
1880   ierr = DMGetCoordinateDM(*dmOverlap, &dmCoord);CHKERRQ(ierr);
1881   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1882   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1883   /* Cleanup overlap partition */
1884   ierr = DMLabelDestroy(&lblOverlap);CHKERRQ(ierr);
1885   if (sf) *sf = sfOverlap;
1886   else    {ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);}
1887   ierr = PetscLogEventEnd(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1888   PetscFunctionReturn(0);
1889 }
1890 
1891 /*@C
1892   DMPlexGetGatherDM - Get a copy of the DMPlex that gathers all points on the
1893   root process of the original's communicator.
1894 
1895   Input Parameters:
1896 . dm - the original DMPlex object
1897 
1898   Output Parameters:
1899 . gatherMesh - the gathered DM object, or NULL
1900 
1901   Level: intermediate
1902 
1903 .keywords: mesh
1904 .seealso: DMPlexDistribute(), DMPlexGetRedundantDM()
1905 @*/
1906 PetscErrorCode DMPlexGetGatherDM(DM dm, DM *gatherMesh)
1907 {
1908   MPI_Comm       comm;
1909   PetscMPIInt    size;
1910   PetscPartitioner oldPart, gatherPart;
1911   PetscErrorCode ierr;
1912 
1913   PetscFunctionBegin;
1914   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1915   PetscValidPointer(gatherMesh,2);
1916   *gatherMesh = NULL;
1917   comm = PetscObjectComm((PetscObject)dm);
1918   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1919   if (size == 1) PetscFunctionReturn(0);
1920   ierr = DMPlexGetPartitioner(dm,&oldPart);CHKERRQ(ierr);
1921   ierr = PetscObjectReference((PetscObject)oldPart);CHKERRQ(ierr);
1922   ierr = PetscPartitionerCreate(comm,&gatherPart);CHKERRQ(ierr);
1923   ierr = PetscPartitionerSetType(gatherPart,PETSCPARTITIONERGATHER);CHKERRQ(ierr);
1924   ierr = DMPlexSetPartitioner(dm,gatherPart);CHKERRQ(ierr);
1925   ierr = DMPlexDistribute(dm,0,NULL,gatherMesh);CHKERRQ(ierr);
1926   ierr = DMPlexSetPartitioner(dm,oldPart);CHKERRQ(ierr);
1927   ierr = PetscPartitionerDestroy(&gatherPart);CHKERRQ(ierr);
1928   ierr = PetscPartitionerDestroy(&oldPart);CHKERRQ(ierr);
1929   PetscFunctionReturn(0);
1930 }
1931 
1932 /*@C
1933   DMPlexGetRedundantDM - Get a copy of the DMPlex that is completely copied on each process.
1934 
1935   Input Parameters:
1936 . dm - the original DMPlex object
1937 
1938   Output Parameters:
1939 . redundantMesh - the redundant DM object, or NULL
1940 
1941   Level: intermediate
1942 
1943 .keywords: mesh
1944 .seealso: DMPlexDistribute(), DMPlexGetGatherDM()
1945 @*/
1946 PetscErrorCode DMPlexGetRedundantDM(DM dm, DM *redundantMesh)
1947 {
1948   MPI_Comm       comm;
1949   PetscMPIInt    size, rank;
1950   PetscInt       pStart, pEnd, p;
1951   PetscInt       numPoints = -1;
1952   PetscSF        migrationSF, sfPoint;
1953   DM             gatherDM, dmCoord;
1954   PetscSFNode    *points;
1955   PetscErrorCode ierr;
1956 
1957   PetscFunctionBegin;
1958   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1959   PetscValidPointer(redundantMesh,2);
1960   *redundantMesh = NULL;
1961   comm = PetscObjectComm((PetscObject)dm);
1962   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1963   if (size == 1) {
1964     ierr = PetscObjectReference((PetscObject) dm);CHKERRQ(ierr);
1965     *redundantMesh = dm;
1966     PetscFunctionReturn(0);
1967   }
1968   ierr = DMPlexGetGatherDM(dm,&gatherDM);CHKERRQ(ierr);
1969   if (!gatherDM) PetscFunctionReturn(0);
1970   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1971   ierr = DMPlexGetChart(gatherDM,&pStart,&pEnd);CHKERRQ(ierr);
1972   numPoints = pEnd - pStart;
1973   ierr = MPI_Bcast(&numPoints,1,MPIU_INT,0,comm);CHKERRQ(ierr);
1974   ierr = PetscMalloc1(numPoints,&points);CHKERRQ(ierr);
1975   ierr = PetscSFCreate(comm,&migrationSF);CHKERRQ(ierr);
1976   for (p = 0; p < numPoints; p++) {
1977     points[p].index = p;
1978     points[p].rank  = 0;
1979   }
1980   ierr = PetscSFSetGraph(migrationSF,pEnd-pStart,numPoints,NULL,PETSC_OWN_POINTER,points,PETSC_OWN_POINTER);CHKERRQ(ierr);
1981   ierr = DMPlexCreate(comm, redundantMesh);CHKERRQ(ierr);
1982   ierr = PetscObjectSetName((PetscObject) *redundantMesh, "Redundant Mesh");CHKERRQ(ierr);
1983   ierr = DMPlexMigrate(gatherDM, migrationSF, *redundantMesh);CHKERRQ(ierr);
1984   ierr = DMPlexCreatePointSF(*redundantMesh, migrationSF, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
1985   ierr = DMSetPointSF(*redundantMesh, sfPoint);CHKERRQ(ierr);
1986   ierr = DMGetCoordinateDM(*redundantMesh, &dmCoord);CHKERRQ(ierr);
1987   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1988   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1989   ierr = PetscSFDestroy(&migrationSF);CHKERRQ(ierr);
1990   ierr = DMDestroy(&gatherDM);CHKERRQ(ierr);
1991   PetscFunctionReturn(0);
1992 }
1993