xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision 6e5f5dad97d5d0d453d1db100f9dbfcf5747de2b)
1 #include <petsc/private/dmpleximpl.h>    /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/dmlabelimpl.h>   /*I      "petscdmlabel.h"  I*/
3 
4 /*@C
5   DMPlexSetAdjacencyUser - Define adjacency in the mesh using a user-provided callback
6 
7   Input Parameters:
8 + dm      - The DM object
9 . user    - The user callback, may be NULL (to clear the callback)
10 - ctx     - context for callback evaluation, may be NULL
11 
12   Level: advanced
13 
14   Notes:
15      The caller of DMPlexGetAdjacency may need to arrange that a large enough array is available for the adjacency.
16 
17      Any setting here overrides other configuration of DMPlex adjacency determination.
18 
19 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexGetAdjacency(), DMPlexGetAdjacencyUser()
20 @*/
21 PetscErrorCode DMPlexSetAdjacencyUser(DM dm,PetscErrorCode (*user)(DM,PetscInt,PetscInt*,PetscInt[],void*),void *ctx)
22 {
23   DM_Plex *mesh = (DM_Plex *)dm->data;
24 
25   PetscFunctionBegin;
26   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
27   mesh->useradjacency = user;
28   mesh->useradjacencyctx = ctx;
29   PetscFunctionReturn(0);
30 }
31 
32 /*@C
33   DMPlexGetAdjacencyUser - get the user-defined adjacency callback
34 
35   Input Parameter:
36 . dm      - The DM object
37 
38   Output Parameters:
39 - user    - The user callback
40 - ctx     - context for callback evaluation
41 
42   Level: advanced
43 
44 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexGetAdjacency(), DMPlexSetAdjacencyUser()
45 @*/
46 PetscErrorCode DMPlexGetAdjacencyUser(DM dm, PetscErrorCode (**user)(DM,PetscInt,PetscInt*,PetscInt[],void*), void **ctx)
47 {
48   DM_Plex *mesh = (DM_Plex *)dm->data;
49 
50   PetscFunctionBegin;
51   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
52   if (user) *user = mesh->useradjacency;
53   if (ctx) *ctx = mesh->useradjacencyctx;
54   PetscFunctionReturn(0);
55 }
56 
57 /*@
58   DMPlexSetAdjacencyUseCone - Define adjacency in the mesh using either the cone or the support first
59 
60   Input Parameters:
61 + dm      - The DM object
62 - useCone - Flag to use the cone first
63 
64   Level: intermediate
65 
66   Notes:
67 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
68 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
69 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
70 
71 .seealso: DMPlexGetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
72 @*/
73 PetscErrorCode DMPlexSetAdjacencyUseCone(DM dm, PetscBool useCone)
74 {
75   PetscDS        prob;
76   PetscBool      useClosure;
77   PetscInt       Nf;
78   PetscErrorCode ierr;
79 
80   PetscFunctionBegin;
81   ierr = DMGetDS(dm, &prob);CHKERRQ(ierr);
82   ierr = PetscDSGetNumFields(prob, &Nf);CHKERRQ(ierr);
83   if (!Nf) {
84     ierr = PetscDSGetAdjacency(prob, PETSC_DEFAULT, NULL, &useClosure);CHKERRQ(ierr);
85     ierr = PetscDSSetAdjacency(prob, PETSC_DEFAULT, useCone, useClosure);CHKERRQ(ierr);
86   } else {
87     ierr = PetscDSGetAdjacency(prob, 0, NULL, &useClosure);CHKERRQ(ierr);
88     ierr = PetscDSSetAdjacency(prob, 0, useCone, useClosure);CHKERRQ(ierr);
89   }
90   PetscFunctionReturn(0);
91 }
92 
93 /*@
94   DMPlexGetAdjacencyUseCone - Query whether adjacency in the mesh uses the cone or the support first
95 
96   Input Parameter:
97 . dm      - The DM object
98 
99   Output Parameter:
100 . useCone - Flag to use the cone first
101 
102   Level: intermediate
103 
104   Notes:
105 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
106 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
107 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
108 
109 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
110 @*/
111 PetscErrorCode DMPlexGetAdjacencyUseCone(DM dm, PetscBool *useCone)
112 {
113   PetscDS        prob;
114   PetscInt       Nf;
115   PetscErrorCode ierr;
116 
117   PetscFunctionBegin;
118   ierr = DMGetDS(dm, &prob);CHKERRQ(ierr);
119   ierr = PetscDSGetNumFields(prob, &Nf);CHKERRQ(ierr);
120   if (!Nf) {
121     ierr = PetscDSGetAdjacency(prob, PETSC_DEFAULT, useCone, NULL);CHKERRQ(ierr);
122   } else {
123     ierr = PetscDSGetAdjacency(prob, 0, useCone, NULL);CHKERRQ(ierr);
124   }
125   PetscFunctionReturn(0);
126 }
127 
128 /*@
129   DMPlexSetAdjacencyUseClosure - Define adjacency in the mesh using the transitive closure
130 
131   Input Parameters:
132 + dm      - The DM object
133 - useClosure - Flag to use the closure
134 
135   Level: intermediate
136 
137   Notes:
138 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
139 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
140 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
141 
142 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
143 @*/
144 PetscErrorCode DMPlexSetAdjacencyUseClosure(DM dm, PetscBool useClosure)
145 {
146   PetscDS        prob;
147   PetscBool      useCone;
148   PetscInt       Nf;
149   PetscErrorCode ierr;
150 
151   PetscFunctionBegin;
152   ierr = DMGetDS(dm, &prob);CHKERRQ(ierr);
153   ierr = PetscDSGetNumFields(prob, &Nf);CHKERRQ(ierr);
154   if (!Nf) {
155     ierr = PetscDSGetAdjacency(prob, PETSC_DEFAULT, &useCone, NULL);CHKERRQ(ierr);
156     ierr = PetscDSSetAdjacency(prob, PETSC_DEFAULT, useCone, useClosure);CHKERRQ(ierr);
157   } else {
158     ierr = PetscDSGetAdjacency(prob, 0, &useCone, NULL);CHKERRQ(ierr);
159     ierr = PetscDSSetAdjacency(prob, 0, useCone, useClosure);CHKERRQ(ierr);
160   }
161   PetscFunctionReturn(0);
162 }
163 
164 /*@
165   DMPlexGetAdjacencyUseClosure - Query whether adjacency in the mesh uses the transitive closure
166 
167   Input Parameter:
168 . dm      - The DM object
169 
170   Output Parameter:
171 . useClosure - Flag to use the closure
172 
173   Level: intermediate
174 
175   Notes:
176 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
177 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
178 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
179 
180 .seealso: DMPlexSetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
181 @*/
182 PetscErrorCode DMPlexGetAdjacencyUseClosure(DM dm, PetscBool *useClosure)
183 {
184   PetscDS        prob;
185   PetscInt       Nf;
186   PetscErrorCode ierr;
187 
188   PetscFunctionBegin;
189   ierr = DMGetDS(dm, &prob);CHKERRQ(ierr);
190   ierr = PetscDSGetNumFields(prob, &Nf);CHKERRQ(ierr);
191   if (!Nf) {
192     ierr = PetscDSGetAdjacency(prob, PETSC_DEFAULT, NULL, useClosure);CHKERRQ(ierr);
193   } else {
194     ierr = PetscDSGetAdjacency(prob, 0, NULL, useClosure);CHKERRQ(ierr);
195   }
196   PetscFunctionReturn(0);
197 }
198 
199 /*@
200   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
201 
202   Input Parameters:
203 + dm      - The DM object
204 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
205 
206   Level: intermediate
207 
208 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
209 @*/
210 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
211 {
212   DM_Plex *mesh = (DM_Plex *) dm->data;
213 
214   PetscFunctionBegin;
215   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
216   mesh->useAnchors = useAnchors;
217   PetscFunctionReturn(0);
218 }
219 
220 /*@
221   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
222 
223   Input Parameter:
224 . dm      - The DM object
225 
226   Output Parameter:
227 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
228 
229   Level: intermediate
230 
231 .seealso: DMPlexSetAdjacencyUseAnchors(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
232 @*/
233 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
234 {
235   DM_Plex *mesh = (DM_Plex *) dm->data;
236 
237   PetscFunctionBegin;
238   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
239   PetscValidIntPointer(useAnchors, 2);
240   *useAnchors = mesh->useAnchors;
241   PetscFunctionReturn(0);
242 }
243 
244 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
245 {
246   const PetscInt *cone = NULL;
247   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
248   PetscErrorCode  ierr;
249 
250   PetscFunctionBeginHot;
251   ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
252   ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
253   for (c = 0; c <= coneSize; ++c) {
254     const PetscInt  point   = !c ? p : cone[c-1];
255     const PetscInt *support = NULL;
256     PetscInt        supportSize, s, q;
257 
258     ierr = DMPlexGetSupportSize(dm, point, &supportSize);CHKERRQ(ierr);
259     ierr = DMPlexGetSupport(dm, point, &support);CHKERRQ(ierr);
260     for (s = 0; s < supportSize; ++s) {
261       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = support[s]),0); ++q) {
262         if (support[s] == adj[q]) break;
263       }
264       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
265     }
266   }
267   *adjSize = numAdj;
268   PetscFunctionReturn(0);
269 }
270 
271 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
272 {
273   const PetscInt *support = NULL;
274   PetscInt        numAdj   = 0, maxAdjSize = *adjSize, supportSize, s;
275   PetscErrorCode  ierr;
276 
277   PetscFunctionBeginHot;
278   ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr);
279   ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr);
280   for (s = 0; s <= supportSize; ++s) {
281     const PetscInt  point = !s ? p : support[s-1];
282     const PetscInt *cone  = NULL;
283     PetscInt        coneSize, c, q;
284 
285     ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
286     ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
287     for (c = 0; c < coneSize; ++c) {
288       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = cone[c]),0); ++q) {
289         if (cone[c] == adj[q]) break;
290       }
291       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
292     }
293   }
294   *adjSize = numAdj;
295   PetscFunctionReturn(0);
296 }
297 
298 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
299 {
300   PetscInt      *star = NULL;
301   PetscInt       numAdj = 0, maxAdjSize = *adjSize, starSize, s;
302   PetscErrorCode ierr;
303 
304   PetscFunctionBeginHot;
305   ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
306   for (s = 0; s < starSize*2; s += 2) {
307     const PetscInt *closure = NULL;
308     PetscInt        closureSize, c, q;
309 
310     ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
311     for (c = 0; c < closureSize*2; c += 2) {
312       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = closure[c]),0); ++q) {
313         if (closure[c] == adj[q]) break;
314       }
315       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
316     }
317     ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
318   }
319   ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
320   *adjSize = numAdj;
321   PetscFunctionReturn(0);
322 }
323 
324 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
325 {
326   static PetscInt asiz = 0;
327   PetscInt maxAnchors = 1;
328   PetscInt aStart = -1, aEnd = -1;
329   PetscInt maxAdjSize;
330   PetscSection aSec = NULL;
331   IS aIS = NULL;
332   const PetscInt *anchors;
333   DM_Plex *mesh = (DM_Plex *)dm->data;
334   PetscErrorCode  ierr;
335 
336   PetscFunctionBeginHot;
337   if (useAnchors) {
338     ierr = DMPlexGetAnchors(dm,&aSec,&aIS);CHKERRQ(ierr);
339     if (aSec) {
340       ierr = PetscSectionGetMaxDof(aSec,&maxAnchors);CHKERRQ(ierr);
341       maxAnchors = PetscMax(1,maxAnchors);
342       ierr = PetscSectionGetChart(aSec,&aStart,&aEnd);CHKERRQ(ierr);
343       ierr = ISGetIndices(aIS,&anchors);CHKERRQ(ierr);
344     }
345   }
346   if (!*adj) {
347     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
348 
349     ierr  = DMPlexGetChart(dm, &pStart,&pEnd);CHKERRQ(ierr);
350     ierr  = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
351     ierr  = DMPlexGetMaxSizes(dm, &maxC, &maxS);CHKERRQ(ierr);
352     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1;
353     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1;
354     asiz  = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries);
355     asiz *= maxAnchors;
356     asiz  = PetscMin(asiz,pEnd-pStart);
357     ierr  = PetscMalloc1(asiz,adj);CHKERRQ(ierr);
358   }
359   if (*adjSize < 0) *adjSize = asiz;
360   maxAdjSize = *adjSize;
361   if (mesh->useradjacency) {
362     ierr = mesh->useradjacency(dm, p, adjSize, *adj, mesh->useradjacencyctx);CHKERRQ(ierr);
363   } else if (useTransitiveClosure) {
364     ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr);
365   } else if (useCone) {
366     ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
367   } else {
368     ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
369   }
370   if (useAnchors && aSec) {
371     PetscInt origSize = *adjSize;
372     PetscInt numAdj = origSize;
373     PetscInt i = 0, j;
374     PetscInt *orig = *adj;
375 
376     while (i < origSize) {
377       PetscInt p = orig[i];
378       PetscInt aDof = 0;
379 
380       if (p >= aStart && p < aEnd) {
381         ierr = PetscSectionGetDof(aSec,p,&aDof);CHKERRQ(ierr);
382       }
383       if (aDof) {
384         PetscInt aOff;
385         PetscInt s, q;
386 
387         for (j = i + 1; j < numAdj; j++) {
388           orig[j - 1] = orig[j];
389         }
390         origSize--;
391         numAdj--;
392         ierr = PetscSectionGetOffset(aSec,p,&aOff);CHKERRQ(ierr);
393         for (s = 0; s < aDof; ++s) {
394           for (q = 0; q < numAdj || ((void)(orig[numAdj++] = anchors[aOff+s]),0); ++q) {
395             if (anchors[aOff+s] == orig[q]) break;
396           }
397           if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
398         }
399       }
400       else {
401         i++;
402       }
403     }
404     *adjSize = numAdj;
405     ierr = ISRestoreIndices(aIS,&anchors);CHKERRQ(ierr);
406   }
407   PetscFunctionReturn(0);
408 }
409 
410 /*@
411   DMPlexGetAdjacency - Return all points adjacent to the given point
412 
413   Input Parameters:
414 + dm - The DM object
415 . p  - The point
416 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE
417 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize
418 
419   Output Parameters:
420 + adjSize - The number of adjacent points
421 - adj - The adjacent points
422 
423   Level: advanced
424 
425   Notes: The user must PetscFree the adj array if it was not passed in.
426 
427 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator()
428 @*/
429 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
430 {
431   PetscBool      useCone, useClosure, useAnchors;
432   PetscErrorCode ierr;
433 
434   PetscFunctionBeginHot;
435   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
436   PetscValidPointer(adjSize,3);
437   PetscValidPointer(adj,4);
438   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
439   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
440   ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
441   ierr = DMPlexGetAdjacency_Internal(dm, p, useCone, useClosure, useAnchors, adjSize, adj);CHKERRQ(ierr);
442   PetscFunctionReturn(0);
443 }
444 
445 /*@
446   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
447 
448   Collective on DM
449 
450   Input Parameters:
451 + dm      - The DM
452 - sfPoint - The PetscSF which encodes point connectivity
453 
454   Output Parameters:
455 + processRanks - A list of process neighbors, or NULL
456 - sfProcess    - An SF encoding the two-sided process connectivity, or NULL
457 
458   Level: developer
459 
460 .seealso: PetscSFCreate(), DMPlexCreateProcessSF()
461 @*/
462 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
463 {
464   const PetscSFNode *remotePoints;
465   PetscInt          *localPointsNew;
466   PetscSFNode       *remotePointsNew;
467   const PetscInt    *nranks;
468   PetscInt          *ranksNew;
469   PetscBT            neighbors;
470   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
471   PetscMPIInt        size, proc, rank;
472   PetscErrorCode     ierr;
473 
474   PetscFunctionBegin;
475   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
476   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
477   if (processRanks) {PetscValidPointer(processRanks, 3);}
478   if (sfProcess)    {PetscValidPointer(sfProcess, 4);}
479   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
480   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
481   ierr = PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);CHKERRQ(ierr);
482   ierr = PetscBTCreate(size, &neighbors);CHKERRQ(ierr);
483   ierr = PetscBTMemzero(size, neighbors);CHKERRQ(ierr);
484   /* Compute root-to-leaf process connectivity */
485   ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr);
486   ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr);
487   for (p = pStart; p < pEnd; ++p) {
488     PetscInt ndof, noff, n;
489 
490     ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr);
491     ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr);
492     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
493   }
494   ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr);
495   /* Compute leaf-to-neighbor process connectivity */
496   ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr);
497   ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr);
498   for (p = pStart; p < pEnd; ++p) {
499     PetscInt ndof, noff, n;
500 
501     ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr);
502     ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr);
503     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
504   }
505   ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr);
506   /* Compute leaf-to-root process connectivity */
507   for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);}
508   /* Calculate edges */
509   PetscBTClear(neighbors, rank);
510   for(proc = 0, numNeighbors = 0; proc < size; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;}
511   ierr = PetscMalloc1(numNeighbors, &ranksNew);CHKERRQ(ierr);
512   ierr = PetscMalloc1(numNeighbors, &localPointsNew);CHKERRQ(ierr);
513   ierr = PetscMalloc1(numNeighbors, &remotePointsNew);CHKERRQ(ierr);
514   for(proc = 0, n = 0; proc < size; ++proc) {
515     if (PetscBTLookup(neighbors, proc)) {
516       ranksNew[n]              = proc;
517       localPointsNew[n]        = proc;
518       remotePointsNew[n].index = rank;
519       remotePointsNew[n].rank  = proc;
520       ++n;
521     }
522   }
523   ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr);
524   if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr);}
525   else              {ierr = PetscFree(ranksNew);CHKERRQ(ierr);}
526   if (sfProcess) {
527     ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr);
528     ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr);
529     ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr);
530     ierr = PetscSFSetGraph(*sfProcess, size, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
531   }
532   PetscFunctionReturn(0);
533 }
534 
535 /*@
536   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
537 
538   Collective on DM
539 
540   Input Parameter:
541 . dm - The DM
542 
543   Output Parameters:
544 + rootSection - The number of leaves for a given root point
545 . rootrank    - The rank of each edge into the root point
546 . leafSection - The number of processes sharing a given leaf point
547 - leafrank    - The rank of each process sharing a leaf point
548 
549   Level: developer
550 
551 .seealso: DMPlexCreateOverlap()
552 @*/
553 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
554 {
555   MPI_Comm        comm;
556   PetscSF         sfPoint;
557   const PetscInt *rootdegree;
558   PetscInt       *myrank, *remoterank;
559   PetscInt        pStart, pEnd, p, nedges;
560   PetscMPIInt     rank;
561   PetscErrorCode  ierr;
562 
563   PetscFunctionBegin;
564   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
565   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
566   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
567   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
568   /* Compute number of leaves for each root */
569   ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr);
570   ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr);
571   ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr);
572   ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr);
573   for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr);}
574   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
575   /* Gather rank of each leaf to root */
576   ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr);
577   ierr = PetscMalloc1(pEnd-pStart, &myrank);CHKERRQ(ierr);
578   ierr = PetscMalloc1(nedges,  &remoterank);CHKERRQ(ierr);
579   for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank;
580   ierr = PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
581   ierr = PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
582   ierr = PetscFree(myrank);CHKERRQ(ierr);
583   ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr);
584   /* Distribute remote ranks to leaves */
585   ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr);
586   ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr);
587   PetscFunctionReturn(0);
588 }
589 
590 /*@C
591   DMPlexCreateOverlap - Compute owner information for shared points. This basically gets two-sided for an SF.
592 
593   Collective on DM
594 
595   Input Parameters:
596 + dm          - The DM
597 . levels      - Number of overlap levels
598 . rootSection - The number of leaves for a given root point
599 . rootrank    - The rank of each edge into the root point
600 . leafSection - The number of processes sharing a given leaf point
601 - leafrank    - The rank of each process sharing a leaf point
602 
603   Output Parameters:
604 + ovLabel     - DMLabel containing remote overlap contributions as point/rank pairings
605 
606   Level: developer
607 
608 .seealso: DMPlexDistributeOwnership(), DMPlexDistribute()
609 @*/
610 PetscErrorCode DMPlexCreateOverlap(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
611 {
612   MPI_Comm           comm;
613   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
614   PetscSF            sfPoint, sfProc;
615   const PetscSFNode *remote;
616   const PetscInt    *local;
617   const PetscInt    *nrank, *rrank;
618   PetscInt          *adj = NULL;
619   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l;
620   PetscMPIInt        rank, size;
621   PetscBool          useCone, useClosure, flg;
622   PetscErrorCode     ierr;
623 
624   PetscFunctionBegin;
625   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
626   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
627   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
628   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
629   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
630   ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr);
631   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
632   ierr = DMLabelCreate("Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr);
633   /* Handle leaves: shared with the root point */
634   for (l = 0; l < nleaves; ++l) {
635     PetscInt adjSize = PETSC_DETERMINE, a;
636 
637     ierr = DMPlexGetAdjacency(dm, local[l], &adjSize, &adj);CHKERRQ(ierr);
638     for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr);}
639   }
640   ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr);
641   ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr);
642   /* Handle roots */
643   for (p = pStart; p < pEnd; ++p) {
644     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
645 
646     if ((p >= sStart) && (p < sEnd)) {
647       /* Some leaves share a root with other leaves on different processes */
648       ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr);
649       if (neighbors) {
650         ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr);
651         ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
652         for (n = 0; n < neighbors; ++n) {
653           const PetscInt remoteRank = nrank[noff+n];
654 
655           if (remoteRank == rank) continue;
656           for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
657         }
658       }
659     }
660     /* Roots are shared with leaves */
661     ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr);
662     if (!neighbors) continue;
663     ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr);
664     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
665     for (n = 0; n < neighbors; ++n) {
666       const PetscInt remoteRank = rrank[noff+n];
667 
668       if (remoteRank == rank) continue;
669       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
670     }
671   }
672   ierr = PetscFree(adj);CHKERRQ(ierr);
673   ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr);
674   ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr);
675   /* Add additional overlap levels */
676   for (l = 1; l < levels; l++) {
677     /* Propagate point donations over SF to capture remote connections */
678     ierr = DMPlexPartitionLabelPropagate(dm, ovAdjByRank);CHKERRQ(ierr);
679     /* Add next level of point donations to the label */
680     ierr = DMPlexPartitionLabelAdjacency(dm, ovAdjByRank);CHKERRQ(ierr);
681   }
682   /* We require the closure in the overlap */
683   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
684   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
685   if (useCone || !useClosure) {
686     ierr = DMPlexPartitionLabelClosure(dm, ovAdjByRank);CHKERRQ(ierr);
687   }
688   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-overlap_view", &flg);CHKERRQ(ierr);
689   if (flg) {
690     ierr = DMLabelView(ovAdjByRank, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
691   }
692   /* Make global process SF and invert sender to receiver label */
693   {
694     /* Build a global process SF */
695     PetscSFNode *remoteProc;
696     ierr = PetscMalloc1(size, &remoteProc);CHKERRQ(ierr);
697     for (p = 0; p < size; ++p) {
698       remoteProc[p].rank  = p;
699       remoteProc[p].index = rank;
700     }
701     ierr = PetscSFCreate(comm, &sfProc);CHKERRQ(ierr);
702     ierr = PetscObjectSetName((PetscObject) sfProc, "Process SF");CHKERRQ(ierr);
703     ierr = PetscSFSetGraph(sfProc, size, size, NULL, PETSC_OWN_POINTER, remoteProc, PETSC_OWN_POINTER);CHKERRQ(ierr);
704   }
705   ierr = DMLabelCreate("Overlap label", ovLabel);CHKERRQ(ierr);
706   ierr = DMPlexPartitionLabelInvert(dm, ovAdjByRank, sfProc, *ovLabel);CHKERRQ(ierr);
707   /* Add owned points, except for shared local points */
708   for (p = pStart; p < pEnd; ++p) {ierr = DMLabelSetValue(*ovLabel, p, rank);CHKERRQ(ierr);}
709   for (l = 0; l < nleaves; ++l) {
710     ierr = DMLabelClearValue(*ovLabel, local[l], rank);CHKERRQ(ierr);
711     ierr = DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank);CHKERRQ(ierr);
712   }
713   /* Clean up */
714   ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr);
715   ierr = PetscSFDestroy(&sfProc);CHKERRQ(ierr);
716   PetscFunctionReturn(0);
717 }
718 
719 /*@C
720   DMPlexCreateOverlapMigrationSF - Create an SF describing the new mesh distribution to make the overlap described by the input SF
721 
722   Collective on DM
723 
724   Input Parameters:
725 + dm          - The DM
726 - overlapSF   - The SF mapping ghost points in overlap to owner points on other processes
727 
728   Output Parameters:
729 + migrationSF - An SF that maps original points in old locations to points in new locations
730 
731   Level: developer
732 
733 .seealso: DMPlexCreateOverlap(), DMPlexDistribute()
734 @*/
735 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
736 {
737   MPI_Comm           comm;
738   PetscMPIInt        rank, size;
739   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
740   PetscInt          *pointDepths, *remoteDepths, *ilocal;
741   PetscInt          *depthRecv, *depthShift, *depthIdx;
742   PetscSFNode       *iremote;
743   PetscSF            pointSF;
744   const PetscInt    *sharedLocal;
745   const PetscSFNode *overlapRemote, *sharedRemote;
746   PetscErrorCode     ierr;
747 
748   PetscFunctionBegin;
749   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
750   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
751   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
752   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
753   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
754 
755   /* Before building the migration SF we need to know the new stratum offsets */
756   ierr = PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote);CHKERRQ(ierr);
757   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
758   for (d=0; d<dim+1; d++) {
759     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
760     for (p=pStart; p<pEnd; p++) pointDepths[p] = d;
761   }
762   for (p=0; p<nleaves; p++) remoteDepths[p] = -1;
763   ierr = PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
764   ierr = PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
765 
766   /* Count recevied points in each stratum and compute the internal strata shift */
767   ierr = PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx);CHKERRQ(ierr);
768   for (d=0; d<dim+1; d++) depthRecv[d]=0;
769   for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++;
770   depthShift[dim] = 0;
771   for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim];
772   for (d=1; d<dim; d++) depthShift[d] += depthRecv[0];
773   for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1];
774   for (d=0; d<dim+1; d++) {
775     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
776     depthIdx[d] = pStart + depthShift[d];
777   }
778 
779   /* Form the overlap SF build an SF that describes the full overlap migration SF */
780   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
781   newLeaves = pEnd - pStart + nleaves;
782   ierr = PetscMalloc1(newLeaves, &ilocal);CHKERRQ(ierr);
783   ierr = PetscMalloc1(newLeaves, &iremote);CHKERRQ(ierr);
784   /* First map local points to themselves */
785   for (d=0; d<dim+1; d++) {
786     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
787     for (p=pStart; p<pEnd; p++) {
788       point = p + depthShift[d];
789       ilocal[point] = point;
790       iremote[point].index = p;
791       iremote[point].rank = rank;
792       depthIdx[d]++;
793     }
794   }
795 
796   /* Add in the remote roots for currently shared points */
797   ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
798   ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote);CHKERRQ(ierr);
799   for (d=0; d<dim+1; d++) {
800     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
801     for (p=0; p<numSharedPoints; p++) {
802       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
803         point = sharedLocal[p] + depthShift[d];
804         iremote[point].index = sharedRemote[p].index;
805         iremote[point].rank = sharedRemote[p].rank;
806       }
807     }
808   }
809 
810   /* Now add the incoming overlap points */
811   for (p=0; p<nleaves; p++) {
812     point = depthIdx[remoteDepths[p]];
813     ilocal[point] = point;
814     iremote[point].index = overlapRemote[p].index;
815     iremote[point].rank = overlapRemote[p].rank;
816     depthIdx[remoteDepths[p]]++;
817   }
818   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
819 
820   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
821   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");CHKERRQ(ierr);
822   ierr = PetscSFSetFromOptions(*migrationSF);CHKERRQ(ierr);
823   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
824   ierr = PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);CHKERRQ(ierr);
825 
826   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
827   PetscFunctionReturn(0);
828 }
829 
830 /*@
831   DMPlexStratifyMigrationSF - Rearrange the leaves of a migration sf for stratification.
832 
833   Input Parameter:
834 + dm          - The DM
835 - sf          - A star forest with non-ordered leaves, usually defining a DM point migration
836 
837   Output Parameter:
838 . migrationSF - A star forest with added leaf indirection that ensures the resulting DM is stratified
839 
840   Level: developer
841 
842 .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexDistributeOverlap()
843 @*/
844 PetscErrorCode DMPlexStratifyMigrationSF(DM dm, PetscSF sf, PetscSF *migrationSF)
845 {
846   MPI_Comm           comm;
847   PetscMPIInt        rank, size;
848   PetscInt           d, ldepth, depth, p, pStart, pEnd, nroots, nleaves;
849   PetscInt          *pointDepths, *remoteDepths, *ilocal;
850   PetscInt          *depthRecv, *depthShift, *depthIdx;
851   PetscInt           hybEnd[4];
852   const PetscSFNode *iremote;
853   PetscErrorCode     ierr;
854 
855   PetscFunctionBegin;
856   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
857   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
858   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
859   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
860   ierr = DMPlexGetDepth(dm, &ldepth);CHKERRQ(ierr);
861   ierr = MPIU_Allreduce(&ldepth, &depth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr);
862   if ((ldepth >= 0) && (depth != ldepth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", ldepth, depth);
863 
864   /* Before building the migration SF we need to know the new stratum offsets */
865   ierr = PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote);CHKERRQ(ierr);
866   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
867   ierr = DMPlexGetHybridBounds(dm,&hybEnd[depth],&hybEnd[depth-1],&hybEnd[1],&hybEnd[0]);CHKERRQ(ierr);
868   for (d = 0; d < depth+1; ++d) {
869     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
870     for (p = pStart; p < pEnd; ++p) {
871       if (hybEnd[d] >= 0 && p >= hybEnd[d]) { /* put in a separate value for hybrid points */
872         pointDepths[p] = 2 * d;
873       } else {
874         pointDepths[p] = 2 * d + 1;
875       }
876     }
877   }
878   for (p = 0; p < nleaves; ++p) remoteDepths[p] = -1;
879   ierr = PetscSFBcastBegin(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
880   ierr = PetscSFBcastEnd(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
881   /* Count recevied points in each stratum and compute the internal strata shift */
882   ierr = PetscMalloc3(2*(depth+1), &depthRecv, 2*(depth+1), &depthShift, 2*(depth+1), &depthIdx);CHKERRQ(ierr);
883   for (d = 0; d < 2*(depth+1); ++d) depthRecv[d] = 0;
884   for (p = 0; p < nleaves; ++p) depthRecv[remoteDepths[p]]++;
885   depthShift[2*depth+1] = 0;
886   for (d = 0; d < 2*depth+1; ++d) depthShift[d] = depthRecv[2 * depth + 1];
887   for (d = 0; d < 2*depth; ++d) depthShift[d] += depthRecv[2 * depth];
888   depthShift[0] += depthRecv[1];
889   for (d = 2; d < 2*depth; ++d) depthShift[d] += depthRecv[1];
890   for (d = 2; d < 2*depth; ++d) depthShift[d] += depthRecv[0];
891   for (d = 2 * depth-1; d > 2; --d) {
892     PetscInt e;
893 
894     for (e = d -1; e > 1; --e) depthShift[e] += depthRecv[d];
895   }
896   for (d = 0; d < 2*(depth+1); ++d) {depthIdx[d] = 0;}
897   /* Derive a new local permutation based on stratified indices */
898   ierr = PetscMalloc1(nleaves, &ilocal);CHKERRQ(ierr);
899   for (p = 0; p < nleaves; ++p) {
900     const PetscInt dep = remoteDepths[p];
901 
902     ilocal[p] = depthShift[dep] + depthIdx[dep];
903     depthIdx[dep]++;
904   }
905   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
906   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Migration SF");CHKERRQ(ierr);
907   ierr = PetscSFSetGraph(*migrationSF, nroots, nleaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_COPY_VALUES);CHKERRQ(ierr);
908   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
909   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
910   PetscFunctionReturn(0);
911 }
912 
913 /*@
914   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
915 
916   Collective on DM
917 
918   Input Parameters:
919 + dm - The DMPlex object
920 . pointSF - The PetscSF describing the communication pattern
921 . originalSection - The PetscSection for existing data layout
922 - originalVec - The existing data
923 
924   Output Parameters:
925 + newSection - The PetscSF describing the new data layout
926 - newVec - The new data
927 
928   Level: developer
929 
930 .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData()
931 @*/
932 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
933 {
934   PetscSF        fieldSF;
935   PetscInt      *remoteOffsets, fieldSize;
936   PetscScalar   *originalValues, *newValues;
937   PetscErrorCode ierr;
938 
939   PetscFunctionBegin;
940   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
941   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
942 
943   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
944   ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr);
945   ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr);
946 
947   ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr);
948   ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr);
949   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
950   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
951   ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
952   ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
953   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
954   ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr);
955   ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr);
956   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
957   PetscFunctionReturn(0);
958 }
959 
960 /*@
961   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
962 
963   Collective on DM
964 
965   Input Parameters:
966 + dm - The DMPlex object
967 . pointSF - The PetscSF describing the communication pattern
968 . originalSection - The PetscSection for existing data layout
969 - originalIS - The existing data
970 
971   Output Parameters:
972 + newSection - The PetscSF describing the new data layout
973 - newIS - The new data
974 
975   Level: developer
976 
977 .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData()
978 @*/
979 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
980 {
981   PetscSF         fieldSF;
982   PetscInt       *newValues, *remoteOffsets, fieldSize;
983   const PetscInt *originalValues;
984   PetscErrorCode  ierr;
985 
986   PetscFunctionBegin;
987   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
988   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
989 
990   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
991   ierr = PetscMalloc1(fieldSize, &newValues);CHKERRQ(ierr);
992 
993   ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr);
994   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
995   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
996   ierr = PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
997   ierr = PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
998   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
999   ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr);
1000   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr);
1001   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
1002   PetscFunctionReturn(0);
1003 }
1004 
1005 /*@
1006   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
1007 
1008   Collective on DM
1009 
1010   Input Parameters:
1011 + dm - The DMPlex object
1012 . pointSF - The PetscSF describing the communication pattern
1013 . originalSection - The PetscSection for existing data layout
1014 . datatype - The type of data
1015 - originalData - The existing data
1016 
1017   Output Parameters:
1018 + newSection - The PetscSection describing the new data layout
1019 - newData - The new data
1020 
1021   Level: developer
1022 
1023 .seealso: DMPlexDistribute(), DMPlexDistributeField()
1024 @*/
1025 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
1026 {
1027   PetscSF        fieldSF;
1028   PetscInt      *remoteOffsets, fieldSize;
1029   PetscMPIInt    dataSize;
1030   PetscErrorCode ierr;
1031 
1032   PetscFunctionBegin;
1033   ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
1034   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
1035 
1036   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
1037   ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr);
1038   ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr);
1039 
1040   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
1041   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
1042   ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
1043   ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
1044   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
1045   ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
1046   PetscFunctionReturn(0);
1047 }
1048 
1049 static PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1050 {
1051   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
1052   MPI_Comm               comm;
1053   PetscSF                coneSF;
1054   PetscSection           originalConeSection, newConeSection;
1055   PetscInt              *remoteOffsets, *cones, *globCones, *newCones, newConesSize;
1056   PetscBool              flg;
1057   PetscErrorCode         ierr;
1058 
1059   PetscFunctionBegin;
1060   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1061   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
1062 
1063   ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
1064   /* Distribute cone section */
1065   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1066   ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr);
1067   ierr = DMPlexGetConeSection(dmParallel, &newConeSection);CHKERRQ(ierr);
1068   ierr = PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr);
1069   ierr = DMSetUp(dmParallel);CHKERRQ(ierr);
1070   {
1071     PetscInt pStart, pEnd, p;
1072 
1073     ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr);
1074     for (p = pStart; p < pEnd; ++p) {
1075       PetscInt coneSize;
1076       ierr               = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr);
1077       pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize);
1078     }
1079   }
1080   /* Communicate and renumber cones */
1081   ierr = PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr);
1082   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
1083   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
1084   if (original) {
1085     PetscInt numCones;
1086 
1087     ierr = PetscSectionGetStorageSize(originalConeSection,&numCones);CHKERRQ(ierr); ierr = PetscMalloc1(numCones,&globCones);CHKERRQ(ierr);
1088     ierr = ISLocalToGlobalMappingApplyBlock(original, numCones, cones, globCones);CHKERRQ(ierr);
1089   }
1090   else {
1091     globCones = cones;
1092   }
1093   ierr = DMPlexGetCones(dmParallel, &newCones);CHKERRQ(ierr);
1094   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr);
1095   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr);
1096   if (original) {
1097     ierr = PetscFree(globCones);CHKERRQ(ierr);
1098   }
1099   ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr);
1100   ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr);
1101 #if PETSC_USE_DEBUG
1102   {
1103     PetscInt  p;
1104     PetscBool valid = PETSC_TRUE;
1105     for (p = 0; p < newConesSize; ++p) {
1106       if (newCones[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
1107     }
1108     if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1109   }
1110 #endif
1111   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr);
1112   if (flg) {
1113     ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr);
1114     ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1115     ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr);
1116     ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1117     ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr);
1118   }
1119   ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr);
1120   ierr = DMPlexGetConeOrientations(dmParallel, &newCones);CHKERRQ(ierr);
1121   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1122   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1123   ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr);
1124   ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
1125   /* Create supports and stratify DMPlex */
1126   {
1127     PetscInt pStart, pEnd;
1128 
1129     ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr);
1130     ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr);
1131   }
1132   ierr = DMPlexSymmetrize(dmParallel);CHKERRQ(ierr);
1133   ierr = DMPlexStratify(dmParallel);CHKERRQ(ierr);
1134   {
1135     PetscBool useCone, useClosure, useAnchors;
1136 
1137     ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
1138     ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
1139     ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
1140     ierr = DMPlexSetAdjacencyUseCone(dmParallel, useCone);CHKERRQ(ierr);
1141     ierr = DMPlexSetAdjacencyUseClosure(dmParallel, useClosure);CHKERRQ(ierr);
1142     ierr = DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors);CHKERRQ(ierr);
1143   }
1144   PetscFunctionReturn(0);
1145 }
1146 
1147 static PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
1148 {
1149   MPI_Comm         comm;
1150   PetscSection     originalCoordSection, newCoordSection;
1151   Vec              originalCoordinates, newCoordinates;
1152   PetscInt         bs;
1153   PetscBool        isper;
1154   const char      *name;
1155   const PetscReal *maxCell, *L;
1156   const DMBoundaryType *bd;
1157   PetscErrorCode   ierr;
1158 
1159   PetscFunctionBegin;
1160   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1161   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1162 
1163   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1164   ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr);
1165   ierr = DMGetCoordinateSection(dmParallel, &newCoordSection);CHKERRQ(ierr);
1166   ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr);
1167   if (originalCoordinates) {
1168     ierr = VecCreate(PETSC_COMM_SELF, &newCoordinates);CHKERRQ(ierr);
1169     ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr);
1170     ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr);
1171 
1172     ierr = DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr);
1173     ierr = DMSetCoordinatesLocal(dmParallel, newCoordinates);CHKERRQ(ierr);
1174     ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr);
1175     ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr);
1176     ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr);
1177   }
1178   ierr = DMGetPeriodicity(dm, &isper, &maxCell, &L, &bd);CHKERRQ(ierr);
1179   ierr = DMSetPeriodicity(dmParallel, isper, maxCell, L, bd);CHKERRQ(ierr);
1180   PetscFunctionReturn(0);
1181 }
1182 
1183 /* Here we are assuming that process 0 always has everything */
1184 static PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1185 {
1186   DM_Plex         *mesh = (DM_Plex*) dm->data;
1187   MPI_Comm         comm;
1188   DMLabel          depthLabel;
1189   PetscMPIInt      rank;
1190   PetscInt         depth, d, numLabels, numLocalLabels, l;
1191   PetscBool        hasLabels = PETSC_FALSE, lsendDepth, sendDepth;
1192   PetscObjectState depthState = -1;
1193   PetscErrorCode   ierr;
1194 
1195   PetscFunctionBegin;
1196   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1197   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1198 
1199   ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1200   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1201   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1202 
1203   /* If the user has changed the depth label, communicate it instead */
1204   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1205   ierr = DMPlexGetDepthLabel(dm, &depthLabel);CHKERRQ(ierr);
1206   if (depthLabel) {ierr = DMLabelGetState(depthLabel, &depthState);CHKERRQ(ierr);}
1207   lsendDepth = mesh->depthState != depthState ? PETSC_TRUE : PETSC_FALSE;
1208   ierr = MPIU_Allreduce(&lsendDepth, &sendDepth, 1, MPIU_BOOL, MPI_LOR, comm);CHKERRQ(ierr);
1209   if (sendDepth) {
1210     ierr = DMRemoveLabel(dmParallel, "depth", &depthLabel);CHKERRQ(ierr);
1211     ierr = DMLabelDestroy(&depthLabel);CHKERRQ(ierr);
1212   }
1213   /* Everyone must have either the same number of labels, or none */
1214   ierr = DMGetNumLabels(dm, &numLocalLabels);CHKERRQ(ierr);
1215   numLabels = numLocalLabels;
1216   ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1217   if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1218   for (l = numLabels-1; l >= 0; --l) {
1219     DMLabel     label = NULL, labelNew = NULL;
1220     PetscBool   isDepth, lisOutput = PETSC_TRUE, isOutput;
1221 
1222     if (hasLabels) {ierr = DMGetLabelByNum(dm, l, &label);CHKERRQ(ierr);}
1223     /* Skip "depth" because it is recreated */
1224     if (hasLabels) {ierr = PetscStrcmp(label->name, "depth", &isDepth);CHKERRQ(ierr);}
1225     ierr = MPI_Bcast(&isDepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr);
1226     if (isDepth && !sendDepth) continue;
1227     ierr = DMLabelDistribute(label, migrationSF, &labelNew);CHKERRQ(ierr);
1228     if (isDepth) {
1229       /* Put in any missing strata which can occur if users are managing the depth label themselves */
1230       PetscInt gdepth;
1231 
1232       ierr = MPIU_Allreduce(&depth, &gdepth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr);
1233       if ((depth >= 0) && (gdepth != depth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", depth, gdepth);
1234       for (d = 0; d <= gdepth; ++d) {
1235         PetscBool has;
1236 
1237         ierr = DMLabelHasStratum(labelNew, d, &has);CHKERRQ(ierr);
1238         if (!has) {ierr = DMLabelAddStratum(labelNew, d);CHKERRQ(ierr);}
1239       }
1240     }
1241     ierr = DMAddLabel(dmParallel, labelNew);CHKERRQ(ierr);
1242     /* Put the output flag in the new label */
1243     if (hasLabels) {ierr = DMGetLabelOutput(dm, label->name, &lisOutput);CHKERRQ(ierr);}
1244     ierr = MPIU_Allreduce(&lisOutput, &isOutput, 1, MPIU_BOOL, MPI_LAND, comm);CHKERRQ(ierr);
1245     ierr = DMSetLabelOutput(dmParallel, labelNew->name, isOutput);CHKERRQ(ierr);
1246   }
1247   ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1248   PetscFunctionReturn(0);
1249 }
1250 
1251 static PetscErrorCode DMPlexDistributeSetupHybrid(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1252 {
1253   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1254   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1255   PetscBool      *isHybrid, *isHybridParallel;
1256   PetscInt        dim, depth, d;
1257   PetscInt        pStart, pEnd, pStartP, pEndP;
1258   PetscErrorCode  ierr;
1259 
1260   PetscFunctionBegin;
1261   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1262   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1263 
1264   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1265   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1266   ierr = DMPlexGetChart(dm,&pStart,&pEnd);CHKERRQ(ierr);
1267   ierr = DMPlexGetChart(dmParallel,&pStartP,&pEndP);CHKERRQ(ierr);
1268   ierr = PetscCalloc2(pEnd-pStart,&isHybrid,pEndP-pStartP,&isHybridParallel);CHKERRQ(ierr);
1269   for (d = 0; d <= depth; d++) {
1270     PetscInt hybridMax = (depth == 1 && d == 1) ? mesh->hybridPointMax[dim] : mesh->hybridPointMax[d];
1271 
1272     if (hybridMax >= 0) {
1273       PetscInt sStart, sEnd, p;
1274 
1275       ierr = DMPlexGetDepthStratum(dm,d,&sStart,&sEnd);CHKERRQ(ierr);
1276       for (p = hybridMax; p < sEnd; p++) isHybrid[p-pStart] = PETSC_TRUE;
1277     }
1278   }
1279   ierr = PetscSFBcastBegin(migrationSF,MPIU_BOOL,isHybrid,isHybridParallel);CHKERRQ(ierr);
1280   ierr = PetscSFBcastEnd(migrationSF,MPIU_BOOL,isHybrid,isHybridParallel);CHKERRQ(ierr);
1281   for (d = 0; d <= dim; d++) pmesh->hybridPointMax[d] = -1;
1282   for (d = 0; d <= depth; d++) {
1283     PetscInt sStart, sEnd, p, dd;
1284 
1285     ierr = DMPlexGetDepthStratum(dmParallel,d,&sStart,&sEnd);CHKERRQ(ierr);
1286     dd = (depth == 1 && d == 1) ? dim : d;
1287     for (p = sStart; p < sEnd; p++) {
1288       if (isHybridParallel[p-pStartP]) {
1289         pmesh->hybridPointMax[dd] = p;
1290         break;
1291       }
1292     }
1293   }
1294   ierr = PetscFree2(isHybrid,isHybridParallel);CHKERRQ(ierr);
1295   PetscFunctionReturn(0);
1296 }
1297 
1298 static PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1299 {
1300   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1301   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1302   MPI_Comm        comm;
1303   DM              refTree;
1304   PetscSection    origParentSection, newParentSection;
1305   PetscInt        *origParents, *origChildIDs;
1306   PetscBool       flg;
1307   PetscErrorCode  ierr;
1308 
1309   PetscFunctionBegin;
1310   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1311   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
1312   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1313 
1314   /* Set up tree */
1315   ierr = DMPlexGetReferenceTree(dm,&refTree);CHKERRQ(ierr);
1316   ierr = DMPlexSetReferenceTree(dmParallel,refTree);CHKERRQ(ierr);
1317   ierr = DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL);CHKERRQ(ierr);
1318   if (origParentSection) {
1319     PetscInt        pStart, pEnd;
1320     PetscInt        *newParents, *newChildIDs, *globParents;
1321     PetscInt        *remoteOffsetsParents, newParentSize;
1322     PetscSF         parentSF;
1323 
1324     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1325     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);CHKERRQ(ierr);
1326     ierr = PetscSectionSetChart(newParentSection,pStart,pEnd);CHKERRQ(ierr);
1327     ierr = PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);CHKERRQ(ierr);
1328     ierr = PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);CHKERRQ(ierr);
1329     ierr = PetscFree(remoteOffsetsParents);CHKERRQ(ierr);
1330     ierr = PetscSectionGetStorageSize(newParentSection,&newParentSize);CHKERRQ(ierr);
1331     ierr = PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs);CHKERRQ(ierr);
1332     if (original) {
1333       PetscInt numParents;
1334 
1335       ierr = PetscSectionGetStorageSize(origParentSection,&numParents);CHKERRQ(ierr);
1336       ierr = PetscMalloc1(numParents,&globParents);CHKERRQ(ierr);
1337       ierr = ISLocalToGlobalMappingApplyBlock(original, numParents, origParents, globParents);CHKERRQ(ierr);
1338     }
1339     else {
1340       globParents = origParents;
1341     }
1342     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr);
1343     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr);
1344     if (original) {
1345       ierr = PetscFree(globParents);CHKERRQ(ierr);
1346     }
1347     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1348     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1349     ierr = ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents);CHKERRQ(ierr);
1350 #if PETSC_USE_DEBUG
1351     {
1352       PetscInt  p;
1353       PetscBool valid = PETSC_TRUE;
1354       for (p = 0; p < newParentSize; ++p) {
1355         if (newParents[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
1356       }
1357       if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1358     }
1359 #endif
1360     ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-parents_view", &flg);CHKERRQ(ierr);
1361     if (flg) {
1362       ierr = PetscPrintf(comm, "Serial Parent Section: \n");CHKERRQ(ierr);
1363       ierr = PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1364       ierr = PetscPrintf(comm, "Parallel Parent Section: \n");CHKERRQ(ierr);
1365       ierr = PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1366       ierr = PetscSFView(parentSF, NULL);CHKERRQ(ierr);
1367     }
1368     ierr = DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);CHKERRQ(ierr);
1369     ierr = PetscSectionDestroy(&newParentSection);CHKERRQ(ierr);
1370     ierr = PetscFree2(newParents,newChildIDs);CHKERRQ(ierr);
1371     ierr = PetscSFDestroy(&parentSF);CHKERRQ(ierr);
1372   }
1373   pmesh->useAnchors = mesh->useAnchors;
1374   PetscFunctionReturn(0);
1375 }
1376 
1377 PETSC_UNUSED static PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, DM dmParallel)
1378 {
1379   PetscMPIInt            rank, size;
1380   MPI_Comm               comm;
1381   PetscErrorCode         ierr;
1382 
1383   PetscFunctionBegin;
1384   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1385   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1386 
1387   /* Create point SF for parallel mesh */
1388   ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1389   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1390   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1391   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1392   {
1393     const PetscInt *leaves;
1394     PetscSFNode    *remotePoints, *rowners, *lowners;
1395     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1396     PetscInt        pStart, pEnd;
1397 
1398     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1399     ierr = PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr);
1400     ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr);
1401     for (p=0; p<numRoots; p++) {
1402       rowners[p].rank  = -1;
1403       rowners[p].index = -1;
1404     }
1405     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1406     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1407     for (p = 0; p < numLeaves; ++p) {
1408       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1409         lowners[p].rank  = rank;
1410         lowners[p].index = leaves ? leaves[p] : p;
1411       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1412         lowners[p].rank  = -2;
1413         lowners[p].index = -2;
1414       }
1415     }
1416     for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1417       rowners[p].rank  = -3;
1418       rowners[p].index = -3;
1419     }
1420     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1421     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1422     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1423     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1424     for (p = 0; p < numLeaves; ++p) {
1425       if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
1426       if (lowners[p].rank != rank) ++numGhostPoints;
1427     }
1428     ierr = PetscMalloc1(numGhostPoints, &ghostPoints);CHKERRQ(ierr);
1429     ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr);
1430     for (p = 0, gp = 0; p < numLeaves; ++p) {
1431       if (lowners[p].rank != rank) {
1432         ghostPoints[gp]        = leaves ? leaves[p] : p;
1433         remotePoints[gp].rank  = lowners[p].rank;
1434         remotePoints[gp].index = lowners[p].index;
1435         ++gp;
1436       }
1437     }
1438     ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr);
1439     ierr = PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1440     ierr = PetscSFSetFromOptions((dmParallel)->sf);CHKERRQ(ierr);
1441   }
1442   {
1443     PetscBool useCone, useClosure, useAnchors;
1444 
1445     ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
1446     ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
1447     ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
1448     ierr = DMPlexSetAdjacencyUseCone(dmParallel, useCone);CHKERRQ(ierr);
1449     ierr = DMPlexSetAdjacencyUseClosure(dmParallel, useClosure);CHKERRQ(ierr);
1450     ierr = DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors);CHKERRQ(ierr);
1451   }
1452   ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1453   PetscFunctionReturn(0);
1454 }
1455 
1456 /*@C
1457   DMPlexDerivePointSF - Build a point SF from an SF describing a point migration
1458 
1459   Input Parameter:
1460 + dm          - The source DMPlex object
1461 . migrationSF - The star forest that describes the parallel point remapping
1462 . ownership   - Flag causing a vote to determine point ownership
1463 
1464   Output Parameter:
1465 - pointSF     - The star forest describing the point overlap in the remapped DM
1466 
1467   Level: developer
1468 
1469 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1470 @*/
1471 PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownership, PetscSF *pointSF)
1472 {
1473   PetscMPIInt        rank;
1474   PetscInt           p, nroots, nleaves, idx, npointLeaves;
1475   PetscInt          *pointLocal;
1476   const PetscInt    *leaves;
1477   const PetscSFNode *roots;
1478   PetscSFNode       *rootNodes, *leafNodes, *pointRemote;
1479   PetscErrorCode     ierr;
1480 
1481   PetscFunctionBegin;
1482   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1483   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1484 
1485   ierr = PetscSFGetGraph(migrationSF, &nroots, &nleaves, &leaves, &roots);CHKERRQ(ierr);
1486   ierr = PetscMalloc2(nroots, &rootNodes, nleaves, &leafNodes);CHKERRQ(ierr);
1487   if (ownership) {
1488     /* Point ownership vote: Process with highest rank ownes shared points */
1489     for (p = 0; p < nleaves; ++p) {
1490       /* Either put in a bid or we know we own it */
1491       leafNodes[p].rank  = rank;
1492       leafNodes[p].index = p;
1493     }
1494     for (p = 0; p < nroots; p++) {
1495       /* Root must not participate in the reduction, flag so that MAXLOC does not use */
1496       rootNodes[p].rank  = -3;
1497       rootNodes[p].index = -3;
1498     }
1499     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);CHKERRQ(ierr);
1500     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);CHKERRQ(ierr);
1501   } else {
1502     for (p = 0; p < nroots; p++) {
1503       rootNodes[p].index = -1;
1504       rootNodes[p].rank = rank;
1505     };
1506     for (p = 0; p < nleaves; p++) {
1507       /* Write new local id into old location */
1508       if (roots[p].rank == rank) {
1509         rootNodes[roots[p].index].index = leaves ? leaves[p] : p;
1510       }
1511     }
1512   }
1513   ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1514   ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1515 
1516   for (npointLeaves = 0, p = 0; p < nleaves; p++) {if (leafNodes[p].rank != rank) npointLeaves++;}
1517   ierr = PetscMalloc1(npointLeaves, &pointLocal);CHKERRQ(ierr);
1518   ierr = PetscMalloc1(npointLeaves, &pointRemote);CHKERRQ(ierr);
1519   for (idx = 0, p = 0; p < nleaves; p++) {
1520     if (leafNodes[p].rank != rank) {
1521       pointLocal[idx] = p;
1522       pointRemote[idx] = leafNodes[p];
1523       idx++;
1524     }
1525   }
1526   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), pointSF);CHKERRQ(ierr);
1527   ierr = PetscSFSetFromOptions(*pointSF);CHKERRQ(ierr);
1528   ierr = PetscSFSetGraph(*pointSF, nleaves, npointLeaves, pointLocal, PETSC_OWN_POINTER, pointRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1529   ierr = PetscFree2(rootNodes, leafNodes);CHKERRQ(ierr);
1530   PetscFunctionReturn(0);
1531 }
1532 
1533 /*@C
1534   DMPlexMigrate  - Migrates internal DM data over the supplied star forest
1535 
1536   Input Parameter:
1537 + dm       - The source DMPlex object
1538 . sf       - The star forest communication context describing the migration pattern
1539 
1540   Output Parameter:
1541 - targetDM - The target DMPlex object
1542 
1543   Level: intermediate
1544 
1545 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1546 @*/
1547 PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM)
1548 {
1549   MPI_Comm               comm;
1550   PetscInt               dim, nroots;
1551   PetscSF                sfPoint;
1552   ISLocalToGlobalMapping ltogMigration;
1553   ISLocalToGlobalMapping ltogOriginal = NULL;
1554   PetscBool              flg;
1555   PetscErrorCode         ierr;
1556 
1557   PetscFunctionBegin;
1558   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1559   ierr = PetscLogEventBegin(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1560   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1561   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1562   ierr = DMSetDimension(targetDM, dim);CHKERRQ(ierr);
1563 
1564   /* Check for a one-to-all distribution pattern */
1565   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1566   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
1567   if (nroots >= 0) {
1568     IS                     isOriginal;
1569     PetscInt               n, size, nleaves;
1570     PetscInt              *numbering_orig, *numbering_new;
1571     /* Get the original point numbering */
1572     ierr = DMPlexCreatePointNumbering(dm, &isOriginal);CHKERRQ(ierr);
1573     ierr = ISLocalToGlobalMappingCreateIS(isOriginal, &ltogOriginal);CHKERRQ(ierr);
1574     ierr = ISLocalToGlobalMappingGetSize(ltogOriginal, &size);CHKERRQ(ierr);
1575     ierr = ISLocalToGlobalMappingGetBlockIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1576     /* Convert to positive global numbers */
1577     for (n=0; n<size; n++) {if (numbering_orig[n] < 0) numbering_orig[n] = -(numbering_orig[n]+1);}
1578     /* Derive the new local-to-global mapping from the old one */
1579     ierr = PetscSFGetGraph(sf, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1580     ierr = PetscMalloc1(nleaves, &numbering_new);CHKERRQ(ierr);
1581     ierr = PetscSFBcastBegin(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr);
1582     ierr = PetscSFBcastEnd(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr);
1583     ierr = ISLocalToGlobalMappingCreate(comm, 1, nleaves, (const PetscInt*) numbering_new, PETSC_OWN_POINTER, &ltogMigration);CHKERRQ(ierr);
1584     ierr = ISLocalToGlobalMappingRestoreIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1585     ierr = ISDestroy(&isOriginal);CHKERRQ(ierr);
1586   } else {
1587     /* One-to-all distribution pattern: We can derive LToG from SF */
1588     ierr = ISLocalToGlobalMappingCreateSF(sf, 0, &ltogMigration);CHKERRQ(ierr);
1589   }
1590   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1591   if (flg) {
1592     ierr = PetscPrintf(comm, "Point renumbering for DM migration:\n");CHKERRQ(ierr);
1593     ierr = ISLocalToGlobalMappingView(ltogMigration, NULL);CHKERRQ(ierr);
1594   }
1595   /* Migrate DM data to target DM */
1596   ierr = DMPlexDistributeCones(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1597   ierr = DMPlexDistributeLabels(dm, sf, targetDM);CHKERRQ(ierr);
1598   ierr = DMPlexDistributeCoordinates(dm, sf, targetDM);CHKERRQ(ierr);
1599   ierr = DMPlexDistributeSetupHybrid(dm, sf, ltogMigration, targetDM);CHKERRQ(ierr);
1600   ierr = DMPlexDistributeSetupTree(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1601   ierr = ISLocalToGlobalMappingDestroy(&ltogOriginal);CHKERRQ(ierr);
1602   ierr = ISLocalToGlobalMappingDestroy(&ltogMigration);CHKERRQ(ierr);
1603   ierr = PetscLogEventEnd(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1604   PetscFunctionReturn(0);
1605 }
1606 
1607 /*@C
1608   DMPlexDistribute - Distributes the mesh and any associated sections.
1609 
1610   Not Collective
1611 
1612   Input Parameter:
1613 + dm  - The original DMPlex object
1614 - overlap - The overlap of partitions, 0 is the default
1615 
1616   Output Parameter:
1617 + sf - The PetscSF used for point distribution
1618 - parallelMesh - The distributed DMPlex object, or NULL
1619 
1620   Note: If the mesh was not distributed, the return value is NULL.
1621 
1622   The user can control the definition of adjacency for the mesh using DMPlexSetAdjacencyUseCone() and
1623   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1624   representation on the mesh.
1625 
1626   Level: intermediate
1627 
1628 .keywords: mesh, elements
1629 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1630 @*/
1631 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1632 {
1633   MPI_Comm               comm;
1634   PetscPartitioner       partitioner;
1635   IS                     cellPart;
1636   PetscSection           cellPartSection;
1637   DM                     dmCoord;
1638   DMLabel                lblPartition, lblMigration;
1639   PetscSF                sfProcess, sfMigration, sfStratified, sfPoint;
1640   PetscBool              flg;
1641   PetscMPIInt            rank, size, p;
1642   PetscErrorCode         ierr;
1643 
1644   PetscFunctionBegin;
1645   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1646   if (sf) PetscValidPointer(sf,4);
1647   PetscValidPointer(dmParallel,5);
1648 
1649   if (sf) *sf = NULL;
1650   *dmParallel = NULL;
1651   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1652   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1653   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1654   if (size == 1) PetscFunctionReturn(0);
1655 
1656   ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1657   /* Create cell partition */
1658   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1659   ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr);
1660   ierr = DMPlexGetPartitioner(dm, &partitioner);CHKERRQ(ierr);
1661   ierr = PetscPartitionerPartition(partitioner, dm, cellPartSection, &cellPart);CHKERRQ(ierr);
1662   {
1663     /* Convert partition to DMLabel */
1664     PetscInt proc, pStart, pEnd, npoints, poffset;
1665     const PetscInt *points;
1666     ierr = DMLabelCreate("Point Partition", &lblPartition);CHKERRQ(ierr);
1667     ierr = ISGetIndices(cellPart, &points);CHKERRQ(ierr);
1668     ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr);
1669     for (proc = pStart; proc < pEnd; proc++) {
1670       ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr);
1671       ierr = PetscSectionGetOffset(cellPartSection, proc, &poffset);CHKERRQ(ierr);
1672       for (p = poffset; p < poffset+npoints; p++) {
1673         ierr = DMLabelSetValue(lblPartition, points[p], proc);CHKERRQ(ierr);
1674       }
1675     }
1676     ierr = ISRestoreIndices(cellPart, &points);CHKERRQ(ierr);
1677   }
1678   ierr = DMPlexPartitionLabelClosure(dm, lblPartition);CHKERRQ(ierr);
1679   {
1680     /* Build a global process SF */
1681     PetscSFNode *remoteProc;
1682     ierr = PetscMalloc1(size, &remoteProc);CHKERRQ(ierr);
1683     for (p = 0; p < size; ++p) {
1684       remoteProc[p].rank  = p;
1685       remoteProc[p].index = rank;
1686     }
1687     ierr = PetscSFCreate(comm, &sfProcess);CHKERRQ(ierr);
1688     ierr = PetscObjectSetName((PetscObject) sfProcess, "Process SF");CHKERRQ(ierr);
1689     ierr = PetscSFSetGraph(sfProcess, size, size, NULL, PETSC_OWN_POINTER, remoteProc, PETSC_OWN_POINTER);CHKERRQ(ierr);
1690   }
1691   ierr = DMLabelCreate("Point migration", &lblMigration);CHKERRQ(ierr);
1692   ierr = DMPlexPartitionLabelInvert(dm, lblPartition, sfProcess, lblMigration);CHKERRQ(ierr);
1693   ierr = DMPlexPartitionLabelCreateSF(dm, lblMigration, &sfMigration);CHKERRQ(ierr);
1694   /* Stratify the SF in case we are migrating an already parallel plex */
1695   ierr = DMPlexStratifyMigrationSF(dm, sfMigration, &sfStratified);CHKERRQ(ierr);
1696   ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1697   sfMigration = sfStratified;
1698   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1699   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1700   if (flg) {
1701     ierr = DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1702     ierr = PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1703   }
1704 
1705   /* Create non-overlapping parallel DM and migrate internal data */
1706   ierr = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr);
1707   ierr = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr);
1708   ierr = DMPlexMigrate(dm, sfMigration, *dmParallel);CHKERRQ(ierr);
1709 
1710   /* Build the point SF without overlap */
1711   ierr = DMPlexCreatePointSF(*dmParallel, sfMigration, PETSC_TRUE, &sfPoint);CHKERRQ(ierr);
1712   ierr = DMSetPointSF(*dmParallel, sfPoint);CHKERRQ(ierr);
1713   ierr = DMGetCoordinateDM(*dmParallel, &dmCoord);CHKERRQ(ierr);
1714   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1715   if (flg) {ierr = PetscSFView(sfPoint, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);}
1716 
1717   if (overlap > 0) {
1718     DM                 dmOverlap;
1719     PetscInt           nroots, nleaves;
1720     PetscSFNode       *newRemote;
1721     const PetscSFNode *oldRemote;
1722     PetscSF            sfOverlap, sfOverlapPoint;
1723     /* Add the partition overlap to the distributed DM */
1724     ierr = DMPlexDistributeOverlap(*dmParallel, overlap, &sfOverlap, &dmOverlap);CHKERRQ(ierr);
1725     ierr = DMDestroy(dmParallel);CHKERRQ(ierr);
1726     *dmParallel = dmOverlap;
1727     if (flg) {
1728       ierr = PetscPrintf(comm, "Overlap Migration SF:\n");CHKERRQ(ierr);
1729       ierr = PetscSFView(sfOverlap, NULL);CHKERRQ(ierr);
1730     }
1731 
1732     /* Re-map the migration SF to establish the full migration pattern */
1733     ierr = PetscSFGetGraph(sfMigration, &nroots, NULL, NULL, &oldRemote);CHKERRQ(ierr);
1734     ierr = PetscSFGetGraph(sfOverlap, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1735     ierr = PetscMalloc1(nleaves, &newRemote);CHKERRQ(ierr);
1736     ierr = PetscSFBcastBegin(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1737     ierr = PetscSFBcastEnd(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1738     ierr = PetscSFCreate(comm, &sfOverlapPoint);CHKERRQ(ierr);
1739     ierr = PetscSFSetGraph(sfOverlapPoint, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1740     ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1741     ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1742     sfMigration = sfOverlapPoint;
1743   }
1744   /* Cleanup Partition */
1745   ierr = PetscSFDestroy(&sfProcess);CHKERRQ(ierr);
1746   ierr = DMLabelDestroy(&lblPartition);CHKERRQ(ierr);
1747   ierr = DMLabelDestroy(&lblMigration);CHKERRQ(ierr);
1748   ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr);
1749   ierr = ISDestroy(&cellPart);CHKERRQ(ierr);
1750   /* Copy BC */
1751   ierr = DMCopyBoundary(dm, *dmParallel);CHKERRQ(ierr);
1752   /* Create sfNatural */
1753   if (dm->useNatural) {
1754     PetscSection section;
1755 
1756     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1757     ierr = DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &(*dmParallel)->sfNatural);CHKERRQ(ierr);
1758     ierr = DMSetUseNatural(*dmParallel, PETSC_TRUE);CHKERRQ(ierr);
1759   }
1760   /* Cleanup */
1761   if (sf) {*sf = sfMigration;}
1762   else    {ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);}
1763   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1764   ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1765   PetscFunctionReturn(0);
1766 }
1767 
1768 /*@C
1769   DMPlexDistributeOverlap - Add partition overlap to a distributed non-overlapping DM.
1770 
1771   Not Collective
1772 
1773   Input Parameter:
1774 + dm  - The non-overlapping distrbuted DMPlex object
1775 - overlap - The overlap of partitions
1776 
1777   Output Parameter:
1778 + sf - The PetscSF used for point distribution
1779 - dmOverlap - The overlapping distributed DMPlex object, or NULL
1780 
1781   Note: If the mesh was not distributed, the return value is NULL.
1782 
1783   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1784   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1785   representation on the mesh.
1786 
1787   Level: intermediate
1788 
1789 .keywords: mesh, elements
1790 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1791 @*/
1792 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, PetscSF *sf, DM *dmOverlap)
1793 {
1794   MPI_Comm               comm;
1795   PetscMPIInt            size, rank;
1796   PetscSection           rootSection, leafSection;
1797   IS                     rootrank, leafrank;
1798   DM                     dmCoord;
1799   DMLabel                lblOverlap;
1800   PetscSF                sfOverlap, sfStratified, sfPoint;
1801   PetscErrorCode         ierr;
1802 
1803   PetscFunctionBegin;
1804   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1805   if (sf) PetscValidPointer(sf, 3);
1806   PetscValidPointer(dmOverlap, 4);
1807 
1808   if (sf) *sf = NULL;
1809   *dmOverlap  = NULL;
1810   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1811   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1812   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1813   if (size == 1) PetscFunctionReturn(0);
1814 
1815   ierr = PetscLogEventBegin(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1816   /* Compute point overlap with neighbouring processes on the distributed DM */
1817   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1818   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1819   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1820   ierr = DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);CHKERRQ(ierr);
1821   ierr = DMPlexCreateOverlap(dm, overlap, rootSection, rootrank, leafSection, leafrank, &lblOverlap);CHKERRQ(ierr);
1822   /* Convert overlap label to stratified migration SF */
1823   ierr = DMPlexPartitionLabelCreateSF(dm, lblOverlap, &sfOverlap);CHKERRQ(ierr);
1824   ierr = DMPlexStratifyMigrationSF(dm, sfOverlap, &sfStratified);CHKERRQ(ierr);
1825   ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1826   sfOverlap = sfStratified;
1827   ierr = PetscObjectSetName((PetscObject) sfOverlap, "Overlap SF");CHKERRQ(ierr);
1828   ierr = PetscSFSetFromOptions(sfOverlap);CHKERRQ(ierr);
1829 
1830   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1831   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1832   ierr = ISDestroy(&rootrank);CHKERRQ(ierr);
1833   ierr = ISDestroy(&leafrank);CHKERRQ(ierr);
1834   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1835 
1836   /* Build the overlapping DM */
1837   ierr = DMPlexCreate(comm, dmOverlap);CHKERRQ(ierr);
1838   ierr = PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");CHKERRQ(ierr);
1839   ierr = DMPlexMigrate(dm, sfOverlap, *dmOverlap);CHKERRQ(ierr);
1840   /* Build the new point SF */
1841   ierr = DMPlexCreatePointSF(*dmOverlap, sfOverlap, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
1842   ierr = DMSetPointSF(*dmOverlap, sfPoint);CHKERRQ(ierr);
1843   ierr = DMGetCoordinateDM(*dmOverlap, &dmCoord);CHKERRQ(ierr);
1844   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1845   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1846   /* Cleanup overlap partition */
1847   ierr = DMLabelDestroy(&lblOverlap);CHKERRQ(ierr);
1848   if (sf) *sf = sfOverlap;
1849   else    {ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);}
1850   ierr = PetscLogEventEnd(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1851   PetscFunctionReturn(0);
1852 }
1853 
1854 /*@C
1855   DMPlexGetGatherDM - Get a copy of the DMPlex that gathers all points on the
1856   root process of the original's communicator.
1857 
1858   Input Parameters:
1859 . dm - the original DMPlex object
1860 
1861   Output Parameters:
1862 . gatherMesh - the gathered DM object, or NULL
1863 
1864   Level: intermediate
1865 
1866 .keywords: mesh
1867 .seealso: DMPlexDistribute(), DMPlexGetRedundantDM()
1868 @*/
1869 PetscErrorCode DMPlexGetGatherDM(DM dm, DM *gatherMesh)
1870 {
1871   MPI_Comm       comm;
1872   PetscMPIInt    size;
1873   PetscPartitioner oldPart, gatherPart;
1874   PetscErrorCode ierr;
1875 
1876   PetscFunctionBegin;
1877   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1878   PetscValidPointer(gatherMesh,2);
1879   *gatherMesh = NULL;
1880   comm = PetscObjectComm((PetscObject)dm);
1881   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1882   if (size == 1) PetscFunctionReturn(0);
1883   ierr = DMPlexGetPartitioner(dm,&oldPart);CHKERRQ(ierr);
1884   ierr = PetscObjectReference((PetscObject)oldPart);CHKERRQ(ierr);
1885   ierr = PetscPartitionerCreate(comm,&gatherPart);CHKERRQ(ierr);
1886   ierr = PetscPartitionerSetType(gatherPart,PETSCPARTITIONERGATHER);CHKERRQ(ierr);
1887   ierr = DMPlexSetPartitioner(dm,gatherPart);CHKERRQ(ierr);
1888   ierr = DMPlexDistribute(dm,0,NULL,gatherMesh);CHKERRQ(ierr);
1889   ierr = DMPlexSetPartitioner(dm,oldPart);CHKERRQ(ierr);
1890   ierr = PetscPartitionerDestroy(&gatherPart);CHKERRQ(ierr);
1891   ierr = PetscPartitionerDestroy(&oldPart);CHKERRQ(ierr);
1892   PetscFunctionReturn(0);
1893 }
1894 
1895 /*@C
1896   DMPlexGetRedundantDM - Get a copy of the DMPlex that is completely copied on each process.
1897 
1898   Input Parameters:
1899 . dm - the original DMPlex object
1900 
1901   Output Parameters:
1902 . redundantMesh - the redundant DM object, or NULL
1903 
1904   Level: intermediate
1905 
1906 .keywords: mesh
1907 .seealso: DMPlexDistribute(), DMPlexGetGatherDM()
1908 @*/
1909 PetscErrorCode DMPlexGetRedundantDM(DM dm, DM *redundantMesh)
1910 {
1911   MPI_Comm       comm;
1912   PetscMPIInt    size, rank;
1913   PetscInt       pStart, pEnd, p;
1914   PetscInt       numPoints = -1;
1915   PetscSF        migrationSF, sfPoint;
1916   DM             gatherDM, dmCoord;
1917   PetscSFNode    *points;
1918   PetscErrorCode ierr;
1919 
1920   PetscFunctionBegin;
1921   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1922   PetscValidPointer(redundantMesh,2);
1923   *redundantMesh = NULL;
1924   comm = PetscObjectComm((PetscObject)dm);
1925   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1926   if (size == 1) {
1927     ierr = PetscObjectReference((PetscObject) dm);CHKERRQ(ierr);
1928     *redundantMesh = dm;
1929     PetscFunctionReturn(0);
1930   }
1931   ierr = DMPlexGetGatherDM(dm,&gatherDM);CHKERRQ(ierr);
1932   if (!gatherDM) PetscFunctionReturn(0);
1933   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1934   ierr = DMPlexGetChart(gatherDM,&pStart,&pEnd);CHKERRQ(ierr);
1935   numPoints = pEnd - pStart;
1936   ierr = MPI_Bcast(&numPoints,1,MPIU_INT,0,comm);CHKERRQ(ierr);
1937   ierr = PetscMalloc1(numPoints,&points);CHKERRQ(ierr);
1938   ierr = PetscSFCreate(comm,&migrationSF);CHKERRQ(ierr);
1939   for (p = 0; p < numPoints; p++) {
1940     points[p].index = p;
1941     points[p].rank  = 0;
1942   }
1943   ierr = PetscSFSetGraph(migrationSF,pEnd-pStart,numPoints,NULL,PETSC_OWN_POINTER,points,PETSC_OWN_POINTER);CHKERRQ(ierr);
1944   ierr = DMPlexCreate(comm, redundantMesh);CHKERRQ(ierr);
1945   ierr = PetscObjectSetName((PetscObject) *redundantMesh, "Redundant Mesh");CHKERRQ(ierr);
1946   ierr = DMPlexMigrate(gatherDM, migrationSF, *redundantMesh);CHKERRQ(ierr);
1947   ierr = DMPlexCreatePointSF(*redundantMesh, migrationSF, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
1948   ierr = DMSetPointSF(*redundantMesh, sfPoint);CHKERRQ(ierr);
1949   ierr = DMGetCoordinateDM(*redundantMesh, &dmCoord);CHKERRQ(ierr);
1950   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1951   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1952   ierr = PetscSFDestroy(&migrationSF);CHKERRQ(ierr);
1953   ierr = DMDestroy(&gatherDM);CHKERRQ(ierr);
1954   PetscFunctionReturn(0);
1955 }
1956