xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision e4c66b91ec5ecaeb63ae1fb42799e6dff316a2f4)
1 #include <petsc/private/dmpleximpl.h>    /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/dmlabelimpl.h>   /*I      "petscdmlabel.h"  I*/
3 
4 /*@C
5   DMPlexSetAdjacencyUser - Define adjacency in the mesh using a user-provided callback
6 
7   Input Parameters:
8 + dm      - The DM object
9 . user    - The user callback, may be NULL (to clear the callback)
10 - ctx     - context for callback evaluation, may be NULL
11 
12   Level: advanced
13 
14   Notes:
15      The caller of DMPlexGetAdjacency may need to arrange that a large enough array is available for the adjacency.
16 
17      Any setting here overrides other configuration of DMPlex adjacency determination.
18 
19 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexGetAdjacency(), DMPlexGetAdjacencyUser()
20 @*/
21 PetscErrorCode DMPlexSetAdjacencyUser(DM dm,PetscErrorCode (*user)(DM,PetscInt,PetscInt*,PetscInt[],void*),void *ctx)
22 {
23   DM_Plex *mesh = (DM_Plex *)dm->data;
24 
25   PetscFunctionBegin;
26   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
27   mesh->useradjacency = user;
28   mesh->useradjacencyctx = ctx;
29   PetscFunctionReturn(0);
30 }
31 
32 /*@C
33   DMPlexGetAdjacencyUser - get the user-defined adjacency callback
34 
35   Input Parameter:
36 . dm      - The DM object
37 
38   Output Parameters:
39 - user    - The user callback
40 - ctx     - context for callback evaluation
41 
42   Level: advanced
43 
44 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexGetAdjacency(), DMPlexSetAdjacencyUser()
45 @*/
46 PetscErrorCode DMPlexGetAdjacencyUser(DM dm, PetscErrorCode (**user)(DM,PetscInt,PetscInt*,PetscInt[],void*), void **ctx)
47 {
48   DM_Plex *mesh = (DM_Plex *)dm->data;
49 
50   PetscFunctionBegin;
51   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
52   if (user) *user = mesh->useradjacency;
53   if (ctx) *ctx = mesh->useradjacencyctx;
54   PetscFunctionReturn(0);
55 }
56 
57 /*@
58   DMPlexSetAdjacencyUseCone - Define adjacency in the mesh using either the cone or the support first
59 
60   Input Parameters:
61 + dm      - The DM object
62 - useCone - Flag to use the cone first
63 
64   Level: intermediate
65 
66   Notes:
67 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
68 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
69 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
70 
71 .seealso: DMPlexGetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
72 @*/
73 PetscErrorCode DMPlexSetAdjacencyUseCone(DM dm, PetscBool useCone)
74 {
75   PetscDS        prob;
76   PetscBool      useClosure;
77   PetscInt       Nf;
78   PetscErrorCode ierr;
79 
80   PetscFunctionBegin;
81   ierr = DMGetDS(dm, &prob);CHKERRQ(ierr);
82   ierr = PetscDSGetNumFields(prob, &Nf);CHKERRQ(ierr);
83   if (!Nf) {
84     ierr = PetscDSGetAdjacency(prob, PETSC_DEFAULT, NULL, &useClosure);CHKERRQ(ierr);
85     ierr = PetscDSSetAdjacency(prob, PETSC_DEFAULT, useCone, useClosure);CHKERRQ(ierr);
86   } else {
87     ierr = PetscDSGetAdjacency(prob, 0, NULL, &useClosure);CHKERRQ(ierr);
88     ierr = PetscDSSetAdjacency(prob, 0, useCone, useClosure);CHKERRQ(ierr);
89   }
90   PetscFunctionReturn(0);
91 }
92 
93 /*@
94   DMPlexGetAdjacencyUseCone - Query whether adjacency in the mesh uses the cone or the support first
95 
96   Input Parameter:
97 . dm      - The DM object
98 
99   Output Parameter:
100 . useCone - Flag to use the cone first
101 
102   Level: intermediate
103 
104   Notes:
105 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
106 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
107 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
108 
109 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
110 @*/
111 PetscErrorCode DMPlexGetAdjacencyUseCone(DM dm, PetscBool *useCone)
112 {
113   PetscDS        prob;
114   PetscInt       Nf;
115   PetscErrorCode ierr;
116 
117   PetscFunctionBegin;
118   ierr = DMGetDS(dm, &prob);CHKERRQ(ierr);
119   ierr = PetscDSGetNumFields(prob, &Nf);CHKERRQ(ierr);
120   if (!Nf) {
121     ierr = PetscDSGetAdjacency(prob, PETSC_DEFAULT, useCone, NULL);CHKERRQ(ierr);
122   } else {
123     ierr = PetscDSGetAdjacency(prob, 0, useCone, NULL);CHKERRQ(ierr);
124   }
125   PetscFunctionReturn(0);
126 }
127 
128 /*@
129   DMPlexSetAdjacencyUseClosure - Define adjacency in the mesh using the transitive closure
130 
131   Input Parameters:
132 + dm      - The DM object
133 - useClosure - Flag to use the closure
134 
135   Level: intermediate
136 
137   Notes:
138 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
139 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
140 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
141 
142 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
143 @*/
144 PetscErrorCode DMPlexSetAdjacencyUseClosure(DM dm, PetscBool useClosure)
145 {
146   PetscDS        prob;
147   PetscBool      useCone;
148   PetscInt       Nf;
149   PetscErrorCode ierr;
150 
151   PetscFunctionBegin;
152   ierr = DMGetDS(dm, &prob);CHKERRQ(ierr);
153   ierr = PetscDSGetNumFields(prob, &Nf);CHKERRQ(ierr);
154   if (!Nf) {
155     ierr = PetscDSGetAdjacency(prob, PETSC_DEFAULT, &useCone, NULL);CHKERRQ(ierr);
156     ierr = PetscDSSetAdjacency(prob, PETSC_DEFAULT, useCone, useClosure);CHKERRQ(ierr);
157   } else {
158     ierr = PetscDSGetAdjacency(prob, 0, &useCone, NULL);CHKERRQ(ierr);
159     ierr = PetscDSSetAdjacency(prob, 0, useCone, useClosure);CHKERRQ(ierr);
160   }
161   PetscFunctionReturn(0);
162 }
163 
164 /*@
165   DMPlexGetAdjacencyUseClosure - Query whether adjacency in the mesh uses the transitive closure
166 
167   Input Parameter:
168 . dm      - The DM object
169 
170   Output Parameter:
171 . useClosure - Flag to use the closure
172 
173   Level: intermediate
174 
175   Notes:
176 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
177 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
178 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
179 
180 .seealso: DMPlexSetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
181 @*/
182 PetscErrorCode DMPlexGetAdjacencyUseClosure(DM dm, PetscBool *useClosure)
183 {
184   PetscDS        prob;
185   PetscInt       Nf;
186   PetscErrorCode ierr;
187 
188   PetscFunctionBegin;
189   ierr = DMGetDS(dm, &prob);CHKERRQ(ierr);
190   ierr = PetscDSGetNumFields(prob, &Nf);CHKERRQ(ierr);
191   if (!Nf) {
192     ierr = PetscDSGetAdjacency(prob, PETSC_DEFAULT, NULL, useClosure);CHKERRQ(ierr);
193   } else {
194     ierr = PetscDSGetAdjacency(prob, 0, NULL, useClosure);CHKERRQ(ierr);
195   }
196   PetscFunctionReturn(0);
197 }
198 
199 /*@
200   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
201 
202   Input Parameters:
203 + dm      - The DM object
204 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
205 
206   Level: intermediate
207 
208 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
209 @*/
210 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
211 {
212   DM_Plex *mesh = (DM_Plex *) dm->data;
213 
214   PetscFunctionBegin;
215   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
216   mesh->useAnchors = useAnchors;
217   PetscFunctionReturn(0);
218 }
219 
220 /*@
221   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
222 
223   Input Parameter:
224 . dm      - The DM object
225 
226   Output Parameter:
227 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
228 
229   Level: intermediate
230 
231 .seealso: DMPlexSetAdjacencyUseAnchors(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
232 @*/
233 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
234 {
235   DM_Plex *mesh = (DM_Plex *) dm->data;
236 
237   PetscFunctionBegin;
238   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
239   PetscValidIntPointer(useAnchors, 2);
240   *useAnchors = mesh->useAnchors;
241   PetscFunctionReturn(0);
242 }
243 
244 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
245 {
246   const PetscInt *cone = NULL;
247   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
248   PetscErrorCode  ierr;
249 
250   PetscFunctionBeginHot;
251   ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
252   ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
253   for (c = 0; c <= coneSize; ++c) {
254     const PetscInt  point   = !c ? p : cone[c-1];
255     const PetscInt *support = NULL;
256     PetscInt        supportSize, s, q;
257 
258     ierr = DMPlexGetSupportSize(dm, point, &supportSize);CHKERRQ(ierr);
259     ierr = DMPlexGetSupport(dm, point, &support);CHKERRQ(ierr);
260     for (s = 0; s < supportSize; ++s) {
261       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = support[s]),0); ++q) {
262         if (support[s] == adj[q]) break;
263       }
264       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
265     }
266   }
267   *adjSize = numAdj;
268   PetscFunctionReturn(0);
269 }
270 
271 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
272 {
273   const PetscInt *support = NULL;
274   PetscInt        numAdj   = 0, maxAdjSize = *adjSize, supportSize, s;
275   PetscErrorCode  ierr;
276 
277   PetscFunctionBeginHot;
278   ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr);
279   ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr);
280   for (s = 0; s <= supportSize; ++s) {
281     const PetscInt  point = !s ? p : support[s-1];
282     const PetscInt *cone  = NULL;
283     PetscInt        coneSize, c, q;
284 
285     ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
286     ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
287     for (c = 0; c < coneSize; ++c) {
288       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = cone[c]),0); ++q) {
289         if (cone[c] == adj[q]) break;
290       }
291       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
292     }
293   }
294   *adjSize = numAdj;
295   PetscFunctionReturn(0);
296 }
297 
298 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
299 {
300   PetscInt      *star = NULL;
301   PetscInt       numAdj = 0, maxAdjSize = *adjSize, starSize, s;
302   PetscErrorCode ierr;
303 
304   PetscFunctionBeginHot;
305   ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
306   for (s = 0; s < starSize*2; s += 2) {
307     const PetscInt *closure = NULL;
308     PetscInt        closureSize, c, q;
309 
310     ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
311     for (c = 0; c < closureSize*2; c += 2) {
312       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = closure[c]),0); ++q) {
313         if (closure[c] == adj[q]) break;
314       }
315       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
316     }
317     ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
318   }
319   ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
320   *adjSize = numAdj;
321   PetscFunctionReturn(0);
322 }
323 
324 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
325 {
326   static PetscInt asiz = 0;
327   PetscInt maxAnchors = 1;
328   PetscInt aStart = -1, aEnd = -1;
329   PetscInt maxAdjSize;
330   PetscSection aSec = NULL;
331   IS aIS = NULL;
332   const PetscInt *anchors;
333   DM_Plex *mesh = (DM_Plex *)dm->data;
334   PetscErrorCode  ierr;
335 
336   PetscFunctionBeginHot;
337   if (useAnchors) {
338     ierr = DMPlexGetAnchors(dm,&aSec,&aIS);CHKERRQ(ierr);
339     if (aSec) {
340       ierr = PetscSectionGetMaxDof(aSec,&maxAnchors);CHKERRQ(ierr);
341       maxAnchors = PetscMax(1,maxAnchors);
342       ierr = PetscSectionGetChart(aSec,&aStart,&aEnd);CHKERRQ(ierr);
343       ierr = ISGetIndices(aIS,&anchors);CHKERRQ(ierr);
344     }
345   }
346   if (!*adj) {
347     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
348 
349     ierr  = DMPlexGetChart(dm, &pStart,&pEnd);CHKERRQ(ierr);
350     ierr  = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
351     ierr  = DMPlexGetMaxSizes(dm, &maxC, &maxS);CHKERRQ(ierr);
352     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1;
353     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1;
354     asiz  = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries);
355     asiz *= maxAnchors;
356     asiz  = PetscMin(asiz,pEnd-pStart);
357     ierr  = PetscMalloc1(asiz,adj);CHKERRQ(ierr);
358   }
359   if (*adjSize < 0) *adjSize = asiz;
360   maxAdjSize = *adjSize;
361   if (mesh->useradjacency) {
362     ierr = mesh->useradjacency(dm, p, adjSize, *adj, mesh->useradjacencyctx);CHKERRQ(ierr);
363   } else if (useTransitiveClosure) {
364     ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr);
365   } else if (useCone) {
366     ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
367   } else {
368     ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
369   }
370   if (useAnchors && aSec) {
371     PetscInt origSize = *adjSize;
372     PetscInt numAdj = origSize;
373     PetscInt i = 0, j;
374     PetscInt *orig = *adj;
375 
376     while (i < origSize) {
377       PetscInt p = orig[i];
378       PetscInt aDof = 0;
379 
380       if (p >= aStart && p < aEnd) {
381         ierr = PetscSectionGetDof(aSec,p,&aDof);CHKERRQ(ierr);
382       }
383       if (aDof) {
384         PetscInt aOff;
385         PetscInt s, q;
386 
387         for (j = i + 1; j < numAdj; j++) {
388           orig[j - 1] = orig[j];
389         }
390         origSize--;
391         numAdj--;
392         ierr = PetscSectionGetOffset(aSec,p,&aOff);CHKERRQ(ierr);
393         for (s = 0; s < aDof; ++s) {
394           for (q = 0; q < numAdj || ((void)(orig[numAdj++] = anchors[aOff+s]),0); ++q) {
395             if (anchors[aOff+s] == orig[q]) break;
396           }
397           if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
398         }
399       }
400       else {
401         i++;
402       }
403     }
404     *adjSize = numAdj;
405     ierr = ISRestoreIndices(aIS,&anchors);CHKERRQ(ierr);
406   }
407   PetscFunctionReturn(0);
408 }
409 
410 /*@
411   DMPlexGetAdjacency - Return all points adjacent to the given point
412 
413   Input Parameters:
414 + dm - The DM object
415 . p  - The point
416 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE
417 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize
418 
419   Output Parameters:
420 + adjSize - The number of adjacent points
421 - adj - The adjacent points
422 
423   Level: advanced
424 
425   Notes:
426     The user must PetscFree the adj array if it was not passed in.
427 
428 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator()
429 @*/
430 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
431 {
432   PetscBool      useCone, useClosure, useAnchors;
433   PetscErrorCode ierr;
434 
435   PetscFunctionBeginHot;
436   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
437   PetscValidPointer(adjSize,3);
438   PetscValidPointer(adj,4);
439   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
440   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
441   ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
442   ierr = DMPlexGetAdjacency_Internal(dm, p, useCone, useClosure, useAnchors, adjSize, adj);CHKERRQ(ierr);
443   PetscFunctionReturn(0);
444 }
445 
446 /*@
447   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
448 
449   Collective on DM
450 
451   Input Parameters:
452 + dm      - The DM
453 - sfPoint - The PetscSF which encodes point connectivity
454 
455   Output Parameters:
456 + processRanks - A list of process neighbors, or NULL
457 - sfProcess    - An SF encoding the two-sided process connectivity, or NULL
458 
459   Level: developer
460 
461 .seealso: PetscSFCreate(), DMPlexCreateProcessSF()
462 @*/
463 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
464 {
465   const PetscSFNode *remotePoints;
466   PetscInt          *localPointsNew;
467   PetscSFNode       *remotePointsNew;
468   const PetscInt    *nranks;
469   PetscInt          *ranksNew;
470   PetscBT            neighbors;
471   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
472   PetscMPIInt        size, proc, rank;
473   PetscErrorCode     ierr;
474 
475   PetscFunctionBegin;
476   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
477   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
478   if (processRanks) {PetscValidPointer(processRanks, 3);}
479   if (sfProcess)    {PetscValidPointer(sfProcess, 4);}
480   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
481   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
482   ierr = PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);CHKERRQ(ierr);
483   ierr = PetscBTCreate(size, &neighbors);CHKERRQ(ierr);
484   ierr = PetscBTMemzero(size, neighbors);CHKERRQ(ierr);
485   /* Compute root-to-leaf process connectivity */
486   ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr);
487   ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr);
488   for (p = pStart; p < pEnd; ++p) {
489     PetscInt ndof, noff, n;
490 
491     ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr);
492     ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr);
493     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
494   }
495   ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr);
496   /* Compute leaf-to-neighbor process connectivity */
497   ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr);
498   ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr);
499   for (p = pStart; p < pEnd; ++p) {
500     PetscInt ndof, noff, n;
501 
502     ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr);
503     ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr);
504     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
505   }
506   ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr);
507   /* Compute leaf-to-root process connectivity */
508   for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);}
509   /* Calculate edges */
510   PetscBTClear(neighbors, rank);
511   for(proc = 0, numNeighbors = 0; proc < size; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;}
512   ierr = PetscMalloc1(numNeighbors, &ranksNew);CHKERRQ(ierr);
513   ierr = PetscMalloc1(numNeighbors, &localPointsNew);CHKERRQ(ierr);
514   ierr = PetscMalloc1(numNeighbors, &remotePointsNew);CHKERRQ(ierr);
515   for(proc = 0, n = 0; proc < size; ++proc) {
516     if (PetscBTLookup(neighbors, proc)) {
517       ranksNew[n]              = proc;
518       localPointsNew[n]        = proc;
519       remotePointsNew[n].index = rank;
520       remotePointsNew[n].rank  = proc;
521       ++n;
522     }
523   }
524   ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr);
525   if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr);}
526   else              {ierr = PetscFree(ranksNew);CHKERRQ(ierr);}
527   if (sfProcess) {
528     ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr);
529     ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr);
530     ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr);
531     ierr = PetscSFSetGraph(*sfProcess, size, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
532   }
533   PetscFunctionReturn(0);
534 }
535 
536 /*@
537   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
538 
539   Collective on DM
540 
541   Input Parameter:
542 . dm - The DM
543 
544   Output Parameters:
545 + rootSection - The number of leaves for a given root point
546 . rootrank    - The rank of each edge into the root point
547 . leafSection - The number of processes sharing a given leaf point
548 - leafrank    - The rank of each process sharing a leaf point
549 
550   Level: developer
551 
552 .seealso: DMPlexCreateOverlap()
553 @*/
554 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
555 {
556   MPI_Comm        comm;
557   PetscSF         sfPoint;
558   const PetscInt *rootdegree;
559   PetscInt       *myrank, *remoterank;
560   PetscInt        pStart, pEnd, p, nedges;
561   PetscMPIInt     rank;
562   PetscErrorCode  ierr;
563 
564   PetscFunctionBegin;
565   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
566   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
567   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
568   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
569   /* Compute number of leaves for each root */
570   ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr);
571   ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr);
572   ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr);
573   ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr);
574   for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr);}
575   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
576   /* Gather rank of each leaf to root */
577   ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr);
578   ierr = PetscMalloc1(pEnd-pStart, &myrank);CHKERRQ(ierr);
579   ierr = PetscMalloc1(nedges,  &remoterank);CHKERRQ(ierr);
580   for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank;
581   ierr = PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
582   ierr = PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
583   ierr = PetscFree(myrank);CHKERRQ(ierr);
584   ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr);
585   /* Distribute remote ranks to leaves */
586   ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr);
587   ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr);
588   PetscFunctionReturn(0);
589 }
590 
591 /*@C
592   DMPlexCreateOverlap - Compute owner information for shared points. This basically gets two-sided for an SF.
593 
594   Collective on DM
595 
596   Input Parameters:
597 + dm          - The DM
598 . levels      - Number of overlap levels
599 . rootSection - The number of leaves for a given root point
600 . rootrank    - The rank of each edge into the root point
601 . leafSection - The number of processes sharing a given leaf point
602 - leafrank    - The rank of each process sharing a leaf point
603 
604   Output Parameters:
605 + ovLabel     - DMLabel containing remote overlap contributions as point/rank pairings
606 
607   Level: developer
608 
609 .seealso: DMPlexDistributeOwnership(), DMPlexDistribute()
610 @*/
611 PetscErrorCode DMPlexCreateOverlap(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
612 {
613   MPI_Comm           comm;
614   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
615   PetscSF            sfPoint, sfProc;
616   const PetscSFNode *remote;
617   const PetscInt    *local;
618   const PetscInt    *nrank, *rrank;
619   PetscInt          *adj = NULL;
620   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l;
621   PetscMPIInt        rank, size;
622   PetscBool          useCone, useClosure, flg;
623   PetscErrorCode     ierr;
624 
625   PetscFunctionBegin;
626   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
627   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
628   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
629   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
630   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
631   ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr);
632   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
633   ierr = DMLabelCreate("Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr);
634   /* Handle leaves: shared with the root point */
635   for (l = 0; l < nleaves; ++l) {
636     PetscInt adjSize = PETSC_DETERMINE, a;
637 
638     ierr = DMPlexGetAdjacency(dm, local ? local[l] : l, &adjSize, &adj);CHKERRQ(ierr);
639     for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr);}
640   }
641   ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr);
642   ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr);
643   /* Handle roots */
644   for (p = pStart; p < pEnd; ++p) {
645     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
646 
647     if ((p >= sStart) && (p < sEnd)) {
648       /* Some leaves share a root with other leaves on different processes */
649       ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr);
650       if (neighbors) {
651         ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr);
652         ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
653         for (n = 0; n < neighbors; ++n) {
654           const PetscInt remoteRank = nrank[noff+n];
655 
656           if (remoteRank == rank) continue;
657           for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
658         }
659       }
660     }
661     /* Roots are shared with leaves */
662     ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr);
663     if (!neighbors) continue;
664     ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr);
665     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
666     for (n = 0; n < neighbors; ++n) {
667       const PetscInt remoteRank = rrank[noff+n];
668 
669       if (remoteRank == rank) continue;
670       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
671     }
672   }
673   ierr = PetscFree(adj);CHKERRQ(ierr);
674   ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr);
675   ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr);
676   /* Add additional overlap levels */
677   for (l = 1; l < levels; l++) {
678     /* Propagate point donations over SF to capture remote connections */
679     ierr = DMPlexPartitionLabelPropagate(dm, ovAdjByRank);CHKERRQ(ierr);
680     /* Add next level of point donations to the label */
681     ierr = DMPlexPartitionLabelAdjacency(dm, ovAdjByRank);CHKERRQ(ierr);
682   }
683   /* We require the closure in the overlap */
684   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
685   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
686   if (useCone || !useClosure) {
687     ierr = DMPlexPartitionLabelClosure(dm, ovAdjByRank);CHKERRQ(ierr);
688   }
689   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-overlap_view", &flg);CHKERRQ(ierr);
690   if (flg) {
691     ierr = DMLabelView(ovAdjByRank, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
692   }
693   /* Make global process SF and invert sender to receiver label */
694   {
695     /* Build a global process SF */
696     PetscSFNode *remoteProc;
697     ierr = PetscMalloc1(size, &remoteProc);CHKERRQ(ierr);
698     for (p = 0; p < size; ++p) {
699       remoteProc[p].rank  = p;
700       remoteProc[p].index = rank;
701     }
702     ierr = PetscSFCreate(comm, &sfProc);CHKERRQ(ierr);
703     ierr = PetscObjectSetName((PetscObject) sfProc, "Process SF");CHKERRQ(ierr);
704     ierr = PetscSFSetGraph(sfProc, size, size, NULL, PETSC_OWN_POINTER, remoteProc, PETSC_OWN_POINTER);CHKERRQ(ierr);
705   }
706   ierr = DMLabelCreate("Overlap label", ovLabel);CHKERRQ(ierr);
707   ierr = DMPlexPartitionLabelInvert(dm, ovAdjByRank, sfProc, *ovLabel);CHKERRQ(ierr);
708   /* Add owned points, except for shared local points */
709   for (p = pStart; p < pEnd; ++p) {ierr = DMLabelSetValue(*ovLabel, p, rank);CHKERRQ(ierr);}
710   for (l = 0; l < nleaves; ++l) {
711     ierr = DMLabelClearValue(*ovLabel, local[l], rank);CHKERRQ(ierr);
712     ierr = DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank);CHKERRQ(ierr);
713   }
714   /* Clean up */
715   ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr);
716   ierr = PetscSFDestroy(&sfProc);CHKERRQ(ierr);
717   PetscFunctionReturn(0);
718 }
719 
720 /*@C
721   DMPlexCreateOverlapMigrationSF - Create an SF describing the new mesh distribution to make the overlap described by the input SF
722 
723   Collective on DM
724 
725   Input Parameters:
726 + dm          - The DM
727 - overlapSF   - The SF mapping ghost points in overlap to owner points on other processes
728 
729   Output Parameters:
730 + migrationSF - An SF that maps original points in old locations to points in new locations
731 
732   Level: developer
733 
734 .seealso: DMPlexCreateOverlap(), DMPlexDistribute()
735 @*/
736 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
737 {
738   MPI_Comm           comm;
739   PetscMPIInt        rank, size;
740   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
741   PetscInt          *pointDepths, *remoteDepths, *ilocal;
742   PetscInt          *depthRecv, *depthShift, *depthIdx;
743   PetscSFNode       *iremote;
744   PetscSF            pointSF;
745   const PetscInt    *sharedLocal;
746   const PetscSFNode *overlapRemote, *sharedRemote;
747   PetscErrorCode     ierr;
748 
749   PetscFunctionBegin;
750   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
751   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
752   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
753   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
754   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
755 
756   /* Before building the migration SF we need to know the new stratum offsets */
757   ierr = PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote);CHKERRQ(ierr);
758   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
759   for (d=0; d<dim+1; d++) {
760     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
761     for (p=pStart; p<pEnd; p++) pointDepths[p] = d;
762   }
763   for (p=0; p<nleaves; p++) remoteDepths[p] = -1;
764   ierr = PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
765   ierr = PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
766 
767   /* Count recevied points in each stratum and compute the internal strata shift */
768   ierr = PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx);CHKERRQ(ierr);
769   for (d=0; d<dim+1; d++) depthRecv[d]=0;
770   for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++;
771   depthShift[dim] = 0;
772   for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim];
773   for (d=1; d<dim; d++) depthShift[d] += depthRecv[0];
774   for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1];
775   for (d=0; d<dim+1; d++) {
776     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
777     depthIdx[d] = pStart + depthShift[d];
778   }
779 
780   /* Form the overlap SF build an SF that describes the full overlap migration SF */
781   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
782   newLeaves = pEnd - pStart + nleaves;
783   ierr = PetscMalloc1(newLeaves, &ilocal);CHKERRQ(ierr);
784   ierr = PetscMalloc1(newLeaves, &iremote);CHKERRQ(ierr);
785   /* First map local points to themselves */
786   for (d=0; d<dim+1; d++) {
787     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
788     for (p=pStart; p<pEnd; p++) {
789       point = p + depthShift[d];
790       ilocal[point] = point;
791       iremote[point].index = p;
792       iremote[point].rank = rank;
793       depthIdx[d]++;
794     }
795   }
796 
797   /* Add in the remote roots for currently shared points */
798   ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
799   ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote);CHKERRQ(ierr);
800   for (d=0; d<dim+1; d++) {
801     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
802     for (p=0; p<numSharedPoints; p++) {
803       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
804         point = sharedLocal[p] + depthShift[d];
805         iremote[point].index = sharedRemote[p].index;
806         iremote[point].rank = sharedRemote[p].rank;
807       }
808     }
809   }
810 
811   /* Now add the incoming overlap points */
812   for (p=0; p<nleaves; p++) {
813     point = depthIdx[remoteDepths[p]];
814     ilocal[point] = point;
815     iremote[point].index = overlapRemote[p].index;
816     iremote[point].rank = overlapRemote[p].rank;
817     depthIdx[remoteDepths[p]]++;
818   }
819   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
820 
821   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
822   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");CHKERRQ(ierr);
823   ierr = PetscSFSetFromOptions(*migrationSF);CHKERRQ(ierr);
824   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
825   ierr = PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);CHKERRQ(ierr);
826 
827   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
828   PetscFunctionReturn(0);
829 }
830 
831 /*@
832   DMPlexStratifyMigrationSF - Rearrange the leaves of a migration sf for stratification.
833 
834   Input Parameter:
835 + dm          - The DM
836 - sf          - A star forest with non-ordered leaves, usually defining a DM point migration
837 
838   Output Parameter:
839 . migrationSF - A star forest with added leaf indirection that ensures the resulting DM is stratified
840 
841   Level: developer
842 
843 .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexDistributeOverlap()
844 @*/
845 PetscErrorCode DMPlexStratifyMigrationSF(DM dm, PetscSF sf, PetscSF *migrationSF)
846 {
847   MPI_Comm           comm;
848   PetscMPIInt        rank, size;
849   PetscInt           d, ldepth, depth, p, pStart, pEnd, nroots, nleaves;
850   PetscInt          *pointDepths, *remoteDepths, *ilocal;
851   PetscInt          *depthRecv, *depthShift, *depthIdx;
852   PetscInt           hybEnd[4];
853   const PetscSFNode *iremote;
854   PetscErrorCode     ierr;
855 
856   PetscFunctionBegin;
857   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
858   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
859   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
860   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
861   ierr = DMPlexGetDepth(dm, &ldepth);CHKERRQ(ierr);
862   ierr = MPIU_Allreduce(&ldepth, &depth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr);
863   if ((ldepth >= 0) && (depth != ldepth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", ldepth, depth);
864 
865   /* Before building the migration SF we need to know the new stratum offsets */
866   ierr = PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote);CHKERRQ(ierr);
867   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
868   ierr = DMPlexGetHybridBounds(dm,&hybEnd[depth],&hybEnd[depth-1],&hybEnd[1],&hybEnd[0]);CHKERRQ(ierr);
869   for (d = 0; d < depth+1; ++d) {
870     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
871     for (p = pStart; p < pEnd; ++p) {
872       if (hybEnd[d] >= 0 && p >= hybEnd[d]) { /* put in a separate value for hybrid points */
873         pointDepths[p] = 2 * d;
874       } else {
875         pointDepths[p] = 2 * d + 1;
876       }
877     }
878   }
879   for (p = 0; p < nleaves; ++p) remoteDepths[p] = -1;
880   ierr = PetscSFBcastBegin(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
881   ierr = PetscSFBcastEnd(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
882   /* Count recevied points in each stratum and compute the internal strata shift */
883   ierr = PetscMalloc3(2*(depth+1), &depthRecv, 2*(depth+1), &depthShift, 2*(depth+1), &depthIdx);CHKERRQ(ierr);
884   for (d = 0; d < 2*(depth+1); ++d) depthRecv[d] = 0;
885   for (p = 0; p < nleaves; ++p) depthRecv[remoteDepths[p]]++;
886   depthShift[2*depth+1] = 0;
887   for (d = 0; d < 2*depth+1; ++d) depthShift[d] = depthRecv[2 * depth + 1];
888   for (d = 0; d < 2*depth; ++d) depthShift[d] += depthRecv[2 * depth];
889   depthShift[0] += depthRecv[1];
890   for (d = 2; d < 2*depth; ++d) depthShift[d] += depthRecv[1];
891   for (d = 2; d < 2*depth; ++d) depthShift[d] += depthRecv[0];
892   for (d = 2 * depth-1; d > 2; --d) {
893     PetscInt e;
894 
895     for (e = d -1; e > 1; --e) depthShift[e] += depthRecv[d];
896   }
897   for (d = 0; d < 2*(depth+1); ++d) {depthIdx[d] = 0;}
898   /* Derive a new local permutation based on stratified indices */
899   ierr = PetscMalloc1(nleaves, &ilocal);CHKERRQ(ierr);
900   for (p = 0; p < nleaves; ++p) {
901     const PetscInt dep = remoteDepths[p];
902 
903     ilocal[p] = depthShift[dep] + depthIdx[dep];
904     depthIdx[dep]++;
905   }
906   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
907   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Migration SF");CHKERRQ(ierr);
908   ierr = PetscSFSetGraph(*migrationSF, nroots, nleaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_COPY_VALUES);CHKERRQ(ierr);
909   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
910   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
911   PetscFunctionReturn(0);
912 }
913 
914 /*@
915   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
916 
917   Collective on DM
918 
919   Input Parameters:
920 + dm - The DMPlex object
921 . pointSF - The PetscSF describing the communication pattern
922 . originalSection - The PetscSection for existing data layout
923 - originalVec - The existing data
924 
925   Output Parameters:
926 + newSection - The PetscSF describing the new data layout
927 - newVec - The new data
928 
929   Level: developer
930 
931 .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData()
932 @*/
933 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
934 {
935   PetscSF        fieldSF;
936   PetscInt      *remoteOffsets, fieldSize;
937   PetscScalar   *originalValues, *newValues;
938   PetscErrorCode ierr;
939 
940   PetscFunctionBegin;
941   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
942   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
943 
944   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
945   ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr);
946   ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr);
947 
948   ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr);
949   ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr);
950   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
951   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
952   ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
953   ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
954   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
955   ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr);
956   ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr);
957   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
958   PetscFunctionReturn(0);
959 }
960 
961 /*@
962   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
963 
964   Collective on DM
965 
966   Input Parameters:
967 + dm - The DMPlex object
968 . pointSF - The PetscSF describing the communication pattern
969 . originalSection - The PetscSection for existing data layout
970 - originalIS - The existing data
971 
972   Output Parameters:
973 + newSection - The PetscSF describing the new data layout
974 - newIS - The new data
975 
976   Level: developer
977 
978 .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData()
979 @*/
980 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
981 {
982   PetscSF         fieldSF;
983   PetscInt       *newValues, *remoteOffsets, fieldSize;
984   const PetscInt *originalValues;
985   PetscErrorCode  ierr;
986 
987   PetscFunctionBegin;
988   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
989   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
990 
991   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
992   ierr = PetscMalloc1(fieldSize, &newValues);CHKERRQ(ierr);
993 
994   ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr);
995   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
996   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
997   ierr = PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
998   ierr = PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
999   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
1000   ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr);
1001   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr);
1002   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
1003   PetscFunctionReturn(0);
1004 }
1005 
1006 /*@
1007   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
1008 
1009   Collective on DM
1010 
1011   Input Parameters:
1012 + dm - The DMPlex object
1013 . pointSF - The PetscSF describing the communication pattern
1014 . originalSection - The PetscSection for existing data layout
1015 . datatype - The type of data
1016 - originalData - The existing data
1017 
1018   Output Parameters:
1019 + newSection - The PetscSection describing the new data layout
1020 - newData - The new data
1021 
1022   Level: developer
1023 
1024 .seealso: DMPlexDistribute(), DMPlexDistributeField()
1025 @*/
1026 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
1027 {
1028   PetscSF        fieldSF;
1029   PetscInt      *remoteOffsets, fieldSize;
1030   PetscMPIInt    dataSize;
1031   PetscErrorCode ierr;
1032 
1033   PetscFunctionBegin;
1034   ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
1035   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
1036 
1037   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
1038   ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr);
1039   ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr);
1040 
1041   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
1042   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
1043   ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
1044   ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
1045   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
1046   ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
1047   PetscFunctionReturn(0);
1048 }
1049 
1050 static PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1051 {
1052   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
1053   MPI_Comm               comm;
1054   PetscSF                coneSF;
1055   PetscSection           originalConeSection, newConeSection;
1056   PetscInt              *remoteOffsets, *cones, *globCones, *newCones, newConesSize;
1057   PetscBool              flg;
1058   PetscErrorCode         ierr;
1059 
1060   PetscFunctionBegin;
1061   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1062   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
1063 
1064   ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
1065   /* Distribute cone section */
1066   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1067   ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr);
1068   ierr = DMPlexGetConeSection(dmParallel, &newConeSection);CHKERRQ(ierr);
1069   ierr = PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr);
1070   ierr = DMSetUp(dmParallel);CHKERRQ(ierr);
1071   {
1072     PetscInt pStart, pEnd, p;
1073 
1074     ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr);
1075     for (p = pStart; p < pEnd; ++p) {
1076       PetscInt coneSize;
1077       ierr               = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr);
1078       pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize);
1079     }
1080   }
1081   /* Communicate and renumber cones */
1082   ierr = PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr);
1083   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
1084   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
1085   if (original) {
1086     PetscInt numCones;
1087 
1088     ierr = PetscSectionGetStorageSize(originalConeSection,&numCones);CHKERRQ(ierr); ierr = PetscMalloc1(numCones,&globCones);CHKERRQ(ierr);
1089     ierr = ISLocalToGlobalMappingApplyBlock(original, numCones, cones, globCones);CHKERRQ(ierr);
1090   }
1091   else {
1092     globCones = cones;
1093   }
1094   ierr = DMPlexGetCones(dmParallel, &newCones);CHKERRQ(ierr);
1095   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr);
1096   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr);
1097   if (original) {
1098     ierr = PetscFree(globCones);CHKERRQ(ierr);
1099   }
1100   ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr);
1101   ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr);
1102 #if PETSC_USE_DEBUG
1103   {
1104     PetscInt  p;
1105     PetscBool valid = PETSC_TRUE;
1106     for (p = 0; p < newConesSize; ++p) {
1107       if (newCones[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
1108     }
1109     if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1110   }
1111 #endif
1112   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr);
1113   if (flg) {
1114     ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr);
1115     ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1116     ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr);
1117     ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1118     ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr);
1119   }
1120   ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr);
1121   ierr = DMPlexGetConeOrientations(dmParallel, &newCones);CHKERRQ(ierr);
1122   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1123   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1124   ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr);
1125   ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
1126   /* Create supports and stratify DMPlex */
1127   {
1128     PetscInt pStart, pEnd;
1129 
1130     ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr);
1131     ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr);
1132   }
1133   ierr = DMPlexSymmetrize(dmParallel);CHKERRQ(ierr);
1134   ierr = DMPlexStratify(dmParallel);CHKERRQ(ierr);
1135   {
1136     PetscBool useCone, useClosure, useAnchors;
1137 
1138     ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
1139     ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
1140     ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
1141     ierr = DMPlexSetAdjacencyUseCone(dmParallel, useCone);CHKERRQ(ierr);
1142     ierr = DMPlexSetAdjacencyUseClosure(dmParallel, useClosure);CHKERRQ(ierr);
1143     ierr = DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors);CHKERRQ(ierr);
1144   }
1145   PetscFunctionReturn(0);
1146 }
1147 
1148 static PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
1149 {
1150   MPI_Comm         comm;
1151   PetscSection     originalCoordSection, newCoordSection;
1152   Vec              originalCoordinates, newCoordinates;
1153   PetscInt         bs;
1154   PetscBool        isper;
1155   const char      *name;
1156   const PetscReal *maxCell, *L;
1157   const DMBoundaryType *bd;
1158   PetscErrorCode   ierr;
1159 
1160   PetscFunctionBegin;
1161   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1162   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1163 
1164   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1165   ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr);
1166   ierr = DMGetCoordinateSection(dmParallel, &newCoordSection);CHKERRQ(ierr);
1167   ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr);
1168   if (originalCoordinates) {
1169     ierr = VecCreate(PETSC_COMM_SELF, &newCoordinates);CHKERRQ(ierr);
1170     ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr);
1171     ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr);
1172 
1173     ierr = DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr);
1174     ierr = DMSetCoordinatesLocal(dmParallel, newCoordinates);CHKERRQ(ierr);
1175     ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr);
1176     ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr);
1177     ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr);
1178   }
1179   ierr = DMGetPeriodicity(dm, &isper, &maxCell, &L, &bd);CHKERRQ(ierr);
1180   ierr = DMSetPeriodicity(dmParallel, isper, maxCell, L, bd);CHKERRQ(ierr);
1181   PetscFunctionReturn(0);
1182 }
1183 
1184 /* Here we are assuming that process 0 always has everything */
1185 static PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1186 {
1187   DM_Plex         *mesh = (DM_Plex*) dm->data;
1188   MPI_Comm         comm;
1189   DMLabel          depthLabel;
1190   PetscMPIInt      rank;
1191   PetscInt         depth, d, numLabels, numLocalLabels, l;
1192   PetscBool        hasLabels = PETSC_FALSE, lsendDepth, sendDepth;
1193   PetscObjectState depthState = -1;
1194   PetscErrorCode   ierr;
1195 
1196   PetscFunctionBegin;
1197   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1198   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1199 
1200   ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1201   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1202   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1203 
1204   /* If the user has changed the depth label, communicate it instead */
1205   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1206   ierr = DMPlexGetDepthLabel(dm, &depthLabel);CHKERRQ(ierr);
1207   if (depthLabel) {ierr = DMLabelGetState(depthLabel, &depthState);CHKERRQ(ierr);}
1208   lsendDepth = mesh->depthState != depthState ? PETSC_TRUE : PETSC_FALSE;
1209   ierr = MPIU_Allreduce(&lsendDepth, &sendDepth, 1, MPIU_BOOL, MPI_LOR, comm);CHKERRQ(ierr);
1210   if (sendDepth) {
1211     ierr = DMRemoveLabel(dmParallel, "depth", &depthLabel);CHKERRQ(ierr);
1212     ierr = DMLabelDestroy(&depthLabel);CHKERRQ(ierr);
1213   }
1214   /* Everyone must have either the same number of labels, or none */
1215   ierr = DMGetNumLabels(dm, &numLocalLabels);CHKERRQ(ierr);
1216   numLabels = numLocalLabels;
1217   ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1218   if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1219   for (l = numLabels-1; l >= 0; --l) {
1220     DMLabel     label = NULL, labelNew = NULL;
1221     PetscBool   isDepth, lisOutput = PETSC_TRUE, isOutput;
1222 
1223     if (hasLabels) {ierr = DMGetLabelByNum(dm, l, &label);CHKERRQ(ierr);}
1224     /* Skip "depth" because it is recreated */
1225     if (hasLabels) {ierr = PetscStrcmp(label->name, "depth", &isDepth);CHKERRQ(ierr);}
1226     ierr = MPI_Bcast(&isDepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr);
1227     if (isDepth && !sendDepth) continue;
1228     ierr = DMLabelDistribute(label, migrationSF, &labelNew);CHKERRQ(ierr);
1229     if (isDepth) {
1230       /* Put in any missing strata which can occur if users are managing the depth label themselves */
1231       PetscInt gdepth;
1232 
1233       ierr = MPIU_Allreduce(&depth, &gdepth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr);
1234       if ((depth >= 0) && (gdepth != depth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", depth, gdepth);
1235       for (d = 0; d <= gdepth; ++d) {
1236         PetscBool has;
1237 
1238         ierr = DMLabelHasStratum(labelNew, d, &has);CHKERRQ(ierr);
1239         if (!has) {ierr = DMLabelAddStratum(labelNew, d);CHKERRQ(ierr);}
1240       }
1241     }
1242     ierr = DMAddLabel(dmParallel, labelNew);CHKERRQ(ierr);
1243     /* Put the output flag in the new label */
1244     if (hasLabels) {ierr = DMGetLabelOutput(dm, label->name, &lisOutput);CHKERRQ(ierr);}
1245     ierr = MPIU_Allreduce(&lisOutput, &isOutput, 1, MPIU_BOOL, MPI_LAND, comm);CHKERRQ(ierr);
1246     ierr = DMSetLabelOutput(dmParallel, labelNew->name, isOutput);CHKERRQ(ierr);
1247   }
1248   ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1249   PetscFunctionReturn(0);
1250 }
1251 
1252 static PetscErrorCode DMPlexDistributeSetupHybrid(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1253 {
1254   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1255   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1256   PetscBool      *isHybrid, *isHybridParallel;
1257   PetscInt        dim, depth, d;
1258   PetscInt        pStart, pEnd, pStartP, pEndP;
1259   PetscErrorCode  ierr;
1260 
1261   PetscFunctionBegin;
1262   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1263   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1264 
1265   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1266   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1267   ierr = DMPlexGetChart(dm,&pStart,&pEnd);CHKERRQ(ierr);
1268   ierr = DMPlexGetChart(dmParallel,&pStartP,&pEndP);CHKERRQ(ierr);
1269   ierr = PetscCalloc2(pEnd-pStart,&isHybrid,pEndP-pStartP,&isHybridParallel);CHKERRQ(ierr);
1270   for (d = 0; d <= depth; d++) {
1271     PetscInt hybridMax = (depth == 1 && d == 1) ? mesh->hybridPointMax[dim] : mesh->hybridPointMax[d];
1272 
1273     if (hybridMax >= 0) {
1274       PetscInt sStart, sEnd, p;
1275 
1276       ierr = DMPlexGetDepthStratum(dm,d,&sStart,&sEnd);CHKERRQ(ierr);
1277       for (p = hybridMax; p < sEnd; p++) isHybrid[p-pStart] = PETSC_TRUE;
1278     }
1279   }
1280   ierr = PetscSFBcastBegin(migrationSF,MPIU_BOOL,isHybrid,isHybridParallel);CHKERRQ(ierr);
1281   ierr = PetscSFBcastEnd(migrationSF,MPIU_BOOL,isHybrid,isHybridParallel);CHKERRQ(ierr);
1282   for (d = 0; d <= dim; d++) pmesh->hybridPointMax[d] = -1;
1283   for (d = 0; d <= depth; d++) {
1284     PetscInt sStart, sEnd, p, dd;
1285 
1286     ierr = DMPlexGetDepthStratum(dmParallel,d,&sStart,&sEnd);CHKERRQ(ierr);
1287     dd = (depth == 1 && d == 1) ? dim : d;
1288     for (p = sStart; p < sEnd; p++) {
1289       if (isHybridParallel[p-pStartP]) {
1290         pmesh->hybridPointMax[dd] = p;
1291         break;
1292       }
1293     }
1294   }
1295   ierr = PetscFree2(isHybrid,isHybridParallel);CHKERRQ(ierr);
1296   PetscFunctionReturn(0);
1297 }
1298 
1299 static PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1300 {
1301   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1302   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1303   MPI_Comm        comm;
1304   DM              refTree;
1305   PetscSection    origParentSection, newParentSection;
1306   PetscInt        *origParents, *origChildIDs;
1307   PetscBool       flg;
1308   PetscErrorCode  ierr;
1309 
1310   PetscFunctionBegin;
1311   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1312   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
1313   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1314 
1315   /* Set up tree */
1316   ierr = DMPlexGetReferenceTree(dm,&refTree);CHKERRQ(ierr);
1317   ierr = DMPlexSetReferenceTree(dmParallel,refTree);CHKERRQ(ierr);
1318   ierr = DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL);CHKERRQ(ierr);
1319   if (origParentSection) {
1320     PetscInt        pStart, pEnd;
1321     PetscInt        *newParents, *newChildIDs, *globParents;
1322     PetscInt        *remoteOffsetsParents, newParentSize;
1323     PetscSF         parentSF;
1324 
1325     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1326     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);CHKERRQ(ierr);
1327     ierr = PetscSectionSetChart(newParentSection,pStart,pEnd);CHKERRQ(ierr);
1328     ierr = PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);CHKERRQ(ierr);
1329     ierr = PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);CHKERRQ(ierr);
1330     ierr = PetscFree(remoteOffsetsParents);CHKERRQ(ierr);
1331     ierr = PetscSectionGetStorageSize(newParentSection,&newParentSize);CHKERRQ(ierr);
1332     ierr = PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs);CHKERRQ(ierr);
1333     if (original) {
1334       PetscInt numParents;
1335 
1336       ierr = PetscSectionGetStorageSize(origParentSection,&numParents);CHKERRQ(ierr);
1337       ierr = PetscMalloc1(numParents,&globParents);CHKERRQ(ierr);
1338       ierr = ISLocalToGlobalMappingApplyBlock(original, numParents, origParents, globParents);CHKERRQ(ierr);
1339     }
1340     else {
1341       globParents = origParents;
1342     }
1343     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr);
1344     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr);
1345     if (original) {
1346       ierr = PetscFree(globParents);CHKERRQ(ierr);
1347     }
1348     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1349     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1350     ierr = ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents);CHKERRQ(ierr);
1351 #if PETSC_USE_DEBUG
1352     {
1353       PetscInt  p;
1354       PetscBool valid = PETSC_TRUE;
1355       for (p = 0; p < newParentSize; ++p) {
1356         if (newParents[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
1357       }
1358       if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1359     }
1360 #endif
1361     ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-parents_view", &flg);CHKERRQ(ierr);
1362     if (flg) {
1363       ierr = PetscPrintf(comm, "Serial Parent Section: \n");CHKERRQ(ierr);
1364       ierr = PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1365       ierr = PetscPrintf(comm, "Parallel Parent Section: \n");CHKERRQ(ierr);
1366       ierr = PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1367       ierr = PetscSFView(parentSF, NULL);CHKERRQ(ierr);
1368     }
1369     ierr = DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);CHKERRQ(ierr);
1370     ierr = PetscSectionDestroy(&newParentSection);CHKERRQ(ierr);
1371     ierr = PetscFree2(newParents,newChildIDs);CHKERRQ(ierr);
1372     ierr = PetscSFDestroy(&parentSF);CHKERRQ(ierr);
1373   }
1374   pmesh->useAnchors = mesh->useAnchors;
1375   PetscFunctionReturn(0);
1376 }
1377 
1378 PETSC_UNUSED static PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, DM dmParallel)
1379 {
1380   PetscMPIInt            rank, size;
1381   MPI_Comm               comm;
1382   PetscErrorCode         ierr;
1383 
1384   PetscFunctionBegin;
1385   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1386   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1387 
1388   /* Create point SF for parallel mesh */
1389   ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1390   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1391   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1392   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1393   {
1394     const PetscInt *leaves;
1395     PetscSFNode    *remotePoints, *rowners, *lowners;
1396     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1397     PetscInt        pStart, pEnd;
1398 
1399     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1400     ierr = PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr);
1401     ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr);
1402     for (p=0; p<numRoots; p++) {
1403       rowners[p].rank  = -1;
1404       rowners[p].index = -1;
1405     }
1406     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1407     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1408     for (p = 0; p < numLeaves; ++p) {
1409       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1410         lowners[p].rank  = rank;
1411         lowners[p].index = leaves ? leaves[p] : p;
1412       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1413         lowners[p].rank  = -2;
1414         lowners[p].index = -2;
1415       }
1416     }
1417     for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1418       rowners[p].rank  = -3;
1419       rowners[p].index = -3;
1420     }
1421     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1422     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1423     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1424     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1425     for (p = 0; p < numLeaves; ++p) {
1426       if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
1427       if (lowners[p].rank != rank) ++numGhostPoints;
1428     }
1429     ierr = PetscMalloc1(numGhostPoints, &ghostPoints);CHKERRQ(ierr);
1430     ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr);
1431     for (p = 0, gp = 0; p < numLeaves; ++p) {
1432       if (lowners[p].rank != rank) {
1433         ghostPoints[gp]        = leaves ? leaves[p] : p;
1434         remotePoints[gp].rank  = lowners[p].rank;
1435         remotePoints[gp].index = lowners[p].index;
1436         ++gp;
1437       }
1438     }
1439     ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr);
1440     ierr = PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1441     ierr = PetscSFSetFromOptions((dmParallel)->sf);CHKERRQ(ierr);
1442   }
1443   {
1444     PetscBool useCone, useClosure, useAnchors;
1445 
1446     ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
1447     ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
1448     ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
1449     ierr = DMPlexSetAdjacencyUseCone(dmParallel, useCone);CHKERRQ(ierr);
1450     ierr = DMPlexSetAdjacencyUseClosure(dmParallel, useClosure);CHKERRQ(ierr);
1451     ierr = DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors);CHKERRQ(ierr);
1452   }
1453   ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1454   PetscFunctionReturn(0);
1455 }
1456 
1457 /*@
1458   DMPlexSetPartitionBalance - Should distribution of the DM attempt to balance the shared point partition?
1459 
1460   Input Parameters:
1461 + dm - The DMPlex object
1462 - flg - Balance the partition?
1463 
1464   Level: intermediate
1465 
1466 .seealso: DMPlexDistribute(), DMPlexGetPartitionBalance()
1467 @*/
1468 PetscErrorCode DMPlexSetPartitionBalance(DM dm, PetscBool flg)
1469 {
1470   DM_Plex *mesh = (DM_Plex *)dm->data;
1471 
1472   PetscFunctionBegin;
1473   mesh->partitionBalance = flg;
1474   PetscFunctionReturn(0);
1475 }
1476 
1477 /*@
1478   DMPlexGetPartitionBalance - Does distribution of the DM attempt to balance the shared point partition?
1479 
1480   Input Parameter:
1481 + dm - The DMPlex object
1482 
1483   Output Parameter:
1484 + flg - Balance the partition?
1485 
1486   Level: intermediate
1487 
1488 .seealso: DMPlexDistribute(), DMPlexSetPartitionBalance()
1489 @*/
1490 PetscErrorCode DMPlexGetPartitionBalance(DM dm, PetscBool *flg)
1491 {
1492   DM_Plex *mesh = (DM_Plex *)dm->data;
1493 
1494   PetscFunctionBegin;
1495   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1496   PetscValidIntPointer(flg, 2);
1497   *flg = mesh->partitionBalance;
1498   PetscFunctionReturn(0);
1499 }
1500 
1501 /*@C
1502   DMPlexDerivePointSF - Build a point SF from an SF describing a point migration
1503 
1504   Input Parameter:
1505 + dm          - The source DMPlex object
1506 . migrationSF - The star forest that describes the parallel point remapping
1507 . ownership   - Flag causing a vote to determine point ownership
1508 
1509   Output Parameter:
1510 - pointSF     - The star forest describing the point overlap in the remapped DM
1511 
1512   Level: developer
1513 
1514 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1515 @*/
1516 PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownership, PetscSF *pointSF)
1517 {
1518   PetscMPIInt        rank, size;
1519   PetscInt           p, nroots, nleaves, idx, npointLeaves;
1520   PetscInt          *pointLocal;
1521   const PetscInt    *leaves;
1522   const PetscSFNode *roots;
1523   PetscSFNode       *rootNodes, *leafNodes, *pointRemote;
1524   Vec                shifts;
1525   const PetscInt     numShifts = 13759;
1526   const PetscScalar *shift = NULL;
1527   const PetscBool    shiftDebug = PETSC_FALSE;
1528   PetscBool          balance;
1529   PetscErrorCode     ierr;
1530 
1531   PetscFunctionBegin;
1532   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1533   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1534   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
1535 
1536   ierr = DMPlexGetPartitionBalance(dm, &balance);CHKERRQ(ierr);
1537   ierr = PetscSFGetGraph(migrationSF, &nroots, &nleaves, &leaves, &roots);CHKERRQ(ierr);
1538   ierr = PetscMalloc2(nroots, &rootNodes, nleaves, &leafNodes);CHKERRQ(ierr);
1539   if (ownership) {
1540     /* If balancing, we compute a random cyclic shift of the rank for each remote point. That way, the max will evenly distribute among ranks. */
1541     if (balance) {
1542       PetscRandom r;
1543 
1544       ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
1545       ierr = PetscRandomSetInterval(r, 0, 2467*size);CHKERRQ(ierr);
1546       ierr = VecCreate(PETSC_COMM_SELF, &shifts);CHKERRQ(ierr);
1547       ierr = VecSetSizes(shifts, numShifts, numShifts);CHKERRQ(ierr);
1548       ierr = VecSetType(shifts, VECSTANDARD);CHKERRQ(ierr);
1549       ierr = VecSetRandom(shifts, r);CHKERRQ(ierr);
1550       ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
1551       ierr = VecGetArrayRead(shifts, &shift);CHKERRQ(ierr);
1552     }
1553 
1554     /* Point ownership vote: Process with highest rank owns shared points */
1555     for (p = 0; p < nleaves; ++p) {
1556       if (shiftDebug) {
1557         ierr = PetscSynchronizedPrintf(PetscObjectComm((PetscObject) dm), "[%d] Point %D RemotePoint %D Shift %D MyRank %D\n", rank, leaves ? leaves[p] : p, roots[p].index, (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]), (rank + (shift ? (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]) : 0))%size);CHKERRQ(ierr);
1558       }
1559       /* Either put in a bid or we know we own it */
1560       leafNodes[p].rank  = (rank + (shift ? (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]) : 0))%size;
1561       leafNodes[p].index = p;
1562     }
1563     for (p = 0; p < nroots; p++) {
1564       /* Root must not participate in the reduction, flag so that MAXLOC does not use */
1565       rootNodes[p].rank  = -3;
1566       rootNodes[p].index = -3;
1567     }
1568     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);CHKERRQ(ierr);
1569     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);CHKERRQ(ierr);
1570     if (balance) {
1571       /* We've voted, now we need to get the rank.  When we're balancing the partition, the "rank" in rootNotes is not
1572        * the rank but rather (rank + random)%size.  So we do another reduction, voting the same way, but sending the
1573        * rank instead of the index. */
1574       PetscSFNode *rootRanks = NULL;
1575       ierr = PetscMalloc1(nroots, &rootRanks);CHKERRQ(ierr);
1576       for (p = 0; p < nroots; p++) {
1577         rootRanks[p].rank = -3;
1578         rootRanks[p].index = -3;
1579       }
1580       for (p = 0; p < nleaves; p++) leafNodes[p].index = rank;
1581       ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, leafNodes, rootRanks, MPI_MAXLOC);CHKERRQ(ierr);
1582       ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, leafNodes, rootRanks, MPI_MAXLOC);CHKERRQ(ierr);
1583       for (p = 0; p < nroots; p++) rootNodes[p].rank = rootRanks[p].index;
1584       ierr = PetscFree(rootRanks);CHKERRQ(ierr);
1585     }
1586   } else {
1587     for (p = 0; p < nroots; p++) {
1588       rootNodes[p].index = -1;
1589       rootNodes[p].rank = rank;
1590     };
1591     for (p = 0; p < nleaves; p++) {
1592       /* Write new local id into old location */
1593       if (roots[p].rank == rank) {
1594         rootNodes[roots[p].index].index = leaves ? leaves[p] : p;
1595       }
1596     }
1597   }
1598   ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1599   ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1600 
1601   for (npointLeaves = 0, p = 0; p < nleaves; p++) {
1602     if (leafNodes[p].rank != rank) npointLeaves++;
1603   }
1604   ierr = PetscMalloc1(npointLeaves, &pointLocal);CHKERRQ(ierr);
1605   ierr = PetscMalloc1(npointLeaves, &pointRemote);CHKERRQ(ierr);
1606   for (idx = 0, p = 0; p < nleaves; p++) {
1607     if (leafNodes[p].rank != rank) {
1608       pointLocal[idx] = p;
1609       pointRemote[idx] = leafNodes[p];
1610       idx++;
1611     }
1612   }
1613   if (shift) {
1614     ierr = VecRestoreArrayRead(shifts, &shift);CHKERRQ(ierr);
1615     ierr = VecDestroy(&shifts);CHKERRQ(ierr);
1616   }
1617   if (shiftDebug) {ierr = PetscSynchronizedFlush(PetscObjectComm((PetscObject) dm), PETSC_STDOUT);CHKERRQ(ierr);}
1618   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), pointSF);CHKERRQ(ierr);
1619   ierr = PetscSFSetFromOptions(*pointSF);CHKERRQ(ierr);
1620   ierr = PetscSFSetGraph(*pointSF, nleaves, npointLeaves, pointLocal, PETSC_OWN_POINTER, pointRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1621   ierr = PetscFree2(rootNodes, leafNodes);CHKERRQ(ierr);
1622   PetscFunctionReturn(0);
1623 }
1624 
1625 /*@C
1626   DMPlexMigrate  - Migrates internal DM data over the supplied star forest
1627 
1628   Input Parameter:
1629 + dm       - The source DMPlex object
1630 . sf       - The star forest communication context describing the migration pattern
1631 
1632   Output Parameter:
1633 - targetDM - The target DMPlex object
1634 
1635   Level: intermediate
1636 
1637 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1638 @*/
1639 PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM)
1640 {
1641   MPI_Comm               comm;
1642   PetscInt               dim, cdim, nroots;
1643   PetscSF                sfPoint;
1644   ISLocalToGlobalMapping ltogMigration;
1645   ISLocalToGlobalMapping ltogOriginal = NULL;
1646   PetscBool              flg;
1647   PetscErrorCode         ierr;
1648 
1649   PetscFunctionBegin;
1650   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1651   ierr = PetscLogEventBegin(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1652   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1653   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1654   ierr = DMSetDimension(targetDM, dim);CHKERRQ(ierr);
1655   ierr = DMGetCoordinateDim(dm, &cdim);CHKERRQ(ierr);
1656   ierr = DMSetCoordinateDim(targetDM, cdim);CHKERRQ(ierr);
1657 
1658   /* Check for a one-to-all distribution pattern */
1659   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1660   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
1661   if (nroots >= 0) {
1662     IS                     isOriginal;
1663     PetscInt               n, size, nleaves;
1664     PetscInt              *numbering_orig, *numbering_new;
1665     /* Get the original point numbering */
1666     ierr = DMPlexCreatePointNumbering(dm, &isOriginal);CHKERRQ(ierr);
1667     ierr = ISLocalToGlobalMappingCreateIS(isOriginal, &ltogOriginal);CHKERRQ(ierr);
1668     ierr = ISLocalToGlobalMappingGetSize(ltogOriginal, &size);CHKERRQ(ierr);
1669     ierr = ISLocalToGlobalMappingGetBlockIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1670     /* Convert to positive global numbers */
1671     for (n=0; n<size; n++) {if (numbering_orig[n] < 0) numbering_orig[n] = -(numbering_orig[n]+1);}
1672     /* Derive the new local-to-global mapping from the old one */
1673     ierr = PetscSFGetGraph(sf, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1674     ierr = PetscMalloc1(nleaves, &numbering_new);CHKERRQ(ierr);
1675     ierr = PetscSFBcastBegin(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr);
1676     ierr = PetscSFBcastEnd(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr);
1677     ierr = ISLocalToGlobalMappingCreate(comm, 1, nleaves, (const PetscInt*) numbering_new, PETSC_OWN_POINTER, &ltogMigration);CHKERRQ(ierr);
1678     ierr = ISLocalToGlobalMappingRestoreIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1679     ierr = ISDestroy(&isOriginal);CHKERRQ(ierr);
1680   } else {
1681     /* One-to-all distribution pattern: We can derive LToG from SF */
1682     ierr = ISLocalToGlobalMappingCreateSF(sf, 0, &ltogMigration);CHKERRQ(ierr);
1683   }
1684   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1685   if (flg) {
1686     ierr = PetscPrintf(comm, "Point renumbering for DM migration:\n");CHKERRQ(ierr);
1687     ierr = ISLocalToGlobalMappingView(ltogMigration, NULL);CHKERRQ(ierr);
1688   }
1689   /* Migrate DM data to target DM */
1690   ierr = DMPlexDistributeCones(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1691   ierr = DMPlexDistributeLabels(dm, sf, targetDM);CHKERRQ(ierr);
1692   ierr = DMPlexDistributeCoordinates(dm, sf, targetDM);CHKERRQ(ierr);
1693   ierr = DMPlexDistributeSetupHybrid(dm, sf, ltogMigration, targetDM);CHKERRQ(ierr);
1694   ierr = DMPlexDistributeSetupTree(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1695   ierr = ISLocalToGlobalMappingDestroy(&ltogOriginal);CHKERRQ(ierr);
1696   ierr = ISLocalToGlobalMappingDestroy(&ltogMigration);CHKERRQ(ierr);
1697   ierr = PetscLogEventEnd(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1698   PetscFunctionReturn(0);
1699 }
1700 
1701 /*@C
1702   DMPlexDistribute - Distributes the mesh and any associated sections.
1703 
1704   Not Collective
1705 
1706   Input Parameter:
1707 + dm  - The original DMPlex object
1708 - overlap - The overlap of partitions, 0 is the default
1709 
1710   Output Parameter:
1711 + sf - The PetscSF used for point distribution, or NULL if not needed
1712 - dmParallel - The distributed DMPlex object
1713 
1714   Note: If the mesh was not distributed, the output dmParallel will be NULL.
1715 
1716   The user can control the definition of adjacency for the mesh using DMPlexSetAdjacencyUseCone() and
1717   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1718   representation on the mesh.
1719 
1720   Level: intermediate
1721 
1722 .keywords: mesh, elements
1723 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1724 @*/
1725 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1726 {
1727   MPI_Comm               comm;
1728   PetscPartitioner       partitioner;
1729   IS                     cellPart;
1730   PetscSection           cellPartSection;
1731   DM                     dmCoord;
1732   DMLabel                lblPartition, lblMigration;
1733   PetscSF                sfProcess, sfMigration, sfStratified, sfPoint;
1734   PetscBool              flg, balance;
1735   PetscMPIInt            rank, size, p;
1736   PetscErrorCode         ierr;
1737 
1738   PetscFunctionBegin;
1739   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1740   PetscValidLogicalCollectiveInt(dm, overlap, 2);
1741   if (sf) PetscValidPointer(sf,3);
1742   PetscValidPointer(dmParallel,4);
1743 
1744   if (sf) *sf = NULL;
1745   *dmParallel = NULL;
1746   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1747   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1748   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1749   if (size == 1) PetscFunctionReturn(0);
1750 
1751   ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1752   /* Create cell partition */
1753   ierr = PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1754   ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr);
1755   ierr = DMPlexGetPartitioner(dm, &partitioner);CHKERRQ(ierr);
1756   ierr = PetscPartitionerPartition(partitioner, dm, cellPartSection, &cellPart);CHKERRQ(ierr);
1757   {
1758     /* Convert partition to DMLabel */
1759     PetscInt proc, pStart, pEnd, npoints, poffset;
1760     const PetscInt *points;
1761     ierr = DMLabelCreate("Point Partition", &lblPartition);CHKERRQ(ierr);
1762     ierr = ISGetIndices(cellPart, &points);CHKERRQ(ierr);
1763     ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr);
1764     for (proc = pStart; proc < pEnd; proc++) {
1765       ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr);
1766       ierr = PetscSectionGetOffset(cellPartSection, proc, &poffset);CHKERRQ(ierr);
1767       for (p = poffset; p < poffset+npoints; p++) {
1768         ierr = DMLabelSetValue(lblPartition, points[p], proc);CHKERRQ(ierr);
1769       }
1770     }
1771     ierr = ISRestoreIndices(cellPart, &points);CHKERRQ(ierr);
1772   }
1773   ierr = DMPlexPartitionLabelClosure(dm, lblPartition);CHKERRQ(ierr);
1774   {
1775     /* Build a global process SF */
1776     PetscSFNode *remoteProc;
1777     ierr = PetscMalloc1(size, &remoteProc);CHKERRQ(ierr);
1778     for (p = 0; p < size; ++p) {
1779       remoteProc[p].rank  = p;
1780       remoteProc[p].index = rank;
1781     }
1782     ierr = PetscSFCreate(comm, &sfProcess);CHKERRQ(ierr);
1783     ierr = PetscObjectSetName((PetscObject) sfProcess, "Process SF");CHKERRQ(ierr);
1784     ierr = PetscSFSetGraph(sfProcess, size, size, NULL, PETSC_OWN_POINTER, remoteProc, PETSC_OWN_POINTER);CHKERRQ(ierr);
1785   }
1786   ierr = DMLabelCreate("Point migration", &lblMigration);CHKERRQ(ierr);
1787   ierr = DMPlexPartitionLabelInvert(dm, lblPartition, sfProcess, lblMigration);CHKERRQ(ierr);
1788   ierr = DMPlexPartitionLabelCreateSF(dm, lblMigration, &sfMigration);CHKERRQ(ierr);
1789   /* Stratify the SF in case we are migrating an already parallel plex */
1790   ierr = DMPlexStratifyMigrationSF(dm, sfMigration, &sfStratified);CHKERRQ(ierr);
1791   ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1792   sfMigration = sfStratified;
1793   ierr = PetscLogEventEnd(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1794   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1795   if (flg) {
1796     ierr = DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1797     ierr = PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1798   }
1799 
1800   /* Create non-overlapping parallel DM and migrate internal data */
1801   ierr = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr);
1802   ierr = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr);
1803   ierr = DMPlexMigrate(dm, sfMigration, *dmParallel);CHKERRQ(ierr);
1804 
1805   /* Build the point SF without overlap */
1806   ierr = DMPlexGetPartitionBalance(dm, &balance);CHKERRQ(ierr);
1807   ierr = DMPlexSetPartitionBalance(*dmParallel, balance);CHKERRQ(ierr);
1808   ierr = DMPlexCreatePointSF(*dmParallel, sfMigration, PETSC_TRUE, &sfPoint);CHKERRQ(ierr);
1809   ierr = DMSetPointSF(*dmParallel, sfPoint);CHKERRQ(ierr);
1810   ierr = DMGetCoordinateDM(*dmParallel, &dmCoord);CHKERRQ(ierr);
1811   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1812   if (flg) {ierr = PetscSFView(sfPoint, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);}
1813 
1814   if (overlap > 0) {
1815     DM                 dmOverlap;
1816     PetscInt           nroots, nleaves;
1817     PetscSFNode       *newRemote;
1818     const PetscSFNode *oldRemote;
1819     PetscSF            sfOverlap, sfOverlapPoint;
1820     /* Add the partition overlap to the distributed DM */
1821     ierr = DMPlexDistributeOverlap(*dmParallel, overlap, &sfOverlap, &dmOverlap);CHKERRQ(ierr);
1822     ierr = DMDestroy(dmParallel);CHKERRQ(ierr);
1823     *dmParallel = dmOverlap;
1824     if (flg) {
1825       ierr = PetscPrintf(comm, "Overlap Migration SF:\n");CHKERRQ(ierr);
1826       ierr = PetscSFView(sfOverlap, NULL);CHKERRQ(ierr);
1827     }
1828 
1829     /* Re-map the migration SF to establish the full migration pattern */
1830     ierr = PetscSFGetGraph(sfMigration, &nroots, NULL, NULL, &oldRemote);CHKERRQ(ierr);
1831     ierr = PetscSFGetGraph(sfOverlap, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1832     ierr = PetscMalloc1(nleaves, &newRemote);CHKERRQ(ierr);
1833     ierr = PetscSFBcastBegin(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1834     ierr = PetscSFBcastEnd(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1835     ierr = PetscSFCreate(comm, &sfOverlapPoint);CHKERRQ(ierr);
1836     ierr = PetscSFSetGraph(sfOverlapPoint, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1837     ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1838     ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1839     sfMigration = sfOverlapPoint;
1840   }
1841   /* Cleanup Partition */
1842   ierr = PetscSFDestroy(&sfProcess);CHKERRQ(ierr);
1843   ierr = DMLabelDestroy(&lblPartition);CHKERRQ(ierr);
1844   ierr = DMLabelDestroy(&lblMigration);CHKERRQ(ierr);
1845   ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr);
1846   ierr = ISDestroy(&cellPart);CHKERRQ(ierr);
1847   /* Copy BC */
1848   ierr = DMCopyBoundary(dm, *dmParallel);CHKERRQ(ierr);
1849   /* Create sfNatural */
1850   if (dm->useNatural) {
1851     PetscSection section;
1852 
1853     ierr = DMGetSection(dm, &section);CHKERRQ(ierr);
1854     ierr = DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &(*dmParallel)->sfNatural);CHKERRQ(ierr);
1855     ierr = DMSetUseNatural(*dmParallel, PETSC_TRUE);CHKERRQ(ierr);
1856   }
1857   /* Cleanup */
1858   if (sf) {*sf = sfMigration;}
1859   else    {ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);}
1860   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1861   ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1862   PetscFunctionReturn(0);
1863 }
1864 
1865 /*@C
1866   DMPlexDistributeOverlap - Add partition overlap to a distributed non-overlapping DM.
1867 
1868   Not Collective
1869 
1870   Input Parameter:
1871 + dm  - The non-overlapping distrbuted DMPlex object
1872 - overlap - The overlap of partitions
1873 
1874   Output Parameter:
1875 + sf - The PetscSF used for point distribution
1876 - dmOverlap - The overlapping distributed DMPlex object, or NULL
1877 
1878   Note: If the mesh was not distributed, the return value is NULL.
1879 
1880   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1881   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1882   representation on the mesh.
1883 
1884   Level: intermediate
1885 
1886 .keywords: mesh, elements
1887 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1888 @*/
1889 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, PetscSF *sf, DM *dmOverlap)
1890 {
1891   MPI_Comm               comm;
1892   PetscMPIInt            size, rank;
1893   PetscSection           rootSection, leafSection;
1894   IS                     rootrank, leafrank;
1895   DM                     dmCoord;
1896   DMLabel                lblOverlap;
1897   PetscSF                sfOverlap, sfStratified, sfPoint;
1898   PetscErrorCode         ierr;
1899 
1900   PetscFunctionBegin;
1901   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1902   if (sf) PetscValidPointer(sf, 3);
1903   PetscValidPointer(dmOverlap, 4);
1904 
1905   if (sf) *sf = NULL;
1906   *dmOverlap  = NULL;
1907   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1908   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1909   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1910   if (size == 1) PetscFunctionReturn(0);
1911 
1912   ierr = PetscLogEventBegin(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1913   /* Compute point overlap with neighbouring processes on the distributed DM */
1914   ierr = PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1915   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1916   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1917   ierr = DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);CHKERRQ(ierr);
1918   ierr = DMPlexCreateOverlap(dm, overlap, rootSection, rootrank, leafSection, leafrank, &lblOverlap);CHKERRQ(ierr);
1919   /* Convert overlap label to stratified migration SF */
1920   ierr = DMPlexPartitionLabelCreateSF(dm, lblOverlap, &sfOverlap);CHKERRQ(ierr);
1921   ierr = DMPlexStratifyMigrationSF(dm, sfOverlap, &sfStratified);CHKERRQ(ierr);
1922   ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1923   sfOverlap = sfStratified;
1924   ierr = PetscObjectSetName((PetscObject) sfOverlap, "Overlap SF");CHKERRQ(ierr);
1925   ierr = PetscSFSetFromOptions(sfOverlap);CHKERRQ(ierr);
1926 
1927   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1928   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1929   ierr = ISDestroy(&rootrank);CHKERRQ(ierr);
1930   ierr = ISDestroy(&leafrank);CHKERRQ(ierr);
1931   ierr = PetscLogEventEnd(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1932 
1933   /* Build the overlapping DM */
1934   ierr = DMPlexCreate(comm, dmOverlap);CHKERRQ(ierr);
1935   ierr = PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");CHKERRQ(ierr);
1936   ierr = DMPlexMigrate(dm, sfOverlap, *dmOverlap);CHKERRQ(ierr);
1937   /* Build the new point SF */
1938   ierr = DMPlexCreatePointSF(*dmOverlap, sfOverlap, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
1939   ierr = DMSetPointSF(*dmOverlap, sfPoint);CHKERRQ(ierr);
1940   ierr = DMGetCoordinateDM(*dmOverlap, &dmCoord);CHKERRQ(ierr);
1941   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1942   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1943   /* Cleanup overlap partition */
1944   ierr = DMLabelDestroy(&lblOverlap);CHKERRQ(ierr);
1945   if (sf) *sf = sfOverlap;
1946   else    {ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);}
1947   ierr = PetscLogEventEnd(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1948   PetscFunctionReturn(0);
1949 }
1950 
1951 /*@C
1952   DMPlexGetGatherDM - Get a copy of the DMPlex that gathers all points on the
1953   root process of the original's communicator.
1954 
1955   Input Parameters:
1956 . dm - the original DMPlex object
1957 
1958   Output Parameters:
1959 . gatherMesh - the gathered DM object, or NULL
1960 
1961   Level: intermediate
1962 
1963 .keywords: mesh
1964 .seealso: DMPlexDistribute(), DMPlexGetRedundantDM()
1965 @*/
1966 PetscErrorCode DMPlexGetGatherDM(DM dm, DM *gatherMesh)
1967 {
1968   MPI_Comm       comm;
1969   PetscMPIInt    size;
1970   PetscPartitioner oldPart, gatherPart;
1971   PetscErrorCode ierr;
1972 
1973   PetscFunctionBegin;
1974   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1975   PetscValidPointer(gatherMesh,2);
1976   *gatherMesh = NULL;
1977   comm = PetscObjectComm((PetscObject)dm);
1978   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1979   if (size == 1) PetscFunctionReturn(0);
1980   ierr = DMPlexGetPartitioner(dm,&oldPart);CHKERRQ(ierr);
1981   ierr = PetscObjectReference((PetscObject)oldPart);CHKERRQ(ierr);
1982   ierr = PetscPartitionerCreate(comm,&gatherPart);CHKERRQ(ierr);
1983   ierr = PetscPartitionerSetType(gatherPart,PETSCPARTITIONERGATHER);CHKERRQ(ierr);
1984   ierr = DMPlexSetPartitioner(dm,gatherPart);CHKERRQ(ierr);
1985   ierr = DMPlexDistribute(dm,0,NULL,gatherMesh);CHKERRQ(ierr);
1986   ierr = DMPlexSetPartitioner(dm,oldPart);CHKERRQ(ierr);
1987   ierr = PetscPartitionerDestroy(&gatherPart);CHKERRQ(ierr);
1988   ierr = PetscPartitionerDestroy(&oldPart);CHKERRQ(ierr);
1989   PetscFunctionReturn(0);
1990 }
1991 
1992 /*@C
1993   DMPlexGetRedundantDM - Get a copy of the DMPlex that is completely copied on each process.
1994 
1995   Input Parameters:
1996 . dm - the original DMPlex object
1997 
1998   Output Parameters:
1999 . redundantMesh - the redundant DM object, or NULL
2000 
2001   Level: intermediate
2002 
2003 .keywords: mesh
2004 .seealso: DMPlexDistribute(), DMPlexGetGatherDM()
2005 @*/
2006 PetscErrorCode DMPlexGetRedundantDM(DM dm, DM *redundantMesh)
2007 {
2008   MPI_Comm       comm;
2009   PetscMPIInt    size, rank;
2010   PetscInt       pStart, pEnd, p;
2011   PetscInt       numPoints = -1;
2012   PetscSF        migrationSF, sfPoint;
2013   DM             gatherDM, dmCoord;
2014   PetscSFNode    *points;
2015   PetscErrorCode ierr;
2016 
2017   PetscFunctionBegin;
2018   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2019   PetscValidPointer(redundantMesh,2);
2020   *redundantMesh = NULL;
2021   comm = PetscObjectComm((PetscObject)dm);
2022   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
2023   if (size == 1) {
2024     ierr = PetscObjectReference((PetscObject) dm);CHKERRQ(ierr);
2025     *redundantMesh = dm;
2026     PetscFunctionReturn(0);
2027   }
2028   ierr = DMPlexGetGatherDM(dm,&gatherDM);CHKERRQ(ierr);
2029   if (!gatherDM) PetscFunctionReturn(0);
2030   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
2031   ierr = DMPlexGetChart(gatherDM,&pStart,&pEnd);CHKERRQ(ierr);
2032   numPoints = pEnd - pStart;
2033   ierr = MPI_Bcast(&numPoints,1,MPIU_INT,0,comm);CHKERRQ(ierr);
2034   ierr = PetscMalloc1(numPoints,&points);CHKERRQ(ierr);
2035   ierr = PetscSFCreate(comm,&migrationSF);CHKERRQ(ierr);
2036   for (p = 0; p < numPoints; p++) {
2037     points[p].index = p;
2038     points[p].rank  = 0;
2039   }
2040   ierr = PetscSFSetGraph(migrationSF,pEnd-pStart,numPoints,NULL,PETSC_OWN_POINTER,points,PETSC_OWN_POINTER);CHKERRQ(ierr);
2041   ierr = DMPlexCreate(comm, redundantMesh);CHKERRQ(ierr);
2042   ierr = PetscObjectSetName((PetscObject) *redundantMesh, "Redundant Mesh");CHKERRQ(ierr);
2043   ierr = DMPlexMigrate(gatherDM, migrationSF, *redundantMesh);CHKERRQ(ierr);
2044   ierr = DMPlexCreatePointSF(*redundantMesh, migrationSF, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
2045   ierr = DMSetPointSF(*redundantMesh, sfPoint);CHKERRQ(ierr);
2046   ierr = DMGetCoordinateDM(*redundantMesh, &dmCoord);CHKERRQ(ierr);
2047   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
2048   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
2049   ierr = PetscSFDestroy(&migrationSF);CHKERRQ(ierr);
2050   ierr = DMDestroy(&gatherDM);CHKERRQ(ierr);
2051   PetscFunctionReturn(0);
2052 }
2053