xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision 00d931fe9835bef04c3bcd2a9a1bf118d64cc4c2)
1 #include <petsc/private/dmpleximpl.h>    /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/dmlabelimpl.h>   /*I      "petscdmlabel.h"  I*/
3 
4 #undef __FUNCT__
5 #define __FUNCT__ "DMPlexSetAdjacencyUseCone"
6 /*@
7   DMPlexSetAdjacencyUseCone - Define adjacency in the mesh using either the cone or the support first
8 
9   Input Parameters:
10 + dm      - The DM object
11 - useCone - Flag to use the cone first
12 
13   Level: intermediate
14 
15   Notes:
16 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
17 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
18 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
19 
20 .seealso: DMPlexGetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
21 @*/
22 PetscErrorCode DMPlexSetAdjacencyUseCone(DM dm, PetscBool useCone)
23 {
24   DM_Plex *mesh = (DM_Plex *) dm->data;
25 
26   PetscFunctionBegin;
27   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
28   mesh->useCone = useCone;
29   PetscFunctionReturn(0);
30 }
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "DMPlexGetAdjacencyUseCone"
34 /*@
35   DMPlexGetAdjacencyUseCone - Query whether adjacency in the mesh uses the cone or the support first
36 
37   Input Parameter:
38 . dm      - The DM object
39 
40   Output Parameter:
41 . useCone - Flag to use the cone first
42 
43   Level: intermediate
44 
45   Notes:
46 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
47 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
48 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
49 
50 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
51 @*/
52 PetscErrorCode DMPlexGetAdjacencyUseCone(DM dm, PetscBool *useCone)
53 {
54   DM_Plex *mesh = (DM_Plex *) dm->data;
55 
56   PetscFunctionBegin;
57   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
58   PetscValidIntPointer(useCone, 2);
59   *useCone = mesh->useCone;
60   PetscFunctionReturn(0);
61 }
62 
63 #undef __FUNCT__
64 #define __FUNCT__ "DMPlexSetAdjacencyUseClosure"
65 /*@
66   DMPlexSetAdjacencyUseClosure - Define adjacency in the mesh using the transitive closure
67 
68   Input Parameters:
69 + dm      - The DM object
70 - useClosure - Flag to use the closure
71 
72   Level: intermediate
73 
74   Notes:
75 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
76 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
77 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
78 
79 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
80 @*/
81 PetscErrorCode DMPlexSetAdjacencyUseClosure(DM dm, PetscBool useClosure)
82 {
83   DM_Plex *mesh = (DM_Plex *) dm->data;
84 
85   PetscFunctionBegin;
86   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
87   mesh->useClosure = useClosure;
88   PetscFunctionReturn(0);
89 }
90 
91 #undef __FUNCT__
92 #define __FUNCT__ "DMPlexGetAdjacencyUseClosure"
93 /*@
94   DMPlexGetAdjacencyUseClosure - Query whether adjacency in the mesh uses the transitive closure
95 
96   Input Parameter:
97 . dm      - The DM object
98 
99   Output Parameter:
100 . useClosure - Flag to use the closure
101 
102   Level: intermediate
103 
104   Notes:
105 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
106 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
107 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
108 
109 .seealso: DMPlexSetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
110 @*/
111 PetscErrorCode DMPlexGetAdjacencyUseClosure(DM dm, PetscBool *useClosure)
112 {
113   DM_Plex *mesh = (DM_Plex *) dm->data;
114 
115   PetscFunctionBegin;
116   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
117   PetscValidIntPointer(useClosure, 2);
118   *useClosure = mesh->useClosure;
119   PetscFunctionReturn(0);
120 }
121 
122 #undef __FUNCT__
123 #define __FUNCT__ "DMPlexSetAdjacencyUseAnchors"
124 /*@
125   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
126 
127   Input Parameters:
128 + dm      - The DM object
129 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
130 
131   Level: intermediate
132 
133 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
134 @*/
135 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
136 {
137   DM_Plex *mesh = (DM_Plex *) dm->data;
138 
139   PetscFunctionBegin;
140   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
141   mesh->useAnchors = useAnchors;
142   PetscFunctionReturn(0);
143 }
144 
145 #undef __FUNCT__
146 #define __FUNCT__ "DMPlexGetAdjacencyUseAnchors"
147 /*@
148   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
149 
150   Input Parameter:
151 . dm      - The DM object
152 
153   Output Parameter:
154 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
155 
156   Level: intermediate
157 
158 .seealso: DMPlexSetAdjacencyUseAnchors(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
159 @*/
160 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
161 {
162   DM_Plex *mesh = (DM_Plex *) dm->data;
163 
164   PetscFunctionBegin;
165   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
166   PetscValidIntPointer(useAnchors, 2);
167   *useAnchors = mesh->useAnchors;
168   PetscFunctionReturn(0);
169 }
170 
171 #undef __FUNCT__
172 #define __FUNCT__ "DMPlexGetAdjacency_Cone_Internal"
173 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
174 {
175   const PetscInt *cone = NULL;
176   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
177   PetscErrorCode  ierr;
178 
179   PetscFunctionBeginHot;
180   ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
181   ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
182   for (c = 0; c <= coneSize; ++c) {
183     const PetscInt  point   = !c ? p : cone[c-1];
184     const PetscInt *support = NULL;
185     PetscInt        supportSize, s, q;
186 
187     ierr = DMPlexGetSupportSize(dm, point, &supportSize);CHKERRQ(ierr);
188     ierr = DMPlexGetSupport(dm, point, &support);CHKERRQ(ierr);
189     for (s = 0; s < supportSize; ++s) {
190       for (q = 0; q < numAdj || (adj[numAdj++] = support[s],0); ++q) {
191         if (support[s] == adj[q]) break;
192       }
193       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
194     }
195   }
196   *adjSize = numAdj;
197   PetscFunctionReturn(0);
198 }
199 
200 #undef __FUNCT__
201 #define __FUNCT__ "DMPlexGetAdjacency_Support_Internal"
202 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
203 {
204   const PetscInt *support = NULL;
205   PetscInt        numAdj   = 0, maxAdjSize = *adjSize, supportSize, s;
206   PetscErrorCode  ierr;
207 
208   PetscFunctionBeginHot;
209   ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr);
210   ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr);
211   for (s = 0; s <= supportSize; ++s) {
212     const PetscInt  point = !s ? p : support[s-1];
213     const PetscInt *cone  = NULL;
214     PetscInt        coneSize, c, q;
215 
216     ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
217     ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
218     for (c = 0; c < coneSize; ++c) {
219       for (q = 0; q < numAdj || (adj[numAdj++] = cone[c],0); ++q) {
220         if (cone[c] == adj[q]) break;
221       }
222       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
223     }
224   }
225   *adjSize = numAdj;
226   PetscFunctionReturn(0);
227 }
228 
229 #undef __FUNCT__
230 #define __FUNCT__ "DMPlexGetAdjacency_Transitive_Internal"
231 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
232 {
233   PetscInt      *star = NULL;
234   PetscInt       numAdj = 0, maxAdjSize = *adjSize, starSize, s;
235   PetscErrorCode ierr;
236 
237   PetscFunctionBeginHot;
238   ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
239   for (s = 0; s < starSize*2; s += 2) {
240     const PetscInt *closure = NULL;
241     PetscInt        closureSize, c, q;
242 
243     ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
244     for (c = 0; c < closureSize*2; c += 2) {
245       for (q = 0; q < numAdj || (adj[numAdj++] = closure[c],0); ++q) {
246         if (closure[c] == adj[q]) break;
247       }
248       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
249     }
250     ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
251   }
252   ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
253   *adjSize = numAdj;
254   PetscFunctionReturn(0);
255 }
256 
257 #undef __FUNCT__
258 #define __FUNCT__ "DMPlexGetAdjacency_Internal"
259 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
260 {
261   static PetscInt asiz = 0;
262   PetscInt maxAnchors = 1;
263   PetscInt aStart = -1, aEnd = -1;
264   PetscInt maxAdjSize;
265   PetscSection aSec = NULL;
266   IS aIS = NULL;
267   const PetscInt *anchors;
268   PetscErrorCode  ierr;
269 
270   PetscFunctionBeginHot;
271   if (useAnchors) {
272     ierr = DMPlexGetAnchors(dm,&aSec,&aIS);CHKERRQ(ierr);
273     if (aSec) {
274       ierr = PetscSectionGetMaxDof(aSec,&maxAnchors);CHKERRQ(ierr);
275       maxAnchors = PetscMax(1,maxAnchors);
276       ierr = PetscSectionGetChart(aSec,&aStart,&aEnd);CHKERRQ(ierr);
277       ierr = ISGetIndices(aIS,&anchors);CHKERRQ(ierr);
278     }
279   }
280   if (!*adj) {
281     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
282 
283     ierr  = DMPlexGetChart(dm, &pStart,&pEnd);CHKERRQ(ierr);
284     ierr  = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
285     ierr  = DMPlexGetMaxSizes(dm, &maxC, &maxS);CHKERRQ(ierr);
286     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1;
287     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1;
288     asiz  = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries);
289     asiz *= maxAnchors;
290     asiz  = PetscMin(asiz,pEnd-pStart);
291     ierr  = PetscMalloc1(asiz,adj);CHKERRQ(ierr);
292   }
293   if (*adjSize < 0) *adjSize = asiz;
294   maxAdjSize = *adjSize;
295   if (useTransitiveClosure) {
296     ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr);
297   } else if (useCone) {
298     ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
299   } else {
300     ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
301   }
302   if (useAnchors && aSec) {
303     PetscInt origSize = *adjSize;
304     PetscInt numAdj = origSize;
305     PetscInt i = 0, j;
306     PetscInt *orig = *adj;
307 
308     while (i < origSize) {
309       PetscInt p = orig[i];
310       PetscInt aDof = 0;
311 
312       if (p >= aStart && p < aEnd) {
313         ierr = PetscSectionGetDof(aSec,p,&aDof);CHKERRQ(ierr);
314       }
315       if (aDof) {
316         PetscInt aOff;
317         PetscInt s, q;
318 
319         for (j = i + 1; j < numAdj; j++) {
320           orig[j - 1] = orig[j];
321         }
322         origSize--;
323         numAdj--;
324         ierr = PetscSectionGetOffset(aSec,p,&aOff);CHKERRQ(ierr);
325         for (s = 0; s < aDof; ++s) {
326           for (q = 0; q < numAdj || (orig[numAdj++] = anchors[aOff+s],0); ++q) {
327             if (anchors[aOff+s] == orig[q]) break;
328           }
329           if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
330         }
331       }
332       else {
333         i++;
334       }
335     }
336     *adjSize = numAdj;
337     ierr = ISRestoreIndices(aIS,&anchors);CHKERRQ(ierr);
338   }
339   PetscFunctionReturn(0);
340 }
341 
342 #undef __FUNCT__
343 #define __FUNCT__ "DMPlexGetAdjacency"
344 /*@
345   DMPlexGetAdjacency - Return all points adjacent to the given point
346 
347   Input Parameters:
348 + dm - The DM object
349 . p  - The point
350 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE
351 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize
352 
353   Output Parameters:
354 + adjSize - The number of adjacent points
355 - adj - The adjacent points
356 
357   Level: advanced
358 
359   Notes: The user must PetscFree the adj array if it was not passed in.
360 
361 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator()
362 @*/
363 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
364 {
365   DM_Plex       *mesh = (DM_Plex *) dm->data;
366   PetscErrorCode ierr;
367 
368   PetscFunctionBeginHot;
369   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
370   PetscValidPointer(adjSize,3);
371   PetscValidPointer(adj,4);
372   ierr = DMPlexGetAdjacency_Internal(dm, p, mesh->useCone, mesh->useClosure, mesh->useAnchors, adjSize, adj);CHKERRQ(ierr);
373   PetscFunctionReturn(0);
374 }
375 
376 #undef __FUNCT__
377 #define __FUNCT__ "DMPlexCreateTwoSidedProcessSF"
378 /*@
379   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
380 
381   Collective on DM
382 
383   Input Parameters:
384 + dm      - The DM
385 - sfPoint - The PetscSF which encodes point connectivity
386 
387   Output Parameters:
388 + processRanks - A list of process neighbors, or NULL
389 - sfProcess    - An SF encoding the two-sided process connectivity, or NULL
390 
391   Level: developer
392 
393 .seealso: PetscSFCreate(), DMPlexCreateProcessSF()
394 @*/
395 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
396 {
397   const PetscSFNode *remotePoints;
398   PetscInt          *localPointsNew;
399   PetscSFNode       *remotePointsNew;
400   const PetscInt    *nranks;
401   PetscInt          *ranksNew;
402   PetscBT            neighbors;
403   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
404   PetscMPIInt        numProcs, proc, rank;
405   PetscErrorCode     ierr;
406 
407   PetscFunctionBegin;
408   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
409   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
410   if (processRanks) {PetscValidPointer(processRanks, 3);}
411   if (sfProcess)    {PetscValidPointer(sfProcess, 4);}
412   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
413   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
414   ierr = PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);CHKERRQ(ierr);
415   ierr = PetscBTCreate(numProcs, &neighbors);CHKERRQ(ierr);
416   ierr = PetscBTMemzero(numProcs, neighbors);CHKERRQ(ierr);
417   /* Compute root-to-leaf process connectivity */
418   ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr);
419   ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr);
420   for (p = pStart; p < pEnd; ++p) {
421     PetscInt ndof, noff, n;
422 
423     ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr);
424     ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr);
425     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
426   }
427   ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr);
428   /* Compute leaf-to-neighbor process connectivity */
429   ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr);
430   ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr);
431   for (p = pStart; p < pEnd; ++p) {
432     PetscInt ndof, noff, n;
433 
434     ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr);
435     ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr);
436     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
437   }
438   ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr);
439   /* Compute leaf-to-root process connectivity */
440   for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);}
441   /* Calculate edges */
442   PetscBTClear(neighbors, rank);
443   for(proc = 0, numNeighbors = 0; proc < numProcs; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;}
444   ierr = PetscMalloc1(numNeighbors, &ranksNew);CHKERRQ(ierr);
445   ierr = PetscMalloc1(numNeighbors, &localPointsNew);CHKERRQ(ierr);
446   ierr = PetscMalloc1(numNeighbors, &remotePointsNew);CHKERRQ(ierr);
447   for(proc = 0, n = 0; proc < numProcs; ++proc) {
448     if (PetscBTLookup(neighbors, proc)) {
449       ranksNew[n]              = proc;
450       localPointsNew[n]        = proc;
451       remotePointsNew[n].index = rank;
452       remotePointsNew[n].rank  = proc;
453       ++n;
454     }
455   }
456   ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr);
457   if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr);}
458   else              {ierr = PetscFree(ranksNew);CHKERRQ(ierr);}
459   if (sfProcess) {
460     ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr);
461     ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr);
462     ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr);
463     ierr = PetscSFSetGraph(*sfProcess, numProcs, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
464   }
465   PetscFunctionReturn(0);
466 }
467 
468 #undef __FUNCT__
469 #define __FUNCT__ "DMPlexDistributeOwnership"
470 /*@
471   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
472 
473   Collective on DM
474 
475   Input Parameter:
476 . dm - The DM
477 
478   Output Parameters:
479 + rootSection - The number of leaves for a given root point
480 . rootrank    - The rank of each edge into the root point
481 . leafSection - The number of processes sharing a given leaf point
482 - leafrank    - The rank of each process sharing a leaf point
483 
484   Level: developer
485 
486 .seealso: DMPlexCreateOverlap()
487 @*/
488 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
489 {
490   MPI_Comm        comm;
491   PetscSF         sfPoint;
492   const PetscInt *rootdegree;
493   PetscInt       *myrank, *remoterank;
494   PetscInt        pStart, pEnd, p, nedges;
495   PetscMPIInt     rank;
496   PetscErrorCode  ierr;
497 
498   PetscFunctionBegin;
499   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
500   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
501   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
502   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
503   /* Compute number of leaves for each root */
504   ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr);
505   ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr);
506   ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr);
507   ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr);
508   for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr);}
509   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
510   /* Gather rank of each leaf to root */
511   ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr);
512   ierr = PetscMalloc1(pEnd-pStart, &myrank);CHKERRQ(ierr);
513   ierr = PetscMalloc1(nedges,  &remoterank);CHKERRQ(ierr);
514   for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank;
515   ierr = PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
516   ierr = PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
517   ierr = PetscFree(myrank);CHKERRQ(ierr);
518   ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr);
519   /* Distribute remote ranks to leaves */
520   ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr);
521   ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr);
522   PetscFunctionReturn(0);
523 }
524 
525 #undef __FUNCT__
526 #define __FUNCT__ "DMPlexCreateOverlap"
527 /*@C
528   DMPlexCreateOverlap - Compute owner information for shared points. This basically gets two-sided for an SF.
529 
530   Collective on DM
531 
532   Input Parameters:
533 + dm          - The DM
534 . levels      - Number of overlap levels
535 . rootSection - The number of leaves for a given root point
536 . rootrank    - The rank of each edge into the root point
537 . leafSection - The number of processes sharing a given leaf point
538 - leafrank    - The rank of each process sharing a leaf point
539 
540   Output Parameters:
541 + ovLabel     - DMLabel containing remote overlap contributions as point/rank pairings
542 
543   Level: developer
544 
545 .seealso: DMPlexDistributeOwnership(), DMPlexDistribute()
546 @*/
547 PetscErrorCode DMPlexCreateOverlap(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
548 {
549   MPI_Comm           comm;
550   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
551   PetscSF            sfPoint, sfProc;
552   const PetscSFNode *remote;
553   const PetscInt    *local;
554   const PetscInt    *nrank, *rrank;
555   PetscInt          *adj = NULL;
556   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l;
557   PetscMPIInt        rank, numProcs;
558   PetscBool          useCone, useClosure, flg;
559   PetscErrorCode     ierr;
560 
561   PetscFunctionBegin;
562   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
563   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
564   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
565   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
566   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
567   ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr);
568   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
569   ierr = DMLabelCreate("Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr);
570   /* Handle leaves: shared with the root point */
571   for (l = 0; l < nleaves; ++l) {
572     PetscInt adjSize = PETSC_DETERMINE, a;
573 
574     ierr = DMPlexGetAdjacency(dm, local[l], &adjSize, &adj);CHKERRQ(ierr);
575     for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr);}
576   }
577   ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr);
578   ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr);
579   /* Handle roots */
580   for (p = pStart; p < pEnd; ++p) {
581     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
582 
583     if ((p >= sStart) && (p < sEnd)) {
584       /* Some leaves share a root with other leaves on different processes */
585       ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr);
586       if (neighbors) {
587         ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr);
588         ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
589         for (n = 0; n < neighbors; ++n) {
590           const PetscInt remoteRank = nrank[noff+n];
591 
592           if (remoteRank == rank) continue;
593           for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
594         }
595       }
596     }
597     /* Roots are shared with leaves */
598     ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr);
599     if (!neighbors) continue;
600     ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr);
601     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
602     for (n = 0; n < neighbors; ++n) {
603       const PetscInt remoteRank = rrank[noff+n];
604 
605       if (remoteRank == rank) continue;
606       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
607     }
608   }
609   ierr = PetscFree(adj);CHKERRQ(ierr);
610   ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr);
611   ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr);
612   /* Add additional overlap levels */
613   for (l = 1; l < levels; l++) {ierr = DMPlexPartitionLabelAdjacency(dm, ovAdjByRank);CHKERRQ(ierr);}
614   /* We require the closure in the overlap */
615   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
616   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
617   if (useCone || !useClosure) {
618     ierr = DMPlexPartitionLabelClosure(dm, ovAdjByRank);CHKERRQ(ierr);
619   }
620   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-overlap_view", &flg);CHKERRQ(ierr);
621   if (flg) {
622     ierr = DMLabelView(ovAdjByRank, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
623   }
624   /* Make process SF and invert sender to receiver label */
625   ierr = DMPlexCreateTwoSidedProcessSF(dm, sfPoint, rootSection, rootrank, leafSection, leafrank, NULL, &sfProc);CHKERRQ(ierr);
626   ierr = DMLabelCreate("Overlap label", ovLabel);CHKERRQ(ierr);
627   ierr = DMPlexPartitionLabelInvert(dm, ovAdjByRank, sfProc, *ovLabel);CHKERRQ(ierr);
628   /* Add owned points, except for shared local points */
629   for (p = pStart; p < pEnd; ++p) {ierr = DMLabelSetValue(*ovLabel, p, rank);CHKERRQ(ierr);}
630   for (l = 0; l < nleaves; ++l) {
631     ierr = DMLabelClearValue(*ovLabel, local[l], rank);CHKERRQ(ierr);
632     ierr = DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank);CHKERRQ(ierr);
633   }
634   /* Clean up */
635   ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr);
636   ierr = PetscSFDestroy(&sfProc);CHKERRQ(ierr);
637   PetscFunctionReturn(0);
638 }
639 
640 #undef __FUNCT__
641 #define __FUNCT__ "DMPlexCreateOverlapMigrationSF"
642 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
643 {
644   MPI_Comm           comm;
645   PetscMPIInt        rank, numProcs;
646   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
647   PetscInt          *pointDepths, *remoteDepths, *ilocal;
648   PetscInt          *depthRecv, *depthShift, *depthIdx;
649   PetscSFNode       *iremote;
650   PetscSF            pointSF;
651   const PetscInt    *sharedLocal;
652   const PetscSFNode *overlapRemote, *sharedRemote;
653   PetscErrorCode     ierr;
654 
655   PetscFunctionBegin;
656   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
657   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
658   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
659   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
660   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
661 
662   /* Before building the migration SF we need to know the new stratum offsets */
663   ierr = PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote);CHKERRQ(ierr);
664   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
665   for (d=0; d<dim+1; d++) {
666     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
667     for (p=pStart; p<pEnd; p++) pointDepths[p] = d;
668   }
669   for (p=0; p<nleaves; p++) remoteDepths[p] = -1;
670   ierr = PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
671   ierr = PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
672 
673   /* Count recevied points in each stratum and compute the internal strata shift */
674   ierr = PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx);CHKERRQ(ierr);
675   for (d=0; d<dim+1; d++) depthRecv[d]=0;
676   for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++;
677   depthShift[dim] = 0;
678   for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim];
679   for (d=1; d<dim; d++) depthShift[d] += depthRecv[0];
680   for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1];
681   for (d=0; d<dim+1; d++) {
682     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
683     depthIdx[d] = pStart + depthShift[d];
684   }
685 
686   /* Form the overlap SF build an SF that describes the full overlap migration SF */
687   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
688   newLeaves = pEnd - pStart + nleaves;
689   ierr = PetscMalloc1(newLeaves, &ilocal);CHKERRQ(ierr);
690   ierr = PetscMalloc1(newLeaves, &iremote);CHKERRQ(ierr);
691   /* First map local points to themselves */
692   for (d=0; d<dim+1; d++) {
693     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
694     for (p=pStart; p<pEnd; p++) {
695       point = p + depthShift[d];
696       ilocal[point] = point;
697       iremote[point].index = p;
698       iremote[point].rank = rank;
699       depthIdx[d]++;
700     }
701   }
702 
703   /* Add in the remote roots for currently shared points */
704   ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
705   ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote);CHKERRQ(ierr);
706   for (d=0; d<dim+1; d++) {
707     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
708     for (p=0; p<numSharedPoints; p++) {
709       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
710         point = sharedLocal[p] + depthShift[d];
711         iremote[point].index = sharedRemote[p].index;
712         iremote[point].rank = sharedRemote[p].rank;
713       }
714     }
715   }
716 
717   /* Now add the incoming overlap points */
718   for (p=0; p<nleaves; p++) {
719     point = depthIdx[remoteDepths[p]];
720     ilocal[point] = point;
721     iremote[point].index = overlapRemote[p].index;
722     iremote[point].rank = overlapRemote[p].rank;
723     depthIdx[remoteDepths[p]]++;
724   }
725   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
726 
727   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
728   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");CHKERRQ(ierr);
729   ierr = PetscSFSetFromOptions(*migrationSF);CHKERRQ(ierr);
730   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
731   ierr = PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);CHKERRQ(ierr);
732 
733   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
734   PetscFunctionReturn(0);
735 }
736 
737 #undef __FUNCT__
738 #define __FUNCT__ "DMPlexStratifyMigrationSF"
739 /*@
740   DMPlexStratifyMigrationSF - Add partition overlap to a distributed non-overlapping DM.
741 
742   Input Parameter:
743 + dm          - The DM
744 - sf          - A star forest with non-ordered leaves, usually defining a DM point migration
745 
746   Output Parameter:
747 . migrationSF - A star forest with added leaf indirection that ensures the resulting DM is stratified
748 
749   Level: developer
750 
751 .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexDistributeOverlap()
752 @*/
753 PetscErrorCode DMPlexStratifyMigrationSF(DM dm, PetscSF sf, PetscSF *migrationSF)
754 {
755   MPI_Comm           comm;
756   PetscMPIInt        rank, numProcs;
757   PetscInt           d, ldepth, depth, p, pStart, pEnd, nroots, nleaves;
758   PetscInt          *pointDepths, *remoteDepths, *ilocal;
759   PetscInt          *depthRecv, *depthShift, *depthIdx;
760   const PetscSFNode *iremote;
761   PetscErrorCode     ierr;
762 
763   PetscFunctionBegin;
764   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
765   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
766   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
767   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
768   ierr = DMPlexGetDepth(dm, &ldepth);CHKERRQ(ierr);
769   ierr = MPIU_Allreduce(&ldepth, &depth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr);
770   if ((ldepth >= 0) && (depth != ldepth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", ldepth, depth);
771 
772   /* Before building the migration SF we need to know the new stratum offsets */
773   ierr = PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote);CHKERRQ(ierr);
774   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
775   for (d = 0; d < depth+1; ++d) {
776     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
777     for (p = pStart; p < pEnd; ++p) pointDepths[p] = d;
778   }
779   for (p = 0; p < nleaves; ++p) remoteDepths[p] = -1;
780   ierr = PetscSFBcastBegin(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
781   ierr = PetscSFBcastEnd(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
782   /* Count recevied points in each stratum and compute the internal strata shift */
783   ierr = PetscMalloc3(depth+1, &depthRecv, depth+1, &depthShift, depth+1, &depthIdx);CHKERRQ(ierr);
784   for (d = 0; d < depth+1; ++d) depthRecv[d] = 0;
785   for (p = 0; p < nleaves; ++p) depthRecv[remoteDepths[p]]++;
786   depthShift[depth] = 0;
787   for (d = 0; d < depth; ++d) depthShift[d] = depthRecv[depth];
788   for (d = 1; d < depth; ++d) depthShift[d] += depthRecv[0];
789   for (d = depth-2; d > 0; --d) depthShift[d] += depthRecv[d+1];
790   for (d = 0; d < depth+1; ++d) {depthIdx[d] = 0;}
791   /* Derive a new local permutation based on stratified indices */
792   ierr = PetscMalloc1(nleaves, &ilocal);CHKERRQ(ierr);
793   for (p = 0; p < nleaves; ++p) {
794     const PetscInt dep = remoteDepths[p];
795 
796     ilocal[p] = depthShift[dep] + depthIdx[dep];
797     depthIdx[dep]++;
798   }
799   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
800   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Migration SF");CHKERRQ(ierr);
801   ierr = PetscSFSetGraph(*migrationSF, nroots, nleaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_COPY_VALUES);CHKERRQ(ierr);
802   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
803   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
804   PetscFunctionReturn(0);
805 }
806 
807 #undef __FUNCT__
808 #define __FUNCT__ "DMPlexDistributeField"
809 /*@
810   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
811 
812   Collective on DM
813 
814   Input Parameters:
815 + dm - The DMPlex object
816 . pointSF - The PetscSF describing the communication pattern
817 . originalSection - The PetscSection for existing data layout
818 - originalVec - The existing data
819 
820   Output Parameters:
821 + newSection - The PetscSF describing the new data layout
822 - newVec - The new data
823 
824   Level: developer
825 
826 .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData()
827 @*/
828 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
829 {
830   PetscSF        fieldSF;
831   PetscInt      *remoteOffsets, fieldSize;
832   PetscScalar   *originalValues, *newValues;
833   PetscErrorCode ierr;
834 
835   PetscFunctionBegin;
836   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
837   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
838 
839   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
840   ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr);
841   ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr);
842 
843   ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr);
844   ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr);
845   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
846   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
847   ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
848   ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
849   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
850   ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr);
851   ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr);
852   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
853   PetscFunctionReturn(0);
854 }
855 
856 #undef __FUNCT__
857 #define __FUNCT__ "DMPlexDistributeFieldIS"
858 /*@
859   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
860 
861   Collective on DM
862 
863   Input Parameters:
864 + dm - The DMPlex object
865 . pointSF - The PetscSF describing the communication pattern
866 . originalSection - The PetscSection for existing data layout
867 - originalIS - The existing data
868 
869   Output Parameters:
870 + newSection - The PetscSF describing the new data layout
871 - newIS - The new data
872 
873   Level: developer
874 
875 .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData()
876 @*/
877 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
878 {
879   PetscSF         fieldSF;
880   PetscInt       *newValues, *remoteOffsets, fieldSize;
881   const PetscInt *originalValues;
882   PetscErrorCode  ierr;
883 
884   PetscFunctionBegin;
885   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
886   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
887 
888   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
889   ierr = PetscMalloc1(fieldSize, &newValues);CHKERRQ(ierr);
890 
891   ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr);
892   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
893   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
894   ierr = PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
895   ierr = PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
896   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
897   ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr);
898   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr);
899   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
900   PetscFunctionReturn(0);
901 }
902 
903 #undef __FUNCT__
904 #define __FUNCT__ "DMPlexDistributeData"
905 /*@
906   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
907 
908   Collective on DM
909 
910   Input Parameters:
911 + dm - The DMPlex object
912 . pointSF - The PetscSF describing the communication pattern
913 . originalSection - The PetscSection for existing data layout
914 . datatype - The type of data
915 - originalData - The existing data
916 
917   Output Parameters:
918 + newSection - The PetscSection describing the new data layout
919 - newData - The new data
920 
921   Level: developer
922 
923 .seealso: DMPlexDistribute(), DMPlexDistributeField()
924 @*/
925 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
926 {
927   PetscSF        fieldSF;
928   PetscInt      *remoteOffsets, fieldSize;
929   PetscMPIInt    dataSize;
930   PetscErrorCode ierr;
931 
932   PetscFunctionBegin;
933   ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
934   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
935 
936   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
937   ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr);
938   ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr);
939 
940   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
941   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
942   ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
943   ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
944   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
945   ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
946   PetscFunctionReturn(0);
947 }
948 
949 #undef __FUNCT__
950 #define __FUNCT__ "DMPlexDistributeCones"
951 PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
952 {
953   DM_Plex               *mesh  = (DM_Plex*) dm->data;
954   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
955   MPI_Comm               comm;
956   PetscSF                coneSF;
957   PetscSection           originalConeSection, newConeSection;
958   PetscInt              *remoteOffsets, *cones, *globCones, *newCones, newConesSize;
959   PetscBool              flg;
960   PetscErrorCode         ierr;
961 
962   PetscFunctionBegin;
963   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
964   PetscValidPointer(dmParallel,4);
965   ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
966 
967   /* Distribute cone section */
968   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
969   ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr);
970   ierr = DMPlexGetConeSection(dmParallel, &newConeSection);CHKERRQ(ierr);
971   ierr = PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr);
972   ierr = DMSetUp(dmParallel);CHKERRQ(ierr);
973   {
974     PetscInt pStart, pEnd, p;
975 
976     ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr);
977     for (p = pStart; p < pEnd; ++p) {
978       PetscInt coneSize;
979       ierr               = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr);
980       pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize);
981     }
982   }
983   /* Communicate and renumber cones */
984   ierr = PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr);
985   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
986   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
987   if (original) {
988     PetscInt numCones;
989 
990     ierr = PetscSectionGetStorageSize(originalConeSection,&numCones);CHKERRQ(ierr); ierr = PetscMalloc1(numCones,&globCones);CHKERRQ(ierr);
991     ierr = ISLocalToGlobalMappingApplyBlock(original, numCones, cones, globCones);CHKERRQ(ierr);
992   }
993   else {
994     globCones = cones;
995   }
996   ierr = DMPlexGetCones(dmParallel, &newCones);CHKERRQ(ierr);
997   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr);
998   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr);
999   if (original) {
1000     ierr = PetscFree(globCones);CHKERRQ(ierr);
1001   }
1002   ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr);
1003   ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr);
1004 #if PETSC_USE_DEBUG
1005   {
1006     PetscInt  p;
1007     PetscBool valid = PETSC_TRUE;
1008     for (p = 0; p < newConesSize; ++p) {
1009       if (newCones[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
1010     }
1011     if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1012   }
1013 #endif
1014   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr);
1015   if (flg) {
1016     ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr);
1017     ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1018     ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr);
1019     ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1020     ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr);
1021   }
1022   ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr);
1023   ierr = DMPlexGetConeOrientations(dmParallel, &newCones);CHKERRQ(ierr);
1024   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1025   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1026   ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr);
1027   ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
1028   /* Create supports and stratify sieve */
1029   {
1030     PetscInt pStart, pEnd;
1031 
1032     ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr);
1033     ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr);
1034   }
1035   ierr = DMPlexSymmetrize(dmParallel);CHKERRQ(ierr);
1036   ierr = DMPlexStratify(dmParallel);CHKERRQ(ierr);
1037   pmesh->useCone    = mesh->useCone;
1038   pmesh->useClosure = mesh->useClosure;
1039   PetscFunctionReturn(0);
1040 }
1041 
1042 #undef __FUNCT__
1043 #define __FUNCT__ "DMPlexDistributeCoordinates"
1044 PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
1045 {
1046   MPI_Comm         comm;
1047   PetscSection     originalCoordSection, newCoordSection;
1048   Vec              originalCoordinates, newCoordinates;
1049   PetscInt         bs;
1050   const char      *name;
1051   const PetscReal *maxCell, *L;
1052   const DMBoundaryType *bd;
1053   PetscErrorCode   ierr;
1054 
1055   PetscFunctionBegin;
1056   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1057   PetscValidPointer(dmParallel, 3);
1058 
1059   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1060   ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr);
1061   ierr = DMGetCoordinateSection(dmParallel, &newCoordSection);CHKERRQ(ierr);
1062   ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr);
1063   if (originalCoordinates) {
1064     ierr = VecCreate(comm, &newCoordinates);CHKERRQ(ierr);
1065     ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr);
1066     ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr);
1067 
1068     ierr = DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr);
1069     ierr = DMSetCoordinatesLocal(dmParallel, newCoordinates);CHKERRQ(ierr);
1070     ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr);
1071     ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr);
1072     ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr);
1073   }
1074   ierr = DMGetPeriodicity(dm, &maxCell, &L, &bd);CHKERRQ(ierr);
1075   if (L) {ierr = DMSetPeriodicity(dmParallel, maxCell, L, bd);CHKERRQ(ierr);}
1076   PetscFunctionReturn(0);
1077 }
1078 
1079 #undef __FUNCT__
1080 #define __FUNCT__ "DMPlexDistributeLabels"
1081 /* Here we are assuming that process 0 always has everything */
1082 PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1083 {
1084   DM_Plex         *mesh = (DM_Plex*) dm->data;
1085   MPI_Comm         comm;
1086   DMLabel          depth;
1087   PetscMPIInt      rank;
1088   PetscInt         numLabels, numLocalLabels, l;
1089   PetscBool        hasLabels = PETSC_FALSE, lsendDepth, sendDepth;
1090   PetscObjectState depthState = -1;
1091   PetscErrorCode   ierr;
1092 
1093   PetscFunctionBegin;
1094   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1095   PetscValidHeaderSpecific(dm, DM_CLASSID, 3);
1096   ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1097   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1098   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1099 
1100   /* If the user has changed the depth label, communicate it instead */
1101   ierr = DMPlexGetDepthLabel(dm, &depth);CHKERRQ(ierr);
1102   if (depth) {ierr = DMLabelGetState(depth, &depthState);CHKERRQ(ierr);}
1103   lsendDepth = mesh->depthState != depthState ? PETSC_TRUE : PETSC_FALSE;
1104   ierr = MPIU_Allreduce(&lsendDepth, &sendDepth, 1, MPIU_BOOL, MPI_LOR, comm);CHKERRQ(ierr);
1105   if (sendDepth) {
1106     ierr = DMRemoveLabel(dmParallel, "depth", &depth);CHKERRQ(ierr);
1107     ierr = DMLabelDestroy(&depth);CHKERRQ(ierr);
1108   }
1109   /* Everyone must have either the same number of labels, or none */
1110   ierr = DMGetNumLabels(dm, &numLocalLabels);CHKERRQ(ierr);
1111   numLabels = numLocalLabels;
1112   ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1113   if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1114   for (l = numLabels-1; l >= 0; --l) {
1115     DMLabel     label = NULL, labelNew = NULL;
1116     PetscBool   isdepth;
1117 
1118     if (hasLabels) {
1119       ierr = DMGetLabelByNum(dm, l, &label);CHKERRQ(ierr);
1120       /* Skip "depth" because it is recreated */
1121       ierr = PetscStrcmp(label->name, "depth", &isdepth);CHKERRQ(ierr);
1122     }
1123     ierr = MPI_Bcast(&isdepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr);
1124     if (isdepth && !sendDepth) continue;
1125     ierr = DMLabelDistribute(label, migrationSF, &labelNew);CHKERRQ(ierr);
1126     ierr = DMAddLabel(dmParallel, labelNew);CHKERRQ(ierr);
1127   }
1128   ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1129   PetscFunctionReturn(0);
1130 }
1131 
1132 #undef __FUNCT__
1133 #define __FUNCT__ "DMPlexDistributeSetupHybrid"
1134 PetscErrorCode DMPlexDistributeSetupHybrid(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1135 {
1136   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1137   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1138   MPI_Comm        comm;
1139   const PetscInt *gpoints;
1140   PetscInt        dim, depth, n, d;
1141   PetscErrorCode  ierr;
1142 
1143   PetscFunctionBegin;
1144   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1145   PetscValidPointer(dmParallel, 4);
1146 
1147   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1148   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1149 
1150   /* Setup hybrid structure */
1151   for (d = 0; d <= dim; ++d) {pmesh->hybridPointMax[d] = mesh->hybridPointMax[d];}
1152   ierr = MPI_Bcast(pmesh->hybridPointMax, dim+1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1153   ierr = ISLocalToGlobalMappingGetSize(renumbering, &n);CHKERRQ(ierr);
1154   ierr = ISLocalToGlobalMappingGetIndices(renumbering, &gpoints);CHKERRQ(ierr);
1155   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1156   for (d = 0; d <= dim; ++d) {
1157     PetscInt pmax = pmesh->hybridPointMax[d], newmax = 0, pEnd, stratum[2], p;
1158 
1159     if (pmax < 0) continue;
1160     ierr = DMPlexGetDepthStratum(dm, d > depth ? depth : d, &stratum[0], &stratum[1]);CHKERRQ(ierr);
1161     ierr = DMPlexGetDepthStratum(dmParallel, d, NULL, &pEnd);CHKERRQ(ierr);
1162     ierr = MPI_Bcast(stratum, 2, MPIU_INT, 0, comm);CHKERRQ(ierr);
1163     for (p = 0; p < n; ++p) {
1164       const PetscInt point = gpoints[p];
1165 
1166       if ((point >= stratum[0]) && (point < stratum[1]) && (point >= pmax)) ++newmax;
1167     }
1168     if (newmax > 0) pmesh->hybridPointMax[d] = pEnd - newmax;
1169     else            pmesh->hybridPointMax[d] = -1;
1170   }
1171   ierr = ISLocalToGlobalMappingRestoreIndices(renumbering, &gpoints);CHKERRQ(ierr);
1172   PetscFunctionReturn(0);
1173 }
1174 
1175 #undef __FUNCT__
1176 #define __FUNCT__ "DMPlexDistributeSetupTree"
1177 PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1178 {
1179   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1180   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1181   MPI_Comm        comm;
1182   DM              refTree;
1183   PetscSection    origParentSection, newParentSection;
1184   PetscInt        *origParents, *origChildIDs;
1185   PetscBool       flg;
1186   PetscErrorCode  ierr;
1187 
1188   PetscFunctionBegin;
1189   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1190   PetscValidHeaderSpecific(dm, DM_CLASSID, 4);
1191   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1192 
1193   /* Set up tree */
1194   ierr = DMPlexGetReferenceTree(dm,&refTree);CHKERRQ(ierr);
1195   ierr = DMPlexSetReferenceTree(dmParallel,refTree);CHKERRQ(ierr);
1196   ierr = DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL);CHKERRQ(ierr);
1197   if (origParentSection) {
1198     PetscInt        pStart, pEnd;
1199     PetscInt        *newParents, *newChildIDs, *globParents;
1200     PetscInt        *remoteOffsetsParents, newParentSize;
1201     PetscSF         parentSF;
1202 
1203     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1204     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);CHKERRQ(ierr);
1205     ierr = PetscSectionSetChart(newParentSection,pStart,pEnd);CHKERRQ(ierr);
1206     ierr = PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);CHKERRQ(ierr);
1207     ierr = PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);CHKERRQ(ierr);
1208     ierr = PetscFree(remoteOffsetsParents);CHKERRQ(ierr);
1209     ierr = PetscSectionGetStorageSize(newParentSection,&newParentSize);CHKERRQ(ierr);
1210     ierr = PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs);CHKERRQ(ierr);
1211     if (original) {
1212       PetscInt numParents;
1213 
1214       ierr = PetscSectionGetStorageSize(origParentSection,&numParents);CHKERRQ(ierr);
1215       ierr = PetscMalloc1(numParents,&globParents);CHKERRQ(ierr);
1216       ierr = ISLocalToGlobalMappingApplyBlock(original, numParents, origParents, globParents);CHKERRQ(ierr);
1217     }
1218     else {
1219       globParents = origParents;
1220     }
1221     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr);
1222     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr);
1223     if (original) {
1224       ierr = PetscFree(globParents);CHKERRQ(ierr);
1225     }
1226     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1227     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1228     ierr = ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents);CHKERRQ(ierr);
1229 #if PETSC_USE_DEBUG
1230     {
1231       PetscInt  p;
1232       PetscBool valid = PETSC_TRUE;
1233       for (p = 0; p < newParentSize; ++p) {
1234         if (newParents[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
1235       }
1236       if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1237     }
1238 #endif
1239     ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-parents_view", &flg);CHKERRQ(ierr);
1240     if (flg) {
1241       ierr = PetscPrintf(comm, "Serial Parent Section: \n");CHKERRQ(ierr);
1242       ierr = PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1243       ierr = PetscPrintf(comm, "Parallel Parent Section: \n");CHKERRQ(ierr);
1244       ierr = PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1245       ierr = PetscSFView(parentSF, NULL);CHKERRQ(ierr);
1246     }
1247     ierr = DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);CHKERRQ(ierr);
1248     ierr = PetscSectionDestroy(&newParentSection);CHKERRQ(ierr);
1249     ierr = PetscFree2(newParents,newChildIDs);CHKERRQ(ierr);
1250     ierr = PetscSFDestroy(&parentSF);CHKERRQ(ierr);
1251   }
1252   pmesh->useAnchors = mesh->useAnchors;
1253   PetscFunctionReturn(0);
1254 }
1255 
1256 #undef __FUNCT__
1257 #define __FUNCT__ "DMPlexDistributeSF"
1258 PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, DM dmParallel)
1259 {
1260   DM_Plex               *mesh  = (DM_Plex*) dm->data;
1261   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
1262   PetscMPIInt            rank, numProcs;
1263   MPI_Comm               comm;
1264   PetscErrorCode         ierr;
1265 
1266   PetscFunctionBegin;
1267   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1268   PetscValidPointer(dmParallel,7);
1269 
1270   /* Create point SF for parallel mesh */
1271   ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1272   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1273   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1274   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1275   {
1276     const PetscInt *leaves;
1277     PetscSFNode    *remotePoints, *rowners, *lowners;
1278     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1279     PetscInt        pStart, pEnd;
1280 
1281     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1282     ierr = PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr);
1283     ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr);
1284     for (p=0; p<numRoots; p++) {
1285       rowners[p].rank  = -1;
1286       rowners[p].index = -1;
1287     }
1288     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1289     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1290     for (p = 0; p < numLeaves; ++p) {
1291       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1292         lowners[p].rank  = rank;
1293         lowners[p].index = leaves ? leaves[p] : p;
1294       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1295         lowners[p].rank  = -2;
1296         lowners[p].index = -2;
1297       }
1298     }
1299     for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1300       rowners[p].rank  = -3;
1301       rowners[p].index = -3;
1302     }
1303     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1304     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1305     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1306     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1307     for (p = 0; p < numLeaves; ++p) {
1308       if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
1309       if (lowners[p].rank != rank) ++numGhostPoints;
1310     }
1311     ierr = PetscMalloc1(numGhostPoints, &ghostPoints);CHKERRQ(ierr);
1312     ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr);
1313     for (p = 0, gp = 0; p < numLeaves; ++p) {
1314       if (lowners[p].rank != rank) {
1315         ghostPoints[gp]        = leaves ? leaves[p] : p;
1316         remotePoints[gp].rank  = lowners[p].rank;
1317         remotePoints[gp].index = lowners[p].index;
1318         ++gp;
1319       }
1320     }
1321     ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr);
1322     ierr = PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1323     ierr = PetscSFSetFromOptions((dmParallel)->sf);CHKERRQ(ierr);
1324   }
1325   pmesh->useCone    = mesh->useCone;
1326   pmesh->useClosure = mesh->useClosure;
1327   ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1328   PetscFunctionReturn(0);
1329 }
1330 
1331 #undef __FUNCT__
1332 #define __FUNCT__ "DMPlexCreatePointSF"
1333 /*@C
1334   DMPlexDerivePointSF - Build a point SF from an SF describing a point migration
1335 
1336   Input Parameter:
1337 + dm          - The source DMPlex object
1338 . migrationSF - The star forest that describes the parallel point remapping
1339 . ownership   - Flag causing a vote to determine point ownership
1340 
1341   Output Parameter:
1342 - pointSF     - The star forest describing the point overlap in the remapped DM
1343 
1344   Level: developer
1345 
1346 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1347 @*/
1348 PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownership, PetscSF *pointSF)
1349 {
1350   PetscMPIInt        rank;
1351   PetscInt           p, nroots, nleaves, idx, npointLeaves;
1352   PetscInt          *pointLocal;
1353   const PetscInt    *leaves;
1354   const PetscSFNode *roots;
1355   PetscSFNode       *rootNodes, *leafNodes, *pointRemote;
1356   PetscErrorCode     ierr;
1357 
1358   PetscFunctionBegin;
1359   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1360   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1361 
1362   ierr = PetscSFGetGraph(migrationSF, &nroots, &nleaves, &leaves, &roots);CHKERRQ(ierr);
1363   ierr = PetscMalloc2(nroots, &rootNodes, nleaves, &leafNodes);CHKERRQ(ierr);
1364   if (ownership) {
1365     /* Point ownership vote: Process with highest rank ownes shared points */
1366     for (p = 0; p < nleaves; ++p) {
1367       /* Either put in a bid or we know we own it */
1368       leafNodes[p].rank  = rank;
1369       leafNodes[p].index = p;
1370     }
1371     for (p = 0; p < nroots; p++) {
1372       /* Root must not participate in the reduction, flag so that MAXLOC does not use */
1373       rootNodes[p].rank  = -3;
1374       rootNodes[p].index = -3;
1375     }
1376     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);CHKERRQ(ierr);
1377     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);CHKERRQ(ierr);
1378   } else {
1379     for (p = 0; p < nroots; p++) {
1380       rootNodes[p].index = -1;
1381       rootNodes[p].rank = rank;
1382     };
1383     for (p = 0; p < nleaves; p++) {
1384       /* Write new local id into old location */
1385       if (roots[p].rank == rank) {
1386         rootNodes[roots[p].index].index = leaves[p];
1387       }
1388     }
1389   }
1390   ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1391   ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1392 
1393   for (npointLeaves = 0, p = 0; p < nleaves; p++) {if (leafNodes[p].rank != rank) npointLeaves++;}
1394   ierr = PetscMalloc1(npointLeaves, &pointLocal);CHKERRQ(ierr);
1395   ierr = PetscMalloc1(npointLeaves, &pointRemote);CHKERRQ(ierr);
1396   for (idx = 0, p = 0; p < nleaves; p++) {
1397     if (leafNodes[p].rank != rank) {
1398       pointLocal[idx] = p;
1399       pointRemote[idx] = leafNodes[p];
1400       idx++;
1401     }
1402   }
1403   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), pointSF);CHKERRQ(ierr);
1404   ierr = PetscSFSetFromOptions(*pointSF);CHKERRQ(ierr);
1405   ierr = PetscSFSetGraph(*pointSF, nleaves, npointLeaves, pointLocal, PETSC_OWN_POINTER, pointRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1406   ierr = PetscFree2(rootNodes, leafNodes);CHKERRQ(ierr);
1407   PetscFunctionReturn(0);
1408 }
1409 
1410 #undef __FUNCT__
1411 #define __FUNCT__ "DMPlexMigrate"
1412 /*@C
1413   DMPlexMigrate  - Migrates internal DM data over the supplied star forest
1414 
1415   Input Parameter:
1416 + dm       - The source DMPlex object
1417 . sf       - The star forest communication context describing the migration pattern
1418 
1419   Output Parameter:
1420 - targetDM - The target DMPlex object
1421 
1422   Level: intermediate
1423 
1424 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1425 @*/
1426 PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM)
1427 {
1428   MPI_Comm               comm;
1429   PetscInt               dim, nroots;
1430   PetscSF                sfPoint;
1431   ISLocalToGlobalMapping ltogMigration;
1432   ISLocalToGlobalMapping ltogOriginal = NULL;
1433   PetscBool              flg;
1434   PetscErrorCode         ierr;
1435 
1436   PetscFunctionBegin;
1437   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1438   ierr = PetscLogEventBegin(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1439   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1440   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1441   ierr = DMSetDimension(targetDM, dim);CHKERRQ(ierr);
1442 
1443   /* Check for a one-to-all distribution pattern */
1444   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1445   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
1446   if (nroots >= 0) {
1447     IS                     isOriginal;
1448     PetscInt               n, size, nleaves;
1449     PetscInt              *numbering_orig, *numbering_new;
1450     /* Get the original point numbering */
1451     ierr = DMPlexCreatePointNumbering(dm, &isOriginal);CHKERRQ(ierr);
1452     ierr = ISLocalToGlobalMappingCreateIS(isOriginal, &ltogOriginal);CHKERRQ(ierr);
1453     ierr = ISLocalToGlobalMappingGetSize(ltogOriginal, &size);CHKERRQ(ierr);
1454     ierr = ISLocalToGlobalMappingGetBlockIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1455     /* Convert to positive global numbers */
1456     for (n=0; n<size; n++) {if (numbering_orig[n] < 0) numbering_orig[n] = -(numbering_orig[n]+1);}
1457     /* Derive the new local-to-global mapping from the old one */
1458     ierr = PetscSFGetGraph(sf, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1459     ierr = PetscMalloc1(nleaves, &numbering_new);CHKERRQ(ierr);
1460     ierr = PetscSFBcastBegin(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr);
1461     ierr = PetscSFBcastEnd(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr);
1462     ierr = ISLocalToGlobalMappingCreate(comm, 1, nleaves, (const PetscInt*) numbering_new, PETSC_OWN_POINTER, &ltogMigration);CHKERRQ(ierr);
1463     ierr = ISLocalToGlobalMappingRestoreIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1464     ierr = ISDestroy(&isOriginal);CHKERRQ(ierr);
1465   } else {
1466     /* One-to-all distribution pattern: We can derive LToG from SF */
1467     ierr = ISLocalToGlobalMappingCreateSF(sf, 0, &ltogMigration);CHKERRQ(ierr);
1468   }
1469   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1470   if (flg) {
1471     ierr = PetscPrintf(comm, "Point renumbering for DM migration:\n");CHKERRQ(ierr);
1472     ierr = ISLocalToGlobalMappingView(ltogMigration, NULL);CHKERRQ(ierr);
1473   }
1474   /* Migrate DM data to target DM */
1475   ierr = DMPlexDistributeCones(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1476   ierr = DMPlexDistributeCoordinates(dm, sf, targetDM);CHKERRQ(ierr);
1477   ierr = DMPlexDistributeLabels(dm, sf, targetDM);CHKERRQ(ierr);
1478   ierr = DMPlexDistributeSetupHybrid(dm, sf, ltogMigration, targetDM);CHKERRQ(ierr);
1479   ierr = DMPlexDistributeSetupTree(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1480   ierr = ISLocalToGlobalMappingDestroy(&ltogOriginal);CHKERRQ(ierr);
1481   ierr = ISLocalToGlobalMappingDestroy(&ltogMigration);CHKERRQ(ierr);
1482   ierr = PetscLogEventEnd(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1483   PetscFunctionReturn(0);
1484 }
1485 
1486 #undef __FUNCT__
1487 #define __FUNCT__ "DMPlexDistribute"
1488 /*@C
1489   DMPlexDistribute - Distributes the mesh and any associated sections.
1490 
1491   Not Collective
1492 
1493   Input Parameter:
1494 + dm  - The original DMPlex object
1495 - overlap - The overlap of partitions, 0 is the default
1496 
1497   Output Parameter:
1498 + sf - The PetscSF used for point distribution
1499 - parallelMesh - The distributed DMPlex object, or NULL
1500 
1501   Note: If the mesh was not distributed, the return value is NULL.
1502 
1503   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1504   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1505   representation on the mesh.
1506 
1507   Level: intermediate
1508 
1509 .keywords: mesh, elements
1510 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1511 @*/
1512 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1513 {
1514   MPI_Comm               comm;
1515   PetscPartitioner       partitioner;
1516   IS                     cellPart;
1517   PetscSection           cellPartSection;
1518   DM                     dmCoord;
1519   DMLabel                lblPartition, lblMigration;
1520   PetscSF                sfProcess, sfMigration, sfStratified, sfPoint;
1521   PetscBool              flg;
1522   PetscMPIInt            rank, numProcs, p;
1523   PetscErrorCode         ierr;
1524 
1525   PetscFunctionBegin;
1526   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1527   if (sf) PetscValidPointer(sf,4);
1528   PetscValidPointer(dmParallel,5);
1529 
1530   ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1531   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1532   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1533   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1534 
1535   *dmParallel = NULL;
1536   if (numProcs == 1) PetscFunctionReturn(0);
1537 
1538   /* Create cell partition */
1539   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1540   ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr);
1541   ierr = DMPlexGetPartitioner(dm, &partitioner);CHKERRQ(ierr);
1542   ierr = PetscPartitionerPartition(partitioner, dm, cellPartSection, &cellPart);CHKERRQ(ierr);
1543   {
1544     /* Convert partition to DMLabel */
1545     PetscInt proc, pStart, pEnd, npoints, poffset;
1546     const PetscInt *points;
1547     ierr = DMLabelCreate("Point Partition", &lblPartition);CHKERRQ(ierr);
1548     ierr = ISGetIndices(cellPart, &points);CHKERRQ(ierr);
1549     ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr);
1550     for (proc = pStart; proc < pEnd; proc++) {
1551       ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr);
1552       ierr = PetscSectionGetOffset(cellPartSection, proc, &poffset);CHKERRQ(ierr);
1553       for (p = poffset; p < poffset+npoints; p++) {
1554         ierr = DMLabelSetValue(lblPartition, points[p], proc);CHKERRQ(ierr);
1555       }
1556     }
1557     ierr = ISRestoreIndices(cellPart, &points);CHKERRQ(ierr);
1558   }
1559   ierr = DMPlexPartitionLabelClosure(dm, lblPartition);CHKERRQ(ierr);
1560   {
1561     /* Build a global process SF */
1562     PetscSFNode *remoteProc;
1563     ierr = PetscMalloc1(numProcs, &remoteProc);CHKERRQ(ierr);
1564     for (p = 0; p < numProcs; ++p) {
1565       remoteProc[p].rank  = p;
1566       remoteProc[p].index = rank;
1567     }
1568     ierr = PetscSFCreate(comm, &sfProcess);CHKERRQ(ierr);
1569     ierr = PetscObjectSetName((PetscObject) sfProcess, "Process SF");CHKERRQ(ierr);
1570     ierr = PetscSFSetGraph(sfProcess, numProcs, numProcs, NULL, PETSC_OWN_POINTER, remoteProc, PETSC_OWN_POINTER);CHKERRQ(ierr);
1571   }
1572   ierr = DMLabelCreate("Point migration", &lblMigration);CHKERRQ(ierr);
1573   ierr = DMPlexPartitionLabelInvert(dm, lblPartition, sfProcess, lblMigration);CHKERRQ(ierr);
1574   ierr = DMPlexPartitionLabelCreateSF(dm, lblMigration, &sfMigration);CHKERRQ(ierr);
1575   /* Stratify the SF in case we are migrating an already parallel plex */
1576   ierr = DMPlexStratifyMigrationSF(dm, sfMigration, &sfStratified);CHKERRQ(ierr);
1577   ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1578   sfMigration = sfStratified;
1579   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1580   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1581   if (flg) {
1582     ierr = DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1583     ierr = PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1584   }
1585 
1586   /* Create non-overlapping parallel DM and migrate internal data */
1587   ierr = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr);
1588   ierr = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr);
1589   ierr = DMPlexMigrate(dm, sfMigration, *dmParallel);CHKERRQ(ierr);
1590 
1591   /* Build the point SF without overlap */
1592   ierr = DMPlexCreatePointSF(*dmParallel, sfMigration, PETSC_TRUE, &sfPoint);CHKERRQ(ierr);
1593   ierr = DMSetPointSF(*dmParallel, sfPoint);CHKERRQ(ierr);
1594   ierr = DMGetCoordinateDM(*dmParallel, &dmCoord);CHKERRQ(ierr);
1595   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1596   if (flg) {ierr = PetscSFView(sfPoint, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);}
1597 
1598   if (overlap > 0) {
1599     DM                 dmOverlap;
1600     PetscInt           nroots, nleaves;
1601     PetscSFNode       *newRemote;
1602     const PetscSFNode *oldRemote;
1603     PetscSF            sfOverlap, sfOverlapPoint;
1604     /* Add the partition overlap to the distributed DM */
1605     ierr = DMPlexDistributeOverlap(*dmParallel, overlap, &sfOverlap, &dmOverlap);CHKERRQ(ierr);
1606     ierr = DMDestroy(dmParallel);CHKERRQ(ierr);
1607     *dmParallel = dmOverlap;
1608     if (flg) {
1609       ierr = PetscPrintf(comm, "Overlap Migration SF:\n");CHKERRQ(ierr);
1610       ierr = PetscSFView(sfOverlap, NULL);CHKERRQ(ierr);
1611     }
1612 
1613     /* Re-map the migration SF to establish the full migration pattern */
1614     ierr = PetscSFGetGraph(sfMigration, &nroots, NULL, NULL, &oldRemote);CHKERRQ(ierr);
1615     ierr = PetscSFGetGraph(sfOverlap, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1616     ierr = PetscMalloc1(nleaves, &newRemote);CHKERRQ(ierr);
1617     ierr = PetscSFBcastBegin(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1618     ierr = PetscSFBcastEnd(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1619     ierr = PetscSFCreate(comm, &sfOverlapPoint);CHKERRQ(ierr);
1620     ierr = PetscSFSetGraph(sfOverlapPoint, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1621     ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1622     ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1623     sfMigration = sfOverlapPoint;
1624   }
1625   /* Cleanup Partition */
1626   ierr = PetscSFDestroy(&sfProcess);CHKERRQ(ierr);
1627   ierr = DMLabelDestroy(&lblPartition);CHKERRQ(ierr);
1628   ierr = DMLabelDestroy(&lblMigration);CHKERRQ(ierr);
1629   ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr);
1630   ierr = ISDestroy(&cellPart);CHKERRQ(ierr);
1631   /* Copy BC */
1632   ierr = DMPlexCopyBoundary(dm, *dmParallel);CHKERRQ(ierr);
1633   /* Create sfNatural */
1634   if (dm->useNatural) {
1635     PetscSection section;
1636 
1637     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1638     ierr = DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &(*dmParallel)->sfNatural);CHKERRQ(ierr);
1639   }
1640   /* Cleanup */
1641   if (sf) {*sf = sfMigration;}
1642   else    {ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);}
1643   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1644   ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1645   PetscFunctionReturn(0);
1646 }
1647 
1648 #undef __FUNCT__
1649 #define __FUNCT__ "DMPlexDistributeOverlap"
1650 /*@C
1651   DMPlexDistribute - Add partition overlap to a distributed non-overlapping DM.
1652 
1653   Not Collective
1654 
1655   Input Parameter:
1656 + dm  - The non-overlapping distrbuted DMPlex object
1657 - overlap - The overlap of partitions, 0 is the default
1658 
1659   Output Parameter:
1660 + sf - The PetscSF used for point distribution
1661 - dmOverlap - The overlapping distributed DMPlex object, or NULL
1662 
1663   Note: If the mesh was not distributed, the return value is NULL.
1664 
1665   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1666   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1667   representation on the mesh.
1668 
1669   Level: intermediate
1670 
1671 .keywords: mesh, elements
1672 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1673 @*/
1674 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, PetscSF *sf, DM *dmOverlap)
1675 {
1676   MPI_Comm               comm;
1677   PetscMPIInt            rank;
1678   PetscSection           rootSection, leafSection;
1679   IS                     rootrank, leafrank;
1680   DM                     dmCoord;
1681   DMLabel                lblOverlap;
1682   PetscSF                sfOverlap, sfStratified, sfPoint;
1683   PetscErrorCode         ierr;
1684 
1685   PetscFunctionBegin;
1686   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1687   if (sf) PetscValidPointer(sf, 3);
1688   PetscValidPointer(dmOverlap, 4);
1689 
1690   ierr = PetscLogEventBegin(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1691   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1692   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1693 
1694   /* Compute point overlap with neighbouring processes on the distributed DM */
1695   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1696   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1697   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1698   ierr = DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);CHKERRQ(ierr);
1699   ierr = DMPlexCreateOverlap(dm, overlap, rootSection, rootrank, leafSection, leafrank, &lblOverlap);CHKERRQ(ierr);
1700   /* Convert overlap label to stratified migration SF */
1701   ierr = DMPlexPartitionLabelCreateSF(dm, lblOverlap, &sfOverlap);CHKERRQ(ierr);
1702   ierr = DMPlexStratifyMigrationSF(dm, sfOverlap, &sfStratified);CHKERRQ(ierr);
1703   ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1704   sfOverlap = sfStratified;
1705   ierr = PetscObjectSetName((PetscObject) sfOverlap, "Overlap SF");CHKERRQ(ierr);
1706   ierr = PetscSFSetFromOptions(sfOverlap);CHKERRQ(ierr);
1707 
1708   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1709   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1710   ierr = ISDestroy(&rootrank);CHKERRQ(ierr);
1711   ierr = ISDestroy(&leafrank);CHKERRQ(ierr);
1712   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1713 
1714   /* Build the overlapping DM */
1715   ierr = DMPlexCreate(comm, dmOverlap);CHKERRQ(ierr);
1716   ierr = PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");CHKERRQ(ierr);
1717   ierr = DMPlexMigrate(dm, sfOverlap, *dmOverlap);CHKERRQ(ierr);
1718   /* Build the new point SF */
1719   ierr = DMPlexCreatePointSF(*dmOverlap, sfOverlap, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
1720   ierr = DMSetPointSF(*dmOverlap, sfPoint);CHKERRQ(ierr);
1721   ierr = DMGetCoordinateDM(*dmOverlap, &dmCoord);CHKERRQ(ierr);
1722   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1723   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1724   /* Cleanup overlap partition */
1725   ierr = DMLabelDestroy(&lblOverlap);CHKERRQ(ierr);
1726   if (sf) *sf = sfOverlap;
1727   else    {ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);}
1728   ierr = PetscLogEventEnd(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1729   PetscFunctionReturn(0);
1730 }
1731