xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision 1b581b665160bc053f1db6f623a6eaab6b98fcae)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 
3 #undef __FUNCT__
4 #define __FUNCT__ "DMPlexSetAdjacencyUseCone"
5 /*@
6   DMPlexSetAdjacencyUseCone - Define adjacency in the mesh using either the cone or the support first
7 
8   Input Parameters:
9 + dm      - The DM object
10 - useCone - Flag to use the cone first
11 
12   Level: intermediate
13 
14   Notes:
15 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
16 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
17 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
18 
19 .seealso: DMPlexGetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
20 @*/
21 PetscErrorCode DMPlexSetAdjacencyUseCone(DM dm, PetscBool useCone)
22 {
23   DM_Plex *mesh = (DM_Plex *) dm->data;
24 
25   PetscFunctionBegin;
26   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
27   mesh->useCone = useCone;
28   PetscFunctionReturn(0);
29 }
30 
31 #undef __FUNCT__
32 #define __FUNCT__ "DMPlexGetAdjacencyUseCone"
33 /*@
34   DMPlexGetAdjacencyUseCone - Query whether adjacency in the mesh uses the cone or the support first
35 
36   Input Parameter:
37 . dm      - The DM object
38 
39   Output Parameter:
40 . useCone - Flag to use the cone first
41 
42   Level: intermediate
43 
44   Notes:
45 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
46 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
47 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
48 
49 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
50 @*/
51 PetscErrorCode DMPlexGetAdjacencyUseCone(DM dm, PetscBool *useCone)
52 {
53   DM_Plex *mesh = (DM_Plex *) dm->data;
54 
55   PetscFunctionBegin;
56   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
57   PetscValidIntPointer(useCone, 2);
58   *useCone = mesh->useCone;
59   PetscFunctionReturn(0);
60 }
61 
62 #undef __FUNCT__
63 #define __FUNCT__ "DMPlexSetAdjacencyUseClosure"
64 /*@
65   DMPlexSetAdjacencyUseClosure - Define adjacency in the mesh using the transitive closure
66 
67   Input Parameters:
68 + dm      - The DM object
69 - useClosure - Flag to use the closure
70 
71   Level: intermediate
72 
73   Notes:
74 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
75 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
76 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
77 
78 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
79 @*/
80 PetscErrorCode DMPlexSetAdjacencyUseClosure(DM dm, PetscBool useClosure)
81 {
82   DM_Plex *mesh = (DM_Plex *) dm->data;
83 
84   PetscFunctionBegin;
85   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
86   mesh->useClosure = useClosure;
87   PetscFunctionReturn(0);
88 }
89 
90 #undef __FUNCT__
91 #define __FUNCT__ "DMPlexGetAdjacencyUseClosure"
92 /*@
93   DMPlexGetAdjacencyUseClosure - Query whether adjacency in the mesh uses the transitive closure
94 
95   Input Parameter:
96 . dm      - The DM object
97 
98   Output Parameter:
99 . useClosure - Flag to use the closure
100 
101   Level: intermediate
102 
103   Notes:
104 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
105 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
106 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
107 
108 .seealso: DMPlexSetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
109 @*/
110 PetscErrorCode DMPlexGetAdjacencyUseClosure(DM dm, PetscBool *useClosure)
111 {
112   DM_Plex *mesh = (DM_Plex *) dm->data;
113 
114   PetscFunctionBegin;
115   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
116   PetscValidIntPointer(useClosure, 2);
117   *useClosure = mesh->useClosure;
118   PetscFunctionReturn(0);
119 }
120 
121 #undef __FUNCT__
122 #define __FUNCT__ "DMPlexSetAdjacencyUseAnchors"
123 /*@
124   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
125 
126   Input Parameters:
127 + dm      - The DM object
128 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
129 
130   Level: intermediate
131 
132 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
133 @*/
134 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
135 {
136   DM_Plex *mesh = (DM_Plex *) dm->data;
137 
138   PetscFunctionBegin;
139   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
140   mesh->useAnchors = useAnchors;
141   PetscFunctionReturn(0);
142 }
143 
144 #undef __FUNCT__
145 #define __FUNCT__ "DMPlexGetAdjacencyUseAnchors"
146 /*@
147   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
148 
149   Input Parameter:
150 . dm      - The DM object
151 
152   Output Parameter:
153 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
154 
155   Level: intermediate
156 
157 .seealso: DMPlexSetAdjacencyUseAnchors(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
158 @*/
159 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
160 {
161   DM_Plex *mesh = (DM_Plex *) dm->data;
162 
163   PetscFunctionBegin;
164   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
165   PetscValidIntPointer(useAnchors, 2);
166   *useAnchors = mesh->useAnchors;
167   PetscFunctionReturn(0);
168 }
169 
170 #undef __FUNCT__
171 #define __FUNCT__ "DMPlexGetAdjacency_Cone_Internal"
172 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
173 {
174   const PetscInt *cone = NULL;
175   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
176   PetscErrorCode  ierr;
177 
178   PetscFunctionBeginHot;
179   ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
180   ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
181   for (c = 0; c <= coneSize; ++c) {
182     const PetscInt  point   = !c ? p : cone[c-1];
183     const PetscInt *support = NULL;
184     PetscInt        supportSize, s, q;
185 
186     ierr = DMPlexGetSupportSize(dm, point, &supportSize);CHKERRQ(ierr);
187     ierr = DMPlexGetSupport(dm, point, &support);CHKERRQ(ierr);
188     for (s = 0; s < supportSize; ++s) {
189       for (q = 0; q < numAdj || (adj[numAdj++] = support[s],0); ++q) {
190         if (support[s] == adj[q]) break;
191       }
192       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
193     }
194   }
195   *adjSize = numAdj;
196   PetscFunctionReturn(0);
197 }
198 
199 #undef __FUNCT__
200 #define __FUNCT__ "DMPlexGetAdjacency_Support_Internal"
201 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
202 {
203   const PetscInt *support = NULL;
204   PetscInt        numAdj   = 0, maxAdjSize = *adjSize, supportSize, s;
205   PetscErrorCode  ierr;
206 
207   PetscFunctionBeginHot;
208   ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr);
209   ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr);
210   for (s = 0; s <= supportSize; ++s) {
211     const PetscInt  point = !s ? p : support[s-1];
212     const PetscInt *cone  = NULL;
213     PetscInt        coneSize, c, q;
214 
215     ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
216     ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
217     for (c = 0; c < coneSize; ++c) {
218       for (q = 0; q < numAdj || (adj[numAdj++] = cone[c],0); ++q) {
219         if (cone[c] == adj[q]) break;
220       }
221       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
222     }
223   }
224   *adjSize = numAdj;
225   PetscFunctionReturn(0);
226 }
227 
228 #undef __FUNCT__
229 #define __FUNCT__ "DMPlexGetAdjacency_Transitive_Internal"
230 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
231 {
232   PetscInt      *star = NULL;
233   PetscInt       numAdj = 0, maxAdjSize = *adjSize, starSize, s;
234   PetscErrorCode ierr;
235 
236   PetscFunctionBeginHot;
237   ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
238   for (s = 0; s < starSize*2; s += 2) {
239     const PetscInt *closure = NULL;
240     PetscInt        closureSize, c, q;
241 
242     ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
243     for (c = 0; c < closureSize*2; c += 2) {
244       for (q = 0; q < numAdj || (adj[numAdj++] = closure[c],0); ++q) {
245         if (closure[c] == adj[q]) break;
246       }
247       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
248     }
249     ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
250   }
251   ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
252   *adjSize = numAdj;
253   PetscFunctionReturn(0);
254 }
255 
256 #undef __FUNCT__
257 #define __FUNCT__ "DMPlexGetAdjacency_Internal"
258 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
259 {
260   static PetscInt asiz = 0;
261   PetscInt maxAnchors = 1;
262   PetscInt aStart = -1, aEnd = -1;
263   PetscInt maxAdjSize;
264   PetscSection aSec = NULL;
265   IS aIS = NULL;
266   const PetscInt *anchors;
267   PetscErrorCode  ierr;
268 
269   PetscFunctionBeginHot;
270   if (useAnchors) {
271     ierr = DMPlexGetAnchors(dm,&aSec,&aIS);CHKERRQ(ierr);
272     if (aSec) {
273       ierr = PetscSectionGetMaxDof(aSec,&maxAnchors);CHKERRQ(ierr);
274       maxAnchors = PetscMax(1,maxAnchors);
275       ierr = PetscSectionGetChart(aSec,&aStart,&aEnd);CHKERRQ(ierr);
276       ierr = ISGetIndices(aIS,&anchors);CHKERRQ(ierr);
277     }
278   }
279   if (!*adj) {
280     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
281 
282     ierr  = DMPlexGetChart(dm, &pStart,&pEnd);CHKERRQ(ierr);
283     ierr  = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
284     ierr  = DMPlexGetMaxSizes(dm, &maxC, &maxS);CHKERRQ(ierr);
285     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1;
286     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1;
287     asiz  = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries);
288     asiz *= maxAnchors;
289     asiz  = PetscMin(asiz,pEnd-pStart);
290     ierr  = PetscMalloc1(asiz,adj);CHKERRQ(ierr);
291   }
292   if (*adjSize < 0) *adjSize = asiz;
293   maxAdjSize = *adjSize;
294   if (useTransitiveClosure) {
295     ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr);
296   } else if (useCone) {
297     ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
298   } else {
299     ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
300   }
301   if (useAnchors && aSec) {
302     PetscInt origSize = *adjSize;
303     PetscInt numAdj = origSize;
304     PetscInt i = 0, j;
305     PetscInt *orig = *adj;
306 
307     while (i < origSize) {
308       PetscInt p = orig[i];
309       PetscInt aDof = 0;
310 
311       if (p >= aStart && p < aEnd) {
312         ierr = PetscSectionGetDof(aSec,p,&aDof);CHKERRQ(ierr);
313       }
314       if (aDof) {
315         PetscInt aOff;
316         PetscInt s, q;
317 
318         for (j = i + 1; j < numAdj; j++) {
319           orig[j - 1] = orig[j];
320         }
321         origSize--;
322         numAdj--;
323         ierr = PetscSectionGetOffset(aSec,p,&aOff);CHKERRQ(ierr);
324         for (s = 0; s < aDof; ++s) {
325           for (q = 0; q < numAdj || (orig[numAdj++] = anchors[aOff+s],0); ++q) {
326             if (anchors[aOff+s] == orig[q]) break;
327           }
328           if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
329         }
330       }
331       else {
332         i++;
333       }
334     }
335     *adjSize = numAdj;
336     ierr = ISRestoreIndices(aIS,&anchors);CHKERRQ(ierr);
337   }
338   PetscFunctionReturn(0);
339 }
340 
341 #undef __FUNCT__
342 #define __FUNCT__ "DMPlexGetAdjacency"
343 /*@
344   DMPlexGetAdjacency - Return all points adjacent to the given point
345 
346   Input Parameters:
347 + dm - The DM object
348 . p  - The point
349 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE
350 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize
351 
352   Output Parameters:
353 + adjSize - The number of adjacent points
354 - adj - The adjacent points
355 
356   Level: advanced
357 
358   Notes: The user must PetscFree the adj array if it was not passed in.
359 
360 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator()
361 @*/
362 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
363 {
364   DM_Plex       *mesh = (DM_Plex *) dm->data;
365   PetscErrorCode ierr;
366 
367   PetscFunctionBeginHot;
368   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
369   PetscValidPointer(adjSize,3);
370   PetscValidPointer(adj,4);
371   ierr = DMPlexGetAdjacency_Internal(dm, p, mesh->useCone, mesh->useClosure, mesh->useAnchors, adjSize, adj);CHKERRQ(ierr);
372   PetscFunctionReturn(0);
373 }
374 
375 #undef __FUNCT__
376 #define __FUNCT__ "DMPlexCreateTwoSidedProcessSF"
377 /*@
378   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
379 
380   Collective on DM
381 
382   Input Parameters:
383 + dm      - The DM
384 - sfPoint - The PetscSF which encodes point connectivity
385 
386   Output Parameters:
387 + processRanks - A list of process neighbors, or NULL
388 - sfProcess    - An SF encoding the two-sided process connectivity, or NULL
389 
390   Level: developer
391 
392 .seealso: PetscSFCreate(), DMPlexCreateProcessSF()
393 @*/
394 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
395 {
396   const PetscSFNode *remotePoints;
397   PetscInt          *localPointsNew;
398   PetscSFNode       *remotePointsNew;
399   const PetscInt    *nranks;
400   PetscInt          *ranksNew;
401   PetscBT            neighbors;
402   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
403   PetscMPIInt        numProcs, proc, rank;
404   PetscErrorCode     ierr;
405 
406   PetscFunctionBegin;
407   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
408   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
409   if (processRanks) {PetscValidPointer(processRanks, 3);}
410   if (sfProcess)    {PetscValidPointer(sfProcess, 4);}
411   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
412   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
413   ierr = PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);CHKERRQ(ierr);
414   ierr = PetscBTCreate(numProcs, &neighbors);CHKERRQ(ierr);
415   ierr = PetscBTMemzero(numProcs, neighbors);CHKERRQ(ierr);
416   /* Compute root-to-leaf process connectivity */
417   ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr);
418   ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr);
419   for (p = pStart; p < pEnd; ++p) {
420     PetscInt ndof, noff, n;
421 
422     ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr);
423     ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr);
424     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
425   }
426   ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr);
427   /* Compute leaf-to-neighbor process connectivity */
428   ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr);
429   ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr);
430   for (p = pStart; p < pEnd; ++p) {
431     PetscInt ndof, noff, n;
432 
433     ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr);
434     ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr);
435     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
436   }
437   ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr);
438   /* Compute leaf-to-root process connectivity */
439   for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);}
440   /* Calculate edges */
441   PetscBTClear(neighbors, rank);
442   for(proc = 0, numNeighbors = 0; proc < numProcs; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;}
443   ierr = PetscMalloc1(numNeighbors, &ranksNew);CHKERRQ(ierr);
444   ierr = PetscMalloc1(numNeighbors, &localPointsNew);CHKERRQ(ierr);
445   ierr = PetscMalloc1(numNeighbors, &remotePointsNew);CHKERRQ(ierr);
446   for(proc = 0, n = 0; proc < numProcs; ++proc) {
447     if (PetscBTLookup(neighbors, proc)) {
448       ranksNew[n]              = proc;
449       localPointsNew[n]        = proc;
450       remotePointsNew[n].index = rank;
451       remotePointsNew[n].rank  = proc;
452       ++n;
453     }
454   }
455   ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr);
456   if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr);}
457   else              {ierr = PetscFree(ranksNew);CHKERRQ(ierr);}
458   if (sfProcess) {
459     ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr);
460     ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr);
461     ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr);
462     ierr = PetscSFSetGraph(*sfProcess, numProcs, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
463   }
464   PetscFunctionReturn(0);
465 }
466 
467 #undef __FUNCT__
468 #define __FUNCT__ "DMPlexDistributeOwnership"
469 /*@
470   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
471 
472   Collective on DM
473 
474   Input Parameter:
475 . dm - The DM
476 
477   Output Parameters:
478 + rootSection - The number of leaves for a given root point
479 . rootrank    - The rank of each edge into the root point
480 . leafSection - The number of processes sharing a given leaf point
481 - leafrank    - The rank of each process sharing a leaf point
482 
483   Level: developer
484 
485 .seealso: DMPlexCreateOverlap()
486 @*/
487 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
488 {
489   MPI_Comm        comm;
490   PetscSF         sfPoint;
491   const PetscInt *rootdegree;
492   PetscInt       *myrank, *remoterank;
493   PetscInt        pStart, pEnd, p, nedges;
494   PetscMPIInt     rank;
495   PetscErrorCode  ierr;
496 
497   PetscFunctionBegin;
498   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
499   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
500   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
501   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
502   /* Compute number of leaves for each root */
503   ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr);
504   ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr);
505   ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr);
506   ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr);
507   for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr);}
508   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
509   /* Gather rank of each leaf to root */
510   ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr);
511   ierr = PetscMalloc1(pEnd-pStart, &myrank);CHKERRQ(ierr);
512   ierr = PetscMalloc1(nedges,  &remoterank);CHKERRQ(ierr);
513   for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank;
514   ierr = PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
515   ierr = PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
516   ierr = PetscFree(myrank);CHKERRQ(ierr);
517   ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr);
518   /* Distribute remote ranks to leaves */
519   ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr);
520   ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr);
521   PetscFunctionReturn(0);
522 }
523 
524 #undef __FUNCT__
525 #define __FUNCT__ "DMPlexCreateOverlap"
526 /*@C
527   DMPlexCreateOverlap - Compute owner information for shared points. This basically gets two-sided for an SF.
528 
529   Collective on DM
530 
531   Input Parameters:
532 + dm          - The DM
533 . levels      - Number of overlap levels
534 . rootSection - The number of leaves for a given root point
535 . rootrank    - The rank of each edge into the root point
536 . leafSection - The number of processes sharing a given leaf point
537 - leafrank    - The rank of each process sharing a leaf point
538 
539   Output Parameters:
540 + ovLabel     - DMLabel containing remote overlap contributions as point/rank pairings
541 
542   Level: developer
543 
544 .seealso: DMPlexDistributeOwnership(), DMPlexDistribute()
545 @*/
546 PetscErrorCode DMPlexCreateOverlap(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
547 {
548   MPI_Comm           comm;
549   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
550   PetscSF            sfPoint, sfProc;
551   const PetscSFNode *remote;
552   const PetscInt    *local;
553   const PetscInt    *nrank, *rrank;
554   PetscInt          *adj = NULL;
555   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l;
556   PetscMPIInt        rank, numProcs;
557   PetscBool          useCone, useClosure, flg;
558   PetscErrorCode     ierr;
559 
560   PetscFunctionBegin;
561   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
562   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
563   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
564   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
565   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
566   ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr);
567   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
568   ierr = DMLabelCreate("Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr);
569   /* Handle leaves: shared with the root point */
570   for (l = 0; l < nleaves; ++l) {
571     PetscInt adjSize = PETSC_DETERMINE, a;
572 
573     ierr = DMPlexGetAdjacency(dm, local[l], &adjSize, &adj);CHKERRQ(ierr);
574     for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr);}
575   }
576   ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr);
577   ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr);
578   /* Handle roots */
579   for (p = pStart; p < pEnd; ++p) {
580     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
581 
582     if ((p >= sStart) && (p < sEnd)) {
583       /* Some leaves share a root with other leaves on different processes */
584       ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr);
585       if (neighbors) {
586         ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr);
587         ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
588         for (n = 0; n < neighbors; ++n) {
589           const PetscInt remoteRank = nrank[noff+n];
590 
591           if (remoteRank == rank) continue;
592           for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
593         }
594       }
595     }
596     /* Roots are shared with leaves */
597     ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr);
598     if (!neighbors) continue;
599     ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr);
600     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
601     for (n = 0; n < neighbors; ++n) {
602       const PetscInt remoteRank = rrank[noff+n];
603 
604       if (remoteRank == rank) continue;
605       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
606     }
607   }
608   ierr = PetscFree(adj);CHKERRQ(ierr);
609   ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr);
610   ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr);
611   /* Add additional overlap levels */
612   for (l = 1; l < levels; l++) {ierr = DMPlexPartitionLabelAdjacency(dm, ovAdjByRank);CHKERRQ(ierr);}
613   /* We require the closure in the overlap */
614   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
615   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
616   if (useCone || !useClosure) {
617     ierr = DMPlexPartitionLabelClosure(dm, ovAdjByRank);CHKERRQ(ierr);
618   }
619   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-overlap_view", &flg);CHKERRQ(ierr);
620   if (flg) {
621     ierr = DMLabelView(ovAdjByRank, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
622   }
623   /* Make process SF and invert sender to receiver label */
624   ierr = DMPlexCreateTwoSidedProcessSF(dm, sfPoint, rootSection, rootrank, leafSection, leafrank, NULL, &sfProc);CHKERRQ(ierr);
625   ierr = DMLabelCreate("Overlap label", ovLabel);CHKERRQ(ierr);
626   ierr = DMPlexPartitionLabelInvert(dm, ovAdjByRank, sfProc, *ovLabel);CHKERRQ(ierr);
627   /* Add owned points, except for shared local points */
628   for (p = pStart; p < pEnd; ++p) {ierr = DMLabelSetValue(*ovLabel, p, rank);CHKERRQ(ierr);}
629   for (l = 0; l < nleaves; ++l) {
630     ierr = DMLabelClearValue(*ovLabel, local[l], rank);CHKERRQ(ierr);
631     ierr = DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank);CHKERRQ(ierr);
632   }
633   /* Clean up */
634   ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr);
635   ierr = PetscSFDestroy(&sfProc);CHKERRQ(ierr);
636   PetscFunctionReturn(0);
637 }
638 
639 #undef __FUNCT__
640 #define __FUNCT__ "DMPlexCreateOverlapMigrationSF"
641 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
642 {
643   MPI_Comm           comm;
644   PetscMPIInt        rank, numProcs;
645   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
646   PetscInt          *pointDepths, *remoteDepths, *ilocal;
647   PetscInt          *depthRecv, *depthShift, *depthIdx;
648   PetscSFNode       *iremote;
649   PetscSF            pointSF;
650   const PetscInt    *sharedLocal;
651   const PetscSFNode *overlapRemote, *sharedRemote;
652   PetscErrorCode     ierr;
653 
654   PetscFunctionBegin;
655   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
656   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
657   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
658   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
659   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
660 
661   /* Before building the migration SF we need to know the new stratum offsets */
662   ierr = PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote);CHKERRQ(ierr);
663   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
664   for (d=0; d<dim+1; d++) {
665     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
666     for (p=pStart; p<pEnd; p++) pointDepths[p] = d;
667   }
668   for (p=0; p<nleaves; p++) remoteDepths[p] = -1;
669   ierr = PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
670   ierr = PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
671 
672   /* Count recevied points in each stratum and compute the internal strata shift */
673   ierr = PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx);CHKERRQ(ierr);
674   for (d=0; d<dim+1; d++) depthRecv[d]=0;
675   for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++;
676   depthShift[dim] = 0;
677   for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim];
678   for (d=1; d<dim; d++) depthShift[d] += depthRecv[0];
679   for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1];
680   for (d=0; d<dim+1; d++) {
681     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
682     depthIdx[d] = pStart + depthShift[d];
683   }
684 
685   /* Form the overlap SF build an SF that describes the full overlap migration SF */
686   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
687   newLeaves = pEnd - pStart + nleaves;
688   ierr = PetscMalloc1(newLeaves, &ilocal);CHKERRQ(ierr);
689   ierr = PetscMalloc1(newLeaves, &iremote);CHKERRQ(ierr);
690   /* First map local points to themselves */
691   for (d=0; d<dim+1; d++) {
692     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
693     for (p=pStart; p<pEnd; p++) {
694       point = p + depthShift[d];
695       ilocal[point] = point;
696       iremote[point].index = p;
697       iremote[point].rank = rank;
698       depthIdx[d]++;
699     }
700   }
701 
702   /* Add in the remote roots for currently shared points */
703   ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
704   ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote);CHKERRQ(ierr);
705   for (d=0; d<dim+1; d++) {
706     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
707     for (p=0; p<numSharedPoints; p++) {
708       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
709         point = sharedLocal[p] + depthShift[d];
710         iremote[point].index = sharedRemote[p].index;
711         iremote[point].rank = sharedRemote[p].rank;
712       }
713     }
714   }
715 
716   /* Now add the incoming overlap points */
717   for (p=0; p<nleaves; p++) {
718     point = depthIdx[remoteDepths[p]];
719     ilocal[point] = point;
720     iremote[point].index = overlapRemote[p].index;
721     iremote[point].rank = overlapRemote[p].rank;
722     depthIdx[remoteDepths[p]]++;
723   }
724   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
725 
726   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
727   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");CHKERRQ(ierr);
728   ierr = PetscSFSetFromOptions(*migrationSF);CHKERRQ(ierr);
729   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
730   ierr = PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);CHKERRQ(ierr);
731 
732   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
733   PetscFunctionReturn(0);
734 }
735 
736 #undef __FUNCT__
737 #define __FUNCT__ "DMPlexStratifyMigrationSF"
738 /*@
739   DMPlexStratifyMigrationSF - Add partition overlap to a distributed non-overlapping DM.
740 
741   Input Parameter:
742 + dm          - The DM
743 - sf          - A star forest with non-ordered leaves, usually defining a DM point migration
744 
745   Output Parameter:
746 . migrationSF - A star forest with added leaf indirection that ensures the resulting DM is stratified
747 
748   Level: developer
749 
750 .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexDistributeOverlap()
751 @*/
752 PetscErrorCode DMPlexStratifyMigrationSF(DM dm, PetscSF sf, PetscSF *migrationSF)
753 {
754   MPI_Comm           comm;
755   PetscMPIInt        rank, numProcs;
756   PetscInt           d, ldepth, depth, p, pStart, pEnd, nroots, nleaves;
757   PetscInt          *pointDepths, *remoteDepths, *ilocal;
758   PetscInt          *depthRecv, *depthShift, *depthIdx;
759   const PetscSFNode *iremote;
760   PetscErrorCode     ierr;
761 
762   PetscFunctionBegin;
763   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
764   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
765   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
766   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
767   ierr = DMPlexGetDepth(dm, &ldepth);CHKERRQ(ierr);
768   ierr = MPIU_Allreduce(&ldepth, &depth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr);
769   if ((ldepth >= 0) && (depth != ldepth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", ldepth, depth);
770 
771   /* Before building the migration SF we need to know the new stratum offsets */
772   ierr = PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote);CHKERRQ(ierr);
773   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
774   for (d = 0; d < depth+1; ++d) {
775     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
776     for (p = pStart; p < pEnd; ++p) pointDepths[p] = d;
777   }
778   for (p = 0; p < nleaves; ++p) remoteDepths[p] = -1;
779   ierr = PetscSFBcastBegin(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
780   ierr = PetscSFBcastEnd(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
781   /* Count recevied points in each stratum and compute the internal strata shift */
782   ierr = PetscMalloc3(depth+1, &depthRecv, depth+1, &depthShift, depth+1, &depthIdx);CHKERRQ(ierr);
783   for (d = 0; d < depth+1; ++d) depthRecv[d] = 0;
784   for (p = 0; p < nleaves; ++p) depthRecv[remoteDepths[p]]++;
785   depthShift[depth] = 0;
786   for (d = 0; d < depth; ++d) depthShift[d] = depthRecv[depth];
787   for (d = 1; d < depth; ++d) depthShift[d] += depthRecv[0];
788   for (d = depth-2; d > 0; --d) depthShift[d] += depthRecv[d+1];
789   for (d = 0; d < depth+1; ++d) {depthIdx[d] = 0;}
790   /* Derive a new local permutation based on stratified indices */
791   ierr = PetscMalloc1(nleaves, &ilocal);CHKERRQ(ierr);
792   for (p = 0; p < nleaves; ++p) {
793     const PetscInt dep = remoteDepths[p];
794 
795     ilocal[p] = depthShift[dep] + depthIdx[dep];
796     depthIdx[dep]++;
797   }
798   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
799   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Migration SF");CHKERRQ(ierr);
800   ierr = PetscSFSetGraph(*migrationSF, nroots, nleaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_COPY_VALUES);CHKERRQ(ierr);
801   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
802   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
803   PetscFunctionReturn(0);
804 }
805 
806 #undef __FUNCT__
807 #define __FUNCT__ "DMPlexDistributeField"
808 /*@
809   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
810 
811   Collective on DM
812 
813   Input Parameters:
814 + dm - The DMPlex object
815 . pointSF - The PetscSF describing the communication pattern
816 . originalSection - The PetscSection for existing data layout
817 - originalVec - The existing data
818 
819   Output Parameters:
820 + newSection - The PetscSF describing the new data layout
821 - newVec - The new data
822 
823   Level: developer
824 
825 .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData()
826 @*/
827 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
828 {
829   PetscSF        fieldSF;
830   PetscInt      *remoteOffsets, fieldSize;
831   PetscScalar   *originalValues, *newValues;
832   PetscErrorCode ierr;
833 
834   PetscFunctionBegin;
835   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
836   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
837 
838   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
839   ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr);
840   ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr);
841 
842   ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr);
843   ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr);
844   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
845   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
846   ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
847   ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
848   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
849   ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr);
850   ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr);
851   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
852   PetscFunctionReturn(0);
853 }
854 
855 #undef __FUNCT__
856 #define __FUNCT__ "DMPlexDistributeFieldIS"
857 /*@
858   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
859 
860   Collective on DM
861 
862   Input Parameters:
863 + dm - The DMPlex object
864 . pointSF - The PetscSF describing the communication pattern
865 . originalSection - The PetscSection for existing data layout
866 - originalIS - The existing data
867 
868   Output Parameters:
869 + newSection - The PetscSF describing the new data layout
870 - newIS - The new data
871 
872   Level: developer
873 
874 .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData()
875 @*/
876 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
877 {
878   PetscSF         fieldSF;
879   PetscInt       *newValues, *remoteOffsets, fieldSize;
880   const PetscInt *originalValues;
881   PetscErrorCode  ierr;
882 
883   PetscFunctionBegin;
884   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
885   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
886 
887   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
888   ierr = PetscMalloc1(fieldSize, &newValues);CHKERRQ(ierr);
889 
890   ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr);
891   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
892   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
893   ierr = PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
894   ierr = PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
895   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
896   ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr);
897   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr);
898   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
899   PetscFunctionReturn(0);
900 }
901 
902 #undef __FUNCT__
903 #define __FUNCT__ "DMPlexDistributeData"
904 /*@
905   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
906 
907   Collective on DM
908 
909   Input Parameters:
910 + dm - The DMPlex object
911 . pointSF - The PetscSF describing the communication pattern
912 . originalSection - The PetscSection for existing data layout
913 . datatype - The type of data
914 - originalData - The existing data
915 
916   Output Parameters:
917 + newSection - The PetscSection describing the new data layout
918 - newData - The new data
919 
920   Level: developer
921 
922 .seealso: DMPlexDistribute(), DMPlexDistributeField()
923 @*/
924 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
925 {
926   PetscSF        fieldSF;
927   PetscInt      *remoteOffsets, fieldSize;
928   PetscMPIInt    dataSize;
929   PetscErrorCode ierr;
930 
931   PetscFunctionBegin;
932   ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
933   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
934 
935   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
936   ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr);
937   ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr);
938 
939   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
940   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
941   ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
942   ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
943   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
944   ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
945   PetscFunctionReturn(0);
946 }
947 
948 #undef __FUNCT__
949 #define __FUNCT__ "DMPlexDistributeCones"
950 PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
951 {
952   DM_Plex               *mesh  = (DM_Plex*) dm->data;
953   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
954   MPI_Comm               comm;
955   PetscSF                coneSF;
956   PetscSection           originalConeSection, newConeSection;
957   PetscInt              *remoteOffsets, *cones, *globCones, *newCones, newConesSize;
958   PetscBool              flg;
959   PetscErrorCode         ierr;
960 
961   PetscFunctionBegin;
962   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
963   PetscValidPointer(dmParallel,4);
964   ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
965 
966   /* Distribute cone section */
967   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
968   ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr);
969   ierr = DMPlexGetConeSection(dmParallel, &newConeSection);CHKERRQ(ierr);
970   ierr = PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr);
971   ierr = DMSetUp(dmParallel);CHKERRQ(ierr);
972   {
973     PetscInt pStart, pEnd, p;
974 
975     ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr);
976     for (p = pStart; p < pEnd; ++p) {
977       PetscInt coneSize;
978       ierr               = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr);
979       pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize);
980     }
981   }
982   /* Communicate and renumber cones */
983   ierr = PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr);
984   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
985   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
986   if (original) {
987     PetscInt numCones;
988 
989     ierr = PetscSectionGetStorageSize(originalConeSection,&numCones);CHKERRQ(ierr); ierr = PetscMalloc1(numCones,&globCones);CHKERRQ(ierr);
990     ierr = ISLocalToGlobalMappingApplyBlock(original, numCones, cones, globCones);CHKERRQ(ierr);
991   }
992   else {
993     globCones = cones;
994   }
995   ierr = DMPlexGetCones(dmParallel, &newCones);CHKERRQ(ierr);
996   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr);
997   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr);
998   if (original) {
999     ierr = PetscFree(globCones);CHKERRQ(ierr);
1000   }
1001   ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr);
1002   ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr);
1003 #if PETSC_USE_DEBUG
1004   {
1005     PetscInt  p;
1006     PetscBool valid = PETSC_TRUE;
1007     for (p = 0; p < newConesSize; ++p) {
1008       if (newCones[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
1009     }
1010     if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1011   }
1012 #endif
1013   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr);
1014   if (flg) {
1015     ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr);
1016     ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1017     ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr);
1018     ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1019     ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr);
1020   }
1021   ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr);
1022   ierr = DMPlexGetConeOrientations(dmParallel, &newCones);CHKERRQ(ierr);
1023   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1024   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1025   ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr);
1026   ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
1027   /* Create supports and stratify sieve */
1028   {
1029     PetscInt pStart, pEnd;
1030 
1031     ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr);
1032     ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr);
1033   }
1034   ierr = DMPlexSymmetrize(dmParallel);CHKERRQ(ierr);
1035   ierr = DMPlexStratify(dmParallel);CHKERRQ(ierr);
1036   pmesh->useCone    = mesh->useCone;
1037   pmesh->useClosure = mesh->useClosure;
1038   PetscFunctionReturn(0);
1039 }
1040 
1041 #undef __FUNCT__
1042 #define __FUNCT__ "DMPlexDistributeCoordinates"
1043 PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
1044 {
1045   MPI_Comm         comm;
1046   PetscSection     originalCoordSection, newCoordSection;
1047   Vec              originalCoordinates, newCoordinates;
1048   PetscInt         bs;
1049   const char      *name;
1050   const PetscReal *maxCell, *L;
1051   const DMBoundaryType *bd;
1052   PetscErrorCode   ierr;
1053 
1054   PetscFunctionBegin;
1055   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1056   PetscValidPointer(dmParallel, 3);
1057 
1058   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1059   ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr);
1060   ierr = DMGetCoordinateSection(dmParallel, &newCoordSection);CHKERRQ(ierr);
1061   ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr);
1062   if (originalCoordinates) {
1063     ierr = VecCreate(comm, &newCoordinates);CHKERRQ(ierr);
1064     ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr);
1065     ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr);
1066 
1067     ierr = DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr);
1068     ierr = DMSetCoordinatesLocal(dmParallel, newCoordinates);CHKERRQ(ierr);
1069     ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr);
1070     ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr);
1071     ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr);
1072   }
1073   ierr = DMGetPeriodicity(dm, &maxCell, &L, &bd);CHKERRQ(ierr);
1074   if (L) {ierr = DMSetPeriodicity(dmParallel, maxCell, L, bd);CHKERRQ(ierr);}
1075   PetscFunctionReturn(0);
1076 }
1077 
1078 #undef __FUNCT__
1079 #define __FUNCT__ "DMPlexDistributeLabels"
1080 /* Here we are assuming that process 0 always has everything */
1081 PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1082 {
1083   DM_Plex         *mesh = (DM_Plex*) dm->data;
1084   MPI_Comm         comm;
1085   DMLabel          depth;
1086   PetscMPIInt      rank;
1087   PetscInt         numLabels, numLocalLabels, l;
1088   PetscBool        hasLabels = PETSC_FALSE, lsendDepth, sendDepth;
1089   PetscObjectState depthState = -1;
1090   PetscErrorCode   ierr;
1091 
1092   PetscFunctionBegin;
1093   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1094   PetscValidHeaderSpecific(dm, DM_CLASSID, 3);
1095   ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1096   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1097   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1098 
1099   /* If the user has changed the depth label, communicate it instead */
1100   ierr = DMPlexGetDepthLabel(dm, &depth);CHKERRQ(ierr);
1101   if (depth) {ierr = DMLabelGetState(depth, &depthState);CHKERRQ(ierr);}
1102   lsendDepth = mesh->depthState != depthState ? PETSC_TRUE : PETSC_FALSE;
1103   ierr = MPIU_Allreduce(&lsendDepth, &sendDepth, 1, MPIU_BOOL, MPI_LOR, comm);CHKERRQ(ierr);
1104   if (sendDepth) {
1105     ierr = DMPlexRemoveLabel(dmParallel, "depth", &depth);CHKERRQ(ierr);
1106     ierr = DMLabelDestroy(&depth);CHKERRQ(ierr);
1107   }
1108   /* Everyone must have either the same number of labels, or none */
1109   ierr = DMPlexGetNumLabels(dm, &numLocalLabels);CHKERRQ(ierr);
1110   numLabels = numLocalLabels;
1111   ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1112   if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1113   for (l = numLabels-1; l >= 0; --l) {
1114     DMLabel     label = NULL, labelNew = NULL;
1115     PetscBool   isdepth;
1116 
1117     if (hasLabels) {
1118       ierr = DMPlexGetLabelByNum(dm, l, &label);CHKERRQ(ierr);
1119       /* Skip "depth" because it is recreated */
1120       ierr = PetscStrcmp(label->name, "depth", &isdepth);CHKERRQ(ierr);
1121     }
1122     ierr = MPI_Bcast(&isdepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr);
1123     if (isdepth && !sendDepth) continue;
1124     ierr = DMLabelDistribute(label, migrationSF, &labelNew);CHKERRQ(ierr);
1125     ierr = DMPlexAddLabel(dmParallel, labelNew);CHKERRQ(ierr);
1126   }
1127   ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1128   PetscFunctionReturn(0);
1129 }
1130 
1131 #undef __FUNCT__
1132 #define __FUNCT__ "DMPlexDistributeSetupHybrid"
1133 PetscErrorCode DMPlexDistributeSetupHybrid(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1134 {
1135   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1136   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1137   MPI_Comm        comm;
1138   const PetscInt *gpoints;
1139   PetscInt        dim, depth, n, d;
1140   PetscErrorCode  ierr;
1141 
1142   PetscFunctionBegin;
1143   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1144   PetscValidPointer(dmParallel, 4);
1145 
1146   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1147   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1148 
1149   /* Setup hybrid structure */
1150   for (d = 0; d <= dim; ++d) {pmesh->hybridPointMax[d] = mesh->hybridPointMax[d];}
1151   ierr = MPI_Bcast(pmesh->hybridPointMax, dim+1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1152   ierr = ISLocalToGlobalMappingGetSize(renumbering, &n);CHKERRQ(ierr);
1153   ierr = ISLocalToGlobalMappingGetIndices(renumbering, &gpoints);CHKERRQ(ierr);
1154   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1155   for (d = 0; d <= dim; ++d) {
1156     PetscInt pmax = pmesh->hybridPointMax[d], newmax = 0, pEnd, stratum[2], p;
1157 
1158     if (pmax < 0) continue;
1159     ierr = DMPlexGetDepthStratum(dm, d > depth ? depth : d, &stratum[0], &stratum[1]);CHKERRQ(ierr);
1160     ierr = DMPlexGetDepthStratum(dmParallel, d, NULL, &pEnd);CHKERRQ(ierr);
1161     ierr = MPI_Bcast(stratum, 2, MPIU_INT, 0, comm);CHKERRQ(ierr);
1162     for (p = 0; p < n; ++p) {
1163       const PetscInt point = gpoints[p];
1164 
1165       if ((point >= stratum[0]) && (point < stratum[1]) && (point >= pmax)) ++newmax;
1166     }
1167     if (newmax > 0) pmesh->hybridPointMax[d] = pEnd - newmax;
1168     else            pmesh->hybridPointMax[d] = -1;
1169   }
1170   ierr = ISLocalToGlobalMappingRestoreIndices(renumbering, &gpoints);CHKERRQ(ierr);
1171   PetscFunctionReturn(0);
1172 }
1173 
1174 #undef __FUNCT__
1175 #define __FUNCT__ "DMPlexDistributeSetupTree"
1176 PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1177 {
1178   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1179   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1180   MPI_Comm        comm;
1181   DM              refTree;
1182   PetscSection    origParentSection, newParentSection;
1183   PetscInt        *origParents, *origChildIDs;
1184   PetscBool       flg;
1185   PetscErrorCode  ierr;
1186 
1187   PetscFunctionBegin;
1188   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1189   PetscValidHeaderSpecific(dm, DM_CLASSID, 4);
1190   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1191 
1192   /* Set up tree */
1193   ierr = DMPlexGetReferenceTree(dm,&refTree);CHKERRQ(ierr);
1194   ierr = DMPlexSetReferenceTree(dmParallel,refTree);CHKERRQ(ierr);
1195   ierr = DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL);CHKERRQ(ierr);
1196   if (origParentSection) {
1197     PetscInt        pStart, pEnd;
1198     PetscInt        *newParents, *newChildIDs, *globParents;
1199     PetscInt        *remoteOffsetsParents, newParentSize;
1200     PetscSF         parentSF;
1201 
1202     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1203     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);CHKERRQ(ierr);
1204     ierr = PetscSectionSetChart(newParentSection,pStart,pEnd);CHKERRQ(ierr);
1205     ierr = PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);CHKERRQ(ierr);
1206     ierr = PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);CHKERRQ(ierr);
1207     ierr = PetscFree(remoteOffsetsParents);CHKERRQ(ierr);
1208     ierr = PetscSectionGetStorageSize(newParentSection,&newParentSize);CHKERRQ(ierr);
1209     ierr = PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs);CHKERRQ(ierr);
1210     if (original) {
1211       PetscInt numParents;
1212 
1213       ierr = PetscSectionGetStorageSize(origParentSection,&numParents);CHKERRQ(ierr);
1214       ierr = PetscMalloc1(numParents,&globParents);CHKERRQ(ierr);
1215       ierr = ISLocalToGlobalMappingApplyBlock(original, numParents, origParents, globParents);CHKERRQ(ierr);
1216     }
1217     else {
1218       globParents = origParents;
1219     }
1220     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr);
1221     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr);
1222     if (original) {
1223       ierr = PetscFree(globParents);CHKERRQ(ierr);
1224     }
1225     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1226     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1227     ierr = ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents);CHKERRQ(ierr);
1228 #if PETSC_USE_DEBUG
1229     {
1230       PetscInt  p;
1231       PetscBool valid = PETSC_TRUE;
1232       for (p = 0; p < newParentSize; ++p) {
1233         if (newParents[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
1234       }
1235       if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1236     }
1237 #endif
1238     ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-parents_view", &flg);CHKERRQ(ierr);
1239     if (flg) {
1240       ierr = PetscPrintf(comm, "Serial Parent Section: \n");CHKERRQ(ierr);
1241       ierr = PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1242       ierr = PetscPrintf(comm, "Parallel Parent Section: \n");CHKERRQ(ierr);
1243       ierr = PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1244       ierr = PetscSFView(parentSF, NULL);CHKERRQ(ierr);
1245     }
1246     ierr = DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);CHKERRQ(ierr);
1247     ierr = PetscSectionDestroy(&newParentSection);CHKERRQ(ierr);
1248     ierr = PetscFree2(newParents,newChildIDs);CHKERRQ(ierr);
1249     ierr = PetscSFDestroy(&parentSF);CHKERRQ(ierr);
1250   }
1251   pmesh->useAnchors = mesh->useAnchors;
1252   PetscFunctionReturn(0);
1253 }
1254 
1255 #undef __FUNCT__
1256 #define __FUNCT__ "DMPlexDistributeSF"
1257 PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, DM dmParallel)
1258 {
1259   DM_Plex               *mesh  = (DM_Plex*) dm->data;
1260   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
1261   PetscMPIInt            rank, numProcs;
1262   MPI_Comm               comm;
1263   PetscErrorCode         ierr;
1264 
1265   PetscFunctionBegin;
1266   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1267   PetscValidPointer(dmParallel,7);
1268 
1269   /* Create point SF for parallel mesh */
1270   ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1271   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1272   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1273   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1274   {
1275     const PetscInt *leaves;
1276     PetscSFNode    *remotePoints, *rowners, *lowners;
1277     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1278     PetscInt        pStart, pEnd;
1279 
1280     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1281     ierr = PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr);
1282     ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr);
1283     for (p=0; p<numRoots; p++) {
1284       rowners[p].rank  = -1;
1285       rowners[p].index = -1;
1286     }
1287     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1288     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1289     for (p = 0; p < numLeaves; ++p) {
1290       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1291         lowners[p].rank  = rank;
1292         lowners[p].index = leaves ? leaves[p] : p;
1293       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1294         lowners[p].rank  = -2;
1295         lowners[p].index = -2;
1296       }
1297     }
1298     for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1299       rowners[p].rank  = -3;
1300       rowners[p].index = -3;
1301     }
1302     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1303     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1304     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1305     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1306     for (p = 0; p < numLeaves; ++p) {
1307       if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
1308       if (lowners[p].rank != rank) ++numGhostPoints;
1309     }
1310     ierr = PetscMalloc1(numGhostPoints, &ghostPoints);CHKERRQ(ierr);
1311     ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr);
1312     for (p = 0, gp = 0; p < numLeaves; ++p) {
1313       if (lowners[p].rank != rank) {
1314         ghostPoints[gp]        = leaves ? leaves[p] : p;
1315         remotePoints[gp].rank  = lowners[p].rank;
1316         remotePoints[gp].index = lowners[p].index;
1317         ++gp;
1318       }
1319     }
1320     ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr);
1321     ierr = PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1322     ierr = PetscSFSetFromOptions((dmParallel)->sf);CHKERRQ(ierr);
1323   }
1324   pmesh->useCone    = mesh->useCone;
1325   pmesh->useClosure = mesh->useClosure;
1326   ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1327   PetscFunctionReturn(0);
1328 }
1329 
1330 #undef __FUNCT__
1331 #define __FUNCT__ "DMPlexCreatePointSF"
1332 /*@C
1333   DMPlexDerivePointSF - Build a point SF from an SF describing a point migration
1334 
1335   Input Parameter:
1336 + dm          - The source DMPlex object
1337 . migrationSF - The star forest that describes the parallel point remapping
1338 . ownership   - Flag causing a vote to determine point ownership
1339 
1340   Output Parameter:
1341 - pointSF     - The star forest describing the point overlap in the remapped DM
1342 
1343   Level: developer
1344 
1345 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1346 @*/
1347 PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownership, PetscSF *pointSF)
1348 {
1349   PetscMPIInt        rank;
1350   PetscInt           p, nroots, nleaves, idx, npointLeaves;
1351   PetscInt          *pointLocal;
1352   const PetscInt    *leaves;
1353   const PetscSFNode *roots;
1354   PetscSFNode       *rootNodes, *leafNodes, *pointRemote;
1355   PetscErrorCode     ierr;
1356 
1357   PetscFunctionBegin;
1358   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1359   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1360 
1361   ierr = PetscSFGetGraph(migrationSF, &nroots, &nleaves, &leaves, &roots);CHKERRQ(ierr);
1362   ierr = PetscMalloc2(nroots, &rootNodes, nleaves, &leafNodes);CHKERRQ(ierr);
1363   if (ownership) {
1364     /* Point ownership vote: Process with highest rank ownes shared points */
1365     for (p = 0; p < nleaves; ++p) {
1366       /* Either put in a bid or we know we own it */
1367       leafNodes[p].rank  = rank;
1368       leafNodes[p].index = p;
1369     }
1370     for (p = 0; p < nroots; p++) {
1371       /* Root must not participate in the reduction, flag so that MAXLOC does not use */
1372       rootNodes[p].rank  = -3;
1373       rootNodes[p].index = -3;
1374     }
1375     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);CHKERRQ(ierr);
1376     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);CHKERRQ(ierr);
1377   } else {
1378     for (p = 0; p < nroots; p++) {
1379       rootNodes[p].index = -1;
1380       rootNodes[p].rank = rank;
1381     };
1382     for (p = 0; p < nleaves; p++) {
1383       /* Write new local id into old location */
1384       if (roots[p].rank == rank) {
1385         rootNodes[roots[p].index].index = leaves[p];
1386       }
1387     }
1388   }
1389   ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1390   ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1391 
1392   for (npointLeaves = 0, p = 0; p < nleaves; p++) {if (leafNodes[p].rank != rank) npointLeaves++;}
1393   ierr = PetscMalloc1(npointLeaves, &pointLocal);CHKERRQ(ierr);
1394   ierr = PetscMalloc1(npointLeaves, &pointRemote);CHKERRQ(ierr);
1395   for (idx = 0, p = 0; p < nleaves; p++) {
1396     if (leafNodes[p].rank != rank) {
1397       pointLocal[idx] = p;
1398       pointRemote[idx] = leafNodes[p];
1399       idx++;
1400     }
1401   }
1402   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), pointSF);CHKERRQ(ierr);
1403   ierr = PetscSFSetFromOptions(*pointSF);CHKERRQ(ierr);
1404   ierr = PetscSFSetGraph(*pointSF, nleaves, npointLeaves, pointLocal, PETSC_OWN_POINTER, pointRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1405   ierr = PetscFree2(rootNodes, leafNodes);CHKERRQ(ierr);
1406   PetscFunctionReturn(0);
1407 }
1408 
1409 #undef __FUNCT__
1410 #define __FUNCT__ "DMPlexMigrate"
1411 /*@C
1412   DMPlexMigrate  - Migrates internal DM data over the supplied star forest
1413 
1414   Input Parameter:
1415 + dm       - The source DMPlex object
1416 . sf       - The star forest communication context describing the migration pattern
1417 
1418   Output Parameter:
1419 - targetDM - The target DMPlex object
1420 
1421   Level: intermediate
1422 
1423 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1424 @*/
1425 PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM)
1426 {
1427   MPI_Comm               comm;
1428   PetscInt               dim, nroots;
1429   PetscSF                sfPoint;
1430   ISLocalToGlobalMapping ltogMigration;
1431   ISLocalToGlobalMapping ltogOriginal = NULL;
1432   PetscBool              flg;
1433   PetscErrorCode         ierr;
1434 
1435   PetscFunctionBegin;
1436   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1437   ierr = PetscLogEventBegin(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1438   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1439   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1440   ierr = DMSetDimension(targetDM, dim);CHKERRQ(ierr);
1441 
1442   /* Check for a one-to-all distribution pattern */
1443   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1444   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
1445   if (nroots >= 0) {
1446     IS                     isOriginal;
1447     PetscInt               n, size, nleaves;
1448     PetscInt              *numbering_orig, *numbering_new;
1449     /* Get the original point numbering */
1450     ierr = DMPlexCreatePointNumbering(dm, &isOriginal);CHKERRQ(ierr);
1451     ierr = ISLocalToGlobalMappingCreateIS(isOriginal, &ltogOriginal);CHKERRQ(ierr);
1452     ierr = ISLocalToGlobalMappingGetSize(ltogOriginal, &size);CHKERRQ(ierr);
1453     ierr = ISLocalToGlobalMappingGetBlockIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1454     /* Convert to positive global numbers */
1455     for (n=0; n<size; n++) {if (numbering_orig[n] < 0) numbering_orig[n] = -(numbering_orig[n]+1);}
1456     /* Derive the new local-to-global mapping from the old one */
1457     ierr = PetscSFGetGraph(sf, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1458     ierr = PetscMalloc1(nleaves, &numbering_new);CHKERRQ(ierr);
1459     ierr = PetscSFBcastBegin(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr);
1460     ierr = PetscSFBcastEnd(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr);
1461     ierr = ISLocalToGlobalMappingCreate(comm, 1, nleaves, (const PetscInt*) numbering_new, PETSC_OWN_POINTER, &ltogMigration);CHKERRQ(ierr);
1462     ierr = ISLocalToGlobalMappingRestoreIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1463     ierr = ISDestroy(&isOriginal);CHKERRQ(ierr);
1464   } else {
1465     /* One-to-all distribution pattern: We can derive LToG from SF */
1466     ierr = ISLocalToGlobalMappingCreateSF(sf, 0, &ltogMigration);CHKERRQ(ierr);
1467   }
1468   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1469   if (flg) {
1470     ierr = PetscPrintf(comm, "Point renumbering for DM migration:\n");CHKERRQ(ierr);
1471     ierr = ISLocalToGlobalMappingView(ltogMigration, NULL);CHKERRQ(ierr);
1472   }
1473   /* Migrate DM data to target DM */
1474   ierr = DMPlexDistributeCones(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1475   ierr = DMPlexDistributeCoordinates(dm, sf, targetDM);CHKERRQ(ierr);
1476   ierr = DMPlexDistributeLabels(dm, sf, targetDM);CHKERRQ(ierr);
1477   ierr = DMPlexDistributeSetupHybrid(dm, sf, ltogMigration, targetDM);CHKERRQ(ierr);
1478   ierr = DMPlexDistributeSetupTree(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1479   ierr = ISLocalToGlobalMappingDestroy(&ltogOriginal);CHKERRQ(ierr);
1480   ierr = ISLocalToGlobalMappingDestroy(&ltogMigration);CHKERRQ(ierr);
1481   ierr = PetscLogEventEnd(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1482   PetscFunctionReturn(0);
1483 }
1484 
1485 #undef __FUNCT__
1486 #define __FUNCT__ "DMPlexDistribute"
1487 /*@C
1488   DMPlexDistribute - Distributes the mesh and any associated sections.
1489 
1490   Not Collective
1491 
1492   Input Parameter:
1493 + dm  - The original DMPlex object
1494 - overlap - The overlap of partitions, 0 is the default
1495 
1496   Output Parameter:
1497 + sf - The PetscSF used for point distribution
1498 - parallelMesh - The distributed DMPlex object, or NULL
1499 
1500   Note: If the mesh was not distributed, the return value is NULL.
1501 
1502   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1503   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1504   representation on the mesh.
1505 
1506   Level: intermediate
1507 
1508 .keywords: mesh, elements
1509 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1510 @*/
1511 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1512 {
1513   MPI_Comm               comm;
1514   PetscPartitioner       partitioner;
1515   IS                     cellPart;
1516   PetscSection           cellPartSection;
1517   DM                     dmCoord;
1518   DMLabel                lblPartition, lblMigration;
1519   PetscSF                sfProcess, sfMigration, sfStratified, sfPoint;
1520   PetscBool              flg;
1521   PetscMPIInt            rank, numProcs, p;
1522   PetscErrorCode         ierr;
1523 
1524   PetscFunctionBegin;
1525   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1526   if (sf) PetscValidPointer(sf,4);
1527   PetscValidPointer(dmParallel,5);
1528 
1529   ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1530   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1531   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1532   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1533 
1534   *dmParallel = NULL;
1535   if (numProcs == 1) PetscFunctionReturn(0);
1536 
1537   /* Create cell partition */
1538   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1539   ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr);
1540   ierr = DMPlexGetPartitioner(dm, &partitioner);CHKERRQ(ierr);
1541   ierr = PetscPartitionerPartition(partitioner, dm, cellPartSection, &cellPart);CHKERRQ(ierr);
1542   {
1543     /* Convert partition to DMLabel */
1544     PetscInt proc, pStart, pEnd, npoints, poffset;
1545     const PetscInt *points;
1546     ierr = DMLabelCreate("Point Partition", &lblPartition);CHKERRQ(ierr);
1547     ierr = ISGetIndices(cellPart, &points);CHKERRQ(ierr);
1548     ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr);
1549     for (proc = pStart; proc < pEnd; proc++) {
1550       ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr);
1551       ierr = PetscSectionGetOffset(cellPartSection, proc, &poffset);CHKERRQ(ierr);
1552       for (p = poffset; p < poffset+npoints; p++) {
1553         ierr = DMLabelSetValue(lblPartition, points[p], proc);CHKERRQ(ierr);
1554       }
1555     }
1556     ierr = ISRestoreIndices(cellPart, &points);CHKERRQ(ierr);
1557   }
1558   ierr = DMPlexPartitionLabelClosure(dm, lblPartition);CHKERRQ(ierr);
1559   {
1560     /* Build a global process SF */
1561     PetscSFNode *remoteProc;
1562     ierr = PetscMalloc1(numProcs, &remoteProc);CHKERRQ(ierr);
1563     for (p = 0; p < numProcs; ++p) {
1564       remoteProc[p].rank  = p;
1565       remoteProc[p].index = rank;
1566     }
1567     ierr = PetscSFCreate(comm, &sfProcess);CHKERRQ(ierr);
1568     ierr = PetscObjectSetName((PetscObject) sfProcess, "Process SF");CHKERRQ(ierr);
1569     ierr = PetscSFSetGraph(sfProcess, numProcs, numProcs, NULL, PETSC_OWN_POINTER, remoteProc, PETSC_OWN_POINTER);CHKERRQ(ierr);
1570   }
1571   ierr = DMLabelCreate("Point migration", &lblMigration);CHKERRQ(ierr);
1572   ierr = DMPlexPartitionLabelInvert(dm, lblPartition, sfProcess, lblMigration);CHKERRQ(ierr);
1573   ierr = DMPlexPartitionLabelCreateSF(dm, lblMigration, &sfMigration);CHKERRQ(ierr);
1574   /* Stratify the SF in case we are migrating an already parallel plex */
1575   ierr = DMPlexStratifyMigrationSF(dm, sfMigration, &sfStratified);CHKERRQ(ierr);
1576   ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1577   sfMigration = sfStratified;
1578   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1579   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1580   if (flg) {
1581     ierr = DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1582     ierr = PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1583   }
1584 
1585   /* Create non-overlapping parallel DM and migrate internal data */
1586   ierr = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr);
1587   ierr = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr);
1588   ierr = DMPlexMigrate(dm, sfMigration, *dmParallel);CHKERRQ(ierr);
1589 
1590   /* Build the point SF without overlap */
1591   ierr = DMPlexCreatePointSF(*dmParallel, sfMigration, PETSC_TRUE, &sfPoint);CHKERRQ(ierr);
1592   ierr = DMSetPointSF(*dmParallel, sfPoint);CHKERRQ(ierr);
1593   ierr = DMGetCoordinateDM(*dmParallel, &dmCoord);CHKERRQ(ierr);
1594   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1595   if (flg) {ierr = PetscSFView(sfPoint, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);}
1596 
1597   if (overlap > 0) {
1598     DM                 dmOverlap;
1599     PetscInt           nroots, nleaves;
1600     PetscSFNode       *newRemote;
1601     const PetscSFNode *oldRemote;
1602     PetscSF            sfOverlap, sfOverlapPoint;
1603     /* Add the partition overlap to the distributed DM */
1604     ierr = DMPlexDistributeOverlap(*dmParallel, overlap, &sfOverlap, &dmOverlap);CHKERRQ(ierr);
1605     ierr = DMDestroy(dmParallel);CHKERRQ(ierr);
1606     *dmParallel = dmOverlap;
1607     if (flg) {
1608       ierr = PetscPrintf(comm, "Overlap Migration SF:\n");CHKERRQ(ierr);
1609       ierr = PetscSFView(sfOverlap, NULL);CHKERRQ(ierr);
1610     }
1611 
1612     /* Re-map the migration SF to establish the full migration pattern */
1613     ierr = PetscSFGetGraph(sfMigration, &nroots, NULL, NULL, &oldRemote);CHKERRQ(ierr);
1614     ierr = PetscSFGetGraph(sfOverlap, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1615     ierr = PetscMalloc1(nleaves, &newRemote);CHKERRQ(ierr);
1616     ierr = PetscSFBcastBegin(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1617     ierr = PetscSFBcastEnd(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1618     ierr = PetscSFCreate(comm, &sfOverlapPoint);CHKERRQ(ierr);
1619     ierr = PetscSFSetGraph(sfOverlapPoint, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1620     ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1621     ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1622     sfMigration = sfOverlapPoint;
1623   }
1624   /* Cleanup Partition */
1625   ierr = PetscSFDestroy(&sfProcess);CHKERRQ(ierr);
1626   ierr = DMLabelDestroy(&lblPartition);CHKERRQ(ierr);
1627   ierr = DMLabelDestroy(&lblMigration);CHKERRQ(ierr);
1628   ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr);
1629   ierr = ISDestroy(&cellPart);CHKERRQ(ierr);
1630   /* Copy BC */
1631   ierr = DMPlexCopyBoundary(dm, *dmParallel);CHKERRQ(ierr);
1632   /* Create sfNatural */
1633   if (dm->useNatural) {
1634     PetscSection section;
1635 
1636     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1637     ierr = DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &(*dmParallel)->sfNatural);CHKERRQ(ierr);
1638   }
1639   /* Cleanup */
1640   if (sf) {*sf = sfMigration;}
1641   else    {ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);}
1642   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1643   ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1644   PetscFunctionReturn(0);
1645 }
1646 
1647 #undef __FUNCT__
1648 #define __FUNCT__ "DMPlexDistributeOverlap"
1649 /*@C
1650   DMPlexDistribute - Add partition overlap to a distributed non-overlapping DM.
1651 
1652   Not Collective
1653 
1654   Input Parameter:
1655 + dm  - The non-overlapping distrbuted DMPlex object
1656 - overlap - The overlap of partitions, 0 is the default
1657 
1658   Output Parameter:
1659 + sf - The PetscSF used for point distribution
1660 - dmOverlap - The overlapping distributed DMPlex object, or NULL
1661 
1662   Note: If the mesh was not distributed, the return value is NULL.
1663 
1664   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1665   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1666   representation on the mesh.
1667 
1668   Level: intermediate
1669 
1670 .keywords: mesh, elements
1671 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1672 @*/
1673 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, PetscSF *sf, DM *dmOverlap)
1674 {
1675   MPI_Comm               comm;
1676   PetscMPIInt            rank;
1677   PetscSection           rootSection, leafSection;
1678   IS                     rootrank, leafrank;
1679   DM                     dmCoord;
1680   DMLabel                lblOverlap;
1681   PetscSF                sfOverlap, sfStratified, sfPoint;
1682   PetscErrorCode         ierr;
1683 
1684   PetscFunctionBegin;
1685   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1686   if (sf) PetscValidPointer(sf, 3);
1687   PetscValidPointer(dmOverlap, 4);
1688 
1689   ierr = PetscLogEventBegin(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1690   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1691   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1692 
1693   /* Compute point overlap with neighbouring processes on the distributed DM */
1694   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1695   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1696   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1697   ierr = DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);CHKERRQ(ierr);
1698   ierr = DMPlexCreateOverlap(dm, overlap, rootSection, rootrank, leafSection, leafrank, &lblOverlap);CHKERRQ(ierr);
1699   /* Convert overlap label to stratified migration SF */
1700   ierr = DMPlexPartitionLabelCreateSF(dm, lblOverlap, &sfOverlap);CHKERRQ(ierr);
1701   ierr = DMPlexStratifyMigrationSF(dm, sfOverlap, &sfStratified);CHKERRQ(ierr);
1702   ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1703   sfOverlap = sfStratified;
1704   ierr = PetscObjectSetName((PetscObject) sfOverlap, "Overlap SF");CHKERRQ(ierr);
1705   ierr = PetscSFSetFromOptions(sfOverlap);CHKERRQ(ierr);
1706 
1707   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1708   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1709   ierr = ISDestroy(&rootrank);CHKERRQ(ierr);
1710   ierr = ISDestroy(&leafrank);CHKERRQ(ierr);
1711   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1712 
1713   /* Build the overlapping DM */
1714   ierr = DMPlexCreate(comm, dmOverlap);CHKERRQ(ierr);
1715   ierr = PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");CHKERRQ(ierr);
1716   ierr = DMPlexMigrate(dm, sfOverlap, *dmOverlap);CHKERRQ(ierr);
1717   /* Build the new point SF */
1718   ierr = DMPlexCreatePointSF(*dmOverlap, sfOverlap, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
1719   ierr = DMSetPointSF(*dmOverlap, sfPoint);CHKERRQ(ierr);
1720   ierr = DMGetCoordinateDM(*dmOverlap, &dmCoord);CHKERRQ(ierr);
1721   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1722   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1723   /* Cleanup overlap partition */
1724   ierr = DMLabelDestroy(&lblOverlap);CHKERRQ(ierr);
1725   if (sf) *sf = sfOverlap;
1726   else    {ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);}
1727   ierr = PetscLogEventEnd(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1728   PetscFunctionReturn(0);
1729 }
1730