xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision 5789d1f5dcd14af9aabfc1f6790ca7b6d7aa2307)
1 #include <petsc/private/dmpleximpl.h>    /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/dmlabelimpl.h>   /*I      "petscdmlabel.h"  I*/
3 
4 /*@C
5   DMPlexSetAdjacencyUser - Define adjacency in the mesh using a user-provided callback
6 
7   Input Parameters:
8 + dm      - The DM object
9 . user    - The user callback, may be NULL (to clear the callback)
10 - ctx     - context for callback evaluation, may be NULL
11 
12   Level: advanced
13 
14   Notes:
15      The caller of DMPlexGetAdjacency may need to arrange that a large enough array is available for the adjacency.
16 
17      Any setting here overrides other configuration of DMPlex adjacency determination.
18 
19 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexGetAdjacency(), DMPlexGetAdjacencyUser()
20 @*/
21 PetscErrorCode DMPlexSetAdjacencyUser(DM dm,PetscErrorCode (*user)(DM,PetscInt,PetscInt*,PetscInt[],void*),void *ctx)
22 {
23   DM_Plex *mesh = (DM_Plex *)dm->data;
24 
25   PetscFunctionBegin;
26   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
27   mesh->useradjacency = user;
28   mesh->useradjacencyctx = ctx;
29   PetscFunctionReturn(0);
30 }
31 
32 /*@C
33   DMPlexGetAdjacencyUser - get the user-defined adjacency callback
34 
35   Input Parameter:
36 . dm      - The DM object
37 
38   Output Parameters:
39 - user    - The user callback
40 - ctx     - context for callback evaluation
41 
42   Level: advanced
43 
44 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexGetAdjacency(), DMPlexSetAdjacencyUser()
45 @*/
46 PetscErrorCode DMPlexGetAdjacencyUser(DM dm, PetscErrorCode (**user)(DM,PetscInt,PetscInt*,PetscInt[],void*), void **ctx)
47 {
48   DM_Plex *mesh = (DM_Plex *)dm->data;
49 
50   PetscFunctionBegin;
51   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
52   if (user) *user = mesh->useradjacency;
53   if (ctx) *ctx = mesh->useradjacencyctx;
54   PetscFunctionReturn(0);
55 }
56 
57 /*@
58   DMPlexSetAdjacencyUseCone - Define adjacency in the mesh using either the cone or the support first
59 
60   Input Parameters:
61 + dm      - The DM object
62 - useCone - Flag to use the cone first
63 
64   Level: intermediate
65 
66   Notes:
67 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
68 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
69 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
70 
71 .seealso: DMPlexGetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
72 @*/
73 PetscErrorCode DMPlexSetAdjacencyUseCone(DM dm, PetscBool useCone)
74 {
75   DM_Plex *mesh = (DM_Plex *) dm->data;
76 
77   PetscFunctionBegin;
78   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
79   mesh->useCone = useCone;
80   PetscFunctionReturn(0);
81 }
82 
83 /*@
84   DMPlexGetAdjacencyUseCone - Query whether adjacency in the mesh uses the cone or the support first
85 
86   Input Parameter:
87 . dm      - The DM object
88 
89   Output Parameter:
90 . useCone - Flag to use the cone first
91 
92   Level: intermediate
93 
94   Notes:
95 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
96 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
97 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
98 
99 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
100 @*/
101 PetscErrorCode DMPlexGetAdjacencyUseCone(DM dm, PetscBool *useCone)
102 {
103   DM_Plex *mesh = (DM_Plex *) dm->data;
104 
105   PetscFunctionBegin;
106   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
107   PetscValidIntPointer(useCone, 2);
108   *useCone = mesh->useCone;
109   PetscFunctionReturn(0);
110 }
111 
112 /*@
113   DMPlexSetAdjacencyUseClosure - Define adjacency in the mesh using the transitive closure
114 
115   Input Parameters:
116 + dm      - The DM object
117 - useClosure - Flag to use the closure
118 
119   Level: intermediate
120 
121   Notes:
122 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
123 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
124 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
125 
126 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
127 @*/
128 PetscErrorCode DMPlexSetAdjacencyUseClosure(DM dm, PetscBool useClosure)
129 {
130   DM_Plex *mesh = (DM_Plex *) dm->data;
131 
132   PetscFunctionBegin;
133   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
134   mesh->useClosure = useClosure;
135   PetscFunctionReturn(0);
136 }
137 
138 /*@
139   DMPlexGetAdjacencyUseClosure - Query whether adjacency in the mesh uses the transitive closure
140 
141   Input Parameter:
142 . dm      - The DM object
143 
144   Output Parameter:
145 . useClosure - Flag to use the closure
146 
147   Level: intermediate
148 
149   Notes:
150 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
151 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
152 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
153 
154 .seealso: DMPlexSetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
155 @*/
156 PetscErrorCode DMPlexGetAdjacencyUseClosure(DM dm, PetscBool *useClosure)
157 {
158   DM_Plex *mesh = (DM_Plex *) dm->data;
159 
160   PetscFunctionBegin;
161   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
162   PetscValidIntPointer(useClosure, 2);
163   *useClosure = mesh->useClosure;
164   PetscFunctionReturn(0);
165 }
166 
167 /*@
168   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
169 
170   Input Parameters:
171 + dm      - The DM object
172 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
173 
174   Level: intermediate
175 
176 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
177 @*/
178 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
179 {
180   DM_Plex *mesh = (DM_Plex *) dm->data;
181 
182   PetscFunctionBegin;
183   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
184   mesh->useAnchors = useAnchors;
185   PetscFunctionReturn(0);
186 }
187 
188 /*@
189   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
190 
191   Input Parameter:
192 . dm      - The DM object
193 
194   Output Parameter:
195 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
196 
197   Level: intermediate
198 
199 .seealso: DMPlexSetAdjacencyUseAnchors(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
200 @*/
201 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
202 {
203   DM_Plex *mesh = (DM_Plex *) dm->data;
204 
205   PetscFunctionBegin;
206   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
207   PetscValidIntPointer(useAnchors, 2);
208   *useAnchors = mesh->useAnchors;
209   PetscFunctionReturn(0);
210 }
211 
212 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
213 {
214   const PetscInt *cone = NULL;
215   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
216   PetscErrorCode  ierr;
217 
218   PetscFunctionBeginHot;
219   ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
220   ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
221   for (c = 0; c <= coneSize; ++c) {
222     const PetscInt  point   = !c ? p : cone[c-1];
223     const PetscInt *support = NULL;
224     PetscInt        supportSize, s, q;
225 
226     ierr = DMPlexGetSupportSize(dm, point, &supportSize);CHKERRQ(ierr);
227     ierr = DMPlexGetSupport(dm, point, &support);CHKERRQ(ierr);
228     for (s = 0; s < supportSize; ++s) {
229       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = support[s]),0); ++q) {
230         if (support[s] == adj[q]) break;
231       }
232       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
233     }
234   }
235   *adjSize = numAdj;
236   PetscFunctionReturn(0);
237 }
238 
239 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
240 {
241   const PetscInt *support = NULL;
242   PetscInt        numAdj   = 0, maxAdjSize = *adjSize, supportSize, s;
243   PetscErrorCode  ierr;
244 
245   PetscFunctionBeginHot;
246   ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr);
247   ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr);
248   for (s = 0; s <= supportSize; ++s) {
249     const PetscInt  point = !s ? p : support[s-1];
250     const PetscInt *cone  = NULL;
251     PetscInt        coneSize, c, q;
252 
253     ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
254     ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
255     for (c = 0; c < coneSize; ++c) {
256       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = cone[c]),0); ++q) {
257         if (cone[c] == adj[q]) break;
258       }
259       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
260     }
261   }
262   *adjSize = numAdj;
263   PetscFunctionReturn(0);
264 }
265 
266 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
267 {
268   PetscInt      *star = NULL;
269   PetscInt       numAdj = 0, maxAdjSize = *adjSize, starSize, s;
270   PetscErrorCode ierr;
271 
272   PetscFunctionBeginHot;
273   ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
274   for (s = 0; s < starSize*2; s += 2) {
275     const PetscInt *closure = NULL;
276     PetscInt        closureSize, c, q;
277 
278     ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
279     for (c = 0; c < closureSize*2; c += 2) {
280       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = closure[c]),0); ++q) {
281         if (closure[c] == adj[q]) break;
282       }
283       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
284     }
285     ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
286   }
287   ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
288   *adjSize = numAdj;
289   PetscFunctionReturn(0);
290 }
291 
292 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
293 {
294   static PetscInt asiz = 0;
295   PetscInt maxAnchors = 1;
296   PetscInt aStart = -1, aEnd = -1;
297   PetscInt maxAdjSize;
298   PetscSection aSec = NULL;
299   IS aIS = NULL;
300   const PetscInt *anchors;
301   DM_Plex *mesh = (DM_Plex *)dm->data;
302   PetscErrorCode  ierr;
303 
304   PetscFunctionBeginHot;
305   if (useAnchors) {
306     ierr = DMPlexGetAnchors(dm,&aSec,&aIS);CHKERRQ(ierr);
307     if (aSec) {
308       ierr = PetscSectionGetMaxDof(aSec,&maxAnchors);CHKERRQ(ierr);
309       maxAnchors = PetscMax(1,maxAnchors);
310       ierr = PetscSectionGetChart(aSec,&aStart,&aEnd);CHKERRQ(ierr);
311       ierr = ISGetIndices(aIS,&anchors);CHKERRQ(ierr);
312     }
313   }
314   if (!*adj) {
315     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
316 
317     ierr  = DMPlexGetChart(dm, &pStart,&pEnd);CHKERRQ(ierr);
318     ierr  = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
319     ierr  = DMPlexGetMaxSizes(dm, &maxC, &maxS);CHKERRQ(ierr);
320     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1;
321     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1;
322     asiz  = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries);
323     asiz *= maxAnchors;
324     asiz  = PetscMin(asiz,pEnd-pStart);
325     ierr  = PetscMalloc1(asiz,adj);CHKERRQ(ierr);
326   }
327   if (*adjSize < 0) *adjSize = asiz;
328   maxAdjSize = *adjSize;
329   if (mesh->useradjacency) {
330     ierr = mesh->useradjacency(dm, p, adjSize, *adj, mesh->useradjacencyctx);CHKERRQ(ierr);
331   } else if (useTransitiveClosure) {
332     ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr);
333   } else if (useCone) {
334     ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
335   } else {
336     ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
337   }
338   if (useAnchors && aSec) {
339     PetscInt origSize = *adjSize;
340     PetscInt numAdj = origSize;
341     PetscInt i = 0, j;
342     PetscInt *orig = *adj;
343 
344     while (i < origSize) {
345       PetscInt p = orig[i];
346       PetscInt aDof = 0;
347 
348       if (p >= aStart && p < aEnd) {
349         ierr = PetscSectionGetDof(aSec,p,&aDof);CHKERRQ(ierr);
350       }
351       if (aDof) {
352         PetscInt aOff;
353         PetscInt s, q;
354 
355         for (j = i + 1; j < numAdj; j++) {
356           orig[j - 1] = orig[j];
357         }
358         origSize--;
359         numAdj--;
360         ierr = PetscSectionGetOffset(aSec,p,&aOff);CHKERRQ(ierr);
361         for (s = 0; s < aDof; ++s) {
362           for (q = 0; q < numAdj || ((void)(orig[numAdj++] = anchors[aOff+s]),0); ++q) {
363             if (anchors[aOff+s] == orig[q]) break;
364           }
365           if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
366         }
367       }
368       else {
369         i++;
370       }
371     }
372     *adjSize = numAdj;
373     ierr = ISRestoreIndices(aIS,&anchors);CHKERRQ(ierr);
374   }
375   PetscFunctionReturn(0);
376 }
377 
378 /*@
379   DMPlexGetAdjacency - Return all points adjacent to the given point
380 
381   Input Parameters:
382 + dm - The DM object
383 . p  - The point
384 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE
385 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize
386 
387   Output Parameters:
388 + adjSize - The number of adjacent points
389 - adj - The adjacent points
390 
391   Level: advanced
392 
393   Notes: The user must PetscFree the adj array if it was not passed in.
394 
395 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator()
396 @*/
397 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
398 {
399   DM_Plex       *mesh = (DM_Plex *) dm->data;
400   PetscErrorCode ierr;
401 
402   PetscFunctionBeginHot;
403   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
404   PetscValidPointer(adjSize,3);
405   PetscValidPointer(adj,4);
406   ierr = DMPlexGetAdjacency_Internal(dm, p, mesh->useCone, mesh->useClosure, mesh->useAnchors, adjSize, adj);CHKERRQ(ierr);
407   PetscFunctionReturn(0);
408 }
409 
410 /*@
411   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
412 
413   Collective on DM
414 
415   Input Parameters:
416 + dm      - The DM
417 - sfPoint - The PetscSF which encodes point connectivity
418 
419   Output Parameters:
420 + processRanks - A list of process neighbors, or NULL
421 - sfProcess    - An SF encoding the two-sided process connectivity, or NULL
422 
423   Level: developer
424 
425 .seealso: PetscSFCreate(), DMPlexCreateProcessSF()
426 @*/
427 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
428 {
429   const PetscSFNode *remotePoints;
430   PetscInt          *localPointsNew;
431   PetscSFNode       *remotePointsNew;
432   const PetscInt    *nranks;
433   PetscInt          *ranksNew;
434   PetscBT            neighbors;
435   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
436   PetscMPIInt        size, proc, rank;
437   PetscErrorCode     ierr;
438 
439   PetscFunctionBegin;
440   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
441   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
442   if (processRanks) {PetscValidPointer(processRanks, 3);}
443   if (sfProcess)    {PetscValidPointer(sfProcess, 4);}
444   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
445   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
446   ierr = PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);CHKERRQ(ierr);
447   ierr = PetscBTCreate(size, &neighbors);CHKERRQ(ierr);
448   ierr = PetscBTMemzero(size, neighbors);CHKERRQ(ierr);
449   /* Compute root-to-leaf process connectivity */
450   ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr);
451   ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr);
452   for (p = pStart; p < pEnd; ++p) {
453     PetscInt ndof, noff, n;
454 
455     ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr);
456     ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr);
457     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
458   }
459   ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr);
460   /* Compute leaf-to-neighbor process connectivity */
461   ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr);
462   ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr);
463   for (p = pStart; p < pEnd; ++p) {
464     PetscInt ndof, noff, n;
465 
466     ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr);
467     ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr);
468     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
469   }
470   ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr);
471   /* Compute leaf-to-root process connectivity */
472   for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);}
473   /* Calculate edges */
474   PetscBTClear(neighbors, rank);
475   for(proc = 0, numNeighbors = 0; proc < size; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;}
476   ierr = PetscMalloc1(numNeighbors, &ranksNew);CHKERRQ(ierr);
477   ierr = PetscMalloc1(numNeighbors, &localPointsNew);CHKERRQ(ierr);
478   ierr = PetscMalloc1(numNeighbors, &remotePointsNew);CHKERRQ(ierr);
479   for(proc = 0, n = 0; proc < size; ++proc) {
480     if (PetscBTLookup(neighbors, proc)) {
481       ranksNew[n]              = proc;
482       localPointsNew[n]        = proc;
483       remotePointsNew[n].index = rank;
484       remotePointsNew[n].rank  = proc;
485       ++n;
486     }
487   }
488   ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr);
489   if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr);}
490   else              {ierr = PetscFree(ranksNew);CHKERRQ(ierr);}
491   if (sfProcess) {
492     ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr);
493     ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr);
494     ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr);
495     ierr = PetscSFSetGraph(*sfProcess, size, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
496   }
497   PetscFunctionReturn(0);
498 }
499 
500 /*@
501   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
502 
503   Collective on DM
504 
505   Input Parameter:
506 . dm - The DM
507 
508   Output Parameters:
509 + rootSection - The number of leaves for a given root point
510 . rootrank    - The rank of each edge into the root point
511 . leafSection - The number of processes sharing a given leaf point
512 - leafrank    - The rank of each process sharing a leaf point
513 
514   Level: developer
515 
516 .seealso: DMPlexCreateOverlap()
517 @*/
518 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
519 {
520   MPI_Comm        comm;
521   PetscSF         sfPoint;
522   const PetscInt *rootdegree;
523   PetscInt       *myrank, *remoterank;
524   PetscInt        pStart, pEnd, p, nedges;
525   PetscMPIInt     rank;
526   PetscErrorCode  ierr;
527 
528   PetscFunctionBegin;
529   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
530   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
531   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
532   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
533   /* Compute number of leaves for each root */
534   ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr);
535   ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr);
536   ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr);
537   ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr);
538   for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr);}
539   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
540   /* Gather rank of each leaf to root */
541   ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr);
542   ierr = PetscMalloc1(pEnd-pStart, &myrank);CHKERRQ(ierr);
543   ierr = PetscMalloc1(nedges,  &remoterank);CHKERRQ(ierr);
544   for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank;
545   ierr = PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
546   ierr = PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
547   ierr = PetscFree(myrank);CHKERRQ(ierr);
548   ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr);
549   /* Distribute remote ranks to leaves */
550   ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr);
551   ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr);
552   PetscFunctionReturn(0);
553 }
554 
555 /*@C
556   DMPlexCreateOverlap - Compute owner information for shared points. This basically gets two-sided for an SF.
557 
558   Collective on DM
559 
560   Input Parameters:
561 + dm          - The DM
562 . levels      - Number of overlap levels
563 . rootSection - The number of leaves for a given root point
564 . rootrank    - The rank of each edge into the root point
565 . leafSection - The number of processes sharing a given leaf point
566 - leafrank    - The rank of each process sharing a leaf point
567 
568   Output Parameters:
569 + ovLabel     - DMLabel containing remote overlap contributions as point/rank pairings
570 
571   Level: developer
572 
573 .seealso: DMPlexDistributeOwnership(), DMPlexDistribute()
574 @*/
575 PetscErrorCode DMPlexCreateOverlap(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
576 {
577   MPI_Comm           comm;
578   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
579   PetscSF            sfPoint, sfProc;
580   const PetscSFNode *remote;
581   const PetscInt    *local;
582   const PetscInt    *nrank, *rrank;
583   PetscInt          *adj = NULL;
584   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l;
585   PetscMPIInt        rank, size;
586   PetscBool          useCone, useClosure, flg;
587   PetscErrorCode     ierr;
588 
589   PetscFunctionBegin;
590   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
591   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
592   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
593   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
594   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
595   ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr);
596   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
597   ierr = DMLabelCreate("Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr);
598   /* Handle leaves: shared with the root point */
599   for (l = 0; l < nleaves; ++l) {
600     PetscInt adjSize = PETSC_DETERMINE, a;
601 
602     ierr = DMPlexGetAdjacency(dm, local[l], &adjSize, &adj);CHKERRQ(ierr);
603     for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr);}
604   }
605   ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr);
606   ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr);
607   /* Handle roots */
608   for (p = pStart; p < pEnd; ++p) {
609     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
610 
611     if ((p >= sStart) && (p < sEnd)) {
612       /* Some leaves share a root with other leaves on different processes */
613       ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr);
614       if (neighbors) {
615         ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr);
616         ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
617         for (n = 0; n < neighbors; ++n) {
618           const PetscInt remoteRank = nrank[noff+n];
619 
620           if (remoteRank == rank) continue;
621           for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
622         }
623       }
624     }
625     /* Roots are shared with leaves */
626     ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr);
627     if (!neighbors) continue;
628     ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr);
629     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
630     for (n = 0; n < neighbors; ++n) {
631       const PetscInt remoteRank = rrank[noff+n];
632 
633       if (remoteRank == rank) continue;
634       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
635     }
636   }
637   ierr = PetscFree(adj);CHKERRQ(ierr);
638   ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr);
639   ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr);
640   /* Add additional overlap levels */
641   for (l = 1; l < levels; l++) {
642     /* Propagate point donations over SF to capture remote connections */
643     ierr = DMPlexPartitionLabelPropagate(dm, ovAdjByRank);CHKERRQ(ierr);
644     /* Add next level of point donations to the label */
645     ierr = DMPlexPartitionLabelAdjacency(dm, ovAdjByRank);CHKERRQ(ierr);
646   }
647   /* We require the closure in the overlap */
648   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
649   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
650   if (useCone || !useClosure) {
651     ierr = DMPlexPartitionLabelClosure(dm, ovAdjByRank);CHKERRQ(ierr);
652   }
653   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-overlap_view", &flg);CHKERRQ(ierr);
654   if (flg) {
655     ierr = DMLabelView(ovAdjByRank, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
656   }
657   /* Make global process SF and invert sender to receiver label */
658   {
659     /* Build a global process SF */
660     PetscSFNode *remoteProc;
661     ierr = PetscMalloc1(size, &remoteProc);CHKERRQ(ierr);
662     for (p = 0; p < size; ++p) {
663       remoteProc[p].rank  = p;
664       remoteProc[p].index = rank;
665     }
666     ierr = PetscSFCreate(comm, &sfProc);CHKERRQ(ierr);
667     ierr = PetscObjectSetName((PetscObject) sfProc, "Process SF");CHKERRQ(ierr);
668     ierr = PetscSFSetGraph(sfProc, size, size, NULL, PETSC_OWN_POINTER, remoteProc, PETSC_OWN_POINTER);CHKERRQ(ierr);
669   }
670   ierr = DMLabelCreate("Overlap label", ovLabel);CHKERRQ(ierr);
671   ierr = DMPlexPartitionLabelInvert(dm, ovAdjByRank, sfProc, *ovLabel);CHKERRQ(ierr);
672   /* Add owned points, except for shared local points */
673   for (p = pStart; p < pEnd; ++p) {ierr = DMLabelSetValue(*ovLabel, p, rank);CHKERRQ(ierr);}
674   for (l = 0; l < nleaves; ++l) {
675     ierr = DMLabelClearValue(*ovLabel, local[l], rank);CHKERRQ(ierr);
676     ierr = DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank);CHKERRQ(ierr);
677   }
678   /* Clean up */
679   ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr);
680   ierr = PetscSFDestroy(&sfProc);CHKERRQ(ierr);
681   PetscFunctionReturn(0);
682 }
683 
684 /*@C
685   DMPlexCreateOverlapMigrationSF - Create an SF describing the new mesh distribution to make the overlap described by the input SF
686 
687   Collective on DM
688 
689   Input Parameters:
690 + dm          - The DM
691 - overlapSF   - The SF mapping ghost points in overlap to owner points on other processes
692 
693   Output Parameters:
694 + migrationSF - An SF that maps original points in old locations to points in new locations
695 
696   Level: developer
697 
698 .seealso: DMPlexCreateOverlap(), DMPlexDistribute()
699 @*/
700 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
701 {
702   MPI_Comm           comm;
703   PetscMPIInt        rank, size;
704   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
705   PetscInt          *pointDepths, *remoteDepths, *ilocal;
706   PetscInt          *depthRecv, *depthShift, *depthIdx;
707   PetscSFNode       *iremote;
708   PetscSF            pointSF;
709   const PetscInt    *sharedLocal;
710   const PetscSFNode *overlapRemote, *sharedRemote;
711   PetscErrorCode     ierr;
712 
713   PetscFunctionBegin;
714   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
715   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
716   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
717   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
718   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
719 
720   /* Before building the migration SF we need to know the new stratum offsets */
721   ierr = PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote);CHKERRQ(ierr);
722   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
723   for (d=0; d<dim+1; d++) {
724     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
725     for (p=pStart; p<pEnd; p++) pointDepths[p] = d;
726   }
727   for (p=0; p<nleaves; p++) remoteDepths[p] = -1;
728   ierr = PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
729   ierr = PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
730 
731   /* Count recevied points in each stratum and compute the internal strata shift */
732   ierr = PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx);CHKERRQ(ierr);
733   for (d=0; d<dim+1; d++) depthRecv[d]=0;
734   for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++;
735   depthShift[dim] = 0;
736   for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim];
737   for (d=1; d<dim; d++) depthShift[d] += depthRecv[0];
738   for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1];
739   for (d=0; d<dim+1; d++) {
740     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
741     depthIdx[d] = pStart + depthShift[d];
742   }
743 
744   /* Form the overlap SF build an SF that describes the full overlap migration SF */
745   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
746   newLeaves = pEnd - pStart + nleaves;
747   ierr = PetscMalloc1(newLeaves, &ilocal);CHKERRQ(ierr);
748   ierr = PetscMalloc1(newLeaves, &iremote);CHKERRQ(ierr);
749   /* First map local points to themselves */
750   for (d=0; d<dim+1; d++) {
751     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
752     for (p=pStart; p<pEnd; p++) {
753       point = p + depthShift[d];
754       ilocal[point] = point;
755       iremote[point].index = p;
756       iremote[point].rank = rank;
757       depthIdx[d]++;
758     }
759   }
760 
761   /* Add in the remote roots for currently shared points */
762   ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
763   ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote);CHKERRQ(ierr);
764   for (d=0; d<dim+1; d++) {
765     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
766     for (p=0; p<numSharedPoints; p++) {
767       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
768         point = sharedLocal[p] + depthShift[d];
769         iremote[point].index = sharedRemote[p].index;
770         iremote[point].rank = sharedRemote[p].rank;
771       }
772     }
773   }
774 
775   /* Now add the incoming overlap points */
776   for (p=0; p<nleaves; p++) {
777     point = depthIdx[remoteDepths[p]];
778     ilocal[point] = point;
779     iremote[point].index = overlapRemote[p].index;
780     iremote[point].rank = overlapRemote[p].rank;
781     depthIdx[remoteDepths[p]]++;
782   }
783   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
784 
785   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
786   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");CHKERRQ(ierr);
787   ierr = PetscSFSetFromOptions(*migrationSF);CHKERRQ(ierr);
788   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
789   ierr = PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);CHKERRQ(ierr);
790 
791   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
792   PetscFunctionReturn(0);
793 }
794 
795 /*@
796   DMPlexStratifyMigrationSF - Rearrange the leaves of a migration sf for stratification.
797 
798   Input Parameter:
799 + dm          - The DM
800 - sf          - A star forest with non-ordered leaves, usually defining a DM point migration
801 
802   Output Parameter:
803 . migrationSF - A star forest with added leaf indirection that ensures the resulting DM is stratified
804 
805   Level: developer
806 
807 .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexDistributeOverlap()
808 @*/
809 PetscErrorCode DMPlexStratifyMigrationSF(DM dm, PetscSF sf, PetscSF *migrationSF)
810 {
811   MPI_Comm           comm;
812   PetscMPIInt        rank, size;
813   PetscInt           d, ldepth, depth, p, pStart, pEnd, nroots, nleaves;
814   PetscInt          *pointDepths, *remoteDepths, *ilocal;
815   PetscInt          *depthRecv, *depthShift, *depthIdx;
816   PetscInt           hybEnd[4];
817   const PetscSFNode *iremote;
818   PetscErrorCode     ierr;
819 
820   PetscFunctionBegin;
821   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
822   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
823   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
824   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
825   ierr = DMPlexGetDepth(dm, &ldepth);CHKERRQ(ierr);
826   ierr = MPIU_Allreduce(&ldepth, &depth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr);
827   if ((ldepth >= 0) && (depth != ldepth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", ldepth, depth);
828 
829   /* Before building the migration SF we need to know the new stratum offsets */
830   ierr = PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote);CHKERRQ(ierr);
831   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
832   ierr = DMPlexGetHybridBounds(dm,&hybEnd[depth],&hybEnd[depth-1],&hybEnd[1],&hybEnd[0]);CHKERRQ(ierr);
833   for (d = 0; d < depth+1; ++d) {
834     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
835     for (p = pStart; p < pEnd; ++p) {
836       if (hybEnd[d] >= 0 && p >= hybEnd[d]) { /* put in a separate value for hybrid points */
837         pointDepths[p] = 2 * d;
838       } else {
839         pointDepths[p] = 2 * d + 1;
840       }
841     }
842   }
843   for (p = 0; p < nleaves; ++p) remoteDepths[p] = -1;
844   ierr = PetscSFBcastBegin(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
845   ierr = PetscSFBcastEnd(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
846   /* Count recevied points in each stratum and compute the internal strata shift */
847   ierr = PetscMalloc3(2*(depth+1), &depthRecv, 2*(depth+1), &depthShift, 2*(depth+1), &depthIdx);CHKERRQ(ierr);
848   for (d = 0; d < 2*(depth+1); ++d) depthRecv[d] = 0;
849   for (p = 0; p < nleaves; ++p) depthRecv[remoteDepths[p]]++;
850   depthShift[2*depth+1] = 0;
851   for (d = 0; d < 2*depth+1; ++d) depthShift[d] = depthRecv[2 * depth + 1];
852   for (d = 0; d < 2*depth; ++d) depthShift[d] += depthRecv[2 * depth];
853   depthShift[0] += depthRecv[1];
854   for (d = 2; d < 2*depth; ++d) depthShift[d] += depthRecv[1];
855   for (d = 2; d < 2*depth; ++d) depthShift[d] += depthRecv[0];
856   for (d = 2 * depth-1; d > 2; --d) {
857     PetscInt e;
858 
859     for (e = d -1; e > 1; --e) depthShift[e] += depthRecv[d];
860   }
861   for (d = 0; d < 2*(depth+1); ++d) {depthIdx[d] = 0;}
862   /* Derive a new local permutation based on stratified indices */
863   ierr = PetscMalloc1(nleaves, &ilocal);CHKERRQ(ierr);
864   for (p = 0; p < nleaves; ++p) {
865     const PetscInt dep = remoteDepths[p];
866 
867     ilocal[p] = depthShift[dep] + depthIdx[dep];
868     depthIdx[dep]++;
869   }
870   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
871   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Migration SF");CHKERRQ(ierr);
872   ierr = PetscSFSetGraph(*migrationSF, nroots, nleaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_COPY_VALUES);CHKERRQ(ierr);
873   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
874   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
875   PetscFunctionReturn(0);
876 }
877 
878 /*@
879   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
880 
881   Collective on DM
882 
883   Input Parameters:
884 + dm - The DMPlex object
885 . pointSF - The PetscSF describing the communication pattern
886 . originalSection - The PetscSection for existing data layout
887 - originalVec - The existing data
888 
889   Output Parameters:
890 + newSection - The PetscSF describing the new data layout
891 - newVec - The new data
892 
893   Level: developer
894 
895 .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData()
896 @*/
897 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
898 {
899   PetscSF        fieldSF;
900   PetscInt      *remoteOffsets, fieldSize;
901   PetscScalar   *originalValues, *newValues;
902   PetscErrorCode ierr;
903 
904   PetscFunctionBegin;
905   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
906   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
907 
908   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
909   ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr);
910   ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr);
911 
912   ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr);
913   ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr);
914   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
915   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
916   ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
917   ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
918   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
919   ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr);
920   ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr);
921   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
922   PetscFunctionReturn(0);
923 }
924 
925 /*@
926   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
927 
928   Collective on DM
929 
930   Input Parameters:
931 + dm - The DMPlex object
932 . pointSF - The PetscSF describing the communication pattern
933 . originalSection - The PetscSection for existing data layout
934 - originalIS - The existing data
935 
936   Output Parameters:
937 + newSection - The PetscSF describing the new data layout
938 - newIS - The new data
939 
940   Level: developer
941 
942 .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData()
943 @*/
944 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
945 {
946   PetscSF         fieldSF;
947   PetscInt       *newValues, *remoteOffsets, fieldSize;
948   const PetscInt *originalValues;
949   PetscErrorCode  ierr;
950 
951   PetscFunctionBegin;
952   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
953   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
954 
955   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
956   ierr = PetscMalloc1(fieldSize, &newValues);CHKERRQ(ierr);
957 
958   ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr);
959   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
960   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
961   ierr = PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
962   ierr = PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
963   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
964   ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr);
965   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr);
966   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
967   PetscFunctionReturn(0);
968 }
969 
970 /*@
971   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
972 
973   Collective on DM
974 
975   Input Parameters:
976 + dm - The DMPlex object
977 . pointSF - The PetscSF describing the communication pattern
978 . originalSection - The PetscSection for existing data layout
979 . datatype - The type of data
980 - originalData - The existing data
981 
982   Output Parameters:
983 + newSection - The PetscSection describing the new data layout
984 - newData - The new data
985 
986   Level: developer
987 
988 .seealso: DMPlexDistribute(), DMPlexDistributeField()
989 @*/
990 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
991 {
992   PetscSF        fieldSF;
993   PetscInt      *remoteOffsets, fieldSize;
994   PetscMPIInt    dataSize;
995   PetscErrorCode ierr;
996 
997   PetscFunctionBegin;
998   ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
999   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
1000 
1001   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
1002   ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr);
1003   ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr);
1004 
1005   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
1006   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
1007   ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
1008   ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
1009   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
1010   ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
1011   PetscFunctionReturn(0);
1012 }
1013 
1014 static PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1015 {
1016   DM_Plex               *mesh  = (DM_Plex*) dm->data;
1017   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
1018   MPI_Comm               comm;
1019   PetscSF                coneSF;
1020   PetscSection           originalConeSection, newConeSection;
1021   PetscInt              *remoteOffsets, *cones, *globCones, *newCones, newConesSize;
1022   PetscBool              flg;
1023   PetscErrorCode         ierr;
1024 
1025   PetscFunctionBegin;
1026   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1027   PetscValidPointer(dmParallel,4);
1028   ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
1029 
1030   /* Distribute cone section */
1031   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1032   ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr);
1033   ierr = DMPlexGetConeSection(dmParallel, &newConeSection);CHKERRQ(ierr);
1034   ierr = PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr);
1035   ierr = DMSetUp(dmParallel);CHKERRQ(ierr);
1036   {
1037     PetscInt pStart, pEnd, p;
1038 
1039     ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr);
1040     for (p = pStart; p < pEnd; ++p) {
1041       PetscInt coneSize;
1042       ierr               = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr);
1043       pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize);
1044     }
1045   }
1046   /* Communicate and renumber cones */
1047   ierr = PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr);
1048   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
1049   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
1050   if (original) {
1051     PetscInt numCones;
1052 
1053     ierr = PetscSectionGetStorageSize(originalConeSection,&numCones);CHKERRQ(ierr); ierr = PetscMalloc1(numCones,&globCones);CHKERRQ(ierr);
1054     ierr = ISLocalToGlobalMappingApplyBlock(original, numCones, cones, globCones);CHKERRQ(ierr);
1055   }
1056   else {
1057     globCones = cones;
1058   }
1059   ierr = DMPlexGetCones(dmParallel, &newCones);CHKERRQ(ierr);
1060   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr);
1061   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr);
1062   if (original) {
1063     ierr = PetscFree(globCones);CHKERRQ(ierr);
1064   }
1065   ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr);
1066   ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr);
1067 #if PETSC_USE_DEBUG
1068   {
1069     PetscInt  p;
1070     PetscBool valid = PETSC_TRUE;
1071     for (p = 0; p < newConesSize; ++p) {
1072       if (newCones[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
1073     }
1074     if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1075   }
1076 #endif
1077   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr);
1078   if (flg) {
1079     ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr);
1080     ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1081     ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr);
1082     ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1083     ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr);
1084   }
1085   ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr);
1086   ierr = DMPlexGetConeOrientations(dmParallel, &newCones);CHKERRQ(ierr);
1087   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1088   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1089   ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr);
1090   ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
1091   /* Create supports and stratify DMPlex */
1092   {
1093     PetscInt pStart, pEnd;
1094 
1095     ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr);
1096     ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr);
1097   }
1098   ierr = DMPlexSymmetrize(dmParallel);CHKERRQ(ierr);
1099   ierr = DMPlexStratify(dmParallel);CHKERRQ(ierr);
1100   pmesh->useCone    = mesh->useCone;
1101   pmesh->useClosure = mesh->useClosure;
1102   PetscFunctionReturn(0);
1103 }
1104 
1105 static PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
1106 {
1107   MPI_Comm         comm;
1108   PetscSection     originalCoordSection, newCoordSection;
1109   Vec              originalCoordinates, newCoordinates;
1110   PetscInt         bs;
1111   PetscBool        isper;
1112   const char      *name;
1113   const PetscReal *maxCell, *L;
1114   const DMBoundaryType *bd;
1115   PetscErrorCode   ierr;
1116 
1117   PetscFunctionBegin;
1118   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1119   PetscValidPointer(dmParallel, 3);
1120 
1121   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1122   ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr);
1123   ierr = DMGetCoordinateSection(dmParallel, &newCoordSection);CHKERRQ(ierr);
1124   ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr);
1125   if (originalCoordinates) {
1126     ierr = VecCreate(PETSC_COMM_SELF, &newCoordinates);CHKERRQ(ierr);
1127     ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr);
1128     ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr);
1129 
1130     ierr = DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr);
1131     ierr = DMSetCoordinatesLocal(dmParallel, newCoordinates);CHKERRQ(ierr);
1132     ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr);
1133     ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr);
1134     ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr);
1135   }
1136   ierr = DMGetPeriodicity(dm, &isper, &maxCell, &L, &bd);CHKERRQ(ierr);
1137   ierr = DMSetPeriodicity(dmParallel, isper, maxCell, L, bd);CHKERRQ(ierr);
1138   PetscFunctionReturn(0);
1139 }
1140 
1141 /* Here we are assuming that process 0 always has everything */
1142 static PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1143 {
1144   DM_Plex         *mesh = (DM_Plex*) dm->data;
1145   MPI_Comm         comm;
1146   DMLabel          depthLabel;
1147   PetscMPIInt      rank;
1148   PetscInt         depth, d, numLabels, numLocalLabels, l;
1149   PetscBool        hasLabels = PETSC_FALSE, lsendDepth, sendDepth;
1150   PetscObjectState depthState = -1;
1151   PetscErrorCode   ierr;
1152 
1153   PetscFunctionBegin;
1154   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1155   PetscValidHeaderSpecific(dm, DM_CLASSID, 3);
1156   ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1157   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1158   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1159 
1160   /* If the user has changed the depth label, communicate it instead */
1161   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1162   ierr = DMPlexGetDepthLabel(dm, &depthLabel);CHKERRQ(ierr);
1163   if (depthLabel) {ierr = DMLabelGetState(depthLabel, &depthState);CHKERRQ(ierr);}
1164   lsendDepth = mesh->depthState != depthState ? PETSC_TRUE : PETSC_FALSE;
1165   ierr = MPIU_Allreduce(&lsendDepth, &sendDepth, 1, MPIU_BOOL, MPI_LOR, comm);CHKERRQ(ierr);
1166   if (sendDepth) {
1167     ierr = DMRemoveLabel(dmParallel, "depth", &depthLabel);CHKERRQ(ierr);
1168     ierr = DMLabelDestroy(&depthLabel);CHKERRQ(ierr);
1169   }
1170   /* Everyone must have either the same number of labels, or none */
1171   ierr = DMGetNumLabels(dm, &numLocalLabels);CHKERRQ(ierr);
1172   numLabels = numLocalLabels;
1173   ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1174   if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1175   for (l = numLabels-1; l >= 0; --l) {
1176     DMLabel     label = NULL, labelNew = NULL;
1177     PetscBool   isdepth;
1178 
1179     if (hasLabels) {
1180       ierr = DMGetLabelByNum(dm, l, &label);CHKERRQ(ierr);
1181       /* Skip "depth" because it is recreated */
1182       ierr = PetscStrcmp(label->name, "depth", &isdepth);CHKERRQ(ierr);
1183     }
1184     ierr = MPI_Bcast(&isdepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr);
1185     if (isdepth && !sendDepth) continue;
1186     ierr = DMLabelDistribute(label, migrationSF, &labelNew);CHKERRQ(ierr);
1187     if (isdepth) {
1188       /* Put in any missing strata which can occur if users are managing the depth label themselves */
1189       PetscInt gdepth;
1190 
1191       ierr = MPIU_Allreduce(&depth, &gdepth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr);
1192       if ((depth >= 0) && (gdepth != depth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", depth, gdepth);
1193       for (d = 0; d <= gdepth; ++d) {
1194         PetscBool has;
1195 
1196         ierr = DMLabelHasStratum(labelNew, d, &has);CHKERRQ(ierr);
1197         if (!has) ierr = DMLabelAddStratum(labelNew, d);CHKERRQ(ierr);
1198       }
1199     }
1200     ierr = DMAddLabel(dmParallel, labelNew);CHKERRQ(ierr);
1201   }
1202   ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1203   PetscFunctionReturn(0);
1204 }
1205 
1206 static PetscErrorCode DMPlexDistributeSetupHybrid(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1207 {
1208   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1209   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1210   PetscBool      *isHybrid, *isHybridParallel;
1211   PetscInt        dim, depth, d;
1212   PetscInt        pStart, pEnd, pStartP, pEndP;
1213   PetscErrorCode  ierr;
1214 
1215   PetscFunctionBegin;
1216   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1217   PetscValidPointer(dmParallel, 4);
1218 
1219   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1220   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1221   ierr = DMPlexGetChart(dm,&pStart,&pEnd);CHKERRQ(ierr);
1222   ierr = DMPlexGetChart(dmParallel,&pStartP,&pEndP);CHKERRQ(ierr);
1223   ierr = PetscCalloc2(pEnd-pStart,&isHybrid,pEndP-pStartP,&isHybridParallel);CHKERRQ(ierr);
1224   for (d = 0; d <= depth; d++) {
1225     PetscInt hybridMax = (depth == 1 && d == 1) ? mesh->hybridPointMax[dim] : mesh->hybridPointMax[d];
1226 
1227     if (hybridMax >= 0) {
1228       PetscInt sStart, sEnd, p;
1229 
1230       ierr = DMPlexGetDepthStratum(dm,d,&sStart,&sEnd);CHKERRQ(ierr);
1231       for (p = hybridMax; p < sEnd; p++) isHybrid[p-pStart] = PETSC_TRUE;
1232     }
1233   }
1234   ierr = PetscSFBcastBegin(migrationSF,MPIU_BOOL,isHybrid,isHybridParallel);CHKERRQ(ierr);
1235   ierr = PetscSFBcastEnd(migrationSF,MPIU_BOOL,isHybrid,isHybridParallel);CHKERRQ(ierr);
1236   for (d = 0; d <= dim; d++) pmesh->hybridPointMax[d] = -1;
1237   for (d = 0; d <= depth; d++) {
1238     PetscInt sStart, sEnd, p, dd;
1239 
1240     ierr = DMPlexGetDepthStratum(dmParallel,d,&sStart,&sEnd);CHKERRQ(ierr);
1241     dd = (depth == 1 && d == 1) ? dim : d;
1242     for (p = sStart; p < sEnd; p++) {
1243       if (isHybridParallel[p-pStartP]) {
1244         pmesh->hybridPointMax[dd] = p;
1245         break;
1246       }
1247     }
1248   }
1249   ierr = PetscFree2(isHybrid,isHybridParallel);CHKERRQ(ierr);
1250   PetscFunctionReturn(0);
1251 }
1252 
1253 static PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1254 {
1255   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1256   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1257   MPI_Comm        comm;
1258   DM              refTree;
1259   PetscSection    origParentSection, newParentSection;
1260   PetscInt        *origParents, *origChildIDs;
1261   PetscBool       flg;
1262   PetscErrorCode  ierr;
1263 
1264   PetscFunctionBegin;
1265   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1266   PetscValidHeaderSpecific(dm, DM_CLASSID, 4);
1267   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1268 
1269   /* Set up tree */
1270   ierr = DMPlexGetReferenceTree(dm,&refTree);CHKERRQ(ierr);
1271   ierr = DMPlexSetReferenceTree(dmParallel,refTree);CHKERRQ(ierr);
1272   ierr = DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL);CHKERRQ(ierr);
1273   if (origParentSection) {
1274     PetscInt        pStart, pEnd;
1275     PetscInt        *newParents, *newChildIDs, *globParents;
1276     PetscInt        *remoteOffsetsParents, newParentSize;
1277     PetscSF         parentSF;
1278 
1279     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1280     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);CHKERRQ(ierr);
1281     ierr = PetscSectionSetChart(newParentSection,pStart,pEnd);CHKERRQ(ierr);
1282     ierr = PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);CHKERRQ(ierr);
1283     ierr = PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);CHKERRQ(ierr);
1284     ierr = PetscFree(remoteOffsetsParents);CHKERRQ(ierr);
1285     ierr = PetscSectionGetStorageSize(newParentSection,&newParentSize);CHKERRQ(ierr);
1286     ierr = PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs);CHKERRQ(ierr);
1287     if (original) {
1288       PetscInt numParents;
1289 
1290       ierr = PetscSectionGetStorageSize(origParentSection,&numParents);CHKERRQ(ierr);
1291       ierr = PetscMalloc1(numParents,&globParents);CHKERRQ(ierr);
1292       ierr = ISLocalToGlobalMappingApplyBlock(original, numParents, origParents, globParents);CHKERRQ(ierr);
1293     }
1294     else {
1295       globParents = origParents;
1296     }
1297     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr);
1298     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr);
1299     if (original) {
1300       ierr = PetscFree(globParents);CHKERRQ(ierr);
1301     }
1302     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1303     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1304     ierr = ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents);CHKERRQ(ierr);
1305 #if PETSC_USE_DEBUG
1306     {
1307       PetscInt  p;
1308       PetscBool valid = PETSC_TRUE;
1309       for (p = 0; p < newParentSize; ++p) {
1310         if (newParents[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
1311       }
1312       if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1313     }
1314 #endif
1315     ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-parents_view", &flg);CHKERRQ(ierr);
1316     if (flg) {
1317       ierr = PetscPrintf(comm, "Serial Parent Section: \n");CHKERRQ(ierr);
1318       ierr = PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1319       ierr = PetscPrintf(comm, "Parallel Parent Section: \n");CHKERRQ(ierr);
1320       ierr = PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1321       ierr = PetscSFView(parentSF, NULL);CHKERRQ(ierr);
1322     }
1323     ierr = DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);CHKERRQ(ierr);
1324     ierr = PetscSectionDestroy(&newParentSection);CHKERRQ(ierr);
1325     ierr = PetscFree2(newParents,newChildIDs);CHKERRQ(ierr);
1326     ierr = PetscSFDestroy(&parentSF);CHKERRQ(ierr);
1327   }
1328   pmesh->useAnchors = mesh->useAnchors;
1329   PetscFunctionReturn(0);
1330 }
1331 
1332 PETSC_UNUSED static PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, DM dmParallel)
1333 {
1334   DM_Plex               *mesh  = (DM_Plex*) dm->data;
1335   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
1336   PetscMPIInt            rank, size;
1337   MPI_Comm               comm;
1338   PetscErrorCode         ierr;
1339 
1340   PetscFunctionBegin;
1341   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1342   PetscValidPointer(dmParallel,7);
1343 
1344   /* Create point SF for parallel mesh */
1345   ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1346   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1347   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1348   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1349   {
1350     const PetscInt *leaves;
1351     PetscSFNode    *remotePoints, *rowners, *lowners;
1352     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1353     PetscInt        pStart, pEnd;
1354 
1355     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1356     ierr = PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr);
1357     ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr);
1358     for (p=0; p<numRoots; p++) {
1359       rowners[p].rank  = -1;
1360       rowners[p].index = -1;
1361     }
1362     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1363     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1364     for (p = 0; p < numLeaves; ++p) {
1365       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1366         lowners[p].rank  = rank;
1367         lowners[p].index = leaves ? leaves[p] : p;
1368       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1369         lowners[p].rank  = -2;
1370         lowners[p].index = -2;
1371       }
1372     }
1373     for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1374       rowners[p].rank  = -3;
1375       rowners[p].index = -3;
1376     }
1377     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1378     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1379     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1380     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1381     for (p = 0; p < numLeaves; ++p) {
1382       if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
1383       if (lowners[p].rank != rank) ++numGhostPoints;
1384     }
1385     ierr = PetscMalloc1(numGhostPoints, &ghostPoints);CHKERRQ(ierr);
1386     ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr);
1387     for (p = 0, gp = 0; p < numLeaves; ++p) {
1388       if (lowners[p].rank != rank) {
1389         ghostPoints[gp]        = leaves ? leaves[p] : p;
1390         remotePoints[gp].rank  = lowners[p].rank;
1391         remotePoints[gp].index = lowners[p].index;
1392         ++gp;
1393       }
1394     }
1395     ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr);
1396     ierr = PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1397     ierr = PetscSFSetFromOptions((dmParallel)->sf);CHKERRQ(ierr);
1398   }
1399   pmesh->useCone    = mesh->useCone;
1400   pmesh->useClosure = mesh->useClosure;
1401   ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1402   PetscFunctionReturn(0);
1403 }
1404 
1405 /*@C
1406   DMPlexDerivePointSF - Build a point SF from an SF describing a point migration
1407 
1408   Input Parameter:
1409 + dm          - The source DMPlex object
1410 . migrationSF - The star forest that describes the parallel point remapping
1411 . ownership   - Flag causing a vote to determine point ownership
1412 
1413   Output Parameter:
1414 - pointSF     - The star forest describing the point overlap in the remapped DM
1415 
1416   Level: developer
1417 
1418 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1419 @*/
1420 PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownership, PetscSF *pointSF)
1421 {
1422   PetscMPIInt        rank;
1423   PetscInt           p, nroots, nleaves, idx, npointLeaves;
1424   PetscInt          *pointLocal;
1425   const PetscInt    *leaves;
1426   const PetscSFNode *roots;
1427   PetscSFNode       *rootNodes, *leafNodes, *pointRemote;
1428   PetscErrorCode     ierr;
1429 
1430   PetscFunctionBegin;
1431   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1432   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1433 
1434   ierr = PetscSFGetGraph(migrationSF, &nroots, &nleaves, &leaves, &roots);CHKERRQ(ierr);
1435   ierr = PetscMalloc2(nroots, &rootNodes, nleaves, &leafNodes);CHKERRQ(ierr);
1436   if (ownership) {
1437     /* Point ownership vote: Process with highest rank ownes shared points */
1438     for (p = 0; p < nleaves; ++p) {
1439       /* Either put in a bid or we know we own it */
1440       leafNodes[p].rank  = rank;
1441       leafNodes[p].index = p;
1442     }
1443     for (p = 0; p < nroots; p++) {
1444       /* Root must not participate in the reduction, flag so that MAXLOC does not use */
1445       rootNodes[p].rank  = -3;
1446       rootNodes[p].index = -3;
1447     }
1448     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);CHKERRQ(ierr);
1449     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);CHKERRQ(ierr);
1450   } else {
1451     for (p = 0; p < nroots; p++) {
1452       rootNodes[p].index = -1;
1453       rootNodes[p].rank = rank;
1454     };
1455     for (p = 0; p < nleaves; p++) {
1456       /* Write new local id into old location */
1457       if (roots[p].rank == rank) {
1458         rootNodes[roots[p].index].index = leaves ? leaves[p] : p;
1459       }
1460     }
1461   }
1462   ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1463   ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1464 
1465   for (npointLeaves = 0, p = 0; p < nleaves; p++) {if (leafNodes[p].rank != rank) npointLeaves++;}
1466   ierr = PetscMalloc1(npointLeaves, &pointLocal);CHKERRQ(ierr);
1467   ierr = PetscMalloc1(npointLeaves, &pointRemote);CHKERRQ(ierr);
1468   for (idx = 0, p = 0; p < nleaves; p++) {
1469     if (leafNodes[p].rank != rank) {
1470       pointLocal[idx] = p;
1471       pointRemote[idx] = leafNodes[p];
1472       idx++;
1473     }
1474   }
1475   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), pointSF);CHKERRQ(ierr);
1476   ierr = PetscSFSetFromOptions(*pointSF);CHKERRQ(ierr);
1477   ierr = PetscSFSetGraph(*pointSF, nleaves, npointLeaves, pointLocal, PETSC_OWN_POINTER, pointRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1478   ierr = PetscFree2(rootNodes, leafNodes);CHKERRQ(ierr);
1479   PetscFunctionReturn(0);
1480 }
1481 
1482 /*@C
1483   DMPlexMigrate  - Migrates internal DM data over the supplied star forest
1484 
1485   Input Parameter:
1486 + dm       - The source DMPlex object
1487 . sf       - The star forest communication context describing the migration pattern
1488 
1489   Output Parameter:
1490 - targetDM - The target DMPlex object
1491 
1492   Level: intermediate
1493 
1494 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1495 @*/
1496 PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM)
1497 {
1498   MPI_Comm               comm;
1499   PetscInt               dim, nroots;
1500   PetscSF                sfPoint;
1501   ISLocalToGlobalMapping ltogMigration;
1502   ISLocalToGlobalMapping ltogOriginal = NULL;
1503   PetscBool              flg;
1504   PetscErrorCode         ierr;
1505 
1506   PetscFunctionBegin;
1507   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1508   ierr = PetscLogEventBegin(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1509   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1510   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1511   ierr = DMSetDimension(targetDM, dim);CHKERRQ(ierr);
1512 
1513   /* Check for a one-to-all distribution pattern */
1514   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1515   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
1516   if (nroots >= 0) {
1517     IS                     isOriginal;
1518     PetscInt               n, size, nleaves;
1519     PetscInt              *numbering_orig, *numbering_new;
1520     /* Get the original point numbering */
1521     ierr = DMPlexCreatePointNumbering(dm, &isOriginal);CHKERRQ(ierr);
1522     ierr = ISLocalToGlobalMappingCreateIS(isOriginal, &ltogOriginal);CHKERRQ(ierr);
1523     ierr = ISLocalToGlobalMappingGetSize(ltogOriginal, &size);CHKERRQ(ierr);
1524     ierr = ISLocalToGlobalMappingGetBlockIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1525     /* Convert to positive global numbers */
1526     for (n=0; n<size; n++) {if (numbering_orig[n] < 0) numbering_orig[n] = -(numbering_orig[n]+1);}
1527     /* Derive the new local-to-global mapping from the old one */
1528     ierr = PetscSFGetGraph(sf, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1529     ierr = PetscMalloc1(nleaves, &numbering_new);CHKERRQ(ierr);
1530     ierr = PetscSFBcastBegin(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr);
1531     ierr = PetscSFBcastEnd(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr);
1532     ierr = ISLocalToGlobalMappingCreate(comm, 1, nleaves, (const PetscInt*) numbering_new, PETSC_OWN_POINTER, &ltogMigration);CHKERRQ(ierr);
1533     ierr = ISLocalToGlobalMappingRestoreIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1534     ierr = ISDestroy(&isOriginal);CHKERRQ(ierr);
1535   } else {
1536     /* One-to-all distribution pattern: We can derive LToG from SF */
1537     ierr = ISLocalToGlobalMappingCreateSF(sf, 0, &ltogMigration);CHKERRQ(ierr);
1538   }
1539   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1540   if (flg) {
1541     ierr = PetscPrintf(comm, "Point renumbering for DM migration:\n");CHKERRQ(ierr);
1542     ierr = ISLocalToGlobalMappingView(ltogMigration, NULL);CHKERRQ(ierr);
1543   }
1544   /* Migrate DM data to target DM */
1545   ierr = DMPlexDistributeCones(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1546   ierr = DMPlexDistributeLabels(dm, sf, targetDM);CHKERRQ(ierr);
1547   ierr = DMPlexDistributeCoordinates(dm, sf, targetDM);CHKERRQ(ierr);
1548   ierr = DMPlexDistributeSetupHybrid(dm, sf, ltogMigration, targetDM);CHKERRQ(ierr);
1549   ierr = DMPlexDistributeSetupTree(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1550   ierr = ISLocalToGlobalMappingDestroy(&ltogOriginal);CHKERRQ(ierr);
1551   ierr = ISLocalToGlobalMappingDestroy(&ltogMigration);CHKERRQ(ierr);
1552   ierr = PetscLogEventEnd(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1553   PetscFunctionReturn(0);
1554 }
1555 
1556 /*@C
1557   DMPlexDistribute - Distributes the mesh and any associated sections.
1558 
1559   Not Collective
1560 
1561   Input Parameter:
1562 + dm  - The original DMPlex object
1563 - overlap - The overlap of partitions, 0 is the default
1564 
1565   Output Parameter:
1566 + sf - The PetscSF used for point distribution
1567 - parallelMesh - The distributed DMPlex object, or NULL
1568 
1569   Note: If the mesh was not distributed, the return value is NULL.
1570 
1571   The user can control the definition of adjacency for the mesh using DMPlexSetAdjacencyUseCone() and
1572   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1573   representation on the mesh.
1574 
1575   Level: intermediate
1576 
1577 .keywords: mesh, elements
1578 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1579 @*/
1580 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1581 {
1582   MPI_Comm               comm;
1583   PetscPartitioner       partitioner;
1584   IS                     cellPart;
1585   PetscSection           cellPartSection;
1586   DM                     dmCoord;
1587   DMLabel                lblPartition, lblMigration;
1588   PetscSF                sfProcess, sfMigration, sfStratified, sfPoint;
1589   PetscBool              flg;
1590   PetscMPIInt            rank, size, p;
1591   PetscErrorCode         ierr;
1592 
1593   PetscFunctionBegin;
1594   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1595   if (sf) PetscValidPointer(sf,4);
1596   PetscValidPointer(dmParallel,5);
1597 
1598   ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1599   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1600   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1601   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1602 
1603   if (sf) *sf = NULL;
1604   *dmParallel = NULL;
1605   if (size == 1) {
1606     ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1607     PetscFunctionReturn(0);
1608   }
1609 
1610   /* Create cell partition */
1611   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1612   ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr);
1613   ierr = DMPlexGetPartitioner(dm, &partitioner);CHKERRQ(ierr);
1614   ierr = PetscPartitionerPartition(partitioner, dm, cellPartSection, &cellPart);CHKERRQ(ierr);
1615   {
1616     /* Convert partition to DMLabel */
1617     PetscInt proc, pStart, pEnd, npoints, poffset;
1618     const PetscInt *points;
1619     ierr = DMLabelCreate("Point Partition", &lblPartition);CHKERRQ(ierr);
1620     ierr = ISGetIndices(cellPart, &points);CHKERRQ(ierr);
1621     ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr);
1622     for (proc = pStart; proc < pEnd; proc++) {
1623       ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr);
1624       ierr = PetscSectionGetOffset(cellPartSection, proc, &poffset);CHKERRQ(ierr);
1625       for (p = poffset; p < poffset+npoints; p++) {
1626         ierr = DMLabelSetValue(lblPartition, points[p], proc);CHKERRQ(ierr);
1627       }
1628     }
1629     ierr = ISRestoreIndices(cellPart, &points);CHKERRQ(ierr);
1630   }
1631   ierr = DMPlexPartitionLabelClosure(dm, lblPartition);CHKERRQ(ierr);
1632   {
1633     /* Build a global process SF */
1634     PetscSFNode *remoteProc;
1635     ierr = PetscMalloc1(size, &remoteProc);CHKERRQ(ierr);
1636     for (p = 0; p < size; ++p) {
1637       remoteProc[p].rank  = p;
1638       remoteProc[p].index = rank;
1639     }
1640     ierr = PetscSFCreate(comm, &sfProcess);CHKERRQ(ierr);
1641     ierr = PetscObjectSetName((PetscObject) sfProcess, "Process SF");CHKERRQ(ierr);
1642     ierr = PetscSFSetGraph(sfProcess, size, size, NULL, PETSC_OWN_POINTER, remoteProc, PETSC_OWN_POINTER);CHKERRQ(ierr);
1643   }
1644   ierr = DMLabelCreate("Point migration", &lblMigration);CHKERRQ(ierr);
1645   ierr = DMPlexPartitionLabelInvert(dm, lblPartition, sfProcess, lblMigration);CHKERRQ(ierr);
1646   ierr = DMPlexPartitionLabelCreateSF(dm, lblMigration, &sfMigration);CHKERRQ(ierr);
1647   /* Stratify the SF in case we are migrating an already parallel plex */
1648   ierr = DMPlexStratifyMigrationSF(dm, sfMigration, &sfStratified);CHKERRQ(ierr);
1649   ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1650   sfMigration = sfStratified;
1651   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1652   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1653   if (flg) {
1654     ierr = DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1655     ierr = PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1656   }
1657 
1658   /* Create non-overlapping parallel DM and migrate internal data */
1659   ierr = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr);
1660   ierr = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr);
1661   ierr = DMPlexMigrate(dm, sfMigration, *dmParallel);CHKERRQ(ierr);
1662 
1663   /* Build the point SF without overlap */
1664   ierr = DMPlexCreatePointSF(*dmParallel, sfMigration, PETSC_TRUE, &sfPoint);CHKERRQ(ierr);
1665   ierr = DMSetPointSF(*dmParallel, sfPoint);CHKERRQ(ierr);
1666   ierr = DMGetCoordinateDM(*dmParallel, &dmCoord);CHKERRQ(ierr);
1667   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1668   if (flg) {ierr = PetscSFView(sfPoint, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);}
1669 
1670   if (overlap > 0) {
1671     DM                 dmOverlap;
1672     PetscInt           nroots, nleaves;
1673     PetscSFNode       *newRemote;
1674     const PetscSFNode *oldRemote;
1675     PetscSF            sfOverlap, sfOverlapPoint;
1676     /* Add the partition overlap to the distributed DM */
1677     ierr = DMPlexDistributeOverlap(*dmParallel, overlap, &sfOverlap, &dmOverlap);CHKERRQ(ierr);
1678     ierr = DMDestroy(dmParallel);CHKERRQ(ierr);
1679     *dmParallel = dmOverlap;
1680     if (flg) {
1681       ierr = PetscPrintf(comm, "Overlap Migration SF:\n");CHKERRQ(ierr);
1682       ierr = PetscSFView(sfOverlap, NULL);CHKERRQ(ierr);
1683     }
1684 
1685     /* Re-map the migration SF to establish the full migration pattern */
1686     ierr = PetscSFGetGraph(sfMigration, &nroots, NULL, NULL, &oldRemote);CHKERRQ(ierr);
1687     ierr = PetscSFGetGraph(sfOverlap, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1688     ierr = PetscMalloc1(nleaves, &newRemote);CHKERRQ(ierr);
1689     ierr = PetscSFBcastBegin(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1690     ierr = PetscSFBcastEnd(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1691     ierr = PetscSFCreate(comm, &sfOverlapPoint);CHKERRQ(ierr);
1692     ierr = PetscSFSetGraph(sfOverlapPoint, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1693     ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1694     ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1695     sfMigration = sfOverlapPoint;
1696   }
1697   /* Cleanup Partition */
1698   ierr = PetscSFDestroy(&sfProcess);CHKERRQ(ierr);
1699   ierr = DMLabelDestroy(&lblPartition);CHKERRQ(ierr);
1700   ierr = DMLabelDestroy(&lblMigration);CHKERRQ(ierr);
1701   ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr);
1702   ierr = ISDestroy(&cellPart);CHKERRQ(ierr);
1703   /* Copy BC */
1704   ierr = DMCopyBoundary(dm, *dmParallel);CHKERRQ(ierr);
1705   /* Create sfNatural */
1706   if (dm->useNatural) {
1707     PetscSection section;
1708 
1709     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1710     ierr = DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &(*dmParallel)->sfNatural);CHKERRQ(ierr);
1711   }
1712   /* Cleanup */
1713   if (sf) {*sf = sfMigration;}
1714   else    {ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);}
1715   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1716   ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1717   PetscFunctionReturn(0);
1718 }
1719 
1720 /*@C
1721   DMPlexDistributeOverlap - Add partition overlap to a distributed non-overlapping DM.
1722 
1723   Not Collective
1724 
1725   Input Parameter:
1726 + dm  - The non-overlapping distrbuted DMPlex object
1727 - overlap - The overlap of partitions, 0 is the default
1728 
1729   Output Parameter:
1730 + sf - The PetscSF used for point distribution
1731 - dmOverlap - The overlapping distributed DMPlex object, or NULL
1732 
1733   Note: If the mesh was not distributed, the return value is NULL.
1734 
1735   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1736   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1737   representation on the mesh.
1738 
1739   Level: intermediate
1740 
1741 .keywords: mesh, elements
1742 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1743 @*/
1744 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, PetscSF *sf, DM *dmOverlap)
1745 {
1746   MPI_Comm               comm;
1747   PetscMPIInt            size, rank;
1748   PetscSection           rootSection, leafSection;
1749   IS                     rootrank, leafrank;
1750   DM                     dmCoord;
1751   DMLabel                lblOverlap;
1752   PetscSF                sfOverlap, sfStratified, sfPoint;
1753   PetscErrorCode         ierr;
1754 
1755   PetscFunctionBegin;
1756   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1757   if (sf) PetscValidPointer(sf, 3);
1758   PetscValidPointer(dmOverlap, 4);
1759 
1760   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1761   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1762   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1763   if (size == 1) {*dmOverlap = NULL; PetscFunctionReturn(0);}
1764   ierr = PetscLogEventBegin(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1765 
1766   /* Compute point overlap with neighbouring processes on the distributed DM */
1767   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1768   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1769   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1770   ierr = DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);CHKERRQ(ierr);
1771   ierr = DMPlexCreateOverlap(dm, overlap, rootSection, rootrank, leafSection, leafrank, &lblOverlap);CHKERRQ(ierr);
1772   /* Convert overlap label to stratified migration SF */
1773   ierr = DMPlexPartitionLabelCreateSF(dm, lblOverlap, &sfOverlap);CHKERRQ(ierr);
1774   ierr = DMPlexStratifyMigrationSF(dm, sfOverlap, &sfStratified);CHKERRQ(ierr);
1775   ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1776   sfOverlap = sfStratified;
1777   ierr = PetscObjectSetName((PetscObject) sfOverlap, "Overlap SF");CHKERRQ(ierr);
1778   ierr = PetscSFSetFromOptions(sfOverlap);CHKERRQ(ierr);
1779 
1780   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1781   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1782   ierr = ISDestroy(&rootrank);CHKERRQ(ierr);
1783   ierr = ISDestroy(&leafrank);CHKERRQ(ierr);
1784   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1785 
1786   /* Build the overlapping DM */
1787   ierr = DMPlexCreate(comm, dmOverlap);CHKERRQ(ierr);
1788   ierr = PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");CHKERRQ(ierr);
1789   ierr = DMPlexMigrate(dm, sfOverlap, *dmOverlap);CHKERRQ(ierr);
1790   /* Build the new point SF */
1791   ierr = DMPlexCreatePointSF(*dmOverlap, sfOverlap, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
1792   ierr = DMSetPointSF(*dmOverlap, sfPoint);CHKERRQ(ierr);
1793   ierr = DMGetCoordinateDM(*dmOverlap, &dmCoord);CHKERRQ(ierr);
1794   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1795   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1796   /* Cleanup overlap partition */
1797   ierr = DMLabelDestroy(&lblOverlap);CHKERRQ(ierr);
1798   if (sf) *sf = sfOverlap;
1799   else    {ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);}
1800   ierr = PetscLogEventEnd(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1801   PetscFunctionReturn(0);
1802 }
1803 
1804 /*@C
1805   DMPlexGetGatherDM - Get a copy of the DMPlex that gathers all points on the
1806   root process of the original's communicator.
1807 
1808   Input Parameters:
1809 . dm - the original DMPlex object
1810 
1811   Output Parameters:
1812 . gatherMesh - the gathered DM object, or NULL
1813 
1814   Level: intermediate
1815 
1816 .keywords: mesh
1817 .seealso: DMPlexDistribute(), DMPlexGetRedundantDM()
1818 @*/
1819 PetscErrorCode DMPlexGetGatherDM(DM dm, DM * gatherMesh)
1820 {
1821   MPI_Comm       comm;
1822   PetscMPIInt    size;
1823   PetscPartitioner oldPart, gatherPart;
1824   PetscErrorCode ierr;
1825 
1826   PetscFunctionBegin;
1827   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1828   comm = PetscObjectComm((PetscObject)dm);
1829   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1830   *gatherMesh = NULL;
1831   if (size == 1) PetscFunctionReturn(0);
1832   ierr = DMPlexGetPartitioner(dm,&oldPart);CHKERRQ(ierr);
1833   ierr = PetscObjectReference((PetscObject)oldPart);CHKERRQ(ierr);
1834   ierr = PetscPartitionerCreate(comm,&gatherPart);CHKERRQ(ierr);
1835   ierr = PetscPartitionerSetType(gatherPart,PETSCPARTITIONERGATHER);CHKERRQ(ierr);
1836   ierr = DMPlexSetPartitioner(dm,gatherPart);CHKERRQ(ierr);
1837   ierr = DMPlexDistribute(dm,0,NULL,gatherMesh);CHKERRQ(ierr);
1838   ierr = DMPlexSetPartitioner(dm,oldPart);CHKERRQ(ierr);
1839   ierr = PetscPartitionerDestroy(&gatherPart);CHKERRQ(ierr);
1840   ierr = PetscPartitionerDestroy(&oldPart);CHKERRQ(ierr);
1841   PetscFunctionReturn(0);
1842 }
1843 
1844 /*@C
1845   DMPlexGetRedundantDM - Get a copy of the DMPlex that is completely copied on each process.
1846 
1847   Input Parameters:
1848 . dm - the original DMPlex object
1849 
1850   Output Parameters:
1851 . redundantMesh - the redundant DM object, or NULL
1852 
1853   Level: intermediate
1854 
1855 .keywords: mesh
1856 .seealso: DMPlexDistribute(), DMPlexGetGatherDM()
1857 @*/
1858 PetscErrorCode DMPlexGetRedundantDM(DM dm, DM * redundantMesh)
1859 {
1860   MPI_Comm       comm;
1861   PetscMPIInt    size, rank;
1862   PetscInt       pStart, pEnd, p;
1863   PetscInt       numPoints = -1;
1864   PetscSF        migrationSF, sfPoint;
1865   DM             gatherDM, dmCoord;
1866   PetscSFNode    *points;
1867   PetscErrorCode ierr;
1868 
1869   PetscFunctionBegin;
1870   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1871   comm = PetscObjectComm((PetscObject)dm);
1872   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1873   *redundantMesh = NULL;
1874   if (size == 1) {
1875     ierr = PetscObjectReference((PetscObject) dm);CHKERRQ(ierr);
1876     *redundantMesh = dm;
1877     PetscFunctionReturn(0);
1878   }
1879   ierr = DMPlexGetGatherDM(dm,&gatherDM);CHKERRQ(ierr);
1880   if (!gatherDM) PetscFunctionReturn(0);
1881   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1882   ierr = DMPlexGetChart(gatherDM,&pStart,&pEnd);CHKERRQ(ierr);
1883   numPoints = pEnd - pStart;
1884   ierr = MPI_Bcast(&numPoints,1,MPIU_INT,0,comm);CHKERRQ(ierr);
1885   ierr = PetscMalloc1(numPoints,&points);CHKERRQ(ierr);
1886   ierr = PetscSFCreate(comm,&migrationSF);CHKERRQ(ierr);
1887   for (p = 0; p < numPoints; p++) {
1888     points[p].index = p;
1889     points[p].rank  = 0;
1890   }
1891   ierr = PetscSFSetGraph(migrationSF,pEnd-pStart,numPoints,NULL,PETSC_OWN_POINTER,points,PETSC_OWN_POINTER);CHKERRQ(ierr);
1892   ierr = DMPlexCreate(comm, redundantMesh);CHKERRQ(ierr);
1893   ierr = PetscObjectSetName((PetscObject) *redundantMesh, "Redundant Mesh");CHKERRQ(ierr);
1894   ierr = DMPlexMigrate(gatherDM, migrationSF, *redundantMesh);CHKERRQ(ierr);
1895   ierr = DMPlexCreatePointSF(*redundantMesh, migrationSF, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
1896   ierr = DMSetPointSF(*redundantMesh, sfPoint);CHKERRQ(ierr);
1897   ierr = DMGetCoordinateDM(*redundantMesh, &dmCoord);CHKERRQ(ierr);
1898   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1899   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1900   ierr = PetscSFDestroy(&migrationSF);CHKERRQ(ierr);
1901   ierr = DMDestroy(&gatherDM);CHKERRQ(ierr);
1902   PetscFunctionReturn(0);
1903 }
1904