xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision ea844a1adba28e307898a6e94a1f422fd7d2db48)
1 #include <petsc/private/dmpleximpl.h>    /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/dmlabelimpl.h>   /*I      "petscdmlabel.h"  I*/
3 
4 /*@C
5   DMPlexSetAdjacencyUser - Define adjacency in the mesh using a user-provided callback
6 
7   Input Parameters:
8 + dm      - The DM object
9 . user    - The user callback, may be NULL (to clear the callback)
10 - ctx     - context for callback evaluation, may be NULL
11 
12   Level: advanced
13 
14   Notes:
15      The caller of DMPlexGetAdjacency may need to arrange that a large enough array is available for the adjacency.
16 
17      Any setting here overrides other configuration of DMPlex adjacency determination.
18 
19 .seealso: DMSetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexGetAdjacency(), DMPlexGetAdjacencyUser()
20 @*/
21 PetscErrorCode DMPlexSetAdjacencyUser(DM dm,PetscErrorCode (*user)(DM,PetscInt,PetscInt*,PetscInt[],void*),void *ctx)
22 {
23   DM_Plex *mesh = (DM_Plex *)dm->data;
24 
25   PetscFunctionBegin;
26   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
27   mesh->useradjacency = user;
28   mesh->useradjacencyctx = ctx;
29   PetscFunctionReturn(0);
30 }
31 
32 /*@C
33   DMPlexGetAdjacencyUser - get the user-defined adjacency callback
34 
35   Input Parameter:
36 . dm      - The DM object
37 
38   Output Parameters:
39 - user    - The user callback
40 - ctx     - context for callback evaluation
41 
42   Level: advanced
43 
44 .seealso: DMSetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexGetAdjacency(), DMPlexSetAdjacencyUser()
45 @*/
46 PetscErrorCode DMPlexGetAdjacencyUser(DM dm, PetscErrorCode (**user)(DM,PetscInt,PetscInt*,PetscInt[],void*), void **ctx)
47 {
48   DM_Plex *mesh = (DM_Plex *)dm->data;
49 
50   PetscFunctionBegin;
51   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
52   if (user) *user = mesh->useradjacency;
53   if (ctx) *ctx = mesh->useradjacencyctx;
54   PetscFunctionReturn(0);
55 }
56 
57 /*@
58   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
59 
60   Input Parameters:
61 + dm      - The DM object
62 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
63 
64   Level: intermediate
65 
66 .seealso: DMGetAdjacency(), DMSetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
67 @*/
68 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
69 {
70   DM_Plex *mesh = (DM_Plex *) dm->data;
71 
72   PetscFunctionBegin;
73   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
74   mesh->useAnchors = useAnchors;
75   PetscFunctionReturn(0);
76 }
77 
78 /*@
79   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
80 
81   Input Parameter:
82 . dm      - The DM object
83 
84   Output Parameter:
85 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
86 
87   Level: intermediate
88 
89 .seealso: DMPlexSetAdjacencyUseAnchors(), DMSetAdjacency(), DMGetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
90 @*/
91 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
92 {
93   DM_Plex *mesh = (DM_Plex *) dm->data;
94 
95   PetscFunctionBegin;
96   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
97   PetscValidIntPointer(useAnchors, 2);
98   *useAnchors = mesh->useAnchors;
99   PetscFunctionReturn(0);
100 }
101 
102 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
103 {
104   const PetscInt *cone = NULL;
105   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
106   PetscErrorCode  ierr;
107 
108   PetscFunctionBeginHot;
109   ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
110   ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
111   for (c = 0; c <= coneSize; ++c) {
112     const PetscInt  point   = !c ? p : cone[c-1];
113     const PetscInt *support = NULL;
114     PetscInt        supportSize, s, q;
115 
116     ierr = DMPlexGetSupportSize(dm, point, &supportSize);CHKERRQ(ierr);
117     ierr = DMPlexGetSupport(dm, point, &support);CHKERRQ(ierr);
118     for (s = 0; s < supportSize; ++s) {
119       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = support[s]),0); ++q) {
120         if (support[s] == adj[q]) break;
121       }
122       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
123     }
124   }
125   *adjSize = numAdj;
126   PetscFunctionReturn(0);
127 }
128 
129 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
130 {
131   const PetscInt *support = NULL;
132   PetscInt        numAdj   = 0, maxAdjSize = *adjSize, supportSize, s;
133   PetscErrorCode  ierr;
134 
135   PetscFunctionBeginHot;
136   ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr);
137   ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr);
138   for (s = 0; s <= supportSize; ++s) {
139     const PetscInt  point = !s ? p : support[s-1];
140     const PetscInt *cone  = NULL;
141     PetscInt        coneSize, c, q;
142 
143     ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
144     ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
145     for (c = 0; c < coneSize; ++c) {
146       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = cone[c]),0); ++q) {
147         if (cone[c] == adj[q]) break;
148       }
149       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
150     }
151   }
152   *adjSize = numAdj;
153   PetscFunctionReturn(0);
154 }
155 
156 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
157 {
158   PetscInt      *star = NULL;
159   PetscInt       numAdj = 0, maxAdjSize = *adjSize, starSize, s;
160   PetscErrorCode ierr;
161 
162   PetscFunctionBeginHot;
163   ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
164   for (s = 0; s < starSize*2; s += 2) {
165     const PetscInt *closure = NULL;
166     PetscInt        closureSize, c, q;
167 
168     ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
169     for (c = 0; c < closureSize*2; c += 2) {
170       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = closure[c]),0); ++q) {
171         if (closure[c] == adj[q]) break;
172       }
173       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
174     }
175     ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
176   }
177   ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
178   *adjSize = numAdj;
179   PetscFunctionReturn(0);
180 }
181 
182 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
183 {
184   static PetscInt asiz = 0;
185   PetscInt maxAnchors = 1;
186   PetscInt aStart = -1, aEnd = -1;
187   PetscInt maxAdjSize;
188   PetscSection aSec = NULL;
189   IS aIS = NULL;
190   const PetscInt *anchors;
191   DM_Plex *mesh = (DM_Plex *)dm->data;
192   PetscErrorCode  ierr;
193 
194   PetscFunctionBeginHot;
195   if (useAnchors) {
196     ierr = DMPlexGetAnchors(dm,&aSec,&aIS);CHKERRQ(ierr);
197     if (aSec) {
198       ierr = PetscSectionGetMaxDof(aSec,&maxAnchors);CHKERRQ(ierr);
199       maxAnchors = PetscMax(1,maxAnchors);
200       ierr = PetscSectionGetChart(aSec,&aStart,&aEnd);CHKERRQ(ierr);
201       ierr = ISGetIndices(aIS,&anchors);CHKERRQ(ierr);
202     }
203   }
204   if (!*adj) {
205     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
206 
207     ierr  = DMPlexGetChart(dm, &pStart,&pEnd);CHKERRQ(ierr);
208     ierr  = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
209     ierr  = DMPlexGetMaxSizes(dm, &maxC, &maxS);CHKERRQ(ierr);
210     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1;
211     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1;
212     asiz  = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries);
213     asiz *= maxAnchors;
214     asiz  = PetscMin(asiz,pEnd-pStart);
215     ierr  = PetscMalloc1(asiz,adj);CHKERRQ(ierr);
216   }
217   if (*adjSize < 0) *adjSize = asiz;
218   maxAdjSize = *adjSize;
219   if (mesh->useradjacency) {
220     ierr = mesh->useradjacency(dm, p, adjSize, *adj, mesh->useradjacencyctx);CHKERRQ(ierr);
221   } else if (useTransitiveClosure) {
222     ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr);
223   } else if (useCone) {
224     ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
225   } else {
226     ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
227   }
228   if (useAnchors && aSec) {
229     PetscInt origSize = *adjSize;
230     PetscInt numAdj = origSize;
231     PetscInt i = 0, j;
232     PetscInt *orig = *adj;
233 
234     while (i < origSize) {
235       PetscInt p = orig[i];
236       PetscInt aDof = 0;
237 
238       if (p >= aStart && p < aEnd) {
239         ierr = PetscSectionGetDof(aSec,p,&aDof);CHKERRQ(ierr);
240       }
241       if (aDof) {
242         PetscInt aOff;
243         PetscInt s, q;
244 
245         for (j = i + 1; j < numAdj; j++) {
246           orig[j - 1] = orig[j];
247         }
248         origSize--;
249         numAdj--;
250         ierr = PetscSectionGetOffset(aSec,p,&aOff);CHKERRQ(ierr);
251         for (s = 0; s < aDof; ++s) {
252           for (q = 0; q < numAdj || ((void)(orig[numAdj++] = anchors[aOff+s]),0); ++q) {
253             if (anchors[aOff+s] == orig[q]) break;
254           }
255           if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
256         }
257       }
258       else {
259         i++;
260       }
261     }
262     *adjSize = numAdj;
263     ierr = ISRestoreIndices(aIS,&anchors);CHKERRQ(ierr);
264   }
265   PetscFunctionReturn(0);
266 }
267 
268 /*@
269   DMPlexGetAdjacency - Return all points adjacent to the given point
270 
271   Input Parameters:
272 + dm - The DM object
273 . p  - The point
274 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE
275 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize
276 
277   Output Parameters:
278 + adjSize - The number of adjacent points
279 - adj - The adjacent points
280 
281   Level: advanced
282 
283   Notes:
284     The user must PetscFree the adj array if it was not passed in.
285 
286 .seealso: DMSetAdjacency(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator()
287 @*/
288 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
289 {
290   PetscBool      useCone, useClosure, useAnchors;
291   PetscErrorCode ierr;
292 
293   PetscFunctionBeginHot;
294   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
295   PetscValidPointer(adjSize,3);
296   PetscValidPointer(adj,4);
297   ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
298   ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
299   ierr = DMPlexGetAdjacency_Internal(dm, p, useCone, useClosure, useAnchors, adjSize, adj);CHKERRQ(ierr);
300   PetscFunctionReturn(0);
301 }
302 
303 /*@
304   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
305 
306   Collective on dm
307 
308   Input Parameters:
309 + dm      - The DM
310 - sfPoint - The PetscSF which encodes point connectivity
311 
312   Output Parameters:
313 + processRanks - A list of process neighbors, or NULL
314 - sfProcess    - An SF encoding the two-sided process connectivity, or NULL
315 
316   Level: developer
317 
318 .seealso: PetscSFCreate(), DMPlexCreateProcessSF()
319 @*/
320 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
321 {
322   const PetscSFNode *remotePoints;
323   PetscInt          *localPointsNew;
324   PetscSFNode       *remotePointsNew;
325   const PetscInt    *nranks;
326   PetscInt          *ranksNew;
327   PetscBT            neighbors;
328   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
329   PetscMPIInt        size, proc, rank;
330   PetscErrorCode     ierr;
331 
332   PetscFunctionBegin;
333   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
334   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
335   if (processRanks) {PetscValidPointer(processRanks, 3);}
336   if (sfProcess)    {PetscValidPointer(sfProcess, 4);}
337   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
338   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
339   ierr = PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);CHKERRQ(ierr);
340   ierr = PetscBTCreate(size, &neighbors);CHKERRQ(ierr);
341   ierr = PetscBTMemzero(size, neighbors);CHKERRQ(ierr);
342   /* Compute root-to-leaf process connectivity */
343   ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr);
344   ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr);
345   for (p = pStart; p < pEnd; ++p) {
346     PetscInt ndof, noff, n;
347 
348     ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr);
349     ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr);
350     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
351   }
352   ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr);
353   /* Compute leaf-to-neighbor process connectivity */
354   ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr);
355   ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr);
356   for (p = pStart; p < pEnd; ++p) {
357     PetscInt ndof, noff, n;
358 
359     ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr);
360     ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr);
361     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
362   }
363   ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr);
364   /* Compute leaf-to-root process connectivity */
365   for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);}
366   /* Calculate edges */
367   PetscBTClear(neighbors, rank);
368   for(proc = 0, numNeighbors = 0; proc < size; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;}
369   ierr = PetscMalloc1(numNeighbors, &ranksNew);CHKERRQ(ierr);
370   ierr = PetscMalloc1(numNeighbors, &localPointsNew);CHKERRQ(ierr);
371   ierr = PetscMalloc1(numNeighbors, &remotePointsNew);CHKERRQ(ierr);
372   for(proc = 0, n = 0; proc < size; ++proc) {
373     if (PetscBTLookup(neighbors, proc)) {
374       ranksNew[n]              = proc;
375       localPointsNew[n]        = proc;
376       remotePointsNew[n].index = rank;
377       remotePointsNew[n].rank  = proc;
378       ++n;
379     }
380   }
381   ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr);
382   if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr);}
383   else              {ierr = PetscFree(ranksNew);CHKERRQ(ierr);}
384   if (sfProcess) {
385     ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr);
386     ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr);
387     ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr);
388     ierr = PetscSFSetGraph(*sfProcess, size, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
389   }
390   PetscFunctionReturn(0);
391 }
392 
393 /*@
394   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
395 
396   Collective on dm
397 
398   Input Parameter:
399 . dm - The DM
400 
401   Output Parameters:
402 + rootSection - The number of leaves for a given root point
403 . rootrank    - The rank of each edge into the root point
404 . leafSection - The number of processes sharing a given leaf point
405 - leafrank    - The rank of each process sharing a leaf point
406 
407   Level: developer
408 
409 .seealso: DMPlexCreateOverlap()
410 @*/
411 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
412 {
413   MPI_Comm        comm;
414   PetscSF         sfPoint;
415   const PetscInt *rootdegree;
416   PetscInt       *myrank, *remoterank;
417   PetscInt        pStart, pEnd, p, nedges;
418   PetscMPIInt     rank;
419   PetscErrorCode  ierr;
420 
421   PetscFunctionBegin;
422   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
423   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
424   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
425   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
426   /* Compute number of leaves for each root */
427   ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr);
428   ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr);
429   ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr);
430   ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr);
431   for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr);}
432   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
433   /* Gather rank of each leaf to root */
434   ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr);
435   ierr = PetscMalloc1(pEnd-pStart, &myrank);CHKERRQ(ierr);
436   ierr = PetscMalloc1(nedges,  &remoterank);CHKERRQ(ierr);
437   for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank;
438   ierr = PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
439   ierr = PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
440   ierr = PetscFree(myrank);CHKERRQ(ierr);
441   ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr);
442   /* Distribute remote ranks to leaves */
443   ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr);
444   ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr);
445   PetscFunctionReturn(0);
446 }
447 
448 /*@C
449   DMPlexCreateOverlap - Compute owner information for shared points. This basically gets two-sided for an SF.
450 
451   Collective on dm
452 
453   Input Parameters:
454 + dm          - The DM
455 . levels      - Number of overlap levels
456 . rootSection - The number of leaves for a given root point
457 . rootrank    - The rank of each edge into the root point
458 . leafSection - The number of processes sharing a given leaf point
459 - leafrank    - The rank of each process sharing a leaf point
460 
461   Output Parameter:
462 . ovLabel     - DMLabel containing remote overlap contributions as point/rank pairings
463 
464   Level: developer
465 
466 .seealso: DMPlexDistributeOwnership(), DMPlexDistribute()
467 @*/
468 PetscErrorCode DMPlexCreateOverlap(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
469 {
470   MPI_Comm           comm;
471   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
472   PetscSF            sfPoint;
473   const PetscSFNode *remote;
474   const PetscInt    *local;
475   const PetscInt    *nrank, *rrank;
476   PetscInt          *adj = NULL;
477   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l;
478   PetscMPIInt        rank, size;
479   PetscBool          flg;
480   PetscErrorCode     ierr;
481 
482   PetscFunctionBegin;
483   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
484   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
485   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
486   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
487   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
488   ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr);
489   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
490   ierr = DMLabelCreate(PETSC_COMM_SELF, "Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr);
491   /* Handle leaves: shared with the root point */
492   for (l = 0; l < nleaves; ++l) {
493     PetscInt adjSize = PETSC_DETERMINE, a;
494 
495     ierr = DMPlexGetAdjacency(dm, local ? local[l] : l, &adjSize, &adj);CHKERRQ(ierr);
496     for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr);}
497   }
498   ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr);
499   ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr);
500   /* Handle roots */
501   for (p = pStart; p < pEnd; ++p) {
502     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
503 
504     if ((p >= sStart) && (p < sEnd)) {
505       /* Some leaves share a root with other leaves on different processes */
506       ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr);
507       if (neighbors) {
508         ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr);
509         ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
510         for (n = 0; n < neighbors; ++n) {
511           const PetscInt remoteRank = nrank[noff+n];
512 
513           if (remoteRank == rank) continue;
514           for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
515         }
516       }
517     }
518     /* Roots are shared with leaves */
519     ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr);
520     if (!neighbors) continue;
521     ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr);
522     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
523     for (n = 0; n < neighbors; ++n) {
524       const PetscInt remoteRank = rrank[noff+n];
525 
526       if (remoteRank == rank) continue;
527       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
528     }
529   }
530   ierr = PetscFree(adj);CHKERRQ(ierr);
531   ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr);
532   ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr);
533   /* Add additional overlap levels */
534   for (l = 1; l < levels; l++) {
535     /* Propagate point donations over SF to capture remote connections */
536     ierr = DMPlexPartitionLabelPropagate(dm, ovAdjByRank);CHKERRQ(ierr);
537     /* Add next level of point donations to the label */
538     ierr = DMPlexPartitionLabelAdjacency(dm, ovAdjByRank);CHKERRQ(ierr);
539   }
540   /* We require the closure in the overlap */
541   ierr = DMPlexPartitionLabelClosure(dm, ovAdjByRank);CHKERRQ(ierr);
542   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-overlap_view", &flg);CHKERRQ(ierr);
543   if (flg) {
544     PetscViewer viewer;
545     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm), &viewer);CHKERRQ(ierr);
546     ierr = DMLabelView(ovAdjByRank, viewer);CHKERRQ(ierr);
547   }
548   /* Invert sender to receiver label */
549   ierr = DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel);CHKERRQ(ierr);
550   ierr = DMPlexPartitionLabelInvert(dm, ovAdjByRank, NULL, *ovLabel);CHKERRQ(ierr);
551   /* Add owned points, except for shared local points */
552   for (p = pStart; p < pEnd; ++p) {ierr = DMLabelSetValue(*ovLabel, p, rank);CHKERRQ(ierr);}
553   for (l = 0; l < nleaves; ++l) {
554     ierr = DMLabelClearValue(*ovLabel, local[l], rank);CHKERRQ(ierr);
555     ierr = DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank);CHKERRQ(ierr);
556   }
557   /* Clean up */
558   ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr);
559   PetscFunctionReturn(0);
560 }
561 
562 /*@C
563   DMPlexCreateOverlapMigrationSF - Create an SF describing the new mesh distribution to make the overlap described by the input SF
564 
565   Collective on dm
566 
567   Input Parameters:
568 + dm          - The DM
569 - overlapSF   - The SF mapping ghost points in overlap to owner points on other processes
570 
571   Output Parameters:
572 . migrationSF - An SF that maps original points in old locations to points in new locations
573 
574   Level: developer
575 
576 .seealso: DMPlexCreateOverlap(), DMPlexDistribute()
577 @*/
578 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
579 {
580   MPI_Comm           comm;
581   PetscMPIInt        rank, size;
582   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
583   PetscInt          *pointDepths, *remoteDepths, *ilocal;
584   PetscInt          *depthRecv, *depthShift, *depthIdx;
585   PetscSFNode       *iremote;
586   PetscSF            pointSF;
587   const PetscInt    *sharedLocal;
588   const PetscSFNode *overlapRemote, *sharedRemote;
589   PetscErrorCode     ierr;
590 
591   PetscFunctionBegin;
592   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
593   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
594   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
595   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
596   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
597 
598   /* Before building the migration SF we need to know the new stratum offsets */
599   ierr = PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote);CHKERRQ(ierr);
600   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
601   for (d=0; d<dim+1; d++) {
602     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
603     for (p=pStart; p<pEnd; p++) pointDepths[p] = d;
604   }
605   for (p=0; p<nleaves; p++) remoteDepths[p] = -1;
606   ierr = PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
607   ierr = PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
608 
609   /* Count recevied points in each stratum and compute the internal strata shift */
610   ierr = PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx);CHKERRQ(ierr);
611   for (d=0; d<dim+1; d++) depthRecv[d]=0;
612   for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++;
613   depthShift[dim] = 0;
614   for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim];
615   for (d=1; d<dim; d++) depthShift[d] += depthRecv[0];
616   for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1];
617   for (d=0; d<dim+1; d++) {
618     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
619     depthIdx[d] = pStart + depthShift[d];
620   }
621 
622   /* Form the overlap SF build an SF that describes the full overlap migration SF */
623   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
624   newLeaves = pEnd - pStart + nleaves;
625   ierr = PetscMalloc1(newLeaves, &ilocal);CHKERRQ(ierr);
626   ierr = PetscMalloc1(newLeaves, &iremote);CHKERRQ(ierr);
627   /* First map local points to themselves */
628   for (d=0; d<dim+1; d++) {
629     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
630     for (p=pStart; p<pEnd; p++) {
631       point = p + depthShift[d];
632       ilocal[point] = point;
633       iremote[point].index = p;
634       iremote[point].rank = rank;
635       depthIdx[d]++;
636     }
637   }
638 
639   /* Add in the remote roots for currently shared points */
640   ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
641   ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote);CHKERRQ(ierr);
642   for (d=0; d<dim+1; d++) {
643     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
644     for (p=0; p<numSharedPoints; p++) {
645       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
646         point = sharedLocal[p] + depthShift[d];
647         iremote[point].index = sharedRemote[p].index;
648         iremote[point].rank = sharedRemote[p].rank;
649       }
650     }
651   }
652 
653   /* Now add the incoming overlap points */
654   for (p=0; p<nleaves; p++) {
655     point = depthIdx[remoteDepths[p]];
656     ilocal[point] = point;
657     iremote[point].index = overlapRemote[p].index;
658     iremote[point].rank = overlapRemote[p].rank;
659     depthIdx[remoteDepths[p]]++;
660   }
661   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
662 
663   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
664   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");CHKERRQ(ierr);
665   ierr = PetscSFSetFromOptions(*migrationSF);CHKERRQ(ierr);
666   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
667   ierr = PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);CHKERRQ(ierr);
668 
669   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
670   PetscFunctionReturn(0);
671 }
672 
673 /*@
674   DMPlexStratifyMigrationSF - Rearrange the leaves of a migration sf for stratification.
675 
676   Input Parameter:
677 + dm          - The DM
678 - sf          - A star forest with non-ordered leaves, usually defining a DM point migration
679 
680   Output Parameter:
681 . migrationSF - A star forest with added leaf indirection that ensures the resulting DM is stratified
682 
683   Level: developer
684 
685 .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexDistributeOverlap()
686 @*/
687 PetscErrorCode DMPlexStratifyMigrationSF(DM dm, PetscSF sf, PetscSF *migrationSF)
688 {
689   MPI_Comm           comm;
690   PetscMPIInt        rank, size;
691   PetscInt           d, ldepth, depth, p, pStart, pEnd, nroots, nleaves;
692   PetscInt          *pointDepths, *remoteDepths, *ilocal;
693   PetscInt          *depthRecv, *depthShift, *depthIdx;
694   PetscInt           hybEnd[4];
695   const PetscSFNode *iremote;
696   PetscErrorCode     ierr;
697 
698   PetscFunctionBegin;
699   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
700   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
701   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
702   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
703   ierr = DMPlexGetDepth(dm, &ldepth);CHKERRQ(ierr);
704   ierr = MPIU_Allreduce(&ldepth, &depth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr);
705   if ((ldepth >= 0) && (depth != ldepth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", ldepth, depth);
706   ierr = PetscLogEventBegin(DMPLEX_PartStratSF,dm,0,0,0);CHKERRQ(ierr);
707 
708   /* Before building the migration SF we need to know the new stratum offsets */
709   ierr = PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote);CHKERRQ(ierr);
710   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
711   ierr = DMPlexGetHybridBounds(dm,&hybEnd[depth],&hybEnd[PetscMax(depth-1,0)],&hybEnd[1],&hybEnd[0]);CHKERRQ(ierr);
712   for (d = 0; d < depth+1; ++d) {
713     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
714     for (p = pStart; p < pEnd; ++p) {
715       if (hybEnd[d] >= 0 && p >= hybEnd[d]) { /* put in a separate value for hybrid points */
716         pointDepths[p] = 2 * d;
717       } else {
718         pointDepths[p] = 2 * d + 1;
719       }
720     }
721   }
722   for (p = 0; p < nleaves; ++p) remoteDepths[p] = -1;
723   ierr = PetscSFBcastBegin(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
724   ierr = PetscSFBcastEnd(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
725   /* Count received points in each stratum and compute the internal strata shift */
726   ierr = PetscMalloc3(2*(depth+1), &depthRecv, 2*(depth+1), &depthShift, 2*(depth+1), &depthIdx);CHKERRQ(ierr);
727   for (d = 0; d < 2*(depth+1); ++d) depthRecv[d] = 0;
728   for (p = 0; p < nleaves; ++p) depthRecv[remoteDepths[p]]++;
729   depthShift[2*depth+1] = 0;
730   for (d = 0; d < 2*depth+1; ++d) depthShift[d] = depthRecv[2 * depth + 1];
731   for (d = 0; d < 2*depth; ++d) depthShift[d] += depthRecv[2 * depth];
732   depthShift[0] += depthRecv[1];
733   for (d = 2; d < 2*depth; ++d) depthShift[d] += depthRecv[1];
734   for (d = 2; d < 2*depth; ++d) depthShift[d] += depthRecv[0];
735   for (d = 2 * depth-1; d > 2; --d) {
736     PetscInt e;
737 
738     for (e = d -1; e > 1; --e) depthShift[e] += depthRecv[d];
739   }
740   for (d = 0; d < 2*(depth+1); ++d) {depthIdx[d] = 0;}
741   /* Derive a new local permutation based on stratified indices */
742   ierr = PetscMalloc1(nleaves, &ilocal);CHKERRQ(ierr);
743   for (p = 0; p < nleaves; ++p) {
744     const PetscInt dep = remoteDepths[p];
745 
746     ilocal[p] = depthShift[dep] + depthIdx[dep];
747     depthIdx[dep]++;
748   }
749   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
750   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Migration SF");CHKERRQ(ierr);
751   ierr = PetscSFSetGraph(*migrationSF, nroots, nleaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_COPY_VALUES);CHKERRQ(ierr);
752   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
753   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
754   ierr = PetscLogEventEnd(DMPLEX_PartStratSF,dm,0,0,0);CHKERRQ(ierr);
755   PetscFunctionReturn(0);
756 }
757 
758 /*@
759   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
760 
761   Collective on dm
762 
763   Input Parameters:
764 + dm - The DMPlex object
765 . pointSF - The PetscSF describing the communication pattern
766 . originalSection - The PetscSection for existing data layout
767 - originalVec - The existing data in a local vector
768 
769   Output Parameters:
770 + newSection - The PetscSF describing the new data layout
771 - newVec - The new data in a local vector
772 
773   Level: developer
774 
775 .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData()
776 @*/
777 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
778 {
779   PetscSF        fieldSF;
780   PetscInt      *remoteOffsets, fieldSize;
781   PetscScalar   *originalValues, *newValues;
782   PetscErrorCode ierr;
783 
784   PetscFunctionBegin;
785   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
786   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
787 
788   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
789   ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr);
790   ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr);
791 
792   ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr);
793   ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr);
794   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
795   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
796   ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
797   ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
798   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
799   ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr);
800   ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr);
801   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
802   PetscFunctionReturn(0);
803 }
804 
805 /*@
806   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
807 
808   Collective on dm
809 
810   Input Parameters:
811 + dm - The DMPlex object
812 . pointSF - The PetscSF describing the communication pattern
813 . originalSection - The PetscSection for existing data layout
814 - originalIS - The existing data
815 
816   Output Parameters:
817 + newSection - The PetscSF describing the new data layout
818 - newIS - The new data
819 
820   Level: developer
821 
822 .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData()
823 @*/
824 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
825 {
826   PetscSF         fieldSF;
827   PetscInt       *newValues, *remoteOffsets, fieldSize;
828   const PetscInt *originalValues;
829   PetscErrorCode  ierr;
830 
831   PetscFunctionBegin;
832   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
833   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
834 
835   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
836   ierr = PetscMalloc1(fieldSize, &newValues);CHKERRQ(ierr);
837 
838   ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr);
839   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
840   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
841   ierr = PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
842   ierr = PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
843   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
844   ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr);
845   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr);
846   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
847   PetscFunctionReturn(0);
848 }
849 
850 /*@
851   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
852 
853   Collective on dm
854 
855   Input Parameters:
856 + dm - The DMPlex object
857 . pointSF - The PetscSF describing the communication pattern
858 . originalSection - The PetscSection for existing data layout
859 . datatype - The type of data
860 - originalData - The existing data
861 
862   Output Parameters:
863 + newSection - The PetscSection describing the new data layout
864 - newData - The new data
865 
866   Level: developer
867 
868 .seealso: DMPlexDistribute(), DMPlexDistributeField()
869 @*/
870 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
871 {
872   PetscSF        fieldSF;
873   PetscInt      *remoteOffsets, fieldSize;
874   PetscMPIInt    dataSize;
875   PetscErrorCode ierr;
876 
877   PetscFunctionBegin;
878   ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
879   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
880 
881   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
882   ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr);
883   ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr);
884 
885   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
886   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
887   ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
888   ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
889   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
890   ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
891   PetscFunctionReturn(0);
892 }
893 
894 static PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
895 {
896   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
897   MPI_Comm               comm;
898   PetscSF                coneSF;
899   PetscSection           originalConeSection, newConeSection;
900   PetscInt              *remoteOffsets, *cones, *globCones, *newCones, newConesSize;
901   PetscBool              flg;
902   PetscErrorCode         ierr;
903 
904   PetscFunctionBegin;
905   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
906   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
907   ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
908   /* Distribute cone section */
909   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
910   ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr);
911   ierr = DMPlexGetConeSection(dmParallel, &newConeSection);CHKERRQ(ierr);
912   ierr = PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr);
913   ierr = DMSetUp(dmParallel);CHKERRQ(ierr);
914   {
915     PetscInt pStart, pEnd, p;
916 
917     ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr);
918     for (p = pStart; p < pEnd; ++p) {
919       PetscInt coneSize;
920       ierr               = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr);
921       pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize);
922     }
923   }
924   /* Communicate and renumber cones */
925   ierr = PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr);
926   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
927   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
928   if (original) {
929     PetscInt numCones;
930 
931     ierr = PetscSectionGetStorageSize(originalConeSection,&numCones);CHKERRQ(ierr);
932     ierr = PetscMalloc1(numCones,&globCones);CHKERRQ(ierr);
933     ierr = ISLocalToGlobalMappingApplyBlock(original, numCones, cones, globCones);CHKERRQ(ierr);
934   } else {
935     globCones = cones;
936   }
937   ierr = DMPlexGetCones(dmParallel, &newCones);CHKERRQ(ierr);
938   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr);
939   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr);
940   if (original) {
941     ierr = PetscFree(globCones);CHKERRQ(ierr);
942   }
943   ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr);
944   ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr);
945 #if defined(PETSC_USE_DEBUG)
946   {
947     PetscInt  p;
948     PetscBool valid = PETSC_TRUE;
949     for (p = 0; p < newConesSize; ++p) {
950       if (newCones[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "[%d] Point %D not in overlap SF\n", PetscGlobalRank,p);CHKERRQ(ierr);}
951     }
952     if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
953   }
954 #endif
955   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr);
956   if (flg) {
957     ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr);
958     ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr);
959     ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr);
960     ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr);
961     ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr);
962   }
963   ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr);
964   ierr = DMPlexGetConeOrientations(dmParallel, &newCones);CHKERRQ(ierr);
965   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
966   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
967   ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr);
968   ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
969   /* Create supports and stratify DMPlex */
970   {
971     PetscInt pStart, pEnd;
972 
973     ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr);
974     ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr);
975   }
976   ierr = DMPlexSymmetrize(dmParallel);CHKERRQ(ierr);
977   ierr = DMPlexStratify(dmParallel);CHKERRQ(ierr);
978   {
979     PetscBool useCone, useClosure, useAnchors;
980 
981     ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
982     ierr = DMSetBasicAdjacency(dmParallel, useCone, useClosure);CHKERRQ(ierr);
983     ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
984     ierr = DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors);CHKERRQ(ierr);
985   }
986   PetscFunctionReturn(0);
987 }
988 
989 static PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
990 {
991   MPI_Comm         comm;
992   PetscSection     originalCoordSection, newCoordSection;
993   Vec              originalCoordinates, newCoordinates;
994   PetscInt         bs;
995   PetscBool        isper;
996   const char      *name;
997   const PetscReal *maxCell, *L;
998   const DMBoundaryType *bd;
999   PetscErrorCode   ierr;
1000 
1001   PetscFunctionBegin;
1002   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1003   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1004 
1005   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1006   ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr);
1007   ierr = DMGetCoordinateSection(dmParallel, &newCoordSection);CHKERRQ(ierr);
1008   ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr);
1009   if (originalCoordinates) {
1010     ierr = VecCreate(PETSC_COMM_SELF, &newCoordinates);CHKERRQ(ierr);
1011     ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr);
1012     ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr);
1013 
1014     ierr = DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr);
1015     ierr = DMSetCoordinatesLocal(dmParallel, newCoordinates);CHKERRQ(ierr);
1016     ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr);
1017     ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr);
1018     ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr);
1019   }
1020   ierr = DMGetPeriodicity(dm, &isper, &maxCell, &L, &bd);CHKERRQ(ierr);
1021   ierr = DMSetPeriodicity(dmParallel, isper, maxCell, L, bd);CHKERRQ(ierr);
1022   PetscFunctionReturn(0);
1023 }
1024 
1025 static PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1026 {
1027   DM_Plex         *mesh = (DM_Plex*) dm->data;
1028   MPI_Comm         comm;
1029   DMLabel          depthLabel;
1030   PetscMPIInt      rank;
1031   PetscInt         depth, d, numLabels, numLocalLabels, l;
1032   PetscBool        hasLabels = PETSC_FALSE, lsendDepth, sendDepth;
1033   PetscObjectState depthState = -1;
1034   PetscErrorCode   ierr;
1035 
1036   PetscFunctionBegin;
1037   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1038   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1039 
1040   ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1041   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1042   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1043 
1044   /* If the user has changed the depth label, communicate it instead */
1045   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1046   ierr = DMPlexGetDepthLabel(dm, &depthLabel);CHKERRQ(ierr);
1047   if (depthLabel) {ierr = PetscObjectStateGet((PetscObject) depthLabel, &depthState);CHKERRQ(ierr);}
1048   lsendDepth = mesh->depthState != depthState ? PETSC_TRUE : PETSC_FALSE;
1049   ierr = MPIU_Allreduce(&lsendDepth, &sendDepth, 1, MPIU_BOOL, MPI_LOR, comm);CHKERRQ(ierr);
1050   if (sendDepth) {
1051     ierr = DMRemoveLabel(dmParallel, "depth", &depthLabel);CHKERRQ(ierr);
1052     ierr = DMLabelDestroy(&depthLabel);CHKERRQ(ierr);
1053   }
1054   /* Everyone must have either the same number of labels, or none */
1055   ierr = DMGetNumLabels(dm, &numLocalLabels);CHKERRQ(ierr);
1056   numLabels = numLocalLabels;
1057   ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1058   if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1059   for (l = numLabels-1; l >= 0; --l) {
1060     DMLabel     label = NULL, labelNew = NULL;
1061     PetscBool   isDepth, lisOutput = PETSC_TRUE, isOutput;
1062     const char *name = NULL;
1063 
1064     if (hasLabels) {
1065       ierr = DMGetLabelByNum(dm, l, &label);CHKERRQ(ierr);
1066       /* Skip "depth" because it is recreated */
1067       ierr = PetscObjectGetName((PetscObject) label, &name);CHKERRQ(ierr);
1068       ierr = PetscStrcmp(name, "depth", &isDepth);CHKERRQ(ierr);
1069     }
1070     ierr = MPI_Bcast(&isDepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr);
1071     if (isDepth && !sendDepth) continue;
1072     ierr = DMLabelDistribute(label, migrationSF, &labelNew);CHKERRQ(ierr);
1073     if (isDepth) {
1074       /* Put in any missing strata which can occur if users are managing the depth label themselves */
1075       PetscInt gdepth;
1076 
1077       ierr = MPIU_Allreduce(&depth, &gdepth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr);
1078       if ((depth >= 0) && (gdepth != depth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", depth, gdepth);
1079       for (d = 0; d <= gdepth; ++d) {
1080         PetscBool has;
1081 
1082         ierr = DMLabelHasStratum(labelNew, d, &has);CHKERRQ(ierr);
1083         if (!has) {ierr = DMLabelAddStratum(labelNew, d);CHKERRQ(ierr);}
1084       }
1085     }
1086     ierr = DMAddLabel(dmParallel, labelNew);CHKERRQ(ierr);
1087     /* Put the output flag in the new label */
1088     if (hasLabels) {ierr = DMGetLabelOutput(dm, name, &lisOutput);CHKERRQ(ierr);}
1089     ierr = MPIU_Allreduce(&lisOutput, &isOutput, 1, MPIU_BOOL, MPI_LAND, comm);CHKERRQ(ierr);
1090     ierr = PetscObjectGetName((PetscObject) labelNew, &name);CHKERRQ(ierr);
1091     ierr = DMSetLabelOutput(dmParallel, name, isOutput);CHKERRQ(ierr);
1092   }
1093   ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1094   PetscFunctionReturn(0);
1095 }
1096 
1097 static PetscErrorCode DMPlexDistributeSetupHybrid(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1098 {
1099   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1100   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1101   PetscBool      *isHybrid, *isHybridParallel;
1102   PetscInt        dim, depth, d;
1103   PetscInt        pStart, pEnd, pStartP, pEndP;
1104   PetscErrorCode  ierr;
1105 
1106   PetscFunctionBegin;
1107   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1108   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1109 
1110   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1111   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1112   ierr = DMPlexGetChart(dm,&pStart,&pEnd);CHKERRQ(ierr);
1113   ierr = DMPlexGetChart(dmParallel,&pStartP,&pEndP);CHKERRQ(ierr);
1114   ierr = PetscCalloc2(pEnd-pStart,&isHybrid,pEndP-pStartP,&isHybridParallel);CHKERRQ(ierr);
1115   for (d = 0; d <= depth; d++) {
1116     PetscInt hybridMax = (depth == 1 && d == 1) ? mesh->hybridPointMax[dim] : mesh->hybridPointMax[d];
1117 
1118     if (hybridMax >= 0) {
1119       PetscInt sStart, sEnd, p;
1120 
1121       ierr = DMPlexGetDepthStratum(dm,d,&sStart,&sEnd);CHKERRQ(ierr);
1122       for (p = hybridMax; p < sEnd; p++) isHybrid[p-pStart] = PETSC_TRUE;
1123     }
1124   }
1125   ierr = PetscSFBcastBegin(migrationSF,MPIU_BOOL,isHybrid,isHybridParallel);CHKERRQ(ierr);
1126   ierr = PetscSFBcastEnd(migrationSF,MPIU_BOOL,isHybrid,isHybridParallel);CHKERRQ(ierr);
1127   for (d = 0; d <= dim; d++) pmesh->hybridPointMax[d] = -1;
1128   for (d = 0; d <= depth; d++) {
1129     PetscInt sStart, sEnd, p, dd;
1130 
1131     ierr = DMPlexGetDepthStratum(dmParallel,d,&sStart,&sEnd);CHKERRQ(ierr);
1132     dd = (depth == 1 && d == 1) ? dim : d;
1133     for (p = sStart; p < sEnd; p++) {
1134       if (isHybridParallel[p-pStartP]) {
1135         pmesh->hybridPointMax[dd] = p;
1136         break;
1137       }
1138     }
1139   }
1140   ierr = PetscFree2(isHybrid,isHybridParallel);CHKERRQ(ierr);
1141   PetscFunctionReturn(0);
1142 }
1143 
1144 static PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1145 {
1146   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1147   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1148   MPI_Comm        comm;
1149   DM              refTree;
1150   PetscSection    origParentSection, newParentSection;
1151   PetscInt        *origParents, *origChildIDs;
1152   PetscBool       flg;
1153   PetscErrorCode  ierr;
1154 
1155   PetscFunctionBegin;
1156   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1157   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
1158   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1159 
1160   /* Set up tree */
1161   ierr = DMPlexGetReferenceTree(dm,&refTree);CHKERRQ(ierr);
1162   ierr = DMPlexSetReferenceTree(dmParallel,refTree);CHKERRQ(ierr);
1163   ierr = DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL);CHKERRQ(ierr);
1164   if (origParentSection) {
1165     PetscInt        pStart, pEnd;
1166     PetscInt        *newParents, *newChildIDs, *globParents;
1167     PetscInt        *remoteOffsetsParents, newParentSize;
1168     PetscSF         parentSF;
1169 
1170     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1171     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);CHKERRQ(ierr);
1172     ierr = PetscSectionSetChart(newParentSection,pStart,pEnd);CHKERRQ(ierr);
1173     ierr = PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);CHKERRQ(ierr);
1174     ierr = PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);CHKERRQ(ierr);
1175     ierr = PetscFree(remoteOffsetsParents);CHKERRQ(ierr);
1176     ierr = PetscSectionGetStorageSize(newParentSection,&newParentSize);CHKERRQ(ierr);
1177     ierr = PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs);CHKERRQ(ierr);
1178     if (original) {
1179       PetscInt numParents;
1180 
1181       ierr = PetscSectionGetStorageSize(origParentSection,&numParents);CHKERRQ(ierr);
1182       ierr = PetscMalloc1(numParents,&globParents);CHKERRQ(ierr);
1183       ierr = ISLocalToGlobalMappingApplyBlock(original, numParents, origParents, globParents);CHKERRQ(ierr);
1184     }
1185     else {
1186       globParents = origParents;
1187     }
1188     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr);
1189     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr);
1190     if (original) {
1191       ierr = PetscFree(globParents);CHKERRQ(ierr);
1192     }
1193     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1194     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1195     ierr = ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents);CHKERRQ(ierr);
1196 #if defined(PETSC_USE_DEBUG)
1197     {
1198       PetscInt  p;
1199       PetscBool valid = PETSC_TRUE;
1200       for (p = 0; p < newParentSize; ++p) {
1201         if (newParents[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
1202       }
1203       if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1204     }
1205 #endif
1206     ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-parents_view", &flg);CHKERRQ(ierr);
1207     if (flg) {
1208       ierr = PetscPrintf(comm, "Serial Parent Section: \n");CHKERRQ(ierr);
1209       ierr = PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr);
1210       ierr = PetscPrintf(comm, "Parallel Parent Section: \n");CHKERRQ(ierr);
1211       ierr = PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr);
1212       ierr = PetscSFView(parentSF, NULL);CHKERRQ(ierr);
1213     }
1214     ierr = DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);CHKERRQ(ierr);
1215     ierr = PetscSectionDestroy(&newParentSection);CHKERRQ(ierr);
1216     ierr = PetscFree2(newParents,newChildIDs);CHKERRQ(ierr);
1217     ierr = PetscSFDestroy(&parentSF);CHKERRQ(ierr);
1218   }
1219   pmesh->useAnchors = mesh->useAnchors;
1220   PetscFunctionReturn(0);
1221 }
1222 
1223 PETSC_UNUSED static PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, DM dmParallel)
1224 {
1225   PetscMPIInt            rank, size;
1226   MPI_Comm               comm;
1227   PetscErrorCode         ierr;
1228 
1229   PetscFunctionBegin;
1230   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1231   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1232 
1233   /* Create point SF for parallel mesh */
1234   ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1235   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1236   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1237   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1238   {
1239     const PetscInt *leaves;
1240     PetscSFNode    *remotePoints, *rowners, *lowners;
1241     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1242     PetscInt        pStart, pEnd;
1243 
1244     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1245     ierr = PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr);
1246     ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr);
1247     for (p=0; p<numRoots; p++) {
1248       rowners[p].rank  = -1;
1249       rowners[p].index = -1;
1250     }
1251     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1252     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1253     for (p = 0; p < numLeaves; ++p) {
1254       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1255         lowners[p].rank  = rank;
1256         lowners[p].index = leaves ? leaves[p] : p;
1257       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1258         lowners[p].rank  = -2;
1259         lowners[p].index = -2;
1260       }
1261     }
1262     for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1263       rowners[p].rank  = -3;
1264       rowners[p].index = -3;
1265     }
1266     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1267     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1268     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1269     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1270     for (p = 0; p < numLeaves; ++p) {
1271       if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
1272       if (lowners[p].rank != rank) ++numGhostPoints;
1273     }
1274     ierr = PetscMalloc1(numGhostPoints, &ghostPoints);CHKERRQ(ierr);
1275     ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr);
1276     for (p = 0, gp = 0; p < numLeaves; ++p) {
1277       if (lowners[p].rank != rank) {
1278         ghostPoints[gp]        = leaves ? leaves[p] : p;
1279         remotePoints[gp].rank  = lowners[p].rank;
1280         remotePoints[gp].index = lowners[p].index;
1281         ++gp;
1282       }
1283     }
1284     ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr);
1285     ierr = PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1286     ierr = PetscSFSetFromOptions((dmParallel)->sf);CHKERRQ(ierr);
1287   }
1288   {
1289     PetscBool useCone, useClosure, useAnchors;
1290 
1291     ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
1292     ierr = DMSetBasicAdjacency(dmParallel, useCone, useClosure);CHKERRQ(ierr);
1293     ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
1294     ierr = DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors);CHKERRQ(ierr);
1295   }
1296   ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1297   PetscFunctionReturn(0);
1298 }
1299 
1300 /*@
1301   DMPlexSetPartitionBalance - Should distribution of the DM attempt to balance the shared point partition?
1302 
1303   Input Parameters:
1304 + dm - The DMPlex object
1305 - flg - Balance the partition?
1306 
1307   Level: intermediate
1308 
1309 .seealso: DMPlexDistribute(), DMPlexGetPartitionBalance()
1310 @*/
1311 PetscErrorCode DMPlexSetPartitionBalance(DM dm, PetscBool flg)
1312 {
1313   DM_Plex *mesh = (DM_Plex *)dm->data;
1314 
1315   PetscFunctionBegin;
1316   mesh->partitionBalance = flg;
1317   PetscFunctionReturn(0);
1318 }
1319 
1320 /*@
1321   DMPlexGetPartitionBalance - Does distribution of the DM attempt to balance the shared point partition?
1322 
1323   Input Parameter:
1324 . dm - The DMPlex object
1325 
1326   Output Parameter:
1327 . flg - Balance the partition?
1328 
1329   Level: intermediate
1330 
1331 .seealso: DMPlexDistribute(), DMPlexSetPartitionBalance()
1332 @*/
1333 PetscErrorCode DMPlexGetPartitionBalance(DM dm, PetscBool *flg)
1334 {
1335   DM_Plex *mesh = (DM_Plex *)dm->data;
1336 
1337   PetscFunctionBegin;
1338   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1339   PetscValidBoolPointer(flg, 2);
1340   *flg = mesh->partitionBalance;
1341   PetscFunctionReturn(0);
1342 }
1343 
1344 typedef struct {
1345   PetscInt vote, rank, index;
1346 } Petsc3Int;
1347 
1348 /* MaxLoc, but carry a third piece of information around */
1349 static void MaxLocCarry(void *in_, void *inout_, PetscMPIInt *len_, MPI_Datatype *dtype)
1350 {
1351   Petsc3Int *a = (Petsc3Int *)inout_;
1352   Petsc3Int *b = (Petsc3Int *)in_;
1353   PetscInt i, len = *len_;
1354   for (i = 0; i < len; i++) {
1355     if (a[i].vote < b[i].vote) {
1356       a[i].vote = b[i].vote;
1357       a[i].rank = b[i].rank;
1358       a[i].index = b[i].index;
1359     } else if (a[i].vote <= b[i].vote) {
1360       if (a[i].rank >= b[i].rank) {
1361         a[i].rank = b[i].rank;
1362         a[i].index = b[i].index;
1363       }
1364     }
1365   }
1366 }
1367 
1368 /*@C
1369   DMPlexCreatePointSF - Build a point SF from an SF describing a point migration
1370 
1371   Input Parameter:
1372 + dm          - The source DMPlex object
1373 . migrationSF - The star forest that describes the parallel point remapping
1374 . ownership   - Flag causing a vote to determine point ownership
1375 
1376   Output Parameter:
1377 - pointSF     - The star forest describing the point overlap in the remapped DM
1378 
1379   Level: developer
1380 
1381 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1382 @*/
1383 PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownership, PetscSF *pointSF)
1384 {
1385   PetscMPIInt        rank, size;
1386   PetscInt           p, nroots, nleaves, idx, npointLeaves;
1387   PetscInt          *pointLocal;
1388   const PetscInt    *leaves;
1389   const PetscSFNode *roots;
1390   PetscSFNode       *rootNodes, *leafNodes, *pointRemote;
1391   Vec                shifts;
1392   const PetscInt     numShifts = 13759;
1393   const PetscScalar *shift = NULL;
1394   const PetscBool    shiftDebug = PETSC_FALSE;
1395   PetscBool          balance;
1396   PetscErrorCode     ierr;
1397 
1398   PetscFunctionBegin;
1399   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1400   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1401   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
1402   ierr = PetscLogEventBegin(DMPLEX_CreatePointSF,dm,0,0,0);CHKERRQ(ierr);
1403 
1404   ierr = DMPlexGetPartitionBalance(dm, &balance);CHKERRQ(ierr);
1405   ierr = PetscSFGetGraph(migrationSF, &nroots, &nleaves, &leaves, &roots);CHKERRQ(ierr);
1406   ierr = PetscMalloc2(nroots, &rootNodes, nleaves, &leafNodes);CHKERRQ(ierr);
1407   if (ownership) {
1408     MPI_Op       op;
1409     MPI_Datatype datatype;
1410     Petsc3Int   *rootVote = NULL, *leafVote = NULL;
1411     /* If balancing, we compute a random cyclic shift of the rank for each remote point. That way, the max will evenly distribute among ranks. */
1412     if (balance) {
1413       PetscRandom r;
1414 
1415       ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
1416       ierr = PetscRandomSetInterval(r, 0, 2467*size);CHKERRQ(ierr);
1417       ierr = VecCreate(PETSC_COMM_SELF, &shifts);CHKERRQ(ierr);
1418       ierr = VecSetSizes(shifts, numShifts, numShifts);CHKERRQ(ierr);
1419       ierr = VecSetType(shifts, VECSTANDARD);CHKERRQ(ierr);
1420       ierr = VecSetRandom(shifts, r);CHKERRQ(ierr);
1421       ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
1422       ierr = VecGetArrayRead(shifts, &shift);CHKERRQ(ierr);
1423     }
1424 
1425     ierr = PetscMalloc1(nroots, &rootVote);CHKERRQ(ierr);
1426     ierr = PetscMalloc1(nleaves, &leafVote);CHKERRQ(ierr);
1427     /* Point ownership vote: Process with highest rank owns shared points */
1428     for (p = 0; p < nleaves; ++p) {
1429       if (shiftDebug) {
1430         ierr = PetscSynchronizedPrintf(PetscObjectComm((PetscObject) dm), "[%d] Point %D RemotePoint %D Shift %D MyRank %D\n", rank, leaves ? leaves[p] : p, roots[p].index, (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]), (rank + (shift ? (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]) : 0))%size);CHKERRQ(ierr);
1431       }
1432       /* Either put in a bid or we know we own it */
1433       leafVote[p].vote  = (rank + (shift ? (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]) : 0))%size;
1434       leafVote[p].rank = rank;
1435       leafVote[p].index = p;
1436     }
1437     for (p = 0; p < nroots; p++) {
1438       /* Root must not participate in the reduction, flag so that MAXLOC does not use */
1439       rootVote[p].vote  = -3;
1440       rootVote[p].rank  = -3;
1441       rootVote[p].index = -3;
1442     }
1443     ierr = MPI_Type_contiguous(3, MPIU_INT, &datatype);CHKERRQ(ierr);
1444     ierr = MPI_Type_commit(&datatype);CHKERRQ(ierr);
1445     ierr = MPI_Op_create(&MaxLocCarry, 1, &op);CHKERRQ(ierr);
1446     ierr = PetscSFReduceBegin(migrationSF, datatype, leafVote, rootVote, op);CHKERRQ(ierr);
1447     ierr = PetscSFReduceEnd(migrationSF, datatype, leafVote, rootVote, op);CHKERRQ(ierr);
1448     ierr = MPI_Op_free(&op);CHKERRQ(ierr);
1449     ierr = MPI_Type_free(&datatype);CHKERRQ(ierr);
1450     for (p = 0; p < nroots; p++) {
1451       rootNodes[p].rank = rootVote[p].rank;
1452       rootNodes[p].index = rootVote[p].index;
1453     }
1454     ierr = PetscFree(leafVote);CHKERRQ(ierr);
1455     ierr = PetscFree(rootVote);CHKERRQ(ierr);
1456   } else {
1457     for (p = 0; p < nroots; p++) {
1458       rootNodes[p].index = -1;
1459       rootNodes[p].rank = rank;
1460     }
1461     for (p = 0; p < nleaves; p++) {
1462       /* Write new local id into old location */
1463       if (roots[p].rank == rank) {
1464         rootNodes[roots[p].index].index = leaves ? leaves[p] : p;
1465       }
1466     }
1467   }
1468   ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1469   ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1470 
1471   for (npointLeaves = 0, p = 0; p < nleaves; p++) {
1472     if (leafNodes[p].rank != rank) npointLeaves++;
1473   }
1474   ierr = PetscMalloc1(npointLeaves, &pointLocal);CHKERRQ(ierr);
1475   ierr = PetscMalloc1(npointLeaves, &pointRemote);CHKERRQ(ierr);
1476   for (idx = 0, p = 0; p < nleaves; p++) {
1477     if (leafNodes[p].rank != rank) {
1478       pointLocal[idx] = p;
1479       pointRemote[idx] = leafNodes[p];
1480       idx++;
1481     }
1482   }
1483   if (shift) {
1484     ierr = VecRestoreArrayRead(shifts, &shift);CHKERRQ(ierr);
1485     ierr = VecDestroy(&shifts);CHKERRQ(ierr);
1486   }
1487   if (shiftDebug) {ierr = PetscSynchronizedFlush(PetscObjectComm((PetscObject) dm), PETSC_STDOUT);CHKERRQ(ierr);}
1488   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), pointSF);CHKERRQ(ierr);
1489   ierr = PetscSFSetFromOptions(*pointSF);CHKERRQ(ierr);
1490   ierr = PetscSFSetGraph(*pointSF, nleaves, npointLeaves, pointLocal, PETSC_OWN_POINTER, pointRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1491   ierr = PetscFree2(rootNodes, leafNodes);CHKERRQ(ierr);
1492   ierr = PetscLogEventEnd(DMPLEX_CreatePointSF,dm,0,0,0);CHKERRQ(ierr);
1493   PetscFunctionReturn(0);
1494 }
1495 
1496 /*@C
1497   DMPlexMigrate  - Migrates internal DM data over the supplied star forest
1498 
1499   Collective on dm
1500 
1501   Input Parameter:
1502 + dm       - The source DMPlex object
1503 . sf       - The star forest communication context describing the migration pattern
1504 
1505   Output Parameter:
1506 - targetDM - The target DMPlex object
1507 
1508   Level: intermediate
1509 
1510 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1511 @*/
1512 PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM)
1513 {
1514   MPI_Comm               comm;
1515   PetscInt               dim, cdim, nroots;
1516   PetscSF                sfPoint;
1517   ISLocalToGlobalMapping ltogMigration;
1518   ISLocalToGlobalMapping ltogOriginal = NULL;
1519   PetscBool              flg;
1520   PetscErrorCode         ierr;
1521 
1522   PetscFunctionBegin;
1523   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1524   ierr = PetscLogEventBegin(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1525   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1526   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1527   ierr = DMSetDimension(targetDM, dim);CHKERRQ(ierr);
1528   ierr = DMGetCoordinateDim(dm, &cdim);CHKERRQ(ierr);
1529   ierr = DMSetCoordinateDim(targetDM, cdim);CHKERRQ(ierr);
1530 
1531   /* Check for a one-to-all distribution pattern */
1532   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1533   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
1534   if (nroots >= 0) {
1535     IS        isOriginal;
1536     PetscInt  n, size, nleaves;
1537     PetscInt  *numbering_orig, *numbering_new;
1538 
1539     /* Get the original point numbering */
1540     ierr = DMPlexCreatePointNumbering(dm, &isOriginal);CHKERRQ(ierr);
1541     ierr = ISLocalToGlobalMappingCreateIS(isOriginal, &ltogOriginal);CHKERRQ(ierr);
1542     ierr = ISLocalToGlobalMappingGetSize(ltogOriginal, &size);CHKERRQ(ierr);
1543     ierr = ISLocalToGlobalMappingGetBlockIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1544     /* Convert to positive global numbers */
1545     for (n=0; n<size; n++) {if (numbering_orig[n] < 0) numbering_orig[n] = -(numbering_orig[n]+1);}
1546     /* Derive the new local-to-global mapping from the old one */
1547     ierr = PetscSFGetGraph(sf, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1548     ierr = PetscMalloc1(nleaves, &numbering_new);CHKERRQ(ierr);
1549     ierr = PetscSFBcastBegin(sf, MPIU_INT, numbering_orig, numbering_new);CHKERRQ(ierr);
1550     ierr = PetscSFBcastEnd(sf, MPIU_INT, numbering_orig, numbering_new);CHKERRQ(ierr);
1551     ierr = ISLocalToGlobalMappingCreate(comm, 1, nleaves, numbering_new, PETSC_OWN_POINTER, &ltogMigration);CHKERRQ(ierr);
1552     ierr = ISLocalToGlobalMappingRestoreIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1553     ierr = ISDestroy(&isOriginal);CHKERRQ(ierr);
1554   } else {
1555     /* One-to-all distribution pattern: We can derive LToG from SF */
1556     ierr = ISLocalToGlobalMappingCreateSF(sf, 0, &ltogMigration);CHKERRQ(ierr);
1557   }
1558   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1559   if (flg) {
1560     ierr = PetscPrintf(comm, "Point renumbering for DM migration:\n");CHKERRQ(ierr);
1561     ierr = ISLocalToGlobalMappingView(ltogMigration, NULL);CHKERRQ(ierr);
1562   }
1563   /* Migrate DM data to target DM */
1564   ierr = DMPlexDistributeCones(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1565   ierr = DMPlexDistributeLabels(dm, sf, targetDM);CHKERRQ(ierr);
1566   ierr = DMPlexDistributeCoordinates(dm, sf, targetDM);CHKERRQ(ierr);
1567   ierr = DMPlexDistributeSetupHybrid(dm, sf, ltogMigration, targetDM);CHKERRQ(ierr);
1568   ierr = DMPlexDistributeSetupTree(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1569   ierr = ISLocalToGlobalMappingDestroy(&ltogOriginal);CHKERRQ(ierr);
1570   ierr = ISLocalToGlobalMappingDestroy(&ltogMigration);CHKERRQ(ierr);
1571   ierr = PetscLogEventEnd(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1572   PetscFunctionReturn(0);
1573 }
1574 
1575 /*@C
1576   DMPlexDistribute - Distributes the mesh and any associated sections.
1577 
1578   Collective on dm
1579 
1580   Input Parameter:
1581 + dm  - The original DMPlex object
1582 - overlap - The overlap of partitions, 0 is the default
1583 
1584   Output Parameter:
1585 + sf - The PetscSF used for point distribution, or NULL if not needed
1586 - dmParallel - The distributed DMPlex object
1587 
1588   Note: If the mesh was not distributed, the output dmParallel will be NULL.
1589 
1590   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
1591   representation on the mesh.
1592 
1593   Level: intermediate
1594 
1595 .seealso: DMPlexCreate(), DMSetAdjacency()
1596 @*/
1597 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1598 {
1599   MPI_Comm               comm;
1600   PetscPartitioner       partitioner;
1601   IS                     cellPart;
1602   PetscSection           cellPartSection;
1603   DM                     dmCoord;
1604   DMLabel                lblPartition, lblMigration;
1605   PetscSF                sfMigration, sfStratified, sfPoint;
1606   PetscBool              flg, balance;
1607   PetscMPIInt            rank, size;
1608   PetscErrorCode         ierr;
1609 
1610   PetscFunctionBegin;
1611   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1612   PetscValidLogicalCollectiveInt(dm, overlap, 2);
1613   if (sf) PetscValidPointer(sf,3);
1614   PetscValidPointer(dmParallel,4);
1615 
1616   if (sf) *sf = NULL;
1617   *dmParallel = NULL;
1618   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1619   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1620   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1621   if (size == 1) PetscFunctionReturn(0);
1622 
1623   ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1624   /* Create cell partition */
1625   ierr = PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1626   ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr);
1627   ierr = DMPlexGetPartitioner(dm, &partitioner);CHKERRQ(ierr);
1628   ierr = PetscPartitionerPartition(partitioner, dm, cellPartSection, &cellPart);CHKERRQ(ierr);
1629   ierr = PetscLogEventBegin(DMPLEX_PartSelf,dm,0,0,0);CHKERRQ(ierr);
1630   {
1631     /* Convert partition to DMLabel */
1632     IS             is;
1633     PetscHSetI     ht;
1634     const PetscInt *points;
1635     PetscInt       *iranks;
1636     PetscInt       pStart, pEnd, proc, npoints, poff = 0, nranks;
1637 
1638     ierr = DMLabelCreate(PETSC_COMM_SELF, "Point Partition", &lblPartition);CHKERRQ(ierr);
1639     /* Preallocate strata */
1640     ierr = PetscHSetICreate(&ht);CHKERRQ(ierr);
1641     ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr);
1642     for (proc = pStart; proc < pEnd; proc++) {
1643       ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr);
1644       if (npoints) {ierr = PetscHSetIAdd(ht, proc);CHKERRQ(ierr);}
1645     }
1646     ierr = PetscHSetIGetSize(ht, &nranks);CHKERRQ(ierr);
1647     ierr = PetscMalloc1(nranks, &iranks);CHKERRQ(ierr);
1648     ierr = PetscHSetIGetElems(ht, &poff, iranks);CHKERRQ(ierr);
1649     ierr = PetscHSetIDestroy(&ht);CHKERRQ(ierr);
1650     ierr = DMLabelAddStrata(lblPartition, nranks, iranks);CHKERRQ(ierr);
1651     ierr = PetscFree(iranks);CHKERRQ(ierr);
1652     /* Inline DMPlexPartitionLabelClosure() */
1653     ierr = ISGetIndices(cellPart, &points);CHKERRQ(ierr);
1654     ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr);
1655     for (proc = pStart; proc < pEnd; proc++) {
1656       ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr);
1657       if (!npoints) continue;
1658       ierr = PetscSectionGetOffset(cellPartSection, proc, &poff);CHKERRQ(ierr);
1659       ierr = DMPlexClosurePoints_Private(dm, npoints, points+poff, &is);CHKERRQ(ierr);
1660       ierr = DMLabelSetStratumIS(lblPartition, proc, is);CHKERRQ(ierr);
1661       ierr = ISDestroy(&is);CHKERRQ(ierr);
1662     }
1663     ierr = ISRestoreIndices(cellPart, &points);CHKERRQ(ierr);
1664   }
1665   ierr = PetscLogEventEnd(DMPLEX_PartSelf,dm,0,0,0);CHKERRQ(ierr);
1666 
1667   ierr = DMLabelCreate(PETSC_COMM_SELF, "Point migration", &lblMigration);CHKERRQ(ierr);
1668   ierr = DMPlexPartitionLabelInvert(dm, lblPartition, NULL, lblMigration);CHKERRQ(ierr);
1669   ierr = DMPlexPartitionLabelCreateSF(dm, lblMigration, &sfMigration);CHKERRQ(ierr);
1670   ierr = DMPlexStratifyMigrationSF(dm, sfMigration, &sfStratified);CHKERRQ(ierr);
1671   ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1672   sfMigration = sfStratified;
1673   ierr = PetscSFSetUp(sfMigration);CHKERRQ(ierr);
1674   ierr = PetscLogEventEnd(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1675   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1676   if (flg) {
1677     ierr = DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr);
1678     ierr = PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr);
1679   }
1680 
1681   /* Create non-overlapping parallel DM and migrate internal data */
1682   ierr = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr);
1683   ierr = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr);
1684   ierr = DMPlexMigrate(dm, sfMigration, *dmParallel);CHKERRQ(ierr);
1685 
1686   /* Build the point SF without overlap */
1687   ierr = DMPlexGetPartitionBalance(dm, &balance);CHKERRQ(ierr);
1688   ierr = DMPlexSetPartitionBalance(*dmParallel, balance);CHKERRQ(ierr);
1689   ierr = DMPlexCreatePointSF(*dmParallel, sfMigration, PETSC_TRUE, &sfPoint);CHKERRQ(ierr);
1690   ierr = DMSetPointSF(*dmParallel, sfPoint);CHKERRQ(ierr);
1691   ierr = DMGetCoordinateDM(*dmParallel, &dmCoord);CHKERRQ(ierr);
1692   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1693   if (flg) {ierr = PetscSFView(sfPoint, NULL);CHKERRQ(ierr);}
1694 
1695   if (overlap > 0) {
1696     DM                 dmOverlap;
1697     PetscInt           nroots, nleaves, noldleaves, l;
1698     const PetscInt    *oldLeaves;
1699     PetscSFNode       *newRemote, *permRemote;
1700     const PetscSFNode *oldRemote;
1701     PetscSF            sfOverlap, sfOverlapPoint;
1702 
1703     /* Add the partition overlap to the distributed DM */
1704     ierr = DMPlexDistributeOverlap(*dmParallel, overlap, &sfOverlap, &dmOverlap);CHKERRQ(ierr);
1705     ierr = DMDestroy(dmParallel);CHKERRQ(ierr);
1706     *dmParallel = dmOverlap;
1707     if (flg) {
1708       ierr = PetscPrintf(comm, "Overlap Migration SF:\n");CHKERRQ(ierr);
1709       ierr = PetscSFView(sfOverlap, NULL);CHKERRQ(ierr);
1710     }
1711 
1712     /* Re-map the migration SF to establish the full migration pattern */
1713     ierr = PetscSFGetGraph(sfMigration, &nroots, &noldleaves, &oldLeaves, &oldRemote);CHKERRQ(ierr);
1714     ierr = PetscSFGetGraph(sfOverlap, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1715     ierr = PetscMalloc1(nleaves, &newRemote);CHKERRQ(ierr);
1716     /* oldRemote: original root point mapping to original leaf point
1717        newRemote: original leaf point mapping to overlapped leaf point */
1718     if (oldLeaves) {
1719       /* After stratification, the migration remotes may not be in root (canonical) order, so we reorder using the leaf numbering */
1720       ierr = PetscMalloc1(noldleaves, &permRemote);CHKERRQ(ierr);
1721       for (l = 0; l < noldleaves; ++l) permRemote[oldLeaves[l]] = oldRemote[l];
1722       oldRemote = permRemote;
1723     }
1724     ierr = PetscSFBcastBegin(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1725     ierr = PetscSFBcastEnd(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1726     if (oldLeaves) {ierr = PetscFree(oldRemote);CHKERRQ(ierr);}
1727     ierr = PetscSFCreate(comm, &sfOverlapPoint);CHKERRQ(ierr);
1728     ierr = PetscSFSetGraph(sfOverlapPoint, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1729     ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1730     ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1731     sfMigration = sfOverlapPoint;
1732   }
1733   /* Cleanup Partition */
1734   ierr = DMLabelDestroy(&lblPartition);CHKERRQ(ierr);
1735   ierr = DMLabelDestroy(&lblMigration);CHKERRQ(ierr);
1736   ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr);
1737   ierr = ISDestroy(&cellPart);CHKERRQ(ierr);
1738   /* Copy BC */
1739   ierr = DMCopyBoundary(dm, *dmParallel);CHKERRQ(ierr);
1740   /* Create sfNatural */
1741   if (dm->useNatural) {
1742     PetscSection section;
1743 
1744     ierr = DMGetLocalSection(dm, &section);CHKERRQ(ierr);
1745     ierr = DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &(*dmParallel)->sfNatural);CHKERRQ(ierr);
1746     ierr = DMSetUseNatural(*dmParallel, PETSC_TRUE);CHKERRQ(ierr);
1747   }
1748   /* Cleanup */
1749   if (sf) {*sf = sfMigration;}
1750   else    {ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);}
1751   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1752   ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1753   PetscFunctionReturn(0);
1754 }
1755 
1756 /*@C
1757   DMPlexDistributeOverlap - Add partition overlap to a distributed non-overlapping DM.
1758 
1759   Collective on dm
1760 
1761   Input Parameter:
1762 + dm  - The non-overlapping distrbuted DMPlex object
1763 - overlap - The overlap of partitions
1764 
1765   Output Parameter:
1766 + sf - The PetscSF used for point distribution
1767 - dmOverlap - The overlapping distributed DMPlex object, or NULL
1768 
1769   Note: If the mesh was not distributed, the return value is NULL.
1770 
1771   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
1772   representation on the mesh.
1773 
1774   Level: intermediate
1775 
1776 .seealso: DMPlexCreate(), DMSetAdjacency()
1777 @*/
1778 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, PetscSF *sf, DM *dmOverlap)
1779 {
1780   MPI_Comm               comm;
1781   PetscMPIInt            size, rank;
1782   PetscSection           rootSection, leafSection;
1783   IS                     rootrank, leafrank;
1784   DM                     dmCoord;
1785   DMLabel                lblOverlap;
1786   PetscSF                sfOverlap, sfStratified, sfPoint;
1787   PetscErrorCode         ierr;
1788 
1789   PetscFunctionBegin;
1790   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1791   if (sf) PetscValidPointer(sf, 3);
1792   PetscValidPointer(dmOverlap, 4);
1793 
1794   if (sf) *sf = NULL;
1795   *dmOverlap  = NULL;
1796   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1797   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1798   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1799   if (size == 1) PetscFunctionReturn(0);
1800 
1801   ierr = PetscLogEventBegin(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1802   /* Compute point overlap with neighbouring processes on the distributed DM */
1803   ierr = PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1804   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1805   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1806   ierr = DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);CHKERRQ(ierr);
1807   ierr = DMPlexCreateOverlap(dm, overlap, rootSection, rootrank, leafSection, leafrank, &lblOverlap);CHKERRQ(ierr);
1808   /* Convert overlap label to stratified migration SF */
1809   ierr = DMPlexPartitionLabelCreateSF(dm, lblOverlap, &sfOverlap);CHKERRQ(ierr);
1810   ierr = DMPlexStratifyMigrationSF(dm, sfOverlap, &sfStratified);CHKERRQ(ierr);
1811   ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1812   sfOverlap = sfStratified;
1813   ierr = PetscObjectSetName((PetscObject) sfOverlap, "Overlap SF");CHKERRQ(ierr);
1814   ierr = PetscSFSetFromOptions(sfOverlap);CHKERRQ(ierr);
1815 
1816   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1817   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1818   ierr = ISDestroy(&rootrank);CHKERRQ(ierr);
1819   ierr = ISDestroy(&leafrank);CHKERRQ(ierr);
1820   ierr = PetscLogEventEnd(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1821 
1822   /* Build the overlapping DM */
1823   ierr = DMPlexCreate(comm, dmOverlap);CHKERRQ(ierr);
1824   ierr = PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");CHKERRQ(ierr);
1825   ierr = DMPlexMigrate(dm, sfOverlap, *dmOverlap);CHKERRQ(ierr);
1826   /* Build the new point SF */
1827   ierr = DMPlexCreatePointSF(*dmOverlap, sfOverlap, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
1828   ierr = DMSetPointSF(*dmOverlap, sfPoint);CHKERRQ(ierr);
1829   ierr = DMGetCoordinateDM(*dmOverlap, &dmCoord);CHKERRQ(ierr);
1830   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1831   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1832   /* Cleanup overlap partition */
1833   ierr = DMLabelDestroy(&lblOverlap);CHKERRQ(ierr);
1834   if (sf) *sf = sfOverlap;
1835   else    {ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);}
1836   ierr = PetscLogEventEnd(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1837   PetscFunctionReturn(0);
1838 }
1839 
1840 /*@C
1841   DMPlexGetGatherDM - Get a copy of the DMPlex that gathers all points on the
1842   root process of the original's communicator.
1843 
1844   Collective on dm
1845 
1846   Input Parameters:
1847 . dm - the original DMPlex object
1848 
1849   Output Parameters:
1850 + sf - the PetscSF used for point distribution (optional)
1851 - gatherMesh - the gathered DM object, or NULL
1852 
1853   Level: intermediate
1854 
1855 .seealso: DMPlexDistribute(), DMPlexGetRedundantDM()
1856 @*/
1857 PetscErrorCode DMPlexGetGatherDM(DM dm, PetscSF *sf, DM *gatherMesh)
1858 {
1859   MPI_Comm       comm;
1860   PetscMPIInt    size;
1861   PetscPartitioner oldPart, gatherPart;
1862   PetscErrorCode ierr;
1863 
1864   PetscFunctionBegin;
1865   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1866   PetscValidPointer(gatherMesh,2);
1867   *gatherMesh = NULL;
1868   if (sf) *sf = NULL;
1869   comm = PetscObjectComm((PetscObject)dm);
1870   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1871   if (size == 1) PetscFunctionReturn(0);
1872   ierr = DMPlexGetPartitioner(dm,&oldPart);CHKERRQ(ierr);
1873   ierr = PetscObjectReference((PetscObject)oldPart);CHKERRQ(ierr);
1874   ierr = PetscPartitionerCreate(comm,&gatherPart);CHKERRQ(ierr);
1875   ierr = PetscPartitionerSetType(gatherPart,PETSCPARTITIONERGATHER);CHKERRQ(ierr);
1876   ierr = DMPlexSetPartitioner(dm,gatherPart);CHKERRQ(ierr);
1877   ierr = DMPlexDistribute(dm,0,sf,gatherMesh);CHKERRQ(ierr);
1878 
1879   ierr = DMPlexSetPartitioner(dm,oldPart);CHKERRQ(ierr);
1880   ierr = PetscPartitionerDestroy(&gatherPart);CHKERRQ(ierr);
1881   ierr = PetscPartitionerDestroy(&oldPart);CHKERRQ(ierr);
1882   PetscFunctionReturn(0);
1883 }
1884 
1885 /*@C
1886   DMPlexGetRedundantDM - Get a copy of the DMPlex that is completely copied on each process.
1887 
1888   Collective on dm
1889 
1890   Input Parameters:
1891 . dm - the original DMPlex object
1892 
1893   Output Parameters:
1894 + sf - the PetscSF used for point distribution (optional)
1895 - redundantMesh - the redundant DM object, or NULL
1896 
1897   Level: intermediate
1898 
1899 .seealso: DMPlexDistribute(), DMPlexGetGatherDM()
1900 @*/
1901 PetscErrorCode DMPlexGetRedundantDM(DM dm, PetscSF *sf, DM *redundantMesh)
1902 {
1903   MPI_Comm       comm;
1904   PetscMPIInt    size, rank;
1905   PetscInt       pStart, pEnd, p;
1906   PetscInt       numPoints = -1;
1907   PetscSF        migrationSF, sfPoint, gatherSF;
1908   DM             gatherDM, dmCoord;
1909   PetscSFNode    *points;
1910   PetscErrorCode ierr;
1911 
1912   PetscFunctionBegin;
1913   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1914   PetscValidPointer(redundantMesh,2);
1915   *redundantMesh = NULL;
1916   comm = PetscObjectComm((PetscObject)dm);
1917   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1918   if (size == 1) {
1919     ierr = PetscObjectReference((PetscObject) dm);CHKERRQ(ierr);
1920     *redundantMesh = dm;
1921     if (sf) *sf = NULL;
1922     PetscFunctionReturn(0);
1923   }
1924   ierr = DMPlexGetGatherDM(dm,&gatherSF,&gatherDM);CHKERRQ(ierr);
1925   if (!gatherDM) PetscFunctionReturn(0);
1926   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1927   ierr = DMPlexGetChart(gatherDM,&pStart,&pEnd);CHKERRQ(ierr);
1928   numPoints = pEnd - pStart;
1929   ierr = MPI_Bcast(&numPoints,1,MPIU_INT,0,comm);CHKERRQ(ierr);
1930   ierr = PetscMalloc1(numPoints,&points);CHKERRQ(ierr);
1931   ierr = PetscSFCreate(comm,&migrationSF);CHKERRQ(ierr);
1932   for (p = 0; p < numPoints; p++) {
1933     points[p].index = p;
1934     points[p].rank  = 0;
1935   }
1936   ierr = PetscSFSetGraph(migrationSF,pEnd-pStart,numPoints,NULL,PETSC_OWN_POINTER,points,PETSC_OWN_POINTER);CHKERRQ(ierr);
1937   ierr = DMPlexCreate(comm, redundantMesh);CHKERRQ(ierr);
1938   ierr = PetscObjectSetName((PetscObject) *redundantMesh, "Redundant Mesh");CHKERRQ(ierr);
1939   ierr = DMPlexMigrate(gatherDM, migrationSF, *redundantMesh);CHKERRQ(ierr);
1940   ierr = DMPlexCreatePointSF(*redundantMesh, migrationSF, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
1941   ierr = DMSetPointSF(*redundantMesh, sfPoint);CHKERRQ(ierr);
1942   ierr = DMGetCoordinateDM(*redundantMesh, &dmCoord);CHKERRQ(ierr);
1943   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1944   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1945   if (sf) {
1946     PetscSF tsf;
1947 
1948     ierr = PetscSFCompose(gatherSF,migrationSF,&tsf);CHKERRQ(ierr);
1949     ierr = DMPlexStratifyMigrationSF(dm, tsf, sf);CHKERRQ(ierr);
1950     ierr = PetscSFDestroy(&tsf);CHKERRQ(ierr);
1951   }
1952   ierr = PetscSFDestroy(&migrationSF);CHKERRQ(ierr);
1953   ierr = PetscSFDestroy(&gatherSF);CHKERRQ(ierr);
1954   ierr = DMDestroy(&gatherDM);CHKERRQ(ierr);
1955   PetscFunctionReturn(0);
1956 }
1957