xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision 6a98f8dc3f2c9149905a87dc2e9d0fedaf64e09a)
1 #include <petsc/private/dmpleximpl.h>    /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/dmlabelimpl.h>   /*I      "petscdmlabel.h"  I*/
3 
4 /*@C
5   DMPlexSetAdjacencyUser - Define adjacency in the mesh using a user-provided callback
6 
7   Input Parameters:
8 + dm      - The DM object
9 . user    - The user callback, may be NULL (to clear the callback)
10 - ctx     - context for callback evaluation, may be NULL
11 
12   Level: advanced
13 
14   Notes:
15      The caller of DMPlexGetAdjacency may need to arrange that a large enough array is available for the adjacency.
16 
17      Any setting here overrides other configuration of DMPlex adjacency determination.
18 
19 .seealso: DMSetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexGetAdjacency(), DMPlexGetAdjacencyUser()
20 @*/
21 PetscErrorCode DMPlexSetAdjacencyUser(DM dm,PetscErrorCode (*user)(DM,PetscInt,PetscInt*,PetscInt[],void*),void *ctx)
22 {
23   DM_Plex *mesh = (DM_Plex *)dm->data;
24 
25   PetscFunctionBegin;
26   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
27   mesh->useradjacency = user;
28   mesh->useradjacencyctx = ctx;
29   PetscFunctionReturn(0);
30 }
31 
32 /*@C
33   DMPlexGetAdjacencyUser - get the user-defined adjacency callback
34 
35   Input Parameter:
36 . dm      - The DM object
37 
38   Output Parameters:
39 - user    - The user callback
40 - ctx     - context for callback evaluation
41 
42   Level: advanced
43 
44 .seealso: DMSetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexGetAdjacency(), DMPlexSetAdjacencyUser()
45 @*/
46 PetscErrorCode DMPlexGetAdjacencyUser(DM dm, PetscErrorCode (**user)(DM,PetscInt,PetscInt*,PetscInt[],void*), void **ctx)
47 {
48   DM_Plex *mesh = (DM_Plex *)dm->data;
49 
50   PetscFunctionBegin;
51   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
52   if (user) *user = mesh->useradjacency;
53   if (ctx) *ctx = mesh->useradjacencyctx;
54   PetscFunctionReturn(0);
55 }
56 
57 /*@
58   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
59 
60   Input Parameters:
61 + dm      - The DM object
62 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
63 
64   Level: intermediate
65 
66 .seealso: DMGetAdjacency(), DMSetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
67 @*/
68 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
69 {
70   DM_Plex *mesh = (DM_Plex *) dm->data;
71 
72   PetscFunctionBegin;
73   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
74   mesh->useAnchors = useAnchors;
75   PetscFunctionReturn(0);
76 }
77 
78 /*@
79   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
80 
81   Input Parameter:
82 . dm      - The DM object
83 
84   Output Parameter:
85 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
86 
87   Level: intermediate
88 
89 .seealso: DMPlexSetAdjacencyUseAnchors(), DMSetAdjacency(), DMGetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
90 @*/
91 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
92 {
93   DM_Plex *mesh = (DM_Plex *) dm->data;
94 
95   PetscFunctionBegin;
96   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
97   PetscValidIntPointer(useAnchors, 2);
98   *useAnchors = mesh->useAnchors;
99   PetscFunctionReturn(0);
100 }
101 
102 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
103 {
104   const PetscInt *cone = NULL;
105   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
106   PetscErrorCode  ierr;
107 
108   PetscFunctionBeginHot;
109   ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
110   ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
111   for (c = 0; c <= coneSize; ++c) {
112     const PetscInt  point   = !c ? p : cone[c-1];
113     const PetscInt *support = NULL;
114     PetscInt        supportSize, s, q;
115 
116     ierr = DMPlexGetSupportSize(dm, point, &supportSize);CHKERRQ(ierr);
117     ierr = DMPlexGetSupport(dm, point, &support);CHKERRQ(ierr);
118     for (s = 0; s < supportSize; ++s) {
119       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = support[s]),0); ++q) {
120         if (support[s] == adj[q]) break;
121       }
122       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
123     }
124   }
125   *adjSize = numAdj;
126   PetscFunctionReturn(0);
127 }
128 
129 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
130 {
131   const PetscInt *support = NULL;
132   PetscInt        numAdj   = 0, maxAdjSize = *adjSize, supportSize, s;
133   PetscErrorCode  ierr;
134 
135   PetscFunctionBeginHot;
136   ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr);
137   ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr);
138   for (s = 0; s <= supportSize; ++s) {
139     const PetscInt  point = !s ? p : support[s-1];
140     const PetscInt *cone  = NULL;
141     PetscInt        coneSize, c, q;
142 
143     ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
144     ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
145     for (c = 0; c < coneSize; ++c) {
146       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = cone[c]),0); ++q) {
147         if (cone[c] == adj[q]) break;
148       }
149       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
150     }
151   }
152   *adjSize = numAdj;
153   PetscFunctionReturn(0);
154 }
155 
156 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
157 {
158   PetscInt      *star = NULL;
159   PetscInt       numAdj = 0, maxAdjSize = *adjSize, starSize, s;
160   PetscErrorCode ierr;
161 
162   PetscFunctionBeginHot;
163   ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
164   for (s = 0; s < starSize*2; s += 2) {
165     const PetscInt *closure = NULL;
166     PetscInt        closureSize, c, q;
167 
168     ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
169     for (c = 0; c < closureSize*2; c += 2) {
170       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = closure[c]),0); ++q) {
171         if (closure[c] == adj[q]) break;
172       }
173       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
174     }
175     ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
176   }
177   ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
178   *adjSize = numAdj;
179   PetscFunctionReturn(0);
180 }
181 
182 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
183 {
184   static PetscInt asiz = 0;
185   PetscInt maxAnchors = 1;
186   PetscInt aStart = -1, aEnd = -1;
187   PetscInt maxAdjSize;
188   PetscSection aSec = NULL;
189   IS aIS = NULL;
190   const PetscInt *anchors;
191   DM_Plex *mesh = (DM_Plex *)dm->data;
192   PetscErrorCode  ierr;
193 
194   PetscFunctionBeginHot;
195   if (useAnchors) {
196     ierr = DMPlexGetAnchors(dm,&aSec,&aIS);CHKERRQ(ierr);
197     if (aSec) {
198       ierr = PetscSectionGetMaxDof(aSec,&maxAnchors);CHKERRQ(ierr);
199       maxAnchors = PetscMax(1,maxAnchors);
200       ierr = PetscSectionGetChart(aSec,&aStart,&aEnd);CHKERRQ(ierr);
201       ierr = ISGetIndices(aIS,&anchors);CHKERRQ(ierr);
202     }
203   }
204   if (!*adj) {
205     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
206 
207     ierr  = DMPlexGetChart(dm, &pStart,&pEnd);CHKERRQ(ierr);
208     ierr  = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
209     depth = PetscMax(depth, -depth);
210     ierr  = DMPlexGetMaxSizes(dm, &maxC, &maxS);CHKERRQ(ierr);
211     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1;
212     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1;
213     asiz  = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries);
214     asiz *= maxAnchors;
215     asiz  = PetscMin(asiz,pEnd-pStart);
216     ierr  = PetscMalloc1(asiz,adj);CHKERRQ(ierr);
217   }
218   if (*adjSize < 0) *adjSize = asiz;
219   maxAdjSize = *adjSize;
220   if (mesh->useradjacency) {
221     ierr = (*mesh->useradjacency)(dm, p, adjSize, *adj, mesh->useradjacencyctx);CHKERRQ(ierr);
222   } else if (useTransitiveClosure) {
223     ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr);
224   } else if (useCone) {
225     ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
226   } else {
227     ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
228   }
229   if (useAnchors && aSec) {
230     PetscInt origSize = *adjSize;
231     PetscInt numAdj = origSize;
232     PetscInt i = 0, j;
233     PetscInt *orig = *adj;
234 
235     while (i < origSize) {
236       PetscInt p = orig[i];
237       PetscInt aDof = 0;
238 
239       if (p >= aStart && p < aEnd) {
240         ierr = PetscSectionGetDof(aSec,p,&aDof);CHKERRQ(ierr);
241       }
242       if (aDof) {
243         PetscInt aOff;
244         PetscInt s, q;
245 
246         for (j = i + 1; j < numAdj; j++) {
247           orig[j - 1] = orig[j];
248         }
249         origSize--;
250         numAdj--;
251         ierr = PetscSectionGetOffset(aSec,p,&aOff);CHKERRQ(ierr);
252         for (s = 0; s < aDof; ++s) {
253           for (q = 0; q < numAdj || ((void)(orig[numAdj++] = anchors[aOff+s]),0); ++q) {
254             if (anchors[aOff+s] == orig[q]) break;
255           }
256           if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
257         }
258       }
259       else {
260         i++;
261       }
262     }
263     *adjSize = numAdj;
264     ierr = ISRestoreIndices(aIS,&anchors);CHKERRQ(ierr);
265   }
266   PetscFunctionReturn(0);
267 }
268 
269 /*@
270   DMPlexGetAdjacency - Return all points adjacent to the given point
271 
272   Input Parameters:
273 + dm - The DM object
274 . p  - The point
275 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE
276 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize
277 
278   Output Parameters:
279 + adjSize - The number of adjacent points
280 - adj - The adjacent points
281 
282   Level: advanced
283 
284   Notes:
285     The user must PetscFree the adj array if it was not passed in.
286 
287 .seealso: DMSetAdjacency(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator()
288 @*/
289 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
290 {
291   PetscBool      useCone, useClosure, useAnchors;
292   PetscErrorCode ierr;
293 
294   PetscFunctionBeginHot;
295   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
296   PetscValidPointer(adjSize,3);
297   PetscValidPointer(adj,4);
298   ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
299   ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
300   ierr = DMPlexGetAdjacency_Internal(dm, p, useCone, useClosure, useAnchors, adjSize, adj);CHKERRQ(ierr);
301   PetscFunctionReturn(0);
302 }
303 
304 /*@
305   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
306 
307   Collective on dm
308 
309   Input Parameters:
310 + dm      - The DM
311 - sfPoint - The PetscSF which encodes point connectivity
312 
313   Output Parameters:
314 + processRanks - A list of process neighbors, or NULL
315 - sfProcess    - An SF encoding the two-sided process connectivity, or NULL
316 
317   Level: developer
318 
319 .seealso: PetscSFCreate(), DMPlexCreateProcessSF()
320 @*/
321 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
322 {
323   const PetscSFNode *remotePoints;
324   PetscInt          *localPointsNew;
325   PetscSFNode       *remotePointsNew;
326   const PetscInt    *nranks;
327   PetscInt          *ranksNew;
328   PetscBT            neighbors;
329   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
330   PetscMPIInt        size, proc, rank;
331   PetscErrorCode     ierr;
332 
333   PetscFunctionBegin;
334   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
335   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
336   if (processRanks) {PetscValidPointer(processRanks, 3);}
337   if (sfProcess)    {PetscValidPointer(sfProcess, 4);}
338   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
339   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
340   ierr = PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);CHKERRQ(ierr);
341   ierr = PetscBTCreate(size, &neighbors);CHKERRQ(ierr);
342   ierr = PetscBTMemzero(size, neighbors);CHKERRQ(ierr);
343   /* Compute root-to-leaf process connectivity */
344   ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr);
345   ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr);
346   for (p = pStart; p < pEnd; ++p) {
347     PetscInt ndof, noff, n;
348 
349     ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr);
350     ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr);
351     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
352   }
353   ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr);
354   /* Compute leaf-to-neighbor process connectivity */
355   ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr);
356   ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr);
357   for (p = pStart; p < pEnd; ++p) {
358     PetscInt ndof, noff, n;
359 
360     ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr);
361     ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr);
362     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
363   }
364   ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr);
365   /* Compute leaf-to-root process connectivity */
366   for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);}
367   /* Calculate edges */
368   PetscBTClear(neighbors, rank);
369   for(proc = 0, numNeighbors = 0; proc < size; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;}
370   ierr = PetscMalloc1(numNeighbors, &ranksNew);CHKERRQ(ierr);
371   ierr = PetscMalloc1(numNeighbors, &localPointsNew);CHKERRQ(ierr);
372   ierr = PetscMalloc1(numNeighbors, &remotePointsNew);CHKERRQ(ierr);
373   for(proc = 0, n = 0; proc < size; ++proc) {
374     if (PetscBTLookup(neighbors, proc)) {
375       ranksNew[n]              = proc;
376       localPointsNew[n]        = proc;
377       remotePointsNew[n].index = rank;
378       remotePointsNew[n].rank  = proc;
379       ++n;
380     }
381   }
382   ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr);
383   if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr);}
384   else              {ierr = PetscFree(ranksNew);CHKERRQ(ierr);}
385   if (sfProcess) {
386     ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr);
387     ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr);
388     ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr);
389     ierr = PetscSFSetGraph(*sfProcess, size, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
390   }
391   PetscFunctionReturn(0);
392 }
393 
394 /*@
395   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
396 
397   Collective on dm
398 
399   Input Parameter:
400 . dm - The DM
401 
402   Output Parameters:
403 + rootSection - The number of leaves for a given root point
404 . rootrank    - The rank of each edge into the root point
405 . leafSection - The number of processes sharing a given leaf point
406 - leafrank    - The rank of each process sharing a leaf point
407 
408   Level: developer
409 
410 .seealso: DMPlexCreateOverlapLabel(), DMPlexDistribute(), DMPlexDistributeOverlap()
411 @*/
412 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
413 {
414   MPI_Comm        comm;
415   PetscSF         sfPoint;
416   const PetscInt *rootdegree;
417   PetscInt       *myrank, *remoterank;
418   PetscInt        pStart, pEnd, p, nedges;
419   PetscMPIInt     rank;
420   PetscErrorCode  ierr;
421 
422   PetscFunctionBegin;
423   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
424   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
425   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
426   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
427   /* Compute number of leaves for each root */
428   ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr);
429   ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr);
430   ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr);
431   ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr);
432   for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr);}
433   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
434   /* Gather rank of each leaf to root */
435   ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr);
436   ierr = PetscMalloc1(pEnd-pStart, &myrank);CHKERRQ(ierr);
437   ierr = PetscMalloc1(nedges,  &remoterank);CHKERRQ(ierr);
438   for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank;
439   ierr = PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
440   ierr = PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
441   ierr = PetscFree(myrank);CHKERRQ(ierr);
442   ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr);
443   /* Distribute remote ranks to leaves */
444   ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr);
445   ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr);
446   PetscFunctionReturn(0);
447 }
448 
449 /*@C
450   DMPlexCreateOverlapLabel - Compute owner information for shared points. This basically gets two-sided for an SF.
451 
452   Collective on dm
453 
454   Input Parameters:
455 + dm          - The DM
456 . levels      - Number of overlap levels
457 . rootSection - The number of leaves for a given root point
458 . rootrank    - The rank of each edge into the root point
459 . leafSection - The number of processes sharing a given leaf point
460 - leafrank    - The rank of each process sharing a leaf point
461 
462   Output Parameter:
463 . ovLabel     - DMLabel containing remote overlap contributions as point/rank pairings
464 
465   Level: developer
466 
467 .seealso: DMPlexDistributeOwnership(), DMPlexDistribute()
468 @*/
469 PetscErrorCode DMPlexCreateOverlapLabel(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
470 {
471   MPI_Comm           comm;
472   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
473   PetscSF            sfPoint;
474   const PetscSFNode *remote;
475   const PetscInt    *local;
476   const PetscInt    *nrank, *rrank;
477   PetscInt          *adj = NULL;
478   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l;
479   PetscMPIInt        rank, size;
480   PetscBool          flg;
481   PetscErrorCode     ierr;
482 
483   PetscFunctionBegin;
484   *ovLabel = NULL;
485   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
486   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
487   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
488   if (size == 1) PetscFunctionReturn(0);
489   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
490   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
491   ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr);
492   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
493   ierr = DMLabelCreate(PETSC_COMM_SELF, "Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr);
494   /* Handle leaves: shared with the root point */
495   for (l = 0; l < nleaves; ++l) {
496     PetscInt adjSize = PETSC_DETERMINE, a;
497 
498     ierr = DMPlexGetAdjacency(dm, local ? local[l] : l, &adjSize, &adj);CHKERRQ(ierr);
499     for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr);}
500   }
501   ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr);
502   ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr);
503   /* Handle roots */
504   for (p = pStart; p < pEnd; ++p) {
505     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
506 
507     if ((p >= sStart) && (p < sEnd)) {
508       /* Some leaves share a root with other leaves on different processes */
509       ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr);
510       if (neighbors) {
511         ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr);
512         ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
513         for (n = 0; n < neighbors; ++n) {
514           const PetscInt remoteRank = nrank[noff+n];
515 
516           if (remoteRank == rank) continue;
517           for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
518         }
519       }
520     }
521     /* Roots are shared with leaves */
522     ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr);
523     if (!neighbors) continue;
524     ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr);
525     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
526     for (n = 0; n < neighbors; ++n) {
527       const PetscInt remoteRank = rrank[noff+n];
528 
529       if (remoteRank == rank) continue;
530       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
531     }
532   }
533   ierr = PetscFree(adj);CHKERRQ(ierr);
534   ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr);
535   ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr);
536   /* Add additional overlap levels */
537   for (l = 1; l < levels; l++) {
538     /* Propagate point donations over SF to capture remote connections */
539     ierr = DMPlexPartitionLabelPropagate(dm, ovAdjByRank);CHKERRQ(ierr);
540     /* Add next level of point donations to the label */
541     ierr = DMPlexPartitionLabelAdjacency(dm, ovAdjByRank);CHKERRQ(ierr);
542   }
543   /* We require the closure in the overlap */
544   ierr = DMPlexPartitionLabelClosure(dm, ovAdjByRank);CHKERRQ(ierr);
545   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-overlap_view", &flg);CHKERRQ(ierr);
546   if (flg) {
547     PetscViewer viewer;
548     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm), &viewer);CHKERRQ(ierr);
549     ierr = DMLabelView(ovAdjByRank, viewer);CHKERRQ(ierr);
550   }
551   /* Invert sender to receiver label */
552   ierr = DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel);CHKERRQ(ierr);
553   ierr = DMPlexPartitionLabelInvert(dm, ovAdjByRank, NULL, *ovLabel);CHKERRQ(ierr);
554   /* Add owned points, except for shared local points */
555   for (p = pStart; p < pEnd; ++p) {ierr = DMLabelSetValue(*ovLabel, p, rank);CHKERRQ(ierr);}
556   for (l = 0; l < nleaves; ++l) {
557     ierr = DMLabelClearValue(*ovLabel, local[l], rank);CHKERRQ(ierr);
558     ierr = DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank);CHKERRQ(ierr);
559   }
560   /* Clean up */
561   ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr);
562   PetscFunctionReturn(0);
563 }
564 
565 /*@C
566   DMPlexCreateOverlapMigrationSF - Create an SF describing the new mesh distribution to make the overlap described by the input SF
567 
568   Collective on dm
569 
570   Input Parameters:
571 + dm          - The DM
572 - overlapSF   - The SF mapping ghost points in overlap to owner points on other processes
573 
574   Output Parameter:
575 . migrationSF - An SF that maps original points in old locations to points in new locations
576 
577   Level: developer
578 
579 .seealso: DMPlexCreateOverlapLabel(), DMPlexDistributeOverlap(), DMPlexDistribute()
580 @*/
581 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
582 {
583   MPI_Comm           comm;
584   PetscMPIInt        rank, size;
585   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
586   PetscInt          *pointDepths, *remoteDepths, *ilocal;
587   PetscInt          *depthRecv, *depthShift, *depthIdx;
588   PetscSFNode       *iremote;
589   PetscSF            pointSF;
590   const PetscInt    *sharedLocal;
591   const PetscSFNode *overlapRemote, *sharedRemote;
592   PetscErrorCode     ierr;
593 
594   PetscFunctionBegin;
595   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
596   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
597   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
598   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
599   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
600 
601   /* Before building the migration SF we need to know the new stratum offsets */
602   ierr = PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote);CHKERRQ(ierr);
603   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
604   for (d=0; d<dim+1; d++) {
605     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
606     for (p=pStart; p<pEnd; p++) pointDepths[p] = d;
607   }
608   for (p=0; p<nleaves; p++) remoteDepths[p] = -1;
609   ierr = PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
610   ierr = PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
611 
612   /* Count recevied points in each stratum and compute the internal strata shift */
613   ierr = PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx);CHKERRQ(ierr);
614   for (d=0; d<dim+1; d++) depthRecv[d]=0;
615   for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++;
616   depthShift[dim] = 0;
617   for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim];
618   for (d=1; d<dim; d++) depthShift[d] += depthRecv[0];
619   for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1];
620   for (d=0; d<dim+1; d++) {
621     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
622     depthIdx[d] = pStart + depthShift[d];
623   }
624 
625   /* Form the overlap SF build an SF that describes the full overlap migration SF */
626   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
627   newLeaves = pEnd - pStart + nleaves;
628   ierr = PetscMalloc1(newLeaves, &ilocal);CHKERRQ(ierr);
629   ierr = PetscMalloc1(newLeaves, &iremote);CHKERRQ(ierr);
630   /* First map local points to themselves */
631   for (d=0; d<dim+1; d++) {
632     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
633     for (p=pStart; p<pEnd; p++) {
634       point = p + depthShift[d];
635       ilocal[point] = point;
636       iremote[point].index = p;
637       iremote[point].rank = rank;
638       depthIdx[d]++;
639     }
640   }
641 
642   /* Add in the remote roots for currently shared points */
643   ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
644   ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote);CHKERRQ(ierr);
645   for (d=0; d<dim+1; d++) {
646     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
647     for (p=0; p<numSharedPoints; p++) {
648       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
649         point = sharedLocal[p] + depthShift[d];
650         iremote[point].index = sharedRemote[p].index;
651         iremote[point].rank = sharedRemote[p].rank;
652       }
653     }
654   }
655 
656   /* Now add the incoming overlap points */
657   for (p=0; p<nleaves; p++) {
658     point = depthIdx[remoteDepths[p]];
659     ilocal[point] = point;
660     iremote[point].index = overlapRemote[p].index;
661     iremote[point].rank = overlapRemote[p].rank;
662     depthIdx[remoteDepths[p]]++;
663   }
664   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
665 
666   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
667   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");CHKERRQ(ierr);
668   ierr = PetscSFSetFromOptions(*migrationSF);CHKERRQ(ierr);
669   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
670   ierr = PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);CHKERRQ(ierr);
671 
672   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
673   PetscFunctionReturn(0);
674 }
675 
676 /*@
677   DMPlexStratifyMigrationSF - Rearrange the leaves of a migration sf for stratification.
678 
679   Input Parameters:
680 + dm          - The DM
681 - sf          - A star forest with non-ordered leaves, usually defining a DM point migration
682 
683   Output Parameter:
684 . migrationSF - A star forest with added leaf indirection that ensures the resulting DM is stratified
685 
686   Note:
687   This lexicographically sorts by (depth, cellType)
688 
689   Level: developer
690 
691 .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexDistributeOverlap()
692 @*/
693 PetscErrorCode DMPlexStratifyMigrationSF(DM dm, PetscSF sf, PetscSF *migrationSF)
694 {
695   MPI_Comm           comm;
696   PetscMPIInt        rank, size;
697   PetscInt           d, ldepth, depth, dim, p, pStart, pEnd, nroots, nleaves;
698   PetscSFNode       *pointDepths, *remoteDepths;
699   PetscInt          *ilocal;
700   PetscInt          *depthRecv, *depthShift, *depthIdx;
701   PetscInt          *ctRecv,    *ctShift,    *ctIdx;
702   const PetscSFNode *iremote;
703   PetscErrorCode     ierr;
704 
705   PetscFunctionBegin;
706   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
707   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
708   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
709   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
710   ierr = DMPlexGetDepth(dm, &ldepth);CHKERRQ(ierr);
711   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
712   ierr = MPIU_Allreduce(&ldepth, &depth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr);
713   if ((ldepth >= 0) && (depth != ldepth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", ldepth, depth);
714   ierr = PetscLogEventBegin(DMPLEX_PartStratSF,dm,0,0,0);CHKERRQ(ierr);
715 
716   /* Before building the migration SF we need to know the new stratum offsets */
717   ierr = PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote);CHKERRQ(ierr);
718   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
719   for (d = 0; d < depth+1; ++d) {
720     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
721     for (p = pStart; p < pEnd; ++p) {
722       DMPolytopeType ct;
723 
724       ierr = DMPlexGetCellType(dm, p, &ct);CHKERRQ(ierr);
725       pointDepths[p].index = d;
726       pointDepths[p].rank  = ct;
727     }
728   }
729   for (p = 0; p < nleaves; ++p) {remoteDepths[p].index = -1; remoteDepths[p].rank = -1;}
730   ierr = PetscSFBcastBegin(sf, MPIU_2INT, pointDepths, remoteDepths);CHKERRQ(ierr);
731   ierr = PetscSFBcastEnd(sf, MPIU_2INT, pointDepths, remoteDepths);CHKERRQ(ierr);
732   /* Count received points in each stratum and compute the internal strata shift */
733   ierr = PetscCalloc6(depth+1, &depthRecv, depth+1, &depthShift, depth+1, &depthIdx, DM_NUM_POLYTOPES, &ctRecv, DM_NUM_POLYTOPES, &ctShift, DM_NUM_POLYTOPES, &ctIdx);CHKERRQ(ierr);
734   for (p = 0; p < nleaves; ++p) {
735     if (remoteDepths[p].rank < 0) {
736       ++depthRecv[remoteDepths[p].index];
737     } else {
738       ++ctRecv[remoteDepths[p].rank];
739     }
740   }
741   {
742     PetscInt depths[4], dims[4], shift = 0, i, c;
743 
744     /* Cells (depth), Vertices (0), Faces (depth-1), Edges (1) */
745     depths[0] = depth; depths[1] = 0; depths[2] = depth-1; depths[3] = 1;
746     dims[0]   = dim;   dims[1]   = 0; dims[2]   = dim-1;   dims[3]   = 1;
747     for (i = 0; i <= depth; ++i) {
748       const PetscInt dep = depths[i];
749       const PetscInt dim = dims[i];
750 
751       for (c = 0; c < DM_NUM_POLYTOPES; ++c) {
752         if (DMPolytopeTypeGetDim((DMPolytopeType) c) != dim && !(i == 0 && c == DM_POLYTOPE_FV_GHOST)) continue;
753         ctShift[c] = shift;
754         shift     += ctRecv[c];
755       }
756       depthShift[dep] = shift;
757       shift          += depthRecv[dep];
758     }
759     for (c = 0; c < DM_NUM_POLYTOPES; ++c) {
760       const PetscInt ctDim = DMPolytopeTypeGetDim((DMPolytopeType) c);
761 
762       if ((ctDim < 0 || ctDim > dim) && c != DM_POLYTOPE_FV_GHOST) {
763         ctShift[c] = shift;
764         shift     += ctRecv[c];
765       }
766     }
767   }
768   /* Derive a new local permutation based on stratified indices */
769   ierr = PetscMalloc1(nleaves, &ilocal);CHKERRQ(ierr);
770   for (p = 0; p < nleaves; ++p) {
771     const PetscInt       dep = remoteDepths[p].index;
772     const DMPolytopeType ct  = (DMPolytopeType) remoteDepths[p].rank;
773 
774     if ((PetscInt) ct < 0) {
775       ilocal[p] = depthShift[dep] + depthIdx[dep];
776       ++depthIdx[dep];
777     } else {
778       ilocal[p] = ctShift[ct] + ctIdx[ct];
779       ++ctIdx[ct];
780     }
781   }
782   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
783   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Migration SF");CHKERRQ(ierr);
784   ierr = PetscSFSetGraph(*migrationSF, nroots, nleaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_COPY_VALUES);CHKERRQ(ierr);
785   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
786   ierr = PetscFree6(depthRecv, depthShift, depthIdx, ctRecv, ctShift, ctIdx);CHKERRQ(ierr);
787   ierr = PetscLogEventEnd(DMPLEX_PartStratSF,dm,0,0,0);CHKERRQ(ierr);
788   PetscFunctionReturn(0);
789 }
790 
791 /*@
792   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
793 
794   Collective on dm
795 
796   Input Parameters:
797 + dm - The DMPlex object
798 . pointSF - The PetscSF describing the communication pattern
799 . originalSection - The PetscSection for existing data layout
800 - originalVec - The existing data in a local vector
801 
802   Output Parameters:
803 + newSection - The PetscSF describing the new data layout
804 - newVec - The new data in a local vector
805 
806   Level: developer
807 
808 .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData()
809 @*/
810 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
811 {
812   PetscSF        fieldSF;
813   PetscInt      *remoteOffsets, fieldSize;
814   PetscScalar   *originalValues, *newValues;
815   PetscErrorCode ierr;
816 
817   PetscFunctionBegin;
818   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
819   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
820 
821   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
822   ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr);
823   ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr);
824 
825   ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr);
826   ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr);
827   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
828   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
829   ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
830   ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
831   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
832   ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr);
833   ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr);
834   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
835   PetscFunctionReturn(0);
836 }
837 
838 /*@
839   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
840 
841   Collective on dm
842 
843   Input Parameters:
844 + dm - The DMPlex object
845 . pointSF - The PetscSF describing the communication pattern
846 . originalSection - The PetscSection for existing data layout
847 - originalIS - The existing data
848 
849   Output Parameters:
850 + newSection - The PetscSF describing the new data layout
851 - newIS - The new data
852 
853   Level: developer
854 
855 .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData()
856 @*/
857 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
858 {
859   PetscSF         fieldSF;
860   PetscInt       *newValues, *remoteOffsets, fieldSize;
861   const PetscInt *originalValues;
862   PetscErrorCode  ierr;
863 
864   PetscFunctionBegin;
865   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
866   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
867 
868   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
869   ierr = PetscMalloc1(fieldSize, &newValues);CHKERRQ(ierr);
870 
871   ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr);
872   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
873   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
874   ierr = PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
875   ierr = PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
876   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
877   ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr);
878   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr);
879   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
880   PetscFunctionReturn(0);
881 }
882 
883 /*@
884   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
885 
886   Collective on dm
887 
888   Input Parameters:
889 + dm - The DMPlex object
890 . pointSF - The PetscSF describing the communication pattern
891 . originalSection - The PetscSection for existing data layout
892 . datatype - The type of data
893 - originalData - The existing data
894 
895   Output Parameters:
896 + newSection - The PetscSection describing the new data layout
897 - newData - The new data
898 
899   Level: developer
900 
901 .seealso: DMPlexDistribute(), DMPlexDistributeField()
902 @*/
903 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
904 {
905   PetscSF        fieldSF;
906   PetscInt      *remoteOffsets, fieldSize;
907   PetscMPIInt    dataSize;
908   PetscErrorCode ierr;
909 
910   PetscFunctionBegin;
911   ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
912   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
913 
914   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
915   ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr);
916   ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr);
917 
918   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
919   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
920   ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
921   ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
922   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
923   ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
924   PetscFunctionReturn(0);
925 }
926 
927 static PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
928 {
929   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
930   MPI_Comm               comm;
931   PetscSF                coneSF;
932   PetscSection           originalConeSection, newConeSection;
933   PetscInt              *remoteOffsets, *cones, *globCones, *newCones, newConesSize;
934   PetscBool              flg;
935   PetscErrorCode         ierr;
936 
937   PetscFunctionBegin;
938   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
939   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
940   ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
941   /* Distribute cone section */
942   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
943   ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr);
944   ierr = DMPlexGetConeSection(dmParallel, &newConeSection);CHKERRQ(ierr);
945   ierr = PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr);
946   ierr = DMSetUp(dmParallel);CHKERRQ(ierr);
947   {
948     PetscInt pStart, pEnd, p;
949 
950     ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr);
951     for (p = pStart; p < pEnd; ++p) {
952       PetscInt coneSize;
953       ierr               = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr);
954       pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize);
955     }
956   }
957   /* Communicate and renumber cones */
958   ierr = PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr);
959   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
960   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
961   if (original) {
962     PetscInt numCones;
963 
964     ierr = PetscSectionGetStorageSize(originalConeSection,&numCones);CHKERRQ(ierr);
965     ierr = PetscMalloc1(numCones,&globCones);CHKERRQ(ierr);
966     ierr = ISLocalToGlobalMappingApplyBlock(original, numCones, cones, globCones);CHKERRQ(ierr);
967   } else {
968     globCones = cones;
969   }
970   ierr = DMPlexGetCones(dmParallel, &newCones);CHKERRQ(ierr);
971   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr);
972   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr);
973   if (original) {
974     ierr = PetscFree(globCones);CHKERRQ(ierr);
975   }
976   ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr);
977   ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr);
978 #if defined(PETSC_USE_DEBUG)
979   {
980     PetscInt  p;
981     PetscBool valid = PETSC_TRUE;
982     for (p = 0; p < newConesSize; ++p) {
983       if (newCones[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "[%d] Point %D not in overlap SF\n", PetscGlobalRank,p);CHKERRQ(ierr);}
984     }
985     if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
986   }
987 #endif
988   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr);
989   if (flg) {
990     ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr);
991     ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr);
992     ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr);
993     ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr);
994     ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr);
995   }
996   ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr);
997   ierr = DMPlexGetConeOrientations(dmParallel, &newCones);CHKERRQ(ierr);
998   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
999   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1000   ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr);
1001   ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
1002   /* Create supports and stratify DMPlex */
1003   {
1004     PetscInt pStart, pEnd;
1005 
1006     ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr);
1007     ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr);
1008   }
1009   ierr = DMPlexSymmetrize(dmParallel);CHKERRQ(ierr);
1010   ierr = DMPlexStratify(dmParallel);CHKERRQ(ierr);
1011   {
1012     PetscBool useCone, useClosure, useAnchors;
1013 
1014     ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
1015     ierr = DMSetBasicAdjacency(dmParallel, useCone, useClosure);CHKERRQ(ierr);
1016     ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
1017     ierr = DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors);CHKERRQ(ierr);
1018   }
1019   PetscFunctionReturn(0);
1020 }
1021 
1022 static PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
1023 {
1024   MPI_Comm         comm;
1025   PetscSection     originalCoordSection, newCoordSection;
1026   Vec              originalCoordinates, newCoordinates;
1027   PetscInt         bs;
1028   PetscBool        isper;
1029   const char      *name;
1030   const PetscReal *maxCell, *L;
1031   const DMBoundaryType *bd;
1032   PetscErrorCode   ierr;
1033 
1034   PetscFunctionBegin;
1035   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1036   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1037 
1038   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1039   ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr);
1040   ierr = DMGetCoordinateSection(dmParallel, &newCoordSection);CHKERRQ(ierr);
1041   ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr);
1042   if (originalCoordinates) {
1043     ierr = VecCreate(PETSC_COMM_SELF, &newCoordinates);CHKERRQ(ierr);
1044     ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr);
1045     ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr);
1046 
1047     ierr = DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr);
1048     ierr = DMSetCoordinatesLocal(dmParallel, newCoordinates);CHKERRQ(ierr);
1049     ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr);
1050     ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr);
1051     ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr);
1052   }
1053   ierr = DMGetPeriodicity(dm, &isper, &maxCell, &L, &bd);CHKERRQ(ierr);
1054   ierr = DMSetPeriodicity(dmParallel, isper, maxCell, L, bd);CHKERRQ(ierr);
1055   PetscFunctionReturn(0);
1056 }
1057 
1058 static PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1059 {
1060   DM_Plex         *mesh = (DM_Plex*) dm->data;
1061   MPI_Comm         comm;
1062   DMLabel          depthLabel;
1063   PetscMPIInt      rank;
1064   PetscInt         depth, d, numLabels, numLocalLabels, l;
1065   PetscBool        hasLabels = PETSC_FALSE, lsendDepth, sendDepth;
1066   PetscObjectState depthState = -1;
1067   PetscErrorCode   ierr;
1068 
1069   PetscFunctionBegin;
1070   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1071   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1072 
1073   ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1074   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1075   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1076 
1077   /* If the user has changed the depth label, communicate it instead */
1078   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1079   ierr = DMPlexGetDepthLabel(dm, &depthLabel);CHKERRQ(ierr);
1080   if (depthLabel) {ierr = PetscObjectStateGet((PetscObject) depthLabel, &depthState);CHKERRQ(ierr);}
1081   lsendDepth = mesh->depthState != depthState ? PETSC_TRUE : PETSC_FALSE;
1082   ierr = MPIU_Allreduce(&lsendDepth, &sendDepth, 1, MPIU_BOOL, MPI_LOR, comm);CHKERRQ(ierr);
1083   if (sendDepth) {
1084     ierr = DMPlexGetDepthLabel(dmParallel, &dmParallel->depthLabel);CHKERRQ(ierr);
1085     ierr = DMRemoveLabelBySelf(dmParallel, &dmParallel->depthLabel, PETSC_FALSE);CHKERRQ(ierr);
1086   }
1087   /* Everyone must have either the same number of labels, or none */
1088   ierr = DMGetNumLabels(dm, &numLocalLabels);CHKERRQ(ierr);
1089   numLabels = numLocalLabels;
1090   ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1091   if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1092   for (l = 0; l < numLabels; ++l) {
1093     DMLabel     label = NULL, labelNew = NULL;
1094     PetscBool   isDepth, lisOutput = PETSC_TRUE, isOutput;
1095     const char *name = NULL;
1096 
1097     if (hasLabels) {
1098       ierr = DMGetLabelByNum(dm, l, &label);CHKERRQ(ierr);
1099       /* Skip "depth" because it is recreated */
1100       ierr = PetscObjectGetName((PetscObject) label, &name);CHKERRQ(ierr);
1101       ierr = PetscStrcmp(name, "depth", &isDepth);CHKERRQ(ierr);
1102     }
1103     ierr = MPI_Bcast(&isDepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr);
1104     if (isDepth && !sendDepth) continue;
1105     ierr = DMLabelDistribute(label, migrationSF, &labelNew);CHKERRQ(ierr);
1106     if (isDepth) {
1107       /* Put in any missing strata which can occur if users are managing the depth label themselves */
1108       PetscInt gdepth;
1109 
1110       ierr = MPIU_Allreduce(&depth, &gdepth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr);
1111       if ((depth >= 0) && (gdepth != depth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", depth, gdepth);
1112       for (d = 0; d <= gdepth; ++d) {
1113         PetscBool has;
1114 
1115         ierr = DMLabelHasStratum(labelNew, d, &has);CHKERRQ(ierr);
1116         if (!has) {ierr = DMLabelAddStratum(labelNew, d);CHKERRQ(ierr);}
1117       }
1118     }
1119     ierr = DMAddLabel(dmParallel, labelNew);CHKERRQ(ierr);
1120     /* Put the output flag in the new label */
1121     if (hasLabels) {ierr = DMGetLabelOutput(dm, name, &lisOutput);CHKERRQ(ierr);}
1122     ierr = MPIU_Allreduce(&lisOutput, &isOutput, 1, MPIU_BOOL, MPI_LAND, comm);CHKERRQ(ierr);
1123     ierr = PetscObjectGetName((PetscObject) labelNew, &name);CHKERRQ(ierr);
1124     ierr = DMSetLabelOutput(dmParallel, name, isOutput);CHKERRQ(ierr);
1125     ierr = DMLabelDestroy(&labelNew);CHKERRQ(ierr);
1126   }
1127   ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1128   PetscFunctionReturn(0);
1129 }
1130 
1131 static PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1132 {
1133   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1134   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1135   MPI_Comm        comm;
1136   DM              refTree;
1137   PetscSection    origParentSection, newParentSection;
1138   PetscInt        *origParents, *origChildIDs;
1139   PetscBool       flg;
1140   PetscErrorCode  ierr;
1141 
1142   PetscFunctionBegin;
1143   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1144   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
1145   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1146 
1147   /* Set up tree */
1148   ierr = DMPlexGetReferenceTree(dm,&refTree);CHKERRQ(ierr);
1149   ierr = DMPlexSetReferenceTree(dmParallel,refTree);CHKERRQ(ierr);
1150   ierr = DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL);CHKERRQ(ierr);
1151   if (origParentSection) {
1152     PetscInt        pStart, pEnd;
1153     PetscInt        *newParents, *newChildIDs, *globParents;
1154     PetscInt        *remoteOffsetsParents, newParentSize;
1155     PetscSF         parentSF;
1156 
1157     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1158     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);CHKERRQ(ierr);
1159     ierr = PetscSectionSetChart(newParentSection,pStart,pEnd);CHKERRQ(ierr);
1160     ierr = PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);CHKERRQ(ierr);
1161     ierr = PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);CHKERRQ(ierr);
1162     ierr = PetscFree(remoteOffsetsParents);CHKERRQ(ierr);
1163     ierr = PetscSectionGetStorageSize(newParentSection,&newParentSize);CHKERRQ(ierr);
1164     ierr = PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs);CHKERRQ(ierr);
1165     if (original) {
1166       PetscInt numParents;
1167 
1168       ierr = PetscSectionGetStorageSize(origParentSection,&numParents);CHKERRQ(ierr);
1169       ierr = PetscMalloc1(numParents,&globParents);CHKERRQ(ierr);
1170       ierr = ISLocalToGlobalMappingApplyBlock(original, numParents, origParents, globParents);CHKERRQ(ierr);
1171     }
1172     else {
1173       globParents = origParents;
1174     }
1175     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr);
1176     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr);
1177     if (original) {
1178       ierr = PetscFree(globParents);CHKERRQ(ierr);
1179     }
1180     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1181     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1182     ierr = ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents);CHKERRQ(ierr);
1183 #if defined(PETSC_USE_DEBUG)
1184     {
1185       PetscInt  p;
1186       PetscBool valid = PETSC_TRUE;
1187       for (p = 0; p < newParentSize; ++p) {
1188         if (newParents[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
1189       }
1190       if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1191     }
1192 #endif
1193     ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-parents_view", &flg);CHKERRQ(ierr);
1194     if (flg) {
1195       ierr = PetscPrintf(comm, "Serial Parent Section: \n");CHKERRQ(ierr);
1196       ierr = PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr);
1197       ierr = PetscPrintf(comm, "Parallel Parent Section: \n");CHKERRQ(ierr);
1198       ierr = PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr);
1199       ierr = PetscSFView(parentSF, NULL);CHKERRQ(ierr);
1200     }
1201     ierr = DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);CHKERRQ(ierr);
1202     ierr = PetscSectionDestroy(&newParentSection);CHKERRQ(ierr);
1203     ierr = PetscFree2(newParents,newChildIDs);CHKERRQ(ierr);
1204     ierr = PetscSFDestroy(&parentSF);CHKERRQ(ierr);
1205   }
1206   pmesh->useAnchors = mesh->useAnchors;
1207   PetscFunctionReturn(0);
1208 }
1209 
1210 PETSC_UNUSED static PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, DM dmParallel)
1211 {
1212   PetscMPIInt            rank, size;
1213   MPI_Comm               comm;
1214   PetscErrorCode         ierr;
1215 
1216   PetscFunctionBegin;
1217   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1218   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1219 
1220   /* Create point SF for parallel mesh */
1221   ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1222   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1223   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1224   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1225   {
1226     const PetscInt *leaves;
1227     PetscSFNode    *remotePoints, *rowners, *lowners;
1228     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1229     PetscInt        pStart, pEnd;
1230 
1231     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1232     ierr = PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr);
1233     ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr);
1234     for (p=0; p<numRoots; p++) {
1235       rowners[p].rank  = -1;
1236       rowners[p].index = -1;
1237     }
1238     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1239     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1240     for (p = 0; p < numLeaves; ++p) {
1241       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1242         lowners[p].rank  = rank;
1243         lowners[p].index = leaves ? leaves[p] : p;
1244       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1245         lowners[p].rank  = -2;
1246         lowners[p].index = -2;
1247       }
1248     }
1249     for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1250       rowners[p].rank  = -3;
1251       rowners[p].index = -3;
1252     }
1253     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1254     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1255     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1256     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1257     for (p = 0; p < numLeaves; ++p) {
1258       if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
1259       if (lowners[p].rank != rank) ++numGhostPoints;
1260     }
1261     ierr = PetscMalloc1(numGhostPoints, &ghostPoints);CHKERRQ(ierr);
1262     ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr);
1263     for (p = 0, gp = 0; p < numLeaves; ++p) {
1264       if (lowners[p].rank != rank) {
1265         ghostPoints[gp]        = leaves ? leaves[p] : p;
1266         remotePoints[gp].rank  = lowners[p].rank;
1267         remotePoints[gp].index = lowners[p].index;
1268         ++gp;
1269       }
1270     }
1271     ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr);
1272     ierr = PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1273     ierr = PetscSFSetFromOptions((dmParallel)->sf);CHKERRQ(ierr);
1274   }
1275   {
1276     PetscBool useCone, useClosure, useAnchors;
1277 
1278     ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
1279     ierr = DMSetBasicAdjacency(dmParallel, useCone, useClosure);CHKERRQ(ierr);
1280     ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
1281     ierr = DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors);CHKERRQ(ierr);
1282   }
1283   ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1284   PetscFunctionReturn(0);
1285 }
1286 
1287 /*@
1288   DMPlexSetPartitionBalance - Should distribution of the DM attempt to balance the shared point partition?
1289 
1290   Input Parameters:
1291 + dm - The DMPlex object
1292 - flg - Balance the partition?
1293 
1294   Level: intermediate
1295 
1296 .seealso: DMPlexDistribute(), DMPlexGetPartitionBalance()
1297 @*/
1298 PetscErrorCode DMPlexSetPartitionBalance(DM dm, PetscBool flg)
1299 {
1300   DM_Plex *mesh = (DM_Plex *)dm->data;
1301 
1302   PetscFunctionBegin;
1303   mesh->partitionBalance = flg;
1304   PetscFunctionReturn(0);
1305 }
1306 
1307 /*@
1308   DMPlexGetPartitionBalance - Does distribution of the DM attempt to balance the shared point partition?
1309 
1310   Input Parameter:
1311 . dm - The DMPlex object
1312 
1313   Output Parameter:
1314 . flg - Balance the partition?
1315 
1316   Level: intermediate
1317 
1318 .seealso: DMPlexDistribute(), DMPlexSetPartitionBalance()
1319 @*/
1320 PetscErrorCode DMPlexGetPartitionBalance(DM dm, PetscBool *flg)
1321 {
1322   DM_Plex *mesh = (DM_Plex *)dm->data;
1323 
1324   PetscFunctionBegin;
1325   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1326   PetscValidBoolPointer(flg, 2);
1327   *flg = mesh->partitionBalance;
1328   PetscFunctionReturn(0);
1329 }
1330 
1331 typedef struct {
1332   PetscInt vote, rank, index;
1333 } Petsc3Int;
1334 
1335 /* MaxLoc, but carry a third piece of information around */
1336 static void MaxLocCarry(void *in_, void *inout_, PetscMPIInt *len_, MPI_Datatype *dtype)
1337 {
1338   Petsc3Int *a = (Petsc3Int *)inout_;
1339   Petsc3Int *b = (Petsc3Int *)in_;
1340   PetscInt i, len = *len_;
1341   for (i = 0; i < len; i++) {
1342     if (a[i].vote < b[i].vote) {
1343       a[i].vote = b[i].vote;
1344       a[i].rank = b[i].rank;
1345       a[i].index = b[i].index;
1346     } else if (a[i].vote <= b[i].vote) {
1347       if (a[i].rank >= b[i].rank) {
1348         a[i].rank = b[i].rank;
1349         a[i].index = b[i].index;
1350       }
1351     }
1352   }
1353 }
1354 
1355 /*@C
1356   DMPlexCreatePointSF - Build a point SF from an SF describing a point migration
1357 
1358   Input Parameters:
1359 + dm          - The source DMPlex object
1360 . migrationSF - The star forest that describes the parallel point remapping
1361 . ownership   - Flag causing a vote to determine point ownership
1362 
1363   Output Parameter:
1364 - pointSF     - The star forest describing the point overlap in the remapped DM
1365 
1366   Notes:
1367   Output pointSF is guaranteed to have the array of local indices (leaves) sorted.
1368 
1369   Level: developer
1370 
1371 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1372 @*/
1373 PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownership, PetscSF *pointSF)
1374 {
1375   PetscMPIInt        rank, size;
1376   PetscInt           p, nroots, nleaves, idx, npointLeaves;
1377   PetscInt          *pointLocal;
1378   const PetscInt    *leaves;
1379   const PetscSFNode *roots;
1380   PetscSFNode       *rootNodes, *leafNodes, *pointRemote;
1381   Vec                shifts;
1382   const PetscInt     numShifts = 13759;
1383   const PetscScalar *shift = NULL;
1384   const PetscBool    shiftDebug = PETSC_FALSE;
1385   PetscBool          balance;
1386   PetscErrorCode     ierr;
1387 
1388   PetscFunctionBegin;
1389   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1390   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1391   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
1392   ierr = PetscLogEventBegin(DMPLEX_CreatePointSF,dm,0,0,0);CHKERRQ(ierr);
1393 
1394   ierr = DMPlexGetPartitionBalance(dm, &balance);CHKERRQ(ierr);
1395   ierr = PetscSFGetGraph(migrationSF, &nroots, &nleaves, &leaves, &roots);CHKERRQ(ierr);
1396   ierr = PetscMalloc2(nroots, &rootNodes, nleaves, &leafNodes);CHKERRQ(ierr);
1397   if (ownership) {
1398     MPI_Op       op;
1399     MPI_Datatype datatype;
1400     Petsc3Int   *rootVote = NULL, *leafVote = NULL;
1401     /* If balancing, we compute a random cyclic shift of the rank for each remote point. That way, the max will evenly distribute among ranks. */
1402     if (balance) {
1403       PetscRandom r;
1404 
1405       ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
1406       ierr = PetscRandomSetInterval(r, 0, 2467*size);CHKERRQ(ierr);
1407       ierr = VecCreate(PETSC_COMM_SELF, &shifts);CHKERRQ(ierr);
1408       ierr = VecSetSizes(shifts, numShifts, numShifts);CHKERRQ(ierr);
1409       ierr = VecSetType(shifts, VECSTANDARD);CHKERRQ(ierr);
1410       ierr = VecSetRandom(shifts, r);CHKERRQ(ierr);
1411       ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
1412       ierr = VecGetArrayRead(shifts, &shift);CHKERRQ(ierr);
1413     }
1414 
1415     ierr = PetscMalloc1(nroots, &rootVote);CHKERRQ(ierr);
1416     ierr = PetscMalloc1(nleaves, &leafVote);CHKERRQ(ierr);
1417     /* Point ownership vote: Process with highest rank owns shared points */
1418     for (p = 0; p < nleaves; ++p) {
1419       if (shiftDebug) {
1420         ierr = PetscSynchronizedPrintf(PetscObjectComm((PetscObject) dm), "[%d] Point %D RemotePoint %D Shift %D MyRank %D\n", rank, leaves ? leaves[p] : p, roots[p].index, (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]), (rank + (shift ? (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]) : 0))%size);CHKERRQ(ierr);
1421       }
1422       /* Either put in a bid or we know we own it */
1423       leafVote[p].vote  = (rank + (shift ? (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]) : 0))%size;
1424       leafVote[p].rank = rank;
1425       leafVote[p].index = p;
1426     }
1427     for (p = 0; p < nroots; p++) {
1428       /* Root must not participate in the reduction, flag so that MAXLOC does not use */
1429       rootVote[p].vote  = -3;
1430       rootVote[p].rank  = -3;
1431       rootVote[p].index = -3;
1432     }
1433     ierr = MPI_Type_contiguous(3, MPIU_INT, &datatype);CHKERRQ(ierr);
1434     ierr = MPI_Type_commit(&datatype);CHKERRQ(ierr);
1435     ierr = MPI_Op_create(&MaxLocCarry, 1, &op);CHKERRQ(ierr);
1436     ierr = PetscSFReduceBegin(migrationSF, datatype, leafVote, rootVote, op);CHKERRQ(ierr);
1437     ierr = PetscSFReduceEnd(migrationSF, datatype, leafVote, rootVote, op);CHKERRQ(ierr);
1438     ierr = MPI_Op_free(&op);CHKERRQ(ierr);
1439     ierr = MPI_Type_free(&datatype);CHKERRQ(ierr);
1440     for (p = 0; p < nroots; p++) {
1441       rootNodes[p].rank = rootVote[p].rank;
1442       rootNodes[p].index = rootVote[p].index;
1443     }
1444     ierr = PetscFree(leafVote);CHKERRQ(ierr);
1445     ierr = PetscFree(rootVote);CHKERRQ(ierr);
1446   } else {
1447     for (p = 0; p < nroots; p++) {
1448       rootNodes[p].index = -1;
1449       rootNodes[p].rank = rank;
1450     }
1451     for (p = 0; p < nleaves; p++) {
1452       /* Write new local id into old location */
1453       if (roots[p].rank == rank) {
1454         rootNodes[roots[p].index].index = leaves ? leaves[p] : p;
1455       }
1456     }
1457   }
1458   ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1459   ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1460 
1461   for (npointLeaves = 0, p = 0; p < nleaves; p++) {
1462     if (leafNodes[p].rank != rank) npointLeaves++;
1463   }
1464   ierr = PetscMalloc1(npointLeaves, &pointLocal);CHKERRQ(ierr);
1465   ierr = PetscMalloc1(npointLeaves, &pointRemote);CHKERRQ(ierr);
1466   for (idx = 0, p = 0; p < nleaves; p++) {
1467     if (leafNodes[p].rank != rank) {
1468       /* Note that pointLocal is automatically sorted as it is sublist of 0, ..., nleaves-1 */
1469       pointLocal[idx] = p;
1470       pointRemote[idx] = leafNodes[p];
1471       idx++;
1472     }
1473   }
1474   if (shift) {
1475     ierr = VecRestoreArrayRead(shifts, &shift);CHKERRQ(ierr);
1476     ierr = VecDestroy(&shifts);CHKERRQ(ierr);
1477   }
1478   if (shiftDebug) {ierr = PetscSynchronizedFlush(PetscObjectComm((PetscObject) dm), PETSC_STDOUT);CHKERRQ(ierr);}
1479   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), pointSF);CHKERRQ(ierr);
1480   ierr = PetscSFSetFromOptions(*pointSF);CHKERRQ(ierr);
1481   ierr = PetscSFSetGraph(*pointSF, nleaves, npointLeaves, pointLocal, PETSC_OWN_POINTER, pointRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1482   ierr = PetscFree2(rootNodes, leafNodes);CHKERRQ(ierr);
1483   ierr = PetscLogEventEnd(DMPLEX_CreatePointSF,dm,0,0,0);CHKERRQ(ierr);
1484   PetscFunctionReturn(0);
1485 }
1486 
1487 /*@C
1488   DMPlexMigrate  - Migrates internal DM data over the supplied star forest
1489 
1490   Collective on dm
1491 
1492   Input Parameters:
1493 + dm       - The source DMPlex object
1494 . sf       - The star forest communication context describing the migration pattern
1495 
1496   Output Parameter:
1497 - targetDM - The target DMPlex object
1498 
1499   Level: intermediate
1500 
1501 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1502 @*/
1503 PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM)
1504 {
1505   MPI_Comm               comm;
1506   PetscInt               dim, cdim, nroots;
1507   PetscSF                sfPoint;
1508   ISLocalToGlobalMapping ltogMigration;
1509   ISLocalToGlobalMapping ltogOriginal = NULL;
1510   PetscBool              flg;
1511   PetscErrorCode         ierr;
1512 
1513   PetscFunctionBegin;
1514   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1515   ierr = PetscLogEventBegin(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1516   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1517   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1518   ierr = DMSetDimension(targetDM, dim);CHKERRQ(ierr);
1519   ierr = DMGetCoordinateDim(dm, &cdim);CHKERRQ(ierr);
1520   ierr = DMSetCoordinateDim(targetDM, cdim);CHKERRQ(ierr);
1521 
1522   /* Check for a one-to-all distribution pattern */
1523   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1524   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
1525   if (nroots >= 0) {
1526     IS        isOriginal;
1527     PetscInt  n, size, nleaves;
1528     PetscInt  *numbering_orig, *numbering_new;
1529 
1530     /* Get the original point numbering */
1531     ierr = DMPlexCreatePointNumbering(dm, &isOriginal);CHKERRQ(ierr);
1532     ierr = ISLocalToGlobalMappingCreateIS(isOriginal, &ltogOriginal);CHKERRQ(ierr);
1533     ierr = ISLocalToGlobalMappingGetSize(ltogOriginal, &size);CHKERRQ(ierr);
1534     ierr = ISLocalToGlobalMappingGetBlockIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1535     /* Convert to positive global numbers */
1536     for (n=0; n<size; n++) {if (numbering_orig[n] < 0) numbering_orig[n] = -(numbering_orig[n]+1);}
1537     /* Derive the new local-to-global mapping from the old one */
1538     ierr = PetscSFGetGraph(sf, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1539     ierr = PetscMalloc1(nleaves, &numbering_new);CHKERRQ(ierr);
1540     ierr = PetscSFBcastBegin(sf, MPIU_INT, numbering_orig, numbering_new);CHKERRQ(ierr);
1541     ierr = PetscSFBcastEnd(sf, MPIU_INT, numbering_orig, numbering_new);CHKERRQ(ierr);
1542     ierr = ISLocalToGlobalMappingCreate(comm, 1, nleaves, numbering_new, PETSC_OWN_POINTER, &ltogMigration);CHKERRQ(ierr);
1543     ierr = ISLocalToGlobalMappingRestoreIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1544     ierr = ISDestroy(&isOriginal);CHKERRQ(ierr);
1545   } else {
1546     /* One-to-all distribution pattern: We can derive LToG from SF */
1547     ierr = ISLocalToGlobalMappingCreateSF(sf, 0, &ltogMigration);CHKERRQ(ierr);
1548   }
1549   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1550   if (flg) {
1551     ierr = PetscPrintf(comm, "Point renumbering for DM migration:\n");CHKERRQ(ierr);
1552     ierr = ISLocalToGlobalMappingView(ltogMigration, NULL);CHKERRQ(ierr);
1553   }
1554   /* Migrate DM data to target DM */
1555   ierr = DMPlexDistributeCones(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1556   ierr = DMPlexDistributeLabels(dm, sf, targetDM);CHKERRQ(ierr);
1557   ierr = DMPlexDistributeCoordinates(dm, sf, targetDM);CHKERRQ(ierr);
1558   ierr = DMPlexDistributeSetupTree(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1559   ierr = ISLocalToGlobalMappingDestroy(&ltogOriginal);CHKERRQ(ierr);
1560   ierr = ISLocalToGlobalMappingDestroy(&ltogMigration);CHKERRQ(ierr);
1561   ierr = PetscLogEventEnd(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1562   PetscFunctionReturn(0);
1563 }
1564 
1565 /*@C
1566   DMPlexDistribute - Distributes the mesh and any associated sections.
1567 
1568   Collective on dm
1569 
1570   Input Parameters:
1571 + dm  - The original DMPlex object
1572 - overlap - The overlap of partitions, 0 is the default
1573 
1574   Output Parameters:
1575 + sf - The PetscSF used for point distribution, or NULL if not needed
1576 - dmParallel - The distributed DMPlex object
1577 
1578   Note: If the mesh was not distributed, the output dmParallel will be NULL.
1579 
1580   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
1581   representation on the mesh.
1582 
1583   Level: intermediate
1584 
1585 .seealso: DMPlexCreate(), DMSetAdjacency(), DMPlexGetOverlap()
1586 @*/
1587 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1588 {
1589   MPI_Comm               comm;
1590   PetscPartitioner       partitioner;
1591   IS                     cellPart;
1592   PetscSection           cellPartSection;
1593   DM                     dmCoord;
1594   DMLabel                lblPartition, lblMigration;
1595   PetscSF                sfMigration, sfStratified, sfPoint;
1596   PetscBool              flg, balance;
1597   PetscMPIInt            rank, size;
1598   PetscErrorCode         ierr;
1599 
1600   PetscFunctionBegin;
1601   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1602   PetscValidLogicalCollectiveInt(dm, overlap, 2);
1603   if (sf) PetscValidPointer(sf,3);
1604   PetscValidPointer(dmParallel,4);
1605 
1606   if (sf) *sf = NULL;
1607   *dmParallel = NULL;
1608   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1609   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1610   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1611   if (size == 1) PetscFunctionReturn(0);
1612 
1613   ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1614   /* Create cell partition */
1615   ierr = PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1616   ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr);
1617   ierr = DMPlexGetPartitioner(dm, &partitioner);CHKERRQ(ierr);
1618   ierr = PetscPartitionerDMPlexPartition(partitioner, dm, NULL, cellPartSection, &cellPart);CHKERRQ(ierr);
1619   ierr = PetscLogEventBegin(DMPLEX_PartSelf,dm,0,0,0);CHKERRQ(ierr);
1620   {
1621     /* Convert partition to DMLabel */
1622     IS             is;
1623     PetscHSetI     ht;
1624     const PetscInt *points;
1625     PetscInt       *iranks;
1626     PetscInt       pStart, pEnd, proc, npoints, poff = 0, nranks;
1627 
1628     ierr = DMLabelCreate(PETSC_COMM_SELF, "Point Partition", &lblPartition);CHKERRQ(ierr);
1629     /* Preallocate strata */
1630     ierr = PetscHSetICreate(&ht);CHKERRQ(ierr);
1631     ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr);
1632     for (proc = pStart; proc < pEnd; proc++) {
1633       ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr);
1634       if (npoints) {ierr = PetscHSetIAdd(ht, proc);CHKERRQ(ierr);}
1635     }
1636     ierr = PetscHSetIGetSize(ht, &nranks);CHKERRQ(ierr);
1637     ierr = PetscMalloc1(nranks, &iranks);CHKERRQ(ierr);
1638     ierr = PetscHSetIGetElems(ht, &poff, iranks);CHKERRQ(ierr);
1639     ierr = PetscHSetIDestroy(&ht);CHKERRQ(ierr);
1640     ierr = DMLabelAddStrata(lblPartition, nranks, iranks);CHKERRQ(ierr);
1641     ierr = PetscFree(iranks);CHKERRQ(ierr);
1642     /* Inline DMPlexPartitionLabelClosure() */
1643     ierr = ISGetIndices(cellPart, &points);CHKERRQ(ierr);
1644     ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr);
1645     for (proc = pStart; proc < pEnd; proc++) {
1646       ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr);
1647       if (!npoints) continue;
1648       ierr = PetscSectionGetOffset(cellPartSection, proc, &poff);CHKERRQ(ierr);
1649       ierr = DMPlexClosurePoints_Private(dm, npoints, points+poff, &is);CHKERRQ(ierr);
1650       ierr = DMLabelSetStratumIS(lblPartition, proc, is);CHKERRQ(ierr);
1651       ierr = ISDestroy(&is);CHKERRQ(ierr);
1652     }
1653     ierr = ISRestoreIndices(cellPart, &points);CHKERRQ(ierr);
1654   }
1655   ierr = PetscLogEventEnd(DMPLEX_PartSelf,dm,0,0,0);CHKERRQ(ierr);
1656 
1657   ierr = DMLabelCreate(PETSC_COMM_SELF, "Point migration", &lblMigration);CHKERRQ(ierr);
1658   ierr = DMPlexPartitionLabelInvert(dm, lblPartition, NULL, lblMigration);CHKERRQ(ierr);
1659   ierr = DMPlexPartitionLabelCreateSF(dm, lblMigration, &sfMigration);CHKERRQ(ierr);
1660   ierr = DMPlexStratifyMigrationSF(dm, sfMigration, &sfStratified);CHKERRQ(ierr);
1661   ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1662   sfMigration = sfStratified;
1663   ierr = PetscSFSetUp(sfMigration);CHKERRQ(ierr);
1664   ierr = PetscLogEventEnd(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1665   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1666   if (flg) {
1667     ierr = DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr);
1668     ierr = PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr);
1669   }
1670 
1671   /* Create non-overlapping parallel DM and migrate internal data */
1672   ierr = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr);
1673   ierr = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr);
1674   ierr = DMPlexMigrate(dm, sfMigration, *dmParallel);CHKERRQ(ierr);
1675 
1676   /* Build the point SF without overlap */
1677   ierr = DMPlexGetPartitionBalance(dm, &balance);CHKERRQ(ierr);
1678   ierr = DMPlexSetPartitionBalance(*dmParallel, balance);CHKERRQ(ierr);
1679   ierr = DMPlexCreatePointSF(*dmParallel, sfMigration, PETSC_TRUE, &sfPoint);CHKERRQ(ierr);
1680   ierr = DMSetPointSF(*dmParallel, sfPoint);CHKERRQ(ierr);
1681   ierr = DMGetCoordinateDM(*dmParallel, &dmCoord);CHKERRQ(ierr);
1682   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1683   if (flg) {ierr = PetscSFView(sfPoint, NULL);CHKERRQ(ierr);}
1684 
1685   if (overlap > 0) {
1686     DM                 dmOverlap;
1687     PetscInt           nroots, nleaves, noldleaves, l;
1688     const PetscInt    *oldLeaves;
1689     PetscSFNode       *newRemote, *permRemote;
1690     const PetscSFNode *oldRemote;
1691     PetscSF            sfOverlap, sfOverlapPoint;
1692 
1693     /* Add the partition overlap to the distributed DM */
1694     ierr = DMPlexDistributeOverlap(*dmParallel, overlap, &sfOverlap, &dmOverlap);CHKERRQ(ierr);
1695     ierr = DMDestroy(dmParallel);CHKERRQ(ierr);
1696     *dmParallel = dmOverlap;
1697     if (flg) {
1698       ierr = PetscPrintf(comm, "Overlap Migration SF:\n");CHKERRQ(ierr);
1699       ierr = PetscSFView(sfOverlap, NULL);CHKERRQ(ierr);
1700     }
1701 
1702     /* Re-map the migration SF to establish the full migration pattern */
1703     ierr = PetscSFGetGraph(sfMigration, &nroots, &noldleaves, &oldLeaves, &oldRemote);CHKERRQ(ierr);
1704     ierr = PetscSFGetGraph(sfOverlap, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1705     ierr = PetscMalloc1(nleaves, &newRemote);CHKERRQ(ierr);
1706     /* oldRemote: original root point mapping to original leaf point
1707        newRemote: original leaf point mapping to overlapped leaf point */
1708     if (oldLeaves) {
1709       /* After stratification, the migration remotes may not be in root (canonical) order, so we reorder using the leaf numbering */
1710       ierr = PetscMalloc1(noldleaves, &permRemote);CHKERRQ(ierr);
1711       for (l = 0; l < noldleaves; ++l) permRemote[oldLeaves[l]] = oldRemote[l];
1712       oldRemote = permRemote;
1713     }
1714     ierr = PetscSFBcastBegin(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1715     ierr = PetscSFBcastEnd(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1716     if (oldLeaves) {ierr = PetscFree(oldRemote);CHKERRQ(ierr);}
1717     ierr = PetscSFCreate(comm, &sfOverlapPoint);CHKERRQ(ierr);
1718     ierr = PetscSFSetGraph(sfOverlapPoint, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1719     ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1720     ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1721     sfMigration = sfOverlapPoint;
1722   }
1723   /* Cleanup Partition */
1724   ierr = DMLabelDestroy(&lblPartition);CHKERRQ(ierr);
1725   ierr = DMLabelDestroy(&lblMigration);CHKERRQ(ierr);
1726   ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr);
1727   ierr = ISDestroy(&cellPart);CHKERRQ(ierr);
1728   /* Copy BC */
1729   ierr = DMCopyBoundary(dm, *dmParallel);CHKERRQ(ierr);
1730   /* Create sfNatural */
1731   if (dm->useNatural) {
1732     PetscSection section;
1733 
1734     ierr = DMGetLocalSection(dm, &section);CHKERRQ(ierr);
1735     ierr = DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &(*dmParallel)->sfNatural);CHKERRQ(ierr);
1736     ierr = DMSetUseNatural(*dmParallel, PETSC_TRUE);CHKERRQ(ierr);
1737   }
1738   /* Cleanup */
1739   if (sf) {*sf = sfMigration;}
1740   else    {ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);}
1741   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1742   ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1743   PetscFunctionReturn(0);
1744 }
1745 
1746 /*@C
1747   DMPlexDistributeOverlap - Add partition overlap to a distributed non-overlapping DM.
1748 
1749   Collective on dm
1750 
1751   Input Parameters:
1752 + dm  - The non-overlapping distributed DMPlex object
1753 - overlap - The overlap of partitions (the same on all ranks)
1754 
1755   Output Parameters:
1756 + sf - The PetscSF used for point distribution
1757 - dmOverlap - The overlapping distributed DMPlex object, or NULL
1758 
1759   Notes:
1760   If the mesh was not distributed, the return value is NULL.
1761 
1762   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
1763   representation on the mesh.
1764 
1765   Level: advanced
1766 
1767 .seealso: DMPlexCreate(), DMSetAdjacency(), DMPlexDistribute(), DMPlexCreateOverlapLabel(), DMPlexGetOverlap()
1768 @*/
1769 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, PetscSF *sf, DM *dmOverlap)
1770 {
1771   MPI_Comm               comm;
1772   PetscMPIInt            size, rank;
1773   PetscSection           rootSection, leafSection;
1774   IS                     rootrank, leafrank;
1775   DM                     dmCoord;
1776   DMLabel                lblOverlap;
1777   PetscSF                sfOverlap, sfStratified, sfPoint;
1778   PetscErrorCode         ierr;
1779 
1780   PetscFunctionBegin;
1781   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1782   PetscValidLogicalCollectiveInt(dm, overlap, 2);
1783   if (sf) PetscValidPointer(sf, 3);
1784   PetscValidPointer(dmOverlap, 4);
1785 
1786   if (sf) *sf = NULL;
1787   *dmOverlap  = NULL;
1788   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1789   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1790   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1791   if (size == 1) PetscFunctionReturn(0);
1792 
1793   ierr = PetscLogEventBegin(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1794   /* Compute point overlap with neighbouring processes on the distributed DM */
1795   ierr = PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1796   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1797   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1798   ierr = DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);CHKERRQ(ierr);
1799   ierr = DMPlexCreateOverlapLabel(dm, overlap, rootSection, rootrank, leafSection, leafrank, &lblOverlap);CHKERRQ(ierr);
1800   /* Convert overlap label to stratified migration SF */
1801   ierr = DMPlexPartitionLabelCreateSF(dm, lblOverlap, &sfOverlap);CHKERRQ(ierr);
1802   ierr = DMPlexStratifyMigrationSF(dm, sfOverlap, &sfStratified);CHKERRQ(ierr);
1803   ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1804   sfOverlap = sfStratified;
1805   ierr = PetscObjectSetName((PetscObject) sfOverlap, "Overlap SF");CHKERRQ(ierr);
1806   ierr = PetscSFSetFromOptions(sfOverlap);CHKERRQ(ierr);
1807 
1808   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1809   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1810   ierr = ISDestroy(&rootrank);CHKERRQ(ierr);
1811   ierr = ISDestroy(&leafrank);CHKERRQ(ierr);
1812   ierr = PetscLogEventEnd(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1813 
1814   /* Build the overlapping DM */
1815   ierr = DMPlexCreate(comm, dmOverlap);CHKERRQ(ierr);
1816   ierr = PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");CHKERRQ(ierr);
1817   ierr = DMPlexMigrate(dm, sfOverlap, *dmOverlap);CHKERRQ(ierr);
1818   /* Store the overlap in the new DM */
1819   ((DM_Plex*)(*dmOverlap)->data)->overlap = overlap + ((DM_Plex*)dm->data)->overlap;
1820   /* Build the new point SF */
1821   ierr = DMPlexCreatePointSF(*dmOverlap, sfOverlap, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
1822   ierr = DMSetPointSF(*dmOverlap, sfPoint);CHKERRQ(ierr);
1823   ierr = DMGetCoordinateDM(*dmOverlap, &dmCoord);CHKERRQ(ierr);
1824   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1825   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1826   /* Cleanup overlap partition */
1827   ierr = DMLabelDestroy(&lblOverlap);CHKERRQ(ierr);
1828   if (sf) *sf = sfOverlap;
1829   else    {ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);}
1830   ierr = PetscLogEventEnd(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1831   PetscFunctionReturn(0);
1832 }
1833 
1834 PetscErrorCode DMPlexGetOverlap_Plex(DM dm, PetscInt *overlap)
1835 {
1836   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1837 
1838   PetscFunctionBegin;
1839   *overlap = mesh->overlap;
1840   PetscFunctionReturn(0);
1841 }
1842 
1843 /*@
1844   DMPlexGetOverlap - Get the DMPlex partition overlap.
1845 
1846   Not collective
1847 
1848   Input Parameter:
1849 . dm - The DM
1850 
1851   Output Parameter:
1852 . overlap - The overlap of this DM
1853 
1854   Level: intermediate
1855 
1856 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap(), DMPlexCreateOverlapLabel()
1857 @*/
1858 PetscErrorCode DMPlexGetOverlap(DM dm, PetscInt *overlap)
1859 {
1860   PetscErrorCode     ierr;
1861 
1862   PetscFunctionBegin;
1863   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1864   ierr = PetscUseMethod(dm,"DMPlexGetOverlap_C",(DM,PetscInt*),(dm,overlap));CHKERRQ(ierr);
1865   PetscFunctionReturn(0);
1866 }
1867 
1868 
1869 /*@C
1870   DMPlexGetGatherDM - Get a copy of the DMPlex that gathers all points on the
1871   root process of the original's communicator.
1872 
1873   Collective on dm
1874 
1875   Input Parameter:
1876 . dm - the original DMPlex object
1877 
1878   Output Parameters:
1879 + sf - the PetscSF used for point distribution (optional)
1880 - gatherMesh - the gathered DM object, or NULL
1881 
1882   Level: intermediate
1883 
1884 .seealso: DMPlexDistribute(), DMPlexGetRedundantDM()
1885 @*/
1886 PetscErrorCode DMPlexGetGatherDM(DM dm, PetscSF *sf, DM *gatherMesh)
1887 {
1888   MPI_Comm       comm;
1889   PetscMPIInt    size;
1890   PetscPartitioner oldPart, gatherPart;
1891   PetscErrorCode ierr;
1892 
1893   PetscFunctionBegin;
1894   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1895   PetscValidPointer(gatherMesh,2);
1896   *gatherMesh = NULL;
1897   if (sf) *sf = NULL;
1898   comm = PetscObjectComm((PetscObject)dm);
1899   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1900   if (size == 1) PetscFunctionReturn(0);
1901   ierr = DMPlexGetPartitioner(dm,&oldPart);CHKERRQ(ierr);
1902   ierr = PetscObjectReference((PetscObject)oldPart);CHKERRQ(ierr);
1903   ierr = PetscPartitionerCreate(comm,&gatherPart);CHKERRQ(ierr);
1904   ierr = PetscPartitionerSetType(gatherPart,PETSCPARTITIONERGATHER);CHKERRQ(ierr);
1905   ierr = DMPlexSetPartitioner(dm,gatherPart);CHKERRQ(ierr);
1906   ierr = DMPlexDistribute(dm,0,sf,gatherMesh);CHKERRQ(ierr);
1907 
1908   ierr = DMPlexSetPartitioner(dm,oldPart);CHKERRQ(ierr);
1909   ierr = PetscPartitionerDestroy(&gatherPart);CHKERRQ(ierr);
1910   ierr = PetscPartitionerDestroy(&oldPart);CHKERRQ(ierr);
1911   PetscFunctionReturn(0);
1912 }
1913 
1914 /*@C
1915   DMPlexGetRedundantDM - Get a copy of the DMPlex that is completely copied on each process.
1916 
1917   Collective on dm
1918 
1919   Input Parameter:
1920 . dm - the original DMPlex object
1921 
1922   Output Parameters:
1923 + sf - the PetscSF used for point distribution (optional)
1924 - redundantMesh - the redundant DM object, or NULL
1925 
1926   Level: intermediate
1927 
1928 .seealso: DMPlexDistribute(), DMPlexGetGatherDM()
1929 @*/
1930 PetscErrorCode DMPlexGetRedundantDM(DM dm, PetscSF *sf, DM *redundantMesh)
1931 {
1932   MPI_Comm       comm;
1933   PetscMPIInt    size, rank;
1934   PetscInt       pStart, pEnd, p;
1935   PetscInt       numPoints = -1;
1936   PetscSF        migrationSF, sfPoint, gatherSF;
1937   DM             gatherDM, dmCoord;
1938   PetscSFNode    *points;
1939   PetscErrorCode ierr;
1940 
1941   PetscFunctionBegin;
1942   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1943   PetscValidPointer(redundantMesh,2);
1944   *redundantMesh = NULL;
1945   comm = PetscObjectComm((PetscObject)dm);
1946   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1947   if (size == 1) {
1948     ierr = PetscObjectReference((PetscObject) dm);CHKERRQ(ierr);
1949     *redundantMesh = dm;
1950     if (sf) *sf = NULL;
1951     PetscFunctionReturn(0);
1952   }
1953   ierr = DMPlexGetGatherDM(dm,&gatherSF,&gatherDM);CHKERRQ(ierr);
1954   if (!gatherDM) PetscFunctionReturn(0);
1955   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1956   ierr = DMPlexGetChart(gatherDM,&pStart,&pEnd);CHKERRQ(ierr);
1957   numPoints = pEnd - pStart;
1958   ierr = MPI_Bcast(&numPoints,1,MPIU_INT,0,comm);CHKERRQ(ierr);
1959   ierr = PetscMalloc1(numPoints,&points);CHKERRQ(ierr);
1960   ierr = PetscSFCreate(comm,&migrationSF);CHKERRQ(ierr);
1961   for (p = 0; p < numPoints; p++) {
1962     points[p].index = p;
1963     points[p].rank  = 0;
1964   }
1965   ierr = PetscSFSetGraph(migrationSF,pEnd-pStart,numPoints,NULL,PETSC_OWN_POINTER,points,PETSC_OWN_POINTER);CHKERRQ(ierr);
1966   ierr = DMPlexCreate(comm, redundantMesh);CHKERRQ(ierr);
1967   ierr = PetscObjectSetName((PetscObject) *redundantMesh, "Redundant Mesh");CHKERRQ(ierr);
1968   ierr = DMPlexMigrate(gatherDM, migrationSF, *redundantMesh);CHKERRQ(ierr);
1969   ierr = DMPlexCreatePointSF(*redundantMesh, migrationSF, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
1970   ierr = DMSetPointSF(*redundantMesh, sfPoint);CHKERRQ(ierr);
1971   ierr = DMGetCoordinateDM(*redundantMesh, &dmCoord);CHKERRQ(ierr);
1972   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1973   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1974   if (sf) {
1975     PetscSF tsf;
1976 
1977     ierr = PetscSFCompose(gatherSF,migrationSF,&tsf);CHKERRQ(ierr);
1978     ierr = DMPlexStratifyMigrationSF(dm, tsf, sf);CHKERRQ(ierr);
1979     ierr = PetscSFDestroy(&tsf);CHKERRQ(ierr);
1980   }
1981   ierr = PetscSFDestroy(&migrationSF);CHKERRQ(ierr);
1982   ierr = PetscSFDestroy(&gatherSF);CHKERRQ(ierr);
1983   ierr = DMDestroy(&gatherDM);CHKERRQ(ierr);
1984   PetscFunctionReturn(0);
1985 }
1986 
1987 /*@
1988   DMPlexIsDistributed - Find out whether this DM is distributed, i.e. more than one rank owns some points.
1989 
1990   Collective
1991 
1992   Input Parameter:
1993 . dm      - The DM object
1994 
1995   Output Parameter:
1996 . distributed - Flag whether the DM is distributed
1997 
1998   Level: intermediate
1999 
2000   Notes:
2001   This currently finds out whether at least two ranks have any DAG points.
2002   This involves MPI_Allreduce() with one integer.
2003   The result is currently not stashed so every call to this routine involves this global communication.
2004 
2005 .seealso: DMPlexDistribute(), DMPlexGetOverlap(), DMPlexIsInterpolated()
2006 @*/
2007 PetscErrorCode DMPlexIsDistributed(DM dm, PetscBool *distributed)
2008 {
2009   PetscInt          pStart, pEnd, count;
2010   MPI_Comm          comm;
2011   PetscErrorCode    ierr;
2012 
2013   PetscFunctionBegin;
2014   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2015   PetscValidPointer(distributed,2);
2016   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
2017   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
2018   count = !!(pEnd - pStart);
2019   ierr = MPI_Allreduce(MPI_IN_PLACE, &count, 1, MPIU_INT, MPI_SUM, comm);CHKERRQ(ierr);
2020   *distributed = count > 1 ? PETSC_TRUE : PETSC_FALSE;
2021   PetscFunctionReturn(0);
2022 }
2023