xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision 415256461b522c4cec96a45e078600d6b790e19c)
1 #include <petsc/private/dmpleximpl.h>    /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/dmlabelimpl.h>   /*I      "petscdmlabel.h"  I*/
3 
4 /*@C
5   DMPlexSetAdjacencyUser - Define adjacency in the mesh using a user-provided callback
6 
7   Input Parameters:
8 + dm      - The DM object
9 . user    - The user callback, may be NULL (to clear the callback)
10 - ctx     - context for callback evaluation, may be NULL
11 
12   Level: advanced
13 
14   Notes:
15      The caller of DMPlexGetAdjacency may need to arrange that a large enough array is available for the adjacency.
16 
17      Any setting here overrides other configuration of DMPlex adjacency determination.
18 
19 .seealso: DMSetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexGetAdjacency(), DMPlexGetAdjacencyUser()
20 @*/
21 PetscErrorCode DMPlexSetAdjacencyUser(DM dm,PetscErrorCode (*user)(DM,PetscInt,PetscInt*,PetscInt[],void*),void *ctx)
22 {
23   DM_Plex *mesh = (DM_Plex *)dm->data;
24 
25   PetscFunctionBegin;
26   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
27   mesh->useradjacency = user;
28   mesh->useradjacencyctx = ctx;
29   PetscFunctionReturn(0);
30 }
31 
32 /*@C
33   DMPlexGetAdjacencyUser - get the user-defined adjacency callback
34 
35   Input Parameter:
36 . dm      - The DM object
37 
38   Output Parameters:
39 - user    - The user callback
40 - ctx     - context for callback evaluation
41 
42   Level: advanced
43 
44 .seealso: DMSetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexGetAdjacency(), DMPlexSetAdjacencyUser()
45 @*/
46 PetscErrorCode DMPlexGetAdjacencyUser(DM dm, PetscErrorCode (**user)(DM,PetscInt,PetscInt*,PetscInt[],void*), void **ctx)
47 {
48   DM_Plex *mesh = (DM_Plex *)dm->data;
49 
50   PetscFunctionBegin;
51   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
52   if (user) *user = mesh->useradjacency;
53   if (ctx) *ctx = mesh->useradjacencyctx;
54   PetscFunctionReturn(0);
55 }
56 
57 /*@
58   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
59 
60   Input Parameters:
61 + dm      - The DM object
62 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
63 
64   Level: intermediate
65 
66 .seealso: DMGetAdjacency(), DMSetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
67 @*/
68 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
69 {
70   DM_Plex *mesh = (DM_Plex *) dm->data;
71 
72   PetscFunctionBegin;
73   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
74   mesh->useAnchors = useAnchors;
75   PetscFunctionReturn(0);
76 }
77 
78 /*@
79   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
80 
81   Input Parameter:
82 . dm      - The DM object
83 
84   Output Parameter:
85 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
86 
87   Level: intermediate
88 
89 .seealso: DMPlexSetAdjacencyUseAnchors(), DMSetAdjacency(), DMGetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
90 @*/
91 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
92 {
93   DM_Plex *mesh = (DM_Plex *) dm->data;
94 
95   PetscFunctionBegin;
96   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
97   PetscValidIntPointer(useAnchors, 2);
98   *useAnchors = mesh->useAnchors;
99   PetscFunctionReturn(0);
100 }
101 
102 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
103 {
104   const PetscInt *cone = NULL;
105   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
106   PetscErrorCode  ierr;
107 
108   PetscFunctionBeginHot;
109   ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
110   ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
111   for (c = 0; c <= coneSize; ++c) {
112     const PetscInt  point   = !c ? p : cone[c-1];
113     const PetscInt *support = NULL;
114     PetscInt        supportSize, s, q;
115 
116     ierr = DMPlexGetSupportSize(dm, point, &supportSize);CHKERRQ(ierr);
117     ierr = DMPlexGetSupport(dm, point, &support);CHKERRQ(ierr);
118     for (s = 0; s < supportSize; ++s) {
119       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = support[s]),0); ++q) {
120         if (support[s] == adj[q]) break;
121       }
122       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
123     }
124   }
125   *adjSize = numAdj;
126   PetscFunctionReturn(0);
127 }
128 
129 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
130 {
131   const PetscInt *support = NULL;
132   PetscInt        numAdj   = 0, maxAdjSize = *adjSize, supportSize, s;
133   PetscErrorCode  ierr;
134 
135   PetscFunctionBeginHot;
136   ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr);
137   ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr);
138   for (s = 0; s <= supportSize; ++s) {
139     const PetscInt  point = !s ? p : support[s-1];
140     const PetscInt *cone  = NULL;
141     PetscInt        coneSize, c, q;
142 
143     ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
144     ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
145     for (c = 0; c < coneSize; ++c) {
146       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = cone[c]),0); ++q) {
147         if (cone[c] == adj[q]) break;
148       }
149       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
150     }
151   }
152   *adjSize = numAdj;
153   PetscFunctionReturn(0);
154 }
155 
156 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
157 {
158   PetscInt      *star = NULL;
159   PetscInt       numAdj = 0, maxAdjSize = *adjSize, starSize, s;
160   PetscErrorCode ierr;
161 
162   PetscFunctionBeginHot;
163   ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
164   for (s = 0; s < starSize*2; s += 2) {
165     const PetscInt *closure = NULL;
166     PetscInt        closureSize, c, q;
167 
168     ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
169     for (c = 0; c < closureSize*2; c += 2) {
170       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = closure[c]),0); ++q) {
171         if (closure[c] == adj[q]) break;
172       }
173       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
174     }
175     ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
176   }
177   ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
178   *adjSize = numAdj;
179   PetscFunctionReturn(0);
180 }
181 
182 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
183 {
184   static PetscInt asiz = 0;
185   PetscInt maxAnchors = 1;
186   PetscInt aStart = -1, aEnd = -1;
187   PetscInt maxAdjSize;
188   PetscSection aSec = NULL;
189   IS aIS = NULL;
190   const PetscInt *anchors;
191   DM_Plex *mesh = (DM_Plex *)dm->data;
192   PetscErrorCode  ierr;
193 
194   PetscFunctionBeginHot;
195   if (useAnchors) {
196     ierr = DMPlexGetAnchors(dm,&aSec,&aIS);CHKERRQ(ierr);
197     if (aSec) {
198       ierr = PetscSectionGetMaxDof(aSec,&maxAnchors);CHKERRQ(ierr);
199       maxAnchors = PetscMax(1,maxAnchors);
200       ierr = PetscSectionGetChart(aSec,&aStart,&aEnd);CHKERRQ(ierr);
201       ierr = ISGetIndices(aIS,&anchors);CHKERRQ(ierr);
202     }
203   }
204   if (!*adj) {
205     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
206 
207     ierr  = DMPlexGetChart(dm, &pStart,&pEnd);CHKERRQ(ierr);
208     ierr  = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
209     ierr  = DMPlexGetMaxSizes(dm, &maxC, &maxS);CHKERRQ(ierr);
210     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1;
211     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1;
212     asiz  = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries);
213     asiz *= maxAnchors;
214     asiz  = PetscMin(asiz,pEnd-pStart);
215     ierr  = PetscMalloc1(asiz,adj);CHKERRQ(ierr);
216   }
217   if (*adjSize < 0) *adjSize = asiz;
218   maxAdjSize = *adjSize;
219   if (mesh->useradjacency) {
220     ierr = mesh->useradjacency(dm, p, adjSize, *adj, mesh->useradjacencyctx);CHKERRQ(ierr);
221   } else if (useTransitiveClosure) {
222     ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr);
223   } else if (useCone) {
224     ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
225   } else {
226     ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
227   }
228   if (useAnchors && aSec) {
229     PetscInt origSize = *adjSize;
230     PetscInt numAdj = origSize;
231     PetscInt i = 0, j;
232     PetscInt *orig = *adj;
233 
234     while (i < origSize) {
235       PetscInt p = orig[i];
236       PetscInt aDof = 0;
237 
238       if (p >= aStart && p < aEnd) {
239         ierr = PetscSectionGetDof(aSec,p,&aDof);CHKERRQ(ierr);
240       }
241       if (aDof) {
242         PetscInt aOff;
243         PetscInt s, q;
244 
245         for (j = i + 1; j < numAdj; j++) {
246           orig[j - 1] = orig[j];
247         }
248         origSize--;
249         numAdj--;
250         ierr = PetscSectionGetOffset(aSec,p,&aOff);CHKERRQ(ierr);
251         for (s = 0; s < aDof; ++s) {
252           for (q = 0; q < numAdj || ((void)(orig[numAdj++] = anchors[aOff+s]),0); ++q) {
253             if (anchors[aOff+s] == orig[q]) break;
254           }
255           if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
256         }
257       }
258       else {
259         i++;
260       }
261     }
262     *adjSize = numAdj;
263     ierr = ISRestoreIndices(aIS,&anchors);CHKERRQ(ierr);
264   }
265   PetscFunctionReturn(0);
266 }
267 
268 /*@
269   DMPlexGetAdjacency - Return all points adjacent to the given point
270 
271   Input Parameters:
272 + dm - The DM object
273 . p  - The point
274 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE
275 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize
276 
277   Output Parameters:
278 + adjSize - The number of adjacent points
279 - adj - The adjacent points
280 
281   Level: advanced
282 
283   Notes:
284     The user must PetscFree the adj array if it was not passed in.
285 
286 .seealso: DMSetAdjacency(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator()
287 @*/
288 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
289 {
290   PetscBool      useCone, useClosure, useAnchors;
291   PetscErrorCode ierr;
292 
293   PetscFunctionBeginHot;
294   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
295   PetscValidPointer(adjSize,3);
296   PetscValidPointer(adj,4);
297   ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
298   ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
299   ierr = DMPlexGetAdjacency_Internal(dm, p, useCone, useClosure, useAnchors, adjSize, adj);CHKERRQ(ierr);
300   PetscFunctionReturn(0);
301 }
302 
303 /*@
304   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
305 
306   Collective on DM
307 
308   Input Parameters:
309 + dm      - The DM
310 - sfPoint - The PetscSF which encodes point connectivity
311 
312   Output Parameters:
313 + processRanks - A list of process neighbors, or NULL
314 - sfProcess    - An SF encoding the two-sided process connectivity, or NULL
315 
316   Level: developer
317 
318 .seealso: PetscSFCreate(), DMPlexCreateProcessSF()
319 @*/
320 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
321 {
322   const PetscSFNode *remotePoints;
323   PetscInt          *localPointsNew;
324   PetscSFNode       *remotePointsNew;
325   const PetscInt    *nranks;
326   PetscInt          *ranksNew;
327   PetscBT            neighbors;
328   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
329   PetscMPIInt        size, proc, rank;
330   PetscErrorCode     ierr;
331 
332   PetscFunctionBegin;
333   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
334   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
335   if (processRanks) {PetscValidPointer(processRanks, 3);}
336   if (sfProcess)    {PetscValidPointer(sfProcess, 4);}
337   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
338   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
339   ierr = PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);CHKERRQ(ierr);
340   ierr = PetscBTCreate(size, &neighbors);CHKERRQ(ierr);
341   ierr = PetscBTMemzero(size, neighbors);CHKERRQ(ierr);
342   /* Compute root-to-leaf process connectivity */
343   ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr);
344   ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr);
345   for (p = pStart; p < pEnd; ++p) {
346     PetscInt ndof, noff, n;
347 
348     ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr);
349     ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr);
350     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
351   }
352   ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr);
353   /* Compute leaf-to-neighbor process connectivity */
354   ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr);
355   ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr);
356   for (p = pStart; p < pEnd; ++p) {
357     PetscInt ndof, noff, n;
358 
359     ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr);
360     ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr);
361     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
362   }
363   ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr);
364   /* Compute leaf-to-root process connectivity */
365   for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);}
366   /* Calculate edges */
367   PetscBTClear(neighbors, rank);
368   for(proc = 0, numNeighbors = 0; proc < size; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;}
369   ierr = PetscMalloc1(numNeighbors, &ranksNew);CHKERRQ(ierr);
370   ierr = PetscMalloc1(numNeighbors, &localPointsNew);CHKERRQ(ierr);
371   ierr = PetscMalloc1(numNeighbors, &remotePointsNew);CHKERRQ(ierr);
372   for(proc = 0, n = 0; proc < size; ++proc) {
373     if (PetscBTLookup(neighbors, proc)) {
374       ranksNew[n]              = proc;
375       localPointsNew[n]        = proc;
376       remotePointsNew[n].index = rank;
377       remotePointsNew[n].rank  = proc;
378       ++n;
379     }
380   }
381   ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr);
382   if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr);}
383   else              {ierr = PetscFree(ranksNew);CHKERRQ(ierr);}
384   if (sfProcess) {
385     ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr);
386     ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr);
387     ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr);
388     ierr = PetscSFSetGraph(*sfProcess, size, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
389   }
390   PetscFunctionReturn(0);
391 }
392 
393 /*@
394   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
395 
396   Collective on DM
397 
398   Input Parameter:
399 . dm - The DM
400 
401   Output Parameters:
402 + rootSection - The number of leaves for a given root point
403 . rootrank    - The rank of each edge into the root point
404 . leafSection - The number of processes sharing a given leaf point
405 - leafrank    - The rank of each process sharing a leaf point
406 
407   Level: developer
408 
409 .seealso: DMPlexCreateOverlap()
410 @*/
411 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
412 {
413   MPI_Comm        comm;
414   PetscSF         sfPoint;
415   const PetscInt *rootdegree;
416   PetscInt       *myrank, *remoterank;
417   PetscInt        pStart, pEnd, p, nedges;
418   PetscMPIInt     rank;
419   PetscErrorCode  ierr;
420 
421   PetscFunctionBegin;
422   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
423   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
424   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
425   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
426   /* Compute number of leaves for each root */
427   ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr);
428   ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr);
429   ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr);
430   ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr);
431   for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr);}
432   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
433   /* Gather rank of each leaf to root */
434   ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr);
435   ierr = PetscMalloc1(pEnd-pStart, &myrank);CHKERRQ(ierr);
436   ierr = PetscMalloc1(nedges,  &remoterank);CHKERRQ(ierr);
437   for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank;
438   ierr = PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
439   ierr = PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
440   ierr = PetscFree(myrank);CHKERRQ(ierr);
441   ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr);
442   /* Distribute remote ranks to leaves */
443   ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr);
444   ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr);
445   PetscFunctionReturn(0);
446 }
447 
448 /*@C
449   DMPlexCreateOverlap - Compute owner information for shared points. This basically gets two-sided for an SF.
450 
451   Collective on DM
452 
453   Input Parameters:
454 + dm          - The DM
455 . levels      - Number of overlap levels
456 . rootSection - The number of leaves for a given root point
457 . rootrank    - The rank of each edge into the root point
458 . leafSection - The number of processes sharing a given leaf point
459 - leafrank    - The rank of each process sharing a leaf point
460 
461   Output Parameters:
462 + ovLabel     - DMLabel containing remote overlap contributions as point/rank pairings
463 
464   Level: developer
465 
466 .seealso: DMPlexDistributeOwnership(), DMPlexDistribute()
467 @*/
468 PetscErrorCode DMPlexCreateOverlap(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
469 {
470   MPI_Comm           comm;
471   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
472   PetscSF            sfPoint, sfProc;
473   const PetscSFNode *remote;
474   const PetscInt    *local;
475   const PetscInt    *nrank, *rrank;
476   PetscInt          *adj = NULL;
477   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l;
478   PetscMPIInt        rank, size;
479   PetscBool          useCone, useClosure, flg;
480   PetscErrorCode     ierr;
481 
482   PetscFunctionBegin;
483   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
484   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
485   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
486   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
487   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
488   ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr);
489   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
490   ierr = DMLabelCreate(PETSC_COMM_SELF, "Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr);
491   /* Handle leaves: shared with the root point */
492   for (l = 0; l < nleaves; ++l) {
493     PetscInt adjSize = PETSC_DETERMINE, a;
494 
495     ierr = DMPlexGetAdjacency(dm, local ? local[l] : l, &adjSize, &adj);CHKERRQ(ierr);
496     for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr);}
497   }
498   ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr);
499   ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr);
500   /* Handle roots */
501   for (p = pStart; p < pEnd; ++p) {
502     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
503 
504     if ((p >= sStart) && (p < sEnd)) {
505       /* Some leaves share a root with other leaves on different processes */
506       ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr);
507       if (neighbors) {
508         ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr);
509         ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
510         for (n = 0; n < neighbors; ++n) {
511           const PetscInt remoteRank = nrank[noff+n];
512 
513           if (remoteRank == rank) continue;
514           for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
515         }
516       }
517     }
518     /* Roots are shared with leaves */
519     ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr);
520     if (!neighbors) continue;
521     ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr);
522     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
523     for (n = 0; n < neighbors; ++n) {
524       const PetscInt remoteRank = rrank[noff+n];
525 
526       if (remoteRank == rank) continue;
527       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
528     }
529   }
530   ierr = PetscFree(adj);CHKERRQ(ierr);
531   ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr);
532   ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr);
533   /* Add additional overlap levels */
534   for (l = 1; l < levels; l++) {
535     /* Propagate point donations over SF to capture remote connections */
536     ierr = DMPlexPartitionLabelPropagate(dm, ovAdjByRank);CHKERRQ(ierr);
537     /* Add next level of point donations to the label */
538     ierr = DMPlexPartitionLabelAdjacency(dm, ovAdjByRank);CHKERRQ(ierr);
539   }
540   /* We require the closure in the overlap */
541   ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
542   if (useCone || !useClosure) {
543     ierr = DMPlexPartitionLabelClosure(dm, ovAdjByRank);CHKERRQ(ierr);
544   }
545   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-overlap_view", &flg);CHKERRQ(ierr);
546   if (flg) {
547     ierr = DMLabelView(ovAdjByRank, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
548   }
549   /* Make global process SF and invert sender to receiver label */
550   {
551     /* Build a global process SF */
552     PetscSFNode *remoteProc;
553     ierr = PetscMalloc1(size, &remoteProc);CHKERRQ(ierr);
554     for (p = 0; p < size; ++p) {
555       remoteProc[p].rank  = p;
556       remoteProc[p].index = rank;
557     }
558     ierr = PetscSFCreate(comm, &sfProc);CHKERRQ(ierr);
559     ierr = PetscObjectSetName((PetscObject) sfProc, "Process SF");CHKERRQ(ierr);
560     ierr = PetscSFSetGraph(sfProc, size, size, NULL, PETSC_OWN_POINTER, remoteProc, PETSC_OWN_POINTER);CHKERRQ(ierr);
561   }
562   ierr = DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel);CHKERRQ(ierr);
563   ierr = DMPlexPartitionLabelInvert(dm, ovAdjByRank, sfProc, *ovLabel);CHKERRQ(ierr);
564   /* Add owned points, except for shared local points */
565   for (p = pStart; p < pEnd; ++p) {ierr = DMLabelSetValue(*ovLabel, p, rank);CHKERRQ(ierr);}
566   for (l = 0; l < nleaves; ++l) {
567     ierr = DMLabelClearValue(*ovLabel, local[l], rank);CHKERRQ(ierr);
568     ierr = DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank);CHKERRQ(ierr);
569   }
570   /* Clean up */
571   ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr);
572   ierr = PetscSFDestroy(&sfProc);CHKERRQ(ierr);
573   PetscFunctionReturn(0);
574 }
575 
576 /*@C
577   DMPlexCreateOverlapMigrationSF - Create an SF describing the new mesh distribution to make the overlap described by the input SF
578 
579   Collective on DM
580 
581   Input Parameters:
582 + dm          - The DM
583 - overlapSF   - The SF mapping ghost points in overlap to owner points on other processes
584 
585   Output Parameters:
586 + migrationSF - An SF that maps original points in old locations to points in new locations
587 
588   Level: developer
589 
590 .seealso: DMPlexCreateOverlap(), DMPlexDistribute()
591 @*/
592 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
593 {
594   MPI_Comm           comm;
595   PetscMPIInt        rank, size;
596   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
597   PetscInt          *pointDepths, *remoteDepths, *ilocal;
598   PetscInt          *depthRecv, *depthShift, *depthIdx;
599   PetscSFNode       *iremote;
600   PetscSF            pointSF;
601   const PetscInt    *sharedLocal;
602   const PetscSFNode *overlapRemote, *sharedRemote;
603   PetscErrorCode     ierr;
604 
605   PetscFunctionBegin;
606   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
607   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
608   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
609   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
610   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
611 
612   /* Before building the migration SF we need to know the new stratum offsets */
613   ierr = PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote);CHKERRQ(ierr);
614   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
615   for (d=0; d<dim+1; d++) {
616     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
617     for (p=pStart; p<pEnd; p++) pointDepths[p] = d;
618   }
619   for (p=0; p<nleaves; p++) remoteDepths[p] = -1;
620   ierr = PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
621   ierr = PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
622 
623   /* Count recevied points in each stratum and compute the internal strata shift */
624   ierr = PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx);CHKERRQ(ierr);
625   for (d=0; d<dim+1; d++) depthRecv[d]=0;
626   for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++;
627   depthShift[dim] = 0;
628   for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim];
629   for (d=1; d<dim; d++) depthShift[d] += depthRecv[0];
630   for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1];
631   for (d=0; d<dim+1; d++) {
632     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
633     depthIdx[d] = pStart + depthShift[d];
634   }
635 
636   /* Form the overlap SF build an SF that describes the full overlap migration SF */
637   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
638   newLeaves = pEnd - pStart + nleaves;
639   ierr = PetscMalloc1(newLeaves, &ilocal);CHKERRQ(ierr);
640   ierr = PetscMalloc1(newLeaves, &iremote);CHKERRQ(ierr);
641   /* First map local points to themselves */
642   for (d=0; d<dim+1; d++) {
643     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
644     for (p=pStart; p<pEnd; p++) {
645       point = p + depthShift[d];
646       ilocal[point] = point;
647       iremote[point].index = p;
648       iremote[point].rank = rank;
649       depthIdx[d]++;
650     }
651   }
652 
653   /* Add in the remote roots for currently shared points */
654   ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
655   ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote);CHKERRQ(ierr);
656   for (d=0; d<dim+1; d++) {
657     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
658     for (p=0; p<numSharedPoints; p++) {
659       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
660         point = sharedLocal[p] + depthShift[d];
661         iremote[point].index = sharedRemote[p].index;
662         iremote[point].rank = sharedRemote[p].rank;
663       }
664     }
665   }
666 
667   /* Now add the incoming overlap points */
668   for (p=0; p<nleaves; p++) {
669     point = depthIdx[remoteDepths[p]];
670     ilocal[point] = point;
671     iremote[point].index = overlapRemote[p].index;
672     iremote[point].rank = overlapRemote[p].rank;
673     depthIdx[remoteDepths[p]]++;
674   }
675   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
676 
677   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
678   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");CHKERRQ(ierr);
679   ierr = PetscSFSetFromOptions(*migrationSF);CHKERRQ(ierr);
680   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
681   ierr = PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);CHKERRQ(ierr);
682 
683   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
684   PetscFunctionReturn(0);
685 }
686 
687 /*@
688   DMPlexStratifyMigrationSF - Rearrange the leaves of a migration sf for stratification.
689 
690   Input Parameter:
691 + dm          - The DM
692 - sf          - A star forest with non-ordered leaves, usually defining a DM point migration
693 
694   Output Parameter:
695 . migrationSF - A star forest with added leaf indirection that ensures the resulting DM is stratified
696 
697   Level: developer
698 
699 .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexDistributeOverlap()
700 @*/
701 PetscErrorCode DMPlexStratifyMigrationSF(DM dm, PetscSF sf, PetscSF *migrationSF)
702 {
703   MPI_Comm           comm;
704   PetscMPIInt        rank, size;
705   PetscInt           d, ldepth, depth, p, pStart, pEnd, nroots, nleaves;
706   PetscInt          *pointDepths, *remoteDepths, *ilocal;
707   PetscInt          *depthRecv, *depthShift, *depthIdx;
708   PetscInt           hybEnd[4];
709   const PetscSFNode *iremote;
710   PetscErrorCode     ierr;
711 
712   PetscFunctionBegin;
713   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
714   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
715   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
716   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
717   ierr = DMPlexGetDepth(dm, &ldepth);CHKERRQ(ierr);
718   ierr = MPIU_Allreduce(&ldepth, &depth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr);
719   if ((ldepth >= 0) && (depth != ldepth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", ldepth, depth);
720 
721   /* Before building the migration SF we need to know the new stratum offsets */
722   ierr = PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote);CHKERRQ(ierr);
723   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
724   ierr = DMPlexGetHybridBounds(dm,&hybEnd[depth],&hybEnd[depth-1],&hybEnd[1],&hybEnd[0]);CHKERRQ(ierr);
725   for (d = 0; d < depth+1; ++d) {
726     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
727     for (p = pStart; p < pEnd; ++p) {
728       if (hybEnd[d] >= 0 && p >= hybEnd[d]) { /* put in a separate value for hybrid points */
729         pointDepths[p] = 2 * d;
730       } else {
731         pointDepths[p] = 2 * d + 1;
732       }
733     }
734   }
735   for (p = 0; p < nleaves; ++p) remoteDepths[p] = -1;
736   ierr = PetscSFBcastBegin(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
737   ierr = PetscSFBcastEnd(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
738   /* Count recevied points in each stratum and compute the internal strata shift */
739   ierr = PetscMalloc3(2*(depth+1), &depthRecv, 2*(depth+1), &depthShift, 2*(depth+1), &depthIdx);CHKERRQ(ierr);
740   for (d = 0; d < 2*(depth+1); ++d) depthRecv[d] = 0;
741   for (p = 0; p < nleaves; ++p) depthRecv[remoteDepths[p]]++;
742   depthShift[2*depth+1] = 0;
743   for (d = 0; d < 2*depth+1; ++d) depthShift[d] = depthRecv[2 * depth + 1];
744   for (d = 0; d < 2*depth; ++d) depthShift[d] += depthRecv[2 * depth];
745   depthShift[0] += depthRecv[1];
746   for (d = 2; d < 2*depth; ++d) depthShift[d] += depthRecv[1];
747   for (d = 2; d < 2*depth; ++d) depthShift[d] += depthRecv[0];
748   for (d = 2 * depth-1; d > 2; --d) {
749     PetscInt e;
750 
751     for (e = d -1; e > 1; --e) depthShift[e] += depthRecv[d];
752   }
753   for (d = 0; d < 2*(depth+1); ++d) {depthIdx[d] = 0;}
754   /* Derive a new local permutation based on stratified indices */
755   ierr = PetscMalloc1(nleaves, &ilocal);CHKERRQ(ierr);
756   for (p = 0; p < nleaves; ++p) {
757     const PetscInt dep = remoteDepths[p];
758 
759     ilocal[p] = depthShift[dep] + depthIdx[dep];
760     depthIdx[dep]++;
761   }
762   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
763   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Migration SF");CHKERRQ(ierr);
764   ierr = PetscSFSetGraph(*migrationSF, nroots, nleaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_COPY_VALUES);CHKERRQ(ierr);
765   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
766   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
767   PetscFunctionReturn(0);
768 }
769 
770 /*@
771   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
772 
773   Collective on DM
774 
775   Input Parameters:
776 + dm - The DMPlex object
777 . pointSF - The PetscSF describing the communication pattern
778 . originalSection - The PetscSection for existing data layout
779 - originalVec - The existing data
780 
781   Output Parameters:
782 + newSection - The PetscSF describing the new data layout
783 - newVec - The new data
784 
785   Level: developer
786 
787 .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData()
788 @*/
789 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
790 {
791   PetscSF        fieldSF;
792   PetscInt      *remoteOffsets, fieldSize;
793   PetscScalar   *originalValues, *newValues;
794   PetscErrorCode ierr;
795 
796   PetscFunctionBegin;
797   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
798   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
799 
800   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
801   ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr);
802   ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr);
803 
804   ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr);
805   ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr);
806   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
807   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
808   ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
809   ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
810   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
811   ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr);
812   ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr);
813   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
814   PetscFunctionReturn(0);
815 }
816 
817 /*@
818   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
819 
820   Collective on DM
821 
822   Input Parameters:
823 + dm - The DMPlex object
824 . pointSF - The PetscSF describing the communication pattern
825 . originalSection - The PetscSection for existing data layout
826 - originalIS - The existing data
827 
828   Output Parameters:
829 + newSection - The PetscSF describing the new data layout
830 - newIS - The new data
831 
832   Level: developer
833 
834 .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData()
835 @*/
836 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
837 {
838   PetscSF         fieldSF;
839   PetscInt       *newValues, *remoteOffsets, fieldSize;
840   const PetscInt *originalValues;
841   PetscErrorCode  ierr;
842 
843   PetscFunctionBegin;
844   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
845   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
846 
847   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
848   ierr = PetscMalloc1(fieldSize, &newValues);CHKERRQ(ierr);
849 
850   ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr);
851   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
852   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
853   ierr = PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
854   ierr = PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
855   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
856   ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr);
857   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr);
858   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
859   PetscFunctionReturn(0);
860 }
861 
862 /*@
863   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
864 
865   Collective on DM
866 
867   Input Parameters:
868 + dm - The DMPlex object
869 . pointSF - The PetscSF describing the communication pattern
870 . originalSection - The PetscSection for existing data layout
871 . datatype - The type of data
872 - originalData - The existing data
873 
874   Output Parameters:
875 + newSection - The PetscSection describing the new data layout
876 - newData - The new data
877 
878   Level: developer
879 
880 .seealso: DMPlexDistribute(), DMPlexDistributeField()
881 @*/
882 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
883 {
884   PetscSF        fieldSF;
885   PetscInt      *remoteOffsets, fieldSize;
886   PetscMPIInt    dataSize;
887   PetscErrorCode ierr;
888 
889   PetscFunctionBegin;
890   ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
891   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
892 
893   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
894   ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr);
895   ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr);
896 
897   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
898   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
899   ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
900   ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
901   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
902   ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
903   PetscFunctionReturn(0);
904 }
905 
906 static PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
907 {
908   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
909   MPI_Comm               comm;
910   PetscSF                coneSF;
911   PetscSection           originalConeSection, newConeSection;
912   PetscInt              *remoteOffsets, *cones, *globCones, *newCones, newConesSize;
913   PetscBool              flg;
914   PetscErrorCode         ierr;
915 
916   PetscFunctionBegin;
917   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
918   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
919 
920   ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
921   /* Distribute cone section */
922   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
923   ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr);
924   ierr = DMPlexGetConeSection(dmParallel, &newConeSection);CHKERRQ(ierr);
925   ierr = PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr);
926   ierr = DMSetUp(dmParallel);CHKERRQ(ierr);
927   {
928     PetscInt pStart, pEnd, p;
929 
930     ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr);
931     for (p = pStart; p < pEnd; ++p) {
932       PetscInt coneSize;
933       ierr               = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr);
934       pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize);
935     }
936   }
937   /* Communicate and renumber cones */
938   ierr = PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr);
939   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
940   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
941   if (original) {
942     PetscInt numCones;
943 
944     ierr = PetscSectionGetStorageSize(originalConeSection,&numCones);CHKERRQ(ierr);
945     ierr = PetscMalloc1(numCones,&globCones);CHKERRQ(ierr);
946     ierr = ISLocalToGlobalMappingApplyBlock(original, numCones, cones, globCones);CHKERRQ(ierr);
947   } else {
948     globCones = cones;
949   }
950   ierr = DMPlexGetCones(dmParallel, &newCones);CHKERRQ(ierr);
951   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr);
952   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr);
953   if (original) {
954     ierr = PetscFree(globCones);CHKERRQ(ierr);
955   }
956   ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr);
957   ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr);
958 #if defined(PETSC_USE_DEBUG)
959   {
960     PetscInt  p;
961     PetscBool valid = PETSC_TRUE;
962     for (p = 0; p < newConesSize; ++p) {
963       if (newCones[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "[%d] Point %D not in overlap SF\n", PetscGlobalRank,p);CHKERRQ(ierr);}
964     }
965     if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
966   }
967 #endif
968   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr);
969   if (flg) {
970     ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr);
971     ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
972     ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr);
973     ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
974     ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr);
975   }
976   ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr);
977   ierr = DMPlexGetConeOrientations(dmParallel, &newCones);CHKERRQ(ierr);
978   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
979   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
980   ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr);
981   ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
982   /* Create supports and stratify DMPlex */
983   {
984     PetscInt pStart, pEnd;
985 
986     ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr);
987     ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr);
988   }
989   ierr = DMPlexSymmetrize(dmParallel);CHKERRQ(ierr);
990   ierr = DMPlexStratify(dmParallel);CHKERRQ(ierr);
991   {
992     PetscBool useCone, useClosure, useAnchors;
993 
994     ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
995     ierr = DMSetBasicAdjacency(dmParallel, useCone, useClosure);CHKERRQ(ierr);
996     ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
997     ierr = DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors);CHKERRQ(ierr);
998   }
999   PetscFunctionReturn(0);
1000 }
1001 
1002 static PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
1003 {
1004   MPI_Comm         comm;
1005   PetscSection     originalCoordSection, newCoordSection;
1006   Vec              originalCoordinates, newCoordinates;
1007   PetscInt         bs;
1008   PetscBool        isper;
1009   const char      *name;
1010   const PetscReal *maxCell, *L;
1011   const DMBoundaryType *bd;
1012   PetscErrorCode   ierr;
1013 
1014   PetscFunctionBegin;
1015   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1016   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1017 
1018   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1019   ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr);
1020   ierr = DMGetCoordinateSection(dmParallel, &newCoordSection);CHKERRQ(ierr);
1021   ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr);
1022   if (originalCoordinates) {
1023     ierr = VecCreate(PETSC_COMM_SELF, &newCoordinates);CHKERRQ(ierr);
1024     ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr);
1025     ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr);
1026 
1027     ierr = DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr);
1028     ierr = DMSetCoordinatesLocal(dmParallel, newCoordinates);CHKERRQ(ierr);
1029     ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr);
1030     ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr);
1031     ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr);
1032   }
1033   ierr = DMGetPeriodicity(dm, &isper, &maxCell, &L, &bd);CHKERRQ(ierr);
1034   ierr = DMSetPeriodicity(dmParallel, isper, maxCell, L, bd);CHKERRQ(ierr);
1035   PetscFunctionReturn(0);
1036 }
1037 
1038 /* Here we are assuming that process 0 always has everything */
1039 static PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1040 {
1041   DM_Plex         *mesh = (DM_Plex*) dm->data;
1042   MPI_Comm         comm;
1043   DMLabel          depthLabel;
1044   PetscMPIInt      rank;
1045   PetscInt         depth, d, numLabels, numLocalLabels, l;
1046   PetscBool        hasLabels = PETSC_FALSE, lsendDepth, sendDepth;
1047   PetscObjectState depthState = -1;
1048   PetscErrorCode   ierr;
1049 
1050   PetscFunctionBegin;
1051   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1052   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1053 
1054   ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1055   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1056   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1057 
1058   /* If the user has changed the depth label, communicate it instead */
1059   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1060   ierr = DMPlexGetDepthLabel(dm, &depthLabel);CHKERRQ(ierr);
1061   if (depthLabel) {ierr = PetscObjectStateGet((PetscObject) depthLabel, &depthState);CHKERRQ(ierr);}
1062   lsendDepth = mesh->depthState != depthState ? PETSC_TRUE : PETSC_FALSE;
1063   ierr = MPIU_Allreduce(&lsendDepth, &sendDepth, 1, MPIU_BOOL, MPI_LOR, comm);CHKERRQ(ierr);
1064   if (sendDepth) {
1065     ierr = DMRemoveLabel(dmParallel, "depth", &depthLabel);CHKERRQ(ierr);
1066     ierr = DMLabelDestroy(&depthLabel);CHKERRQ(ierr);
1067   }
1068   /* Everyone must have either the same number of labels, or none */
1069   ierr = DMGetNumLabels(dm, &numLocalLabels);CHKERRQ(ierr);
1070   numLabels = numLocalLabels;
1071   ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1072   if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1073   for (l = numLabels-1; l >= 0; --l) {
1074     DMLabel     label = NULL, labelNew = NULL;
1075     PetscBool   isDepth, lisOutput = PETSC_TRUE, isOutput;
1076     const char *name = NULL;
1077 
1078     if (hasLabels) {
1079       ierr = DMGetLabelByNum(dm, l, &label);CHKERRQ(ierr);
1080       /* Skip "depth" because it is recreated */
1081       ierr = PetscObjectGetName((PetscObject) label, &name);CHKERRQ(ierr);
1082       ierr = PetscStrcmp(name, "depth", &isDepth);CHKERRQ(ierr);
1083     }
1084     ierr = MPI_Bcast(&isDepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr);
1085     if (isDepth && !sendDepth) continue;
1086     ierr = DMLabelDistribute(label, migrationSF, &labelNew);CHKERRQ(ierr);
1087     if (isDepth) {
1088       /* Put in any missing strata which can occur if users are managing the depth label themselves */
1089       PetscInt gdepth;
1090 
1091       ierr = MPIU_Allreduce(&depth, &gdepth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr);
1092       if ((depth >= 0) && (gdepth != depth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", depth, gdepth);
1093       for (d = 0; d <= gdepth; ++d) {
1094         PetscBool has;
1095 
1096         ierr = DMLabelHasStratum(labelNew, d, &has);CHKERRQ(ierr);
1097         if (!has) {ierr = DMLabelAddStratum(labelNew, d);CHKERRQ(ierr);}
1098       }
1099     }
1100     ierr = DMAddLabel(dmParallel, labelNew);CHKERRQ(ierr);
1101     /* Put the output flag in the new label */
1102     if (hasLabels) {ierr = DMGetLabelOutput(dm, name, &lisOutput);CHKERRQ(ierr);}
1103     ierr = MPIU_Allreduce(&lisOutput, &isOutput, 1, MPIU_BOOL, MPI_LAND, comm);CHKERRQ(ierr);
1104     ierr = PetscObjectGetName((PetscObject) labelNew, &name);CHKERRQ(ierr);
1105     ierr = DMSetLabelOutput(dmParallel, name, isOutput);CHKERRQ(ierr);
1106   }
1107   ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1108   PetscFunctionReturn(0);
1109 }
1110 
1111 static PetscErrorCode DMPlexDistributeSetupHybrid(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1112 {
1113   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1114   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1115   PetscBool      *isHybrid, *isHybridParallel;
1116   PetscInt        dim, depth, d;
1117   PetscInt        pStart, pEnd, pStartP, pEndP;
1118   PetscErrorCode  ierr;
1119 
1120   PetscFunctionBegin;
1121   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1122   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1123 
1124   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1125   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1126   ierr = DMPlexGetChart(dm,&pStart,&pEnd);CHKERRQ(ierr);
1127   ierr = DMPlexGetChart(dmParallel,&pStartP,&pEndP);CHKERRQ(ierr);
1128   ierr = PetscCalloc2(pEnd-pStart,&isHybrid,pEndP-pStartP,&isHybridParallel);CHKERRQ(ierr);
1129   for (d = 0; d <= depth; d++) {
1130     PetscInt hybridMax = (depth == 1 && d == 1) ? mesh->hybridPointMax[dim] : mesh->hybridPointMax[d];
1131 
1132     if (hybridMax >= 0) {
1133       PetscInt sStart, sEnd, p;
1134 
1135       ierr = DMPlexGetDepthStratum(dm,d,&sStart,&sEnd);CHKERRQ(ierr);
1136       for (p = hybridMax; p < sEnd; p++) isHybrid[p-pStart] = PETSC_TRUE;
1137     }
1138   }
1139   ierr = PetscSFBcastBegin(migrationSF,MPIU_BOOL,isHybrid,isHybridParallel);CHKERRQ(ierr);
1140   ierr = PetscSFBcastEnd(migrationSF,MPIU_BOOL,isHybrid,isHybridParallel);CHKERRQ(ierr);
1141   for (d = 0; d <= dim; d++) pmesh->hybridPointMax[d] = -1;
1142   for (d = 0; d <= depth; d++) {
1143     PetscInt sStart, sEnd, p, dd;
1144 
1145     ierr = DMPlexGetDepthStratum(dmParallel,d,&sStart,&sEnd);CHKERRQ(ierr);
1146     dd = (depth == 1 && d == 1) ? dim : d;
1147     for (p = sStart; p < sEnd; p++) {
1148       if (isHybridParallel[p-pStartP]) {
1149         pmesh->hybridPointMax[dd] = p;
1150         break;
1151       }
1152     }
1153   }
1154   ierr = PetscFree2(isHybrid,isHybridParallel);CHKERRQ(ierr);
1155   PetscFunctionReturn(0);
1156 }
1157 
1158 static PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1159 {
1160   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1161   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1162   MPI_Comm        comm;
1163   DM              refTree;
1164   PetscSection    origParentSection, newParentSection;
1165   PetscInt        *origParents, *origChildIDs;
1166   PetscBool       flg;
1167   PetscErrorCode  ierr;
1168 
1169   PetscFunctionBegin;
1170   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1171   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
1172   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1173 
1174   /* Set up tree */
1175   ierr = DMPlexGetReferenceTree(dm,&refTree);CHKERRQ(ierr);
1176   ierr = DMPlexSetReferenceTree(dmParallel,refTree);CHKERRQ(ierr);
1177   ierr = DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL);CHKERRQ(ierr);
1178   if (origParentSection) {
1179     PetscInt        pStart, pEnd;
1180     PetscInt        *newParents, *newChildIDs, *globParents;
1181     PetscInt        *remoteOffsetsParents, newParentSize;
1182     PetscSF         parentSF;
1183 
1184     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1185     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);CHKERRQ(ierr);
1186     ierr = PetscSectionSetChart(newParentSection,pStart,pEnd);CHKERRQ(ierr);
1187     ierr = PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);CHKERRQ(ierr);
1188     ierr = PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);CHKERRQ(ierr);
1189     ierr = PetscFree(remoteOffsetsParents);CHKERRQ(ierr);
1190     ierr = PetscSectionGetStorageSize(newParentSection,&newParentSize);CHKERRQ(ierr);
1191     ierr = PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs);CHKERRQ(ierr);
1192     if (original) {
1193       PetscInt numParents;
1194 
1195       ierr = PetscSectionGetStorageSize(origParentSection,&numParents);CHKERRQ(ierr);
1196       ierr = PetscMalloc1(numParents,&globParents);CHKERRQ(ierr);
1197       ierr = ISLocalToGlobalMappingApplyBlock(original, numParents, origParents, globParents);CHKERRQ(ierr);
1198     }
1199     else {
1200       globParents = origParents;
1201     }
1202     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr);
1203     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr);
1204     if (original) {
1205       ierr = PetscFree(globParents);CHKERRQ(ierr);
1206     }
1207     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1208     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1209     ierr = ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents);CHKERRQ(ierr);
1210 #if defined(PETSC_USE_DEBUG)
1211     {
1212       PetscInt  p;
1213       PetscBool valid = PETSC_TRUE;
1214       for (p = 0; p < newParentSize; ++p) {
1215         if (newParents[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
1216       }
1217       if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1218     }
1219 #endif
1220     ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-parents_view", &flg);CHKERRQ(ierr);
1221     if (flg) {
1222       ierr = PetscPrintf(comm, "Serial Parent Section: \n");CHKERRQ(ierr);
1223       ierr = PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1224       ierr = PetscPrintf(comm, "Parallel Parent Section: \n");CHKERRQ(ierr);
1225       ierr = PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1226       ierr = PetscSFView(parentSF, NULL);CHKERRQ(ierr);
1227     }
1228     ierr = DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);CHKERRQ(ierr);
1229     ierr = PetscSectionDestroy(&newParentSection);CHKERRQ(ierr);
1230     ierr = PetscFree2(newParents,newChildIDs);CHKERRQ(ierr);
1231     ierr = PetscSFDestroy(&parentSF);CHKERRQ(ierr);
1232   }
1233   pmesh->useAnchors = mesh->useAnchors;
1234   PetscFunctionReturn(0);
1235 }
1236 
1237 PETSC_UNUSED static PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, DM dmParallel)
1238 {
1239   PetscMPIInt            rank, size;
1240   MPI_Comm               comm;
1241   PetscErrorCode         ierr;
1242 
1243   PetscFunctionBegin;
1244   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1245   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1246 
1247   /* Create point SF for parallel mesh */
1248   ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1249   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1250   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1251   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1252   {
1253     const PetscInt *leaves;
1254     PetscSFNode    *remotePoints, *rowners, *lowners;
1255     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1256     PetscInt        pStart, pEnd;
1257 
1258     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1259     ierr = PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr);
1260     ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr);
1261     for (p=0; p<numRoots; p++) {
1262       rowners[p].rank  = -1;
1263       rowners[p].index = -1;
1264     }
1265     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1266     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1267     for (p = 0; p < numLeaves; ++p) {
1268       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1269         lowners[p].rank  = rank;
1270         lowners[p].index = leaves ? leaves[p] : p;
1271       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1272         lowners[p].rank  = -2;
1273         lowners[p].index = -2;
1274       }
1275     }
1276     for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1277       rowners[p].rank  = -3;
1278       rowners[p].index = -3;
1279     }
1280     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1281     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1282     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1283     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1284     for (p = 0; p < numLeaves; ++p) {
1285       if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
1286       if (lowners[p].rank != rank) ++numGhostPoints;
1287     }
1288     ierr = PetscMalloc1(numGhostPoints, &ghostPoints);CHKERRQ(ierr);
1289     ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr);
1290     for (p = 0, gp = 0; p < numLeaves; ++p) {
1291       if (lowners[p].rank != rank) {
1292         ghostPoints[gp]        = leaves ? leaves[p] : p;
1293         remotePoints[gp].rank  = lowners[p].rank;
1294         remotePoints[gp].index = lowners[p].index;
1295         ++gp;
1296       }
1297     }
1298     ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr);
1299     ierr = PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1300     ierr = PetscSFSetFromOptions((dmParallel)->sf);CHKERRQ(ierr);
1301   }
1302   {
1303     PetscBool useCone, useClosure, useAnchors;
1304 
1305     ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
1306     ierr = DMSetBasicAdjacency(dmParallel, useCone, useClosure);CHKERRQ(ierr);
1307     ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
1308     ierr = DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors);CHKERRQ(ierr);
1309   }
1310   ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1311   PetscFunctionReturn(0);
1312 }
1313 
1314 
1315 
1316 /*@
1317   DMPlexSetPartitionBalance - Should distribution of the DM attempt to balance the shared point partition?
1318 
1319   Input Parameters:
1320 + dm - The DMPlex object
1321 - flg - Balance the partition?
1322 
1323   Level: intermediate
1324 
1325 .seealso: DMPlexDistribute(), DMPlexGetPartitionBalance()
1326 @*/
1327 PetscErrorCode DMPlexSetPartitionBalance(DM dm, PetscBool flg)
1328 {
1329   DM_Plex *mesh = (DM_Plex *)dm->data;
1330 
1331   PetscFunctionBegin;
1332   mesh->partitionBalance = flg;
1333   PetscFunctionReturn(0);
1334 }
1335 
1336 /*@
1337   DMPlexGetPartitionBalance - Does distribution of the DM attempt to balance the shared point partition?
1338 
1339   Input Parameter:
1340 + dm - The DMPlex object
1341 
1342   Output Parameter:
1343 + flg - Balance the partition?
1344 
1345   Level: intermediate
1346 
1347 .seealso: DMPlexDistribute(), DMPlexSetPartitionBalance()
1348 @*/
1349 PetscErrorCode DMPlexGetPartitionBalance(DM dm, PetscBool *flg)
1350 {
1351   DM_Plex *mesh = (DM_Plex *)dm->data;
1352 
1353   PetscFunctionBegin;
1354   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1355   PetscValidIntPointer(flg, 2);
1356   *flg = mesh->partitionBalance;
1357   PetscFunctionReturn(0);
1358 }
1359 
1360 /*@C
1361   DMPlexDerivePointSF - Build a point SF from an SF describing a point migration
1362 
1363   Input Parameter:
1364 + dm          - The source DMPlex object
1365 . migrationSF - The star forest that describes the parallel point remapping
1366 . ownership   - Flag causing a vote to determine point ownership
1367 
1368   Output Parameter:
1369 - pointSF     - The star forest describing the point overlap in the remapped DM
1370 
1371   Level: developer
1372 
1373 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1374 @*/
1375 PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownership, PetscSF *pointSF)
1376 {
1377   PetscMPIInt        rank, size;
1378   PetscInt           p, nroots, nleaves, idx, npointLeaves;
1379   PetscInt          *pointLocal;
1380   const PetscInt    *leaves;
1381   const PetscSFNode *roots;
1382   PetscSFNode       *rootNodes, *leafNodes, *pointRemote;
1383   Vec                shifts;
1384   const PetscInt     numShifts = 13759;
1385   const PetscScalar *shift = NULL;
1386   const PetscBool    shiftDebug = PETSC_FALSE;
1387   PetscBool          balance;
1388   PetscErrorCode     ierr;
1389 
1390   PetscFunctionBegin;
1391   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1392   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1393   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
1394 
1395   ierr = DMPlexGetPartitionBalance(dm, &balance);CHKERRQ(ierr);
1396   ierr = PetscSFGetGraph(migrationSF, &nroots, &nleaves, &leaves, &roots);CHKERRQ(ierr);
1397   ierr = PetscMalloc2(nroots, &rootNodes, nleaves, &leafNodes);CHKERRQ(ierr);
1398   if (ownership) {
1399     /* If balancing, we compute a random cyclic shift of the rank for each remote point. That way, the max will evenly distribute among ranks. */
1400     if (balance) {
1401       PetscRandom r;
1402 
1403       ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
1404       ierr = PetscRandomSetInterval(r, 0, 2467*size);CHKERRQ(ierr);
1405       ierr = VecCreate(PETSC_COMM_SELF, &shifts);CHKERRQ(ierr);
1406       ierr = VecSetSizes(shifts, numShifts, numShifts);CHKERRQ(ierr);
1407       ierr = VecSetType(shifts, VECSTANDARD);CHKERRQ(ierr);
1408       ierr = VecSetRandom(shifts, r);CHKERRQ(ierr);
1409       ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
1410       ierr = VecGetArrayRead(shifts, &shift);CHKERRQ(ierr);
1411     }
1412 
1413     /* Point ownership vote: Process with highest rank owns shared points */
1414     for (p = 0; p < nleaves; ++p) {
1415       if (shiftDebug) {
1416         ierr = PetscSynchronizedPrintf(PetscObjectComm((PetscObject) dm), "[%d] Point %D RemotePoint %D Shift %D MyRank %D\n", rank, leaves ? leaves[p] : p, roots[p].index, (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]), (rank + (shift ? (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]) : 0))%size);CHKERRQ(ierr);
1417       }
1418       /* Either put in a bid or we know we own it */
1419       leafNodes[p].rank  = (rank + (shift ? (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]) : 0))%size;
1420       leafNodes[p].index = p;
1421     }
1422     for (p = 0; p < nroots; p++) {
1423       /* Root must not participate in the reduction, flag so that MAXLOC does not use */
1424       rootNodes[p].rank  = -3;
1425       rootNodes[p].index = -3;
1426     }
1427     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);CHKERRQ(ierr);
1428     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);CHKERRQ(ierr);
1429     if (balance) {
1430       /* We've voted, now we need to get the rank.  When we're balancing the partition, the "rank" in rootNotes is not
1431        * the rank but rather (rank + random)%size.  So we do another reduction, voting the same way, but sending the
1432        * rank instead of the index. */
1433       PetscSFNode *rootRanks = NULL;
1434       ierr = PetscMalloc1(nroots, &rootRanks);CHKERRQ(ierr);
1435       for (p = 0; p < nroots; p++) {
1436         rootRanks[p].rank = -3;
1437         rootRanks[p].index = -3;
1438       }
1439       for (p = 0; p < nleaves; p++) leafNodes[p].index = rank;
1440       ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, leafNodes, rootRanks, MPI_MAXLOC);CHKERRQ(ierr);
1441       ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, leafNodes, rootRanks, MPI_MAXLOC);CHKERRQ(ierr);
1442       for (p = 0; p < nroots; p++) rootNodes[p].rank = rootRanks[p].index;
1443       ierr = PetscFree(rootRanks);CHKERRQ(ierr);
1444     }
1445   } else {
1446     for (p = 0; p < nroots; p++) {
1447       rootNodes[p].index = -1;
1448       rootNodes[p].rank = rank;
1449     };
1450     for (p = 0; p < nleaves; p++) {
1451       /* Write new local id into old location */
1452       if (roots[p].rank == rank) {
1453         rootNodes[roots[p].index].index = leaves ? leaves[p] : p;
1454       }
1455     }
1456   }
1457   ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1458   ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1459 
1460   for (npointLeaves = 0, p = 0; p < nleaves; p++) {
1461     if (leafNodes[p].rank != rank) npointLeaves++;
1462   }
1463   ierr = PetscMalloc1(npointLeaves, &pointLocal);CHKERRQ(ierr);
1464   ierr = PetscMalloc1(npointLeaves, &pointRemote);CHKERRQ(ierr);
1465   for (idx = 0, p = 0; p < nleaves; p++) {
1466     if (leafNodes[p].rank != rank) {
1467       pointLocal[idx] = p;
1468       pointRemote[idx] = leafNodes[p];
1469       idx++;
1470     }
1471   }
1472   if (shift) {
1473     ierr = VecRestoreArrayRead(shifts, &shift);CHKERRQ(ierr);
1474     ierr = VecDestroy(&shifts);CHKERRQ(ierr);
1475   }
1476   if (shiftDebug) {ierr = PetscSynchronizedFlush(PetscObjectComm((PetscObject) dm), PETSC_STDOUT);CHKERRQ(ierr);}
1477   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), pointSF);CHKERRQ(ierr);
1478   ierr = PetscSFSetFromOptions(*pointSF);CHKERRQ(ierr);
1479   ierr = PetscSFSetGraph(*pointSF, nleaves, npointLeaves, pointLocal, PETSC_OWN_POINTER, pointRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1480   ierr = PetscFree2(rootNodes, leafNodes);CHKERRQ(ierr);
1481   PetscFunctionReturn(0);
1482 }
1483 
1484 /*@C
1485   DMPlexMigrate  - Migrates internal DM data over the supplied star forest
1486 
1487   Collective on DM and PetscSF
1488 
1489   Input Parameter:
1490 + dm       - The source DMPlex object
1491 . sf       - The star forest communication context describing the migration pattern
1492 
1493   Output Parameter:
1494 - targetDM - The target DMPlex object
1495 
1496   Level: intermediate
1497 
1498 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1499 @*/
1500 PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM)
1501 {
1502   MPI_Comm               comm;
1503   PetscInt               dim, cdim, nroots;
1504   PetscSF                sfPoint;
1505   ISLocalToGlobalMapping ltogMigration;
1506   ISLocalToGlobalMapping ltogOriginal = NULL;
1507   PetscBool              flg;
1508   PetscErrorCode         ierr;
1509 
1510   PetscFunctionBegin;
1511   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1512   ierr = PetscLogEventBegin(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1513   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1514   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1515   ierr = DMSetDimension(targetDM, dim);CHKERRQ(ierr);
1516   ierr = DMGetCoordinateDim(dm, &cdim);CHKERRQ(ierr);
1517   ierr = DMSetCoordinateDim(targetDM, cdim);CHKERRQ(ierr);
1518 
1519   /* Check for a one-to-all distribution pattern */
1520   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1521   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
1522   if (nroots >= 0) {
1523     IS        isOriginal;
1524     PetscInt  n, size, nleaves;
1525     PetscInt  *numbering_orig, *numbering_new;
1526 
1527     /* Get the original point numbering */
1528     ierr = DMPlexCreatePointNumbering(dm, &isOriginal);CHKERRQ(ierr);
1529     ierr = ISLocalToGlobalMappingCreateIS(isOriginal, &ltogOriginal);CHKERRQ(ierr);
1530     ierr = ISLocalToGlobalMappingGetSize(ltogOriginal, &size);CHKERRQ(ierr);
1531     ierr = ISLocalToGlobalMappingGetBlockIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1532     /* Convert to positive global numbers */
1533     for (n=0; n<size; n++) {if (numbering_orig[n] < 0) numbering_orig[n] = -(numbering_orig[n]+1);}
1534     /* Derive the new local-to-global mapping from the old one */
1535     ierr = PetscSFGetGraph(sf, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1536     ierr = PetscMalloc1(nleaves, &numbering_new);CHKERRQ(ierr);
1537     ierr = PetscSFBcastBegin(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr);
1538     ierr = PetscSFBcastEnd(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr);
1539     ierr = ISLocalToGlobalMappingCreate(comm, 1, nleaves, (const PetscInt*) numbering_new, PETSC_OWN_POINTER, &ltogMigration);CHKERRQ(ierr);
1540     ierr = ISLocalToGlobalMappingRestoreIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1541     ierr = ISDestroy(&isOriginal);CHKERRQ(ierr);
1542   } else {
1543     /* One-to-all distribution pattern: We can derive LToG from SF */
1544     ierr = ISLocalToGlobalMappingCreateSF(sf, 0, &ltogMigration);CHKERRQ(ierr);
1545   }
1546   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1547   if (flg) {
1548     ierr = PetscPrintf(comm, "Point renumbering for DM migration:\n");CHKERRQ(ierr);
1549     ierr = ISLocalToGlobalMappingView(ltogMigration, NULL);CHKERRQ(ierr);
1550   }
1551   /* Migrate DM data to target DM */
1552   ierr = DMPlexDistributeCones(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1553   ierr = DMPlexDistributeLabels(dm, sf, targetDM);CHKERRQ(ierr);
1554   ierr = DMPlexDistributeCoordinates(dm, sf, targetDM);CHKERRQ(ierr);
1555   ierr = DMPlexDistributeSetupHybrid(dm, sf, ltogMigration, targetDM);CHKERRQ(ierr);
1556   ierr = DMPlexDistributeSetupTree(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1557   ierr = ISLocalToGlobalMappingDestroy(&ltogOriginal);CHKERRQ(ierr);
1558   ierr = ISLocalToGlobalMappingDestroy(&ltogMigration);CHKERRQ(ierr);
1559   ierr = PetscLogEventEnd(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1560   PetscFunctionReturn(0);
1561 }
1562 
1563 /*@C
1564   DMPlexDistribute - Distributes the mesh and any associated sections.
1565 
1566   Collective on DM
1567 
1568   Input Parameter:
1569 + dm  - The original DMPlex object
1570 - overlap - The overlap of partitions, 0 is the default
1571 
1572   Output Parameter:
1573 + sf - The PetscSF used for point distribution, or NULL if not needed
1574 - dmParallel - The distributed DMPlex object
1575 
1576   Note: If the mesh was not distributed, the output dmParallel will be NULL.
1577 
1578   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
1579   representation on the mesh.
1580 
1581   Level: intermediate
1582 
1583 .keywords: mesh, elements
1584 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMSetAdjacency()
1585 @*/
1586 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1587 {
1588   MPI_Comm               comm;
1589   PetscPartitioner       partitioner;
1590   IS                     cellPart;
1591   PetscSection           cellPartSection;
1592   DM                     dmCoord;
1593   DMLabel                lblPartition, lblMigration;
1594   PetscSF                sfProcess, sfMigration, sfStratified, sfPoint;
1595   PetscBool              flg, balance;
1596   PetscMPIInt            rank, size, p;
1597   PetscErrorCode         ierr;
1598 
1599   PetscFunctionBegin;
1600   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1601   PetscValidLogicalCollectiveInt(dm, overlap, 2);
1602   if (sf) PetscValidPointer(sf,3);
1603   PetscValidPointer(dmParallel,4);
1604 
1605   if (sf) *sf = NULL;
1606   *dmParallel = NULL;
1607   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1608   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1609   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1610   if (size == 1) PetscFunctionReturn(0);
1611 
1612   ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1613   /* Create cell partition */
1614   ierr = PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1615   ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr);
1616   ierr = DMPlexGetPartitioner(dm, &partitioner);CHKERRQ(ierr);
1617   ierr = PetscPartitionerPartition(partitioner, dm, cellPartSection, &cellPart);CHKERRQ(ierr);
1618   {
1619     /* Convert partition to DMLabel */
1620     PetscInt proc, pStart, pEnd, npoints, poffset;
1621     const PetscInt *points;
1622     ierr = DMLabelCreate(PETSC_COMM_SELF, "Point Partition", &lblPartition);CHKERRQ(ierr);
1623     ierr = ISGetIndices(cellPart, &points);CHKERRQ(ierr);
1624     ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr);
1625     for (proc = pStart; proc < pEnd; proc++) {
1626       ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr);
1627       ierr = PetscSectionGetOffset(cellPartSection, proc, &poffset);CHKERRQ(ierr);
1628       for (p = poffset; p < poffset+npoints; p++) {
1629         ierr = DMLabelSetValue(lblPartition, points[p], proc);CHKERRQ(ierr);
1630       }
1631     }
1632     ierr = ISRestoreIndices(cellPart, &points);CHKERRQ(ierr);
1633   }
1634   ierr = DMPlexPartitionLabelClosure(dm, lblPartition);CHKERRQ(ierr);
1635   {
1636     /* Build a global process SF */
1637     PetscSFNode *remoteProc;
1638     ierr = PetscMalloc1(size, &remoteProc);CHKERRQ(ierr);
1639     for (p = 0; p < size; ++p) {
1640       remoteProc[p].rank  = p;
1641       remoteProc[p].index = rank;
1642     }
1643     ierr = PetscSFCreate(comm, &sfProcess);CHKERRQ(ierr);
1644     ierr = PetscObjectSetName((PetscObject) sfProcess, "Process SF");CHKERRQ(ierr);
1645     ierr = PetscSFSetGraph(sfProcess, size, size, NULL, PETSC_OWN_POINTER, remoteProc, PETSC_OWN_POINTER);CHKERRQ(ierr);
1646   }
1647   ierr = DMLabelCreate(PETSC_COMM_SELF, "Point migration", &lblMigration);CHKERRQ(ierr);
1648   ierr = DMPlexPartitionLabelInvert(dm, lblPartition, sfProcess, lblMigration);CHKERRQ(ierr);
1649   ierr = DMPlexPartitionLabelCreateSF(dm, lblMigration, &sfMigration);CHKERRQ(ierr);
1650   /* Stratify the SF in case we are migrating an already parallel plex */
1651   ierr = DMPlexStratifyMigrationSF(dm, sfMigration, &sfStratified);CHKERRQ(ierr);
1652   ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1653   sfMigration = sfStratified;
1654   ierr = PetscLogEventEnd(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1655   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1656   if (flg) {
1657     ierr = DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1658     ierr = PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1659   }
1660 
1661   /* Create non-overlapping parallel DM and migrate internal data */
1662   ierr = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr);
1663   ierr = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr);
1664   ierr = DMPlexMigrate(dm, sfMigration, *dmParallel);CHKERRQ(ierr);
1665 
1666   /* Build the point SF without overlap */
1667   ierr = DMPlexGetPartitionBalance(dm, &balance);CHKERRQ(ierr);
1668   ierr = DMPlexSetPartitionBalance(*dmParallel, balance);CHKERRQ(ierr);
1669   ierr = DMPlexCreatePointSF(*dmParallel, sfMigration, PETSC_TRUE, &sfPoint);CHKERRQ(ierr);
1670   ierr = DMSetPointSF(*dmParallel, sfPoint);CHKERRQ(ierr);
1671   ierr = DMGetCoordinateDM(*dmParallel, &dmCoord);CHKERRQ(ierr);
1672   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1673   if (flg) {ierr = PetscSFView(sfPoint, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);}
1674 
1675   if (overlap > 0) {
1676     DM                 dmOverlap;
1677     PetscInt           nroots, nleaves;
1678     PetscSFNode       *newRemote;
1679     const PetscSFNode *oldRemote;
1680     PetscSF            sfOverlap, sfOverlapPoint;
1681     /* Add the partition overlap to the distributed DM */
1682     ierr = DMPlexDistributeOverlap(*dmParallel, overlap, &sfOverlap, &dmOverlap);CHKERRQ(ierr);
1683     ierr = DMDestroy(dmParallel);CHKERRQ(ierr);
1684     *dmParallel = dmOverlap;
1685     if (flg) {
1686       ierr = PetscPrintf(comm, "Overlap Migration SF:\n");CHKERRQ(ierr);
1687       ierr = PetscSFView(sfOverlap, NULL);CHKERRQ(ierr);
1688     }
1689 
1690     /* Re-map the migration SF to establish the full migration pattern */
1691     ierr = PetscSFGetGraph(sfMigration, &nroots, NULL, NULL, &oldRemote);CHKERRQ(ierr);
1692     ierr = PetscSFGetGraph(sfOverlap, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1693     ierr = PetscMalloc1(nleaves, &newRemote);CHKERRQ(ierr);
1694     ierr = PetscSFBcastBegin(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1695     ierr = PetscSFBcastEnd(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1696     ierr = PetscSFCreate(comm, &sfOverlapPoint);CHKERRQ(ierr);
1697     ierr = PetscSFSetGraph(sfOverlapPoint, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1698     ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1699     ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1700     sfMigration = sfOverlapPoint;
1701   }
1702   /* Cleanup Partition */
1703   ierr = PetscSFDestroy(&sfProcess);CHKERRQ(ierr);
1704   ierr = DMLabelDestroy(&lblPartition);CHKERRQ(ierr);
1705   ierr = DMLabelDestroy(&lblMigration);CHKERRQ(ierr);
1706   ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr);
1707   ierr = ISDestroy(&cellPart);CHKERRQ(ierr);
1708   /* Copy BC */
1709   ierr = DMCopyBoundary(dm, *dmParallel);CHKERRQ(ierr);
1710   /* Create sfNatural */
1711   if (dm->useNatural) {
1712     PetscSection section;
1713 
1714     ierr = DMGetSection(dm, &section);CHKERRQ(ierr);
1715     ierr = DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &(*dmParallel)->sfNatural);CHKERRQ(ierr);
1716     ierr = DMSetUseNatural(*dmParallel, PETSC_TRUE);CHKERRQ(ierr);
1717   }
1718   /* Cleanup */
1719   if (sf) {*sf = sfMigration;}
1720   else    {ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);}
1721   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1722   ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1723   PetscFunctionReturn(0);
1724 }
1725 
1726 /*@C
1727   DMPlexDistributeOverlap - Add partition overlap to a distributed non-overlapping DM.
1728 
1729   Collective on DM
1730 
1731   Input Parameter:
1732 + dm  - The non-overlapping distrbuted DMPlex object
1733 - overlap - The overlap of partitions
1734 
1735   Output Parameter:
1736 + sf - The PetscSF used for point distribution
1737 - dmOverlap - The overlapping distributed DMPlex object, or NULL
1738 
1739   Note: If the mesh was not distributed, the return value is NULL.
1740 
1741   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
1742   representation on the mesh.
1743 
1744   Level: intermediate
1745 
1746 .keywords: mesh, elements
1747 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMSetAdjacency()
1748 @*/
1749 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, PetscSF *sf, DM *dmOverlap)
1750 {
1751   MPI_Comm               comm;
1752   PetscMPIInt            size, rank;
1753   PetscSection           rootSection, leafSection;
1754   IS                     rootrank, leafrank;
1755   DM                     dmCoord;
1756   DMLabel                lblOverlap;
1757   PetscSF                sfOverlap, sfStratified, sfPoint;
1758   PetscErrorCode         ierr;
1759 
1760   PetscFunctionBegin;
1761   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1762   if (sf) PetscValidPointer(sf, 3);
1763   PetscValidPointer(dmOverlap, 4);
1764 
1765   if (sf) *sf = NULL;
1766   *dmOverlap  = NULL;
1767   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1768   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1769   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1770   if (size == 1) PetscFunctionReturn(0);
1771 
1772   ierr = PetscLogEventBegin(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1773   /* Compute point overlap with neighbouring processes on the distributed DM */
1774   ierr = PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1775   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1776   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1777   ierr = DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);CHKERRQ(ierr);
1778   ierr = DMPlexCreateOverlap(dm, overlap, rootSection, rootrank, leafSection, leafrank, &lblOverlap);CHKERRQ(ierr);
1779   /* Convert overlap label to stratified migration SF */
1780   ierr = DMPlexPartitionLabelCreateSF(dm, lblOverlap, &sfOverlap);CHKERRQ(ierr);
1781   ierr = DMPlexStratifyMigrationSF(dm, sfOverlap, &sfStratified);CHKERRQ(ierr);
1782   ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1783   sfOverlap = sfStratified;
1784   ierr = PetscObjectSetName((PetscObject) sfOverlap, "Overlap SF");CHKERRQ(ierr);
1785   ierr = PetscSFSetFromOptions(sfOverlap);CHKERRQ(ierr);
1786 
1787   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1788   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1789   ierr = ISDestroy(&rootrank);CHKERRQ(ierr);
1790   ierr = ISDestroy(&leafrank);CHKERRQ(ierr);
1791   ierr = PetscLogEventEnd(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1792 
1793   /* Build the overlapping DM */
1794   ierr = DMPlexCreate(comm, dmOverlap);CHKERRQ(ierr);
1795   ierr = PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");CHKERRQ(ierr);
1796   ierr = DMPlexMigrate(dm, sfOverlap, *dmOverlap);CHKERRQ(ierr);
1797   /* Build the new point SF */
1798   ierr = DMPlexCreatePointSF(*dmOverlap, sfOverlap, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
1799   ierr = DMSetPointSF(*dmOverlap, sfPoint);CHKERRQ(ierr);
1800   ierr = DMGetCoordinateDM(*dmOverlap, &dmCoord);CHKERRQ(ierr);
1801   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1802   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1803   /* Cleanup overlap partition */
1804   ierr = DMLabelDestroy(&lblOverlap);CHKERRQ(ierr);
1805   if (sf) *sf = sfOverlap;
1806   else    {ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);}
1807   ierr = PetscLogEventEnd(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1808   PetscFunctionReturn(0);
1809 }
1810 
1811 /*@C
1812   DMPlexGetGatherDM - Get a copy of the DMPlex that gathers all points on the
1813   root process of the original's communicator.
1814 
1815   Collective on DM
1816 
1817   Input Parameters:
1818 . dm - the original DMPlex object
1819 
1820   Output Parameters:
1821 + sf - the PetscSF used for point distribution (optional)
1822 - gatherMesh - the gathered DM object, or NULL
1823 
1824   Level: intermediate
1825 
1826 .keywords: mesh
1827 .seealso: DMPlexDistribute(), DMPlexGetRedundantDM()
1828 @*/
1829 PetscErrorCode DMPlexGetGatherDM(DM dm, PetscSF *sf, DM *gatherMesh)
1830 {
1831   MPI_Comm       comm;
1832   PetscMPIInt    size;
1833   PetscPartitioner oldPart, gatherPart;
1834   PetscErrorCode ierr;
1835 
1836   PetscFunctionBegin;
1837   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1838   PetscValidPointer(gatherMesh,2);
1839   *gatherMesh = NULL;
1840   if (sf) *sf = NULL;
1841   comm = PetscObjectComm((PetscObject)dm);
1842   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1843   if (size == 1) PetscFunctionReturn(0);
1844   ierr = DMPlexGetPartitioner(dm,&oldPart);CHKERRQ(ierr);
1845   ierr = PetscObjectReference((PetscObject)oldPart);CHKERRQ(ierr);
1846   ierr = PetscPartitionerCreate(comm,&gatherPart);CHKERRQ(ierr);
1847   ierr = PetscPartitionerSetType(gatherPart,PETSCPARTITIONERGATHER);CHKERRQ(ierr);
1848   ierr = DMPlexSetPartitioner(dm,gatherPart);CHKERRQ(ierr);
1849   ierr = DMPlexDistribute(dm,0,sf,gatherMesh);CHKERRQ(ierr);
1850 
1851   ierr = DMPlexSetPartitioner(dm,oldPart);CHKERRQ(ierr);
1852   ierr = PetscPartitionerDestroy(&gatherPart);CHKERRQ(ierr);
1853   ierr = PetscPartitionerDestroy(&oldPart);CHKERRQ(ierr);
1854   PetscFunctionReturn(0);
1855 }
1856 
1857 /*@C
1858   DMPlexGetRedundantDM - Get a copy of the DMPlex that is completely copied on each process.
1859 
1860   Collective on DM
1861 
1862   Input Parameters:
1863 . dm - the original DMPlex object
1864 
1865   Output Parameters:
1866 + sf - the PetscSF used for point distribution (optional)
1867 - redundantMesh - the redundant DM object, or NULL
1868 
1869   Level: intermediate
1870 
1871 .keywords: mesh
1872 .seealso: DMPlexDistribute(), DMPlexGetGatherDM()
1873 @*/
1874 PetscErrorCode DMPlexGetRedundantDM(DM dm, PetscSF *sf, DM *redundantMesh)
1875 {
1876   MPI_Comm       comm;
1877   PetscMPIInt    size, rank;
1878   PetscInt       pStart, pEnd, p;
1879   PetscInt       numPoints = -1;
1880   PetscSF        migrationSF, sfPoint, gatherSF;
1881   DM             gatherDM, dmCoord;
1882   PetscSFNode    *points;
1883   PetscErrorCode ierr;
1884 
1885   PetscFunctionBegin;
1886   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1887   PetscValidPointer(redundantMesh,2);
1888   *redundantMesh = NULL;
1889   comm = PetscObjectComm((PetscObject)dm);
1890   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1891   if (size == 1) {
1892     ierr = PetscObjectReference((PetscObject) dm);CHKERRQ(ierr);
1893     *redundantMesh = dm;
1894     if (sf) *sf = NULL;
1895     PetscFunctionReturn(0);
1896   }
1897   ierr = DMPlexGetGatherDM(dm,&gatherSF,&gatherDM);CHKERRQ(ierr);
1898   if (!gatherDM) PetscFunctionReturn(0);
1899   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1900   ierr = DMPlexGetChart(gatherDM,&pStart,&pEnd);CHKERRQ(ierr);
1901   numPoints = pEnd - pStart;
1902   ierr = MPI_Bcast(&numPoints,1,MPIU_INT,0,comm);CHKERRQ(ierr);
1903   ierr = PetscMalloc1(numPoints,&points);CHKERRQ(ierr);
1904   ierr = PetscSFCreate(comm,&migrationSF);CHKERRQ(ierr);
1905   for (p = 0; p < numPoints; p++) {
1906     points[p].index = p;
1907     points[p].rank  = 0;
1908   }
1909   ierr = PetscSFSetGraph(migrationSF,pEnd-pStart,numPoints,NULL,PETSC_OWN_POINTER,points,PETSC_OWN_POINTER);CHKERRQ(ierr);
1910   ierr = DMPlexCreate(comm, redundantMesh);CHKERRQ(ierr);
1911   ierr = PetscObjectSetName((PetscObject) *redundantMesh, "Redundant Mesh");CHKERRQ(ierr);
1912   ierr = DMPlexMigrate(gatherDM, migrationSF, *redundantMesh);CHKERRQ(ierr);
1913   ierr = DMPlexCreatePointSF(*redundantMesh, migrationSF, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
1914   ierr = DMSetPointSF(*redundantMesh, sfPoint);CHKERRQ(ierr);
1915   ierr = DMGetCoordinateDM(*redundantMesh, &dmCoord);CHKERRQ(ierr);
1916   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1917   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1918   if (sf) {
1919     PetscSF tsf;
1920 
1921     ierr = PetscSFCompose(gatherSF,migrationSF,&tsf);CHKERRQ(ierr);
1922     ierr = DMPlexStratifyMigrationSF(dm, tsf, sf);CHKERRQ(ierr);
1923     ierr = PetscSFDestroy(&tsf);CHKERRQ(ierr);
1924   }
1925   ierr = PetscSFDestroy(&migrationSF);CHKERRQ(ierr);
1926   ierr = PetscSFDestroy(&gatherSF);CHKERRQ(ierr);
1927   ierr = DMDestroy(&gatherDM);CHKERRQ(ierr);
1928   PetscFunctionReturn(0);
1929 }
1930