xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision a685ae26bbf9fb229cf02e15e73d16b8a9cd69e6)
1 #include <petsc/private/dmpleximpl.h>    /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/dmlabelimpl.h>   /*I      "petscdmlabel.h"  I*/
3 
4 /*@C
5   DMPlexSetAdjacencyUser - Define adjacency in the mesh using a user-provided callback
6 
7   Input Parameters:
8 + dm      - The DM object
9 . user    - The user callback, may be NULL (to clear the callback)
10 - ctx     - context for callback evaluation, may be NULL
11 
12   Level: advanced
13 
14   Notes:
15      The caller of DMPlexGetAdjacency may need to arrange that a large enough array is available for the adjacency.
16 
17      Any setting here overrides other configuration of DMPlex adjacency determination.
18 
19 .seealso: DMSetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexGetAdjacency(), DMPlexGetAdjacencyUser()
20 @*/
21 PetscErrorCode DMPlexSetAdjacencyUser(DM dm,PetscErrorCode (*user)(DM,PetscInt,PetscInt*,PetscInt[],void*),void *ctx)
22 {
23   DM_Plex *mesh = (DM_Plex *)dm->data;
24 
25   PetscFunctionBegin;
26   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
27   mesh->useradjacency = user;
28   mesh->useradjacencyctx = ctx;
29   PetscFunctionReturn(0);
30 }
31 
32 /*@C
33   DMPlexGetAdjacencyUser - get the user-defined adjacency callback
34 
35   Input Parameter:
36 . dm      - The DM object
37 
38   Output Parameters:
39 - user    - The user callback
40 - ctx     - context for callback evaluation
41 
42   Level: advanced
43 
44 .seealso: DMSetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexGetAdjacency(), DMPlexSetAdjacencyUser()
45 @*/
46 PetscErrorCode DMPlexGetAdjacencyUser(DM dm, PetscErrorCode (**user)(DM,PetscInt,PetscInt*,PetscInt[],void*), void **ctx)
47 {
48   DM_Plex *mesh = (DM_Plex *)dm->data;
49 
50   PetscFunctionBegin;
51   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
52   if (user) *user = mesh->useradjacency;
53   if (ctx) *ctx = mesh->useradjacencyctx;
54   PetscFunctionReturn(0);
55 }
56 
57 /*@
58   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
59 
60   Input Parameters:
61 + dm      - The DM object
62 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
63 
64   Level: intermediate
65 
66 .seealso: DMGetAdjacency(), DMSetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
67 @*/
68 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
69 {
70   DM_Plex *mesh = (DM_Plex *) dm->data;
71 
72   PetscFunctionBegin;
73   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
74   mesh->useAnchors = useAnchors;
75   PetscFunctionReturn(0);
76 }
77 
78 /*@
79   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
80 
81   Input Parameter:
82 . dm      - The DM object
83 
84   Output Parameter:
85 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
86 
87   Level: intermediate
88 
89 .seealso: DMPlexSetAdjacencyUseAnchors(), DMSetAdjacency(), DMGetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
90 @*/
91 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
92 {
93   DM_Plex *mesh = (DM_Plex *) dm->data;
94 
95   PetscFunctionBegin;
96   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
97   PetscValidIntPointer(useAnchors, 2);
98   *useAnchors = mesh->useAnchors;
99   PetscFunctionReturn(0);
100 }
101 
102 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
103 {
104   const PetscInt *cone = NULL;
105   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
106   PetscErrorCode  ierr;
107 
108   PetscFunctionBeginHot;
109   ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
110   ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
111   for (c = 0; c <= coneSize; ++c) {
112     const PetscInt  point   = !c ? p : cone[c-1];
113     const PetscInt *support = NULL;
114     PetscInt        supportSize, s, q;
115 
116     ierr = DMPlexGetSupportSize(dm, point, &supportSize);CHKERRQ(ierr);
117     ierr = DMPlexGetSupport(dm, point, &support);CHKERRQ(ierr);
118     for (s = 0; s < supportSize; ++s) {
119       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = support[s]),0); ++q) {
120         if (support[s] == adj[q]) break;
121       }
122       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
123     }
124   }
125   *adjSize = numAdj;
126   PetscFunctionReturn(0);
127 }
128 
129 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
130 {
131   const PetscInt *support = NULL;
132   PetscInt        numAdj   = 0, maxAdjSize = *adjSize, supportSize, s;
133   PetscErrorCode  ierr;
134 
135   PetscFunctionBeginHot;
136   ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr);
137   ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr);
138   for (s = 0; s <= supportSize; ++s) {
139     const PetscInt  point = !s ? p : support[s-1];
140     const PetscInt *cone  = NULL;
141     PetscInt        coneSize, c, q;
142 
143     ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
144     ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
145     for (c = 0; c < coneSize; ++c) {
146       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = cone[c]),0); ++q) {
147         if (cone[c] == adj[q]) break;
148       }
149       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
150     }
151   }
152   *adjSize = numAdj;
153   PetscFunctionReturn(0);
154 }
155 
156 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
157 {
158   PetscInt      *star = NULL;
159   PetscInt       numAdj = 0, maxAdjSize = *adjSize, starSize, s;
160   PetscErrorCode ierr;
161 
162   PetscFunctionBeginHot;
163   ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
164   for (s = 0; s < starSize*2; s += 2) {
165     const PetscInt *closure = NULL;
166     PetscInt        closureSize, c, q;
167 
168     ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
169     for (c = 0; c < closureSize*2; c += 2) {
170       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = closure[c]),0); ++q) {
171         if (closure[c] == adj[q]) break;
172       }
173       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
174     }
175     ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
176   }
177   ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
178   *adjSize = numAdj;
179   PetscFunctionReturn(0);
180 }
181 
182 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
183 {
184   static PetscInt asiz = 0;
185   PetscInt maxAnchors = 1;
186   PetscInt aStart = -1, aEnd = -1;
187   PetscInt maxAdjSize;
188   PetscSection aSec = NULL;
189   IS aIS = NULL;
190   const PetscInt *anchors;
191   DM_Plex *mesh = (DM_Plex *)dm->data;
192   PetscErrorCode  ierr;
193 
194   PetscFunctionBeginHot;
195   if (useAnchors) {
196     ierr = DMPlexGetAnchors(dm,&aSec,&aIS);CHKERRQ(ierr);
197     if (aSec) {
198       ierr = PetscSectionGetMaxDof(aSec,&maxAnchors);CHKERRQ(ierr);
199       maxAnchors = PetscMax(1,maxAnchors);
200       ierr = PetscSectionGetChart(aSec,&aStart,&aEnd);CHKERRQ(ierr);
201       ierr = ISGetIndices(aIS,&anchors);CHKERRQ(ierr);
202     }
203   }
204   if (!*adj) {
205     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
206 
207     ierr  = DMPlexGetChart(dm, &pStart,&pEnd);CHKERRQ(ierr);
208     ierr  = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
209     ierr  = DMPlexGetMaxSizes(dm, &maxC, &maxS);CHKERRQ(ierr);
210     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1;
211     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1;
212     asiz  = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries);
213     asiz *= maxAnchors;
214     asiz  = PetscMin(asiz,pEnd-pStart);
215     ierr  = PetscMalloc1(asiz,adj);CHKERRQ(ierr);
216   }
217   if (*adjSize < 0) *adjSize = asiz;
218   maxAdjSize = *adjSize;
219   if (mesh->useradjacency) {
220     ierr = mesh->useradjacency(dm, p, adjSize, *adj, mesh->useradjacencyctx);CHKERRQ(ierr);
221   } else if (useTransitiveClosure) {
222     ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr);
223   } else if (useCone) {
224     ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
225   } else {
226     ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
227   }
228   if (useAnchors && aSec) {
229     PetscInt origSize = *adjSize;
230     PetscInt numAdj = origSize;
231     PetscInt i = 0, j;
232     PetscInt *orig = *adj;
233 
234     while (i < origSize) {
235       PetscInt p = orig[i];
236       PetscInt aDof = 0;
237 
238       if (p >= aStart && p < aEnd) {
239         ierr = PetscSectionGetDof(aSec,p,&aDof);CHKERRQ(ierr);
240       }
241       if (aDof) {
242         PetscInt aOff;
243         PetscInt s, q;
244 
245         for (j = i + 1; j < numAdj; j++) {
246           orig[j - 1] = orig[j];
247         }
248         origSize--;
249         numAdj--;
250         ierr = PetscSectionGetOffset(aSec,p,&aOff);CHKERRQ(ierr);
251         for (s = 0; s < aDof; ++s) {
252           for (q = 0; q < numAdj || ((void)(orig[numAdj++] = anchors[aOff+s]),0); ++q) {
253             if (anchors[aOff+s] == orig[q]) break;
254           }
255           if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
256         }
257       }
258       else {
259         i++;
260       }
261     }
262     *adjSize = numAdj;
263     ierr = ISRestoreIndices(aIS,&anchors);CHKERRQ(ierr);
264   }
265   PetscFunctionReturn(0);
266 }
267 
268 /*@
269   DMPlexGetAdjacency - Return all points adjacent to the given point
270 
271   Input Parameters:
272 + dm - The DM object
273 . p  - The point
274 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE
275 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize
276 
277   Output Parameters:
278 + adjSize - The number of adjacent points
279 - adj - The adjacent points
280 
281   Level: advanced
282 
283   Notes:
284     The user must PetscFree the adj array if it was not passed in.
285 
286 .seealso: DMSetAdjacency(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator()
287 @*/
288 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
289 {
290   PetscBool      useCone, useClosure, useAnchors;
291   PetscErrorCode ierr;
292 
293   PetscFunctionBeginHot;
294   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
295   PetscValidPointer(adjSize,3);
296   PetscValidPointer(adj,4);
297   ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
298   ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
299   ierr = DMPlexGetAdjacency_Internal(dm, p, useCone, useClosure, useAnchors, adjSize, adj);CHKERRQ(ierr);
300   PetscFunctionReturn(0);
301 }
302 
303 /*@
304   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
305 
306   Collective on DM
307 
308   Input Parameters:
309 + dm      - The DM
310 - sfPoint - The PetscSF which encodes point connectivity
311 
312   Output Parameters:
313 + processRanks - A list of process neighbors, or NULL
314 - sfProcess    - An SF encoding the two-sided process connectivity, or NULL
315 
316   Level: developer
317 
318 .seealso: PetscSFCreate(), DMPlexCreateProcessSF()
319 @*/
320 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
321 {
322   const PetscSFNode *remotePoints;
323   PetscInt          *localPointsNew;
324   PetscSFNode       *remotePointsNew;
325   const PetscInt    *nranks;
326   PetscInt          *ranksNew;
327   PetscBT            neighbors;
328   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
329   PetscMPIInt        size, proc, rank;
330   PetscErrorCode     ierr;
331 
332   PetscFunctionBegin;
333   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
334   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
335   if (processRanks) {PetscValidPointer(processRanks, 3);}
336   if (sfProcess)    {PetscValidPointer(sfProcess, 4);}
337   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
338   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
339   ierr = PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);CHKERRQ(ierr);
340   ierr = PetscBTCreate(size, &neighbors);CHKERRQ(ierr);
341   ierr = PetscBTMemzero(size, neighbors);CHKERRQ(ierr);
342   /* Compute root-to-leaf process connectivity */
343   ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr);
344   ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr);
345   for (p = pStart; p < pEnd; ++p) {
346     PetscInt ndof, noff, n;
347 
348     ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr);
349     ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr);
350     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
351   }
352   ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr);
353   /* Compute leaf-to-neighbor process connectivity */
354   ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr);
355   ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr);
356   for (p = pStart; p < pEnd; ++p) {
357     PetscInt ndof, noff, n;
358 
359     ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr);
360     ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr);
361     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
362   }
363   ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr);
364   /* Compute leaf-to-root process connectivity */
365   for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);}
366   /* Calculate edges */
367   PetscBTClear(neighbors, rank);
368   for(proc = 0, numNeighbors = 0; proc < size; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;}
369   ierr = PetscMalloc1(numNeighbors, &ranksNew);CHKERRQ(ierr);
370   ierr = PetscMalloc1(numNeighbors, &localPointsNew);CHKERRQ(ierr);
371   ierr = PetscMalloc1(numNeighbors, &remotePointsNew);CHKERRQ(ierr);
372   for(proc = 0, n = 0; proc < size; ++proc) {
373     if (PetscBTLookup(neighbors, proc)) {
374       ranksNew[n]              = proc;
375       localPointsNew[n]        = proc;
376       remotePointsNew[n].index = rank;
377       remotePointsNew[n].rank  = proc;
378       ++n;
379     }
380   }
381   ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr);
382   if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr);}
383   else              {ierr = PetscFree(ranksNew);CHKERRQ(ierr);}
384   if (sfProcess) {
385     ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr);
386     ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr);
387     ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr);
388     ierr = PetscSFSetGraph(*sfProcess, size, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
389   }
390   PetscFunctionReturn(0);
391 }
392 
393 /*@
394   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
395 
396   Collective on DM
397 
398   Input Parameter:
399 . dm - The DM
400 
401   Output Parameters:
402 + rootSection - The number of leaves for a given root point
403 . rootrank    - The rank of each edge into the root point
404 . leafSection - The number of processes sharing a given leaf point
405 - leafrank    - The rank of each process sharing a leaf point
406 
407   Level: developer
408 
409 .seealso: DMPlexCreateOverlap()
410 @*/
411 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
412 {
413   MPI_Comm        comm;
414   PetscSF         sfPoint;
415   const PetscInt *rootdegree;
416   PetscInt       *myrank, *remoterank;
417   PetscInt        pStart, pEnd, p, nedges;
418   PetscMPIInt     rank;
419   PetscErrorCode  ierr;
420 
421   PetscFunctionBegin;
422   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
423   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
424   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
425   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
426   /* Compute number of leaves for each root */
427   ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr);
428   ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr);
429   ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr);
430   ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr);
431   for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr);}
432   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
433   /* Gather rank of each leaf to root */
434   ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr);
435   ierr = PetscMalloc1(pEnd-pStart, &myrank);CHKERRQ(ierr);
436   ierr = PetscMalloc1(nedges,  &remoterank);CHKERRQ(ierr);
437   for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank;
438   ierr = PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
439   ierr = PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
440   ierr = PetscFree(myrank);CHKERRQ(ierr);
441   ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr);
442   /* Distribute remote ranks to leaves */
443   ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr);
444   ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr);
445   PetscFunctionReturn(0);
446 }
447 
448 /*@C
449   DMPlexCreateOverlap - Compute owner information for shared points. This basically gets two-sided for an SF.
450 
451   Collective on DM
452 
453   Input Parameters:
454 + dm          - The DM
455 . levels      - Number of overlap levels
456 . rootSection - The number of leaves for a given root point
457 . rootrank    - The rank of each edge into the root point
458 . leafSection - The number of processes sharing a given leaf point
459 - leafrank    - The rank of each process sharing a leaf point
460 
461   Output Parameters:
462 + ovLabel     - DMLabel containing remote overlap contributions as point/rank pairings
463 
464   Level: developer
465 
466 .seealso: DMPlexDistributeOwnership(), DMPlexDistribute()
467 @*/
468 PetscErrorCode DMPlexCreateOverlap(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
469 {
470   MPI_Comm           comm;
471   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
472   PetscSF            sfPoint, sfProc;
473   const PetscSFNode *remote;
474   const PetscInt    *local;
475   const PetscInt    *nrank, *rrank;
476   PetscInt          *adj = NULL;
477   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l;
478   PetscMPIInt        rank, size;
479   PetscBool          useCone, useClosure, flg;
480   PetscErrorCode     ierr;
481 
482   PetscFunctionBegin;
483   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
484   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
485   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
486   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
487   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
488   ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr);
489   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
490   ierr = DMLabelCreate(PETSC_COMM_SELF, "Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr);
491   /* Handle leaves: shared with the root point */
492   for (l = 0; l < nleaves; ++l) {
493     PetscInt adjSize = PETSC_DETERMINE, a;
494 
495     ierr = DMPlexGetAdjacency(dm, local ? local[l] : l, &adjSize, &adj);CHKERRQ(ierr);
496     for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr);}
497   }
498   ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr);
499   ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr);
500   /* Handle roots */
501   for (p = pStart; p < pEnd; ++p) {
502     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
503 
504     if ((p >= sStart) && (p < sEnd)) {
505       /* Some leaves share a root with other leaves on different processes */
506       ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr);
507       if (neighbors) {
508         ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr);
509         ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
510         for (n = 0; n < neighbors; ++n) {
511           const PetscInt remoteRank = nrank[noff+n];
512 
513           if (remoteRank == rank) continue;
514           for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
515         }
516       }
517     }
518     /* Roots are shared with leaves */
519     ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr);
520     if (!neighbors) continue;
521     ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr);
522     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
523     for (n = 0; n < neighbors; ++n) {
524       const PetscInt remoteRank = rrank[noff+n];
525 
526       if (remoteRank == rank) continue;
527       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
528     }
529   }
530   ierr = PetscFree(adj);CHKERRQ(ierr);
531   ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr);
532   ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr);
533   /* Add additional overlap levels */
534   for (l = 1; l < levels; l++) {
535     /* Propagate point donations over SF to capture remote connections */
536     ierr = DMPlexPartitionLabelPropagate(dm, ovAdjByRank);CHKERRQ(ierr);
537     /* Add next level of point donations to the label */
538     ierr = DMPlexPartitionLabelAdjacency(dm, ovAdjByRank);CHKERRQ(ierr);
539   }
540   /* We require the closure in the overlap */
541   ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
542   if (useCone || !useClosure) {
543     ierr = DMPlexPartitionLabelClosure(dm, ovAdjByRank);CHKERRQ(ierr);
544   }
545   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-overlap_view", &flg);CHKERRQ(ierr);
546   if (flg) {
547     ierr = DMLabelView(ovAdjByRank, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
548   }
549   /* Make global process SF and invert sender to receiver label */
550   {
551     /* Build a global process SF */
552     PetscSFNode *remoteProc;
553     ierr = PetscMalloc1(size, &remoteProc);CHKERRQ(ierr);
554     for (p = 0; p < size; ++p) {
555       remoteProc[p].rank  = p;
556       remoteProc[p].index = rank;
557     }
558     ierr = PetscSFCreate(comm, &sfProc);CHKERRQ(ierr);
559     ierr = PetscObjectSetName((PetscObject) sfProc, "Process SF");CHKERRQ(ierr);
560     ierr = PetscSFSetGraph(sfProc, size, size, NULL, PETSC_OWN_POINTER, remoteProc, PETSC_OWN_POINTER);CHKERRQ(ierr);
561   }
562   ierr = DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel);CHKERRQ(ierr);
563   ierr = DMPlexPartitionLabelInvert(dm, ovAdjByRank, sfProc, *ovLabel);CHKERRQ(ierr);
564   /* Add owned points, except for shared local points */
565   for (p = pStart; p < pEnd; ++p) {ierr = DMLabelSetValue(*ovLabel, p, rank);CHKERRQ(ierr);}
566   for (l = 0; l < nleaves; ++l) {
567     ierr = DMLabelClearValue(*ovLabel, local[l], rank);CHKERRQ(ierr);
568     ierr = DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank);CHKERRQ(ierr);
569   }
570   /* Clean up */
571   ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr);
572   ierr = PetscSFDestroy(&sfProc);CHKERRQ(ierr);
573   PetscFunctionReturn(0);
574 }
575 
576 /*@C
577   DMPlexCreateOverlapMigrationSF - Create an SF describing the new mesh distribution to make the overlap described by the input SF
578 
579   Collective on DM
580 
581   Input Parameters:
582 + dm          - The DM
583 - overlapSF   - The SF mapping ghost points in overlap to owner points on other processes
584 
585   Output Parameters:
586 + migrationSF - An SF that maps original points in old locations to points in new locations
587 
588   Level: developer
589 
590 .seealso: DMPlexCreateOverlap(), DMPlexDistribute()
591 @*/
592 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
593 {
594   MPI_Comm           comm;
595   PetscMPIInt        rank, size;
596   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
597   PetscInt          *pointDepths, *remoteDepths, *ilocal;
598   PetscInt          *depthRecv, *depthShift, *depthIdx;
599   PetscSFNode       *iremote;
600   PetscSF            pointSF;
601   const PetscInt    *sharedLocal;
602   const PetscSFNode *overlapRemote, *sharedRemote;
603   PetscErrorCode     ierr;
604 
605   PetscFunctionBegin;
606   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
607   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
608   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
609   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
610   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
611 
612   /* Before building the migration SF we need to know the new stratum offsets */
613   ierr = PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote);CHKERRQ(ierr);
614   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
615   for (d=0; d<dim+1; d++) {
616     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
617     for (p=pStart; p<pEnd; p++) pointDepths[p] = d;
618   }
619   for (p=0; p<nleaves; p++) remoteDepths[p] = -1;
620   ierr = PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
621   ierr = PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
622 
623   /* Count recevied points in each stratum and compute the internal strata shift */
624   ierr = PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx);CHKERRQ(ierr);
625   for (d=0; d<dim+1; d++) depthRecv[d]=0;
626   for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++;
627   depthShift[dim] = 0;
628   for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim];
629   for (d=1; d<dim; d++) depthShift[d] += depthRecv[0];
630   for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1];
631   for (d=0; d<dim+1; d++) {
632     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
633     depthIdx[d] = pStart + depthShift[d];
634   }
635 
636   /* Form the overlap SF build an SF that describes the full overlap migration SF */
637   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
638   newLeaves = pEnd - pStart + nleaves;
639   ierr = PetscMalloc1(newLeaves, &ilocal);CHKERRQ(ierr);
640   ierr = PetscMalloc1(newLeaves, &iremote);CHKERRQ(ierr);
641   /* First map local points to themselves */
642   for (d=0; d<dim+1; d++) {
643     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
644     for (p=pStart; p<pEnd; p++) {
645       point = p + depthShift[d];
646       ilocal[point] = point;
647       iremote[point].index = p;
648       iremote[point].rank = rank;
649       depthIdx[d]++;
650     }
651   }
652 
653   /* Add in the remote roots for currently shared points */
654   ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
655   ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote);CHKERRQ(ierr);
656   for (d=0; d<dim+1; d++) {
657     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
658     for (p=0; p<numSharedPoints; p++) {
659       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
660         point = sharedLocal[p] + depthShift[d];
661         iremote[point].index = sharedRemote[p].index;
662         iremote[point].rank = sharedRemote[p].rank;
663       }
664     }
665   }
666 
667   /* Now add the incoming overlap points */
668   for (p=0; p<nleaves; p++) {
669     point = depthIdx[remoteDepths[p]];
670     ilocal[point] = point;
671     iremote[point].index = overlapRemote[p].index;
672     iremote[point].rank = overlapRemote[p].rank;
673     depthIdx[remoteDepths[p]]++;
674   }
675   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
676 
677   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
678   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");CHKERRQ(ierr);
679   ierr = PetscSFSetFromOptions(*migrationSF);CHKERRQ(ierr);
680   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
681   ierr = PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);CHKERRQ(ierr);
682 
683   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
684   PetscFunctionReturn(0);
685 }
686 
687 /*@
688   DMPlexStratifyMigrationSF - Rearrange the leaves of a migration sf for stratification.
689 
690   Input Parameter:
691 + dm          - The DM
692 - sf          - A star forest with non-ordered leaves, usually defining a DM point migration
693 
694   Output Parameter:
695 . migrationSF - A star forest with added leaf indirection that ensures the resulting DM is stratified
696 
697   Level: developer
698 
699 .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexDistributeOverlap()
700 @*/
701 PetscErrorCode DMPlexStratifyMigrationSF(DM dm, PetscSF sf, PetscSF *migrationSF)
702 {
703   MPI_Comm           comm;
704   PetscMPIInt        rank, size;
705   PetscInt           d, ldepth, depth, p, pStart, pEnd, nroots, nleaves;
706   PetscInt          *pointDepths, *remoteDepths, *ilocal;
707   PetscInt          *depthRecv, *depthShift, *depthIdx;
708   PetscInt           hybEnd[4];
709   const PetscSFNode *iremote;
710   PetscErrorCode     ierr;
711 
712   PetscFunctionBegin;
713   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
714   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
715   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
716   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
717   ierr = DMPlexGetDepth(dm, &ldepth);CHKERRQ(ierr);
718   ierr = MPIU_Allreduce(&ldepth, &depth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr);
719   if ((ldepth >= 0) && (depth != ldepth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", ldepth, depth);
720 
721   /* Before building the migration SF we need to know the new stratum offsets */
722   ierr = PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote);CHKERRQ(ierr);
723   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
724   ierr = DMPlexGetHybridBounds(dm,&hybEnd[depth],&hybEnd[depth-1],&hybEnd[1],&hybEnd[0]);CHKERRQ(ierr);
725   for (d = 0; d < depth+1; ++d) {
726     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
727     for (p = pStart; p < pEnd; ++p) {
728       if (hybEnd[d] >= 0 && p >= hybEnd[d]) { /* put in a separate value for hybrid points */
729         pointDepths[p] = 2 * d;
730       } else {
731         pointDepths[p] = 2 * d + 1;
732       }
733     }
734   }
735   for (p = 0; p < nleaves; ++p) remoteDepths[p] = -1;
736   ierr = PetscSFBcastBegin(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
737   ierr = PetscSFBcastEnd(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
738   /* Count recevied points in each stratum and compute the internal strata shift */
739   ierr = PetscMalloc3(2*(depth+1), &depthRecv, 2*(depth+1), &depthShift, 2*(depth+1), &depthIdx);CHKERRQ(ierr);
740   for (d = 0; d < 2*(depth+1); ++d) depthRecv[d] = 0;
741   for (p = 0; p < nleaves; ++p) depthRecv[remoteDepths[p]]++;
742   depthShift[2*depth+1] = 0;
743   for (d = 0; d < 2*depth+1; ++d) depthShift[d] = depthRecv[2 * depth + 1];
744   for (d = 0; d < 2*depth; ++d) depthShift[d] += depthRecv[2 * depth];
745   depthShift[0] += depthRecv[1];
746   for (d = 2; d < 2*depth; ++d) depthShift[d] += depthRecv[1];
747   for (d = 2; d < 2*depth; ++d) depthShift[d] += depthRecv[0];
748   for (d = 2 * depth-1; d > 2; --d) {
749     PetscInt e;
750 
751     for (e = d -1; e > 1; --e) depthShift[e] += depthRecv[d];
752   }
753   for (d = 0; d < 2*(depth+1); ++d) {depthIdx[d] = 0;}
754   /* Derive a new local permutation based on stratified indices */
755   ierr = PetscMalloc1(nleaves, &ilocal);CHKERRQ(ierr);
756   for (p = 0; p < nleaves; ++p) {
757     const PetscInt dep = remoteDepths[p];
758 
759     ilocal[p] = depthShift[dep] + depthIdx[dep];
760     depthIdx[dep]++;
761   }
762   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
763   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Migration SF");CHKERRQ(ierr);
764   ierr = PetscSFSetGraph(*migrationSF, nroots, nleaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_COPY_VALUES);CHKERRQ(ierr);
765   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
766   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
767   PetscFunctionReturn(0);
768 }
769 
770 /*@
771   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
772 
773   Collective on DM
774 
775   Input Parameters:
776 + dm - The DMPlex object
777 . pointSF - The PetscSF describing the communication pattern
778 . originalSection - The PetscSection for existing data layout
779 - originalVec - The existing data
780 
781   Output Parameters:
782 + newSection - The PetscSF describing the new data layout
783 - newVec - The new data
784 
785   Level: developer
786 
787 .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData()
788 @*/
789 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
790 {
791   PetscSF        fieldSF;
792   PetscInt      *remoteOffsets, fieldSize;
793   PetscScalar   *originalValues, *newValues;
794   PetscErrorCode ierr;
795 
796   PetscFunctionBegin;
797   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
798   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
799 
800   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
801   ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr);
802   ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr);
803 
804   ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr);
805   ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr);
806   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
807   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
808   ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
809   ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
810   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
811   ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr);
812   ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr);
813   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
814   PetscFunctionReturn(0);
815 }
816 
817 /*@
818   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
819 
820   Collective on DM
821 
822   Input Parameters:
823 + dm - The DMPlex object
824 . pointSF - The PetscSF describing the communication pattern
825 . originalSection - The PetscSection for existing data layout
826 - originalIS - The existing data
827 
828   Output Parameters:
829 + newSection - The PetscSF describing the new data layout
830 - newIS - The new data
831 
832   Level: developer
833 
834 .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData()
835 @*/
836 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
837 {
838   PetscSF         fieldSF;
839   PetscInt       *newValues, *remoteOffsets, fieldSize;
840   const PetscInt *originalValues;
841   PetscErrorCode  ierr;
842 
843   PetscFunctionBegin;
844   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
845   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
846 
847   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
848   ierr = PetscMalloc1(fieldSize, &newValues);CHKERRQ(ierr);
849 
850   ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr);
851   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
852   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
853   ierr = PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
854   ierr = PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
855   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
856   ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr);
857   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr);
858   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
859   PetscFunctionReturn(0);
860 }
861 
862 /*@
863   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
864 
865   Collective on DM
866 
867   Input Parameters:
868 + dm - The DMPlex object
869 . pointSF - The PetscSF describing the communication pattern
870 . originalSection - The PetscSection for existing data layout
871 . datatype - The type of data
872 - originalData - The existing data
873 
874   Output Parameters:
875 + newSection - The PetscSection describing the new data layout
876 - newData - The new data
877 
878   Level: developer
879 
880 .seealso: DMPlexDistribute(), DMPlexDistributeField()
881 @*/
882 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
883 {
884   PetscSF        fieldSF;
885   PetscInt      *remoteOffsets, fieldSize;
886   PetscMPIInt    dataSize;
887   PetscErrorCode ierr;
888 
889   PetscFunctionBegin;
890   ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
891   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
892 
893   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
894   ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr);
895   ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr);
896 
897   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
898   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
899   ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
900   ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
901   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
902   ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
903   PetscFunctionReturn(0);
904 }
905 
906 static PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
907 {
908   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
909   MPI_Comm               comm;
910   PetscSF                coneSF;
911   PetscSection           originalConeSection, newConeSection;
912   PetscInt              *remoteOffsets, *cones, *globCones, *newCones, newConesSize;
913   PetscBool              flg;
914   PetscErrorCode         ierr;
915 
916   PetscFunctionBegin;
917   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
918   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
919 
920   ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
921   /* Distribute cone section */
922   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
923   ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr);
924   ierr = DMPlexGetConeSection(dmParallel, &newConeSection);CHKERRQ(ierr);
925   ierr = PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr);
926   ierr = DMSetUp(dmParallel);CHKERRQ(ierr);
927   {
928     PetscInt pStart, pEnd, p;
929 
930     ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr);
931     for (p = pStart; p < pEnd; ++p) {
932       PetscInt coneSize;
933       ierr               = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr);
934       pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize);
935     }
936   }
937   /* Communicate and renumber cones */
938   ierr = PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr);
939   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
940   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
941   if (original) {
942     PetscInt numCones;
943 
944     ierr = PetscSectionGetStorageSize(originalConeSection,&numCones);CHKERRQ(ierr);
945     ierr = PetscMalloc1(numCones,&globCones);CHKERRQ(ierr);
946     ierr = ISLocalToGlobalMappingApplyBlock(original, numCones, cones, globCones);CHKERRQ(ierr);
947   } else {
948     globCones = cones;
949   }
950   ierr = DMPlexGetCones(dmParallel, &newCones);CHKERRQ(ierr);
951   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr);
952   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr);
953   if (original) {
954     ierr = PetscFree(globCones);CHKERRQ(ierr);
955   }
956   ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr);
957   ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr);
958 #if defined(PETSC_USE_DEBUG)
959   {
960     PetscInt  p;
961     PetscBool valid = PETSC_TRUE;
962     for (p = 0; p < newConesSize; ++p) {
963       if (newCones[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "[%d] Point %D not in overlap SF\n", PetscGlobalRank,p);CHKERRQ(ierr);}
964     }
965     if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
966   }
967 #endif
968   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr);
969   if (flg) {
970     ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr);
971     ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
972     ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr);
973     ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
974     ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr);
975   }
976   ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr);
977   ierr = DMPlexGetConeOrientations(dmParallel, &newCones);CHKERRQ(ierr);
978   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
979   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
980   ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr);
981   ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
982   /* Create supports and stratify DMPlex */
983   {
984     PetscInt pStart, pEnd;
985 
986     ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr);
987     ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr);
988   }
989   ierr = DMPlexSymmetrize(dmParallel);CHKERRQ(ierr);
990   ierr = DMPlexStratify(dmParallel);CHKERRQ(ierr);
991   {
992     PetscBool useCone, useClosure, useAnchors;
993 
994     ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
995     ierr = DMSetBasicAdjacency(dmParallel, useCone, useClosure);CHKERRQ(ierr);
996     ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
997     ierr = DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors);CHKERRQ(ierr);
998   }
999   PetscFunctionReturn(0);
1000 }
1001 
1002 static PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
1003 {
1004   MPI_Comm         comm;
1005   PetscSection     originalCoordSection, newCoordSection;
1006   Vec              originalCoordinates, newCoordinates;
1007   PetscInt         bs;
1008   PetscBool        isper;
1009   const char      *name;
1010   const PetscReal *maxCell, *L;
1011   const DMBoundaryType *bd;
1012   PetscErrorCode   ierr;
1013 
1014   PetscFunctionBegin;
1015   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1016   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1017 
1018   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1019   ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr);
1020   ierr = DMGetCoordinateSection(dmParallel, &newCoordSection);CHKERRQ(ierr);
1021   ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr);
1022   if (originalCoordinates) {
1023     ierr = VecCreate(PETSC_COMM_SELF, &newCoordinates);CHKERRQ(ierr);
1024     ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr);
1025     ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr);
1026 
1027     ierr = DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr);
1028     ierr = DMSetCoordinatesLocal(dmParallel, newCoordinates);CHKERRQ(ierr);
1029     ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr);
1030     ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr);
1031     ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr);
1032   }
1033   ierr = DMGetPeriodicity(dm, &isper, &maxCell, &L, &bd);CHKERRQ(ierr);
1034   ierr = DMSetPeriodicity(dmParallel, isper, maxCell, L, bd);CHKERRQ(ierr);
1035   PetscFunctionReturn(0);
1036 }
1037 
1038 /* Here we are assuming that process 0 always has everything */
1039 static PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1040 {
1041   DM_Plex         *mesh = (DM_Plex*) dm->data;
1042   MPI_Comm         comm;
1043   DMLabel          depthLabel;
1044   PetscMPIInt      rank;
1045   PetscInt         depth, d, numLabels, numLocalLabels, l;
1046   PetscBool        hasLabels = PETSC_FALSE, lsendDepth, sendDepth;
1047   PetscObjectState depthState = -1;
1048   PetscErrorCode   ierr;
1049 
1050   PetscFunctionBegin;
1051   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1052   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1053 
1054   ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1055   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1056   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1057 
1058   /* If the user has changed the depth label, communicate it instead */
1059   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1060   ierr = DMPlexGetDepthLabel(dm, &depthLabel);CHKERRQ(ierr);
1061   if (depthLabel) {ierr = PetscObjectStateGet((PetscObject) depthLabel, &depthState);CHKERRQ(ierr);}
1062   lsendDepth = mesh->depthState != depthState ? PETSC_TRUE : PETSC_FALSE;
1063   ierr = MPIU_Allreduce(&lsendDepth, &sendDepth, 1, MPIU_BOOL, MPI_LOR, comm);CHKERRQ(ierr);
1064   if (sendDepth) {
1065     ierr = DMRemoveLabel(dmParallel, "depth", &depthLabel);CHKERRQ(ierr);
1066     ierr = DMLabelDestroy(&depthLabel);CHKERRQ(ierr);
1067   }
1068   /* Everyone must have either the same number of labels, or none */
1069   ierr = DMGetNumLabels(dm, &numLocalLabels);CHKERRQ(ierr);
1070   numLabels = numLocalLabels;
1071   ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1072   if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1073   for (l = numLabels-1; l >= 0; --l) {
1074     DMLabel     label = NULL, labelNew = NULL;
1075     PetscBool   isDepth, lisOutput = PETSC_TRUE, isOutput;
1076     const char *name = NULL;
1077 
1078     if (hasLabels) {
1079       ierr = DMGetLabelByNum(dm, l, &label);CHKERRQ(ierr);
1080       /* Skip "depth" because it is recreated */
1081       ierr = PetscObjectGetName((PetscObject) label, &name);CHKERRQ(ierr);
1082       ierr = PetscStrcmp(name, "depth", &isDepth);CHKERRQ(ierr);
1083     }
1084     ierr = MPI_Bcast(&isDepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr);
1085     if (isDepth && !sendDepth) continue;
1086     ierr = DMLabelDistribute(label, migrationSF, &labelNew);CHKERRQ(ierr);
1087     if (isDepth) {
1088       /* Put in any missing strata which can occur if users are managing the depth label themselves */
1089       PetscInt gdepth;
1090 
1091       ierr = MPIU_Allreduce(&depth, &gdepth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr);
1092       if ((depth >= 0) && (gdepth != depth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", depth, gdepth);
1093       for (d = 0; d <= gdepth; ++d) {
1094         PetscBool has;
1095 
1096         ierr = DMLabelHasStratum(labelNew, d, &has);CHKERRQ(ierr);
1097         if (!has) {ierr = DMLabelAddStratum(labelNew, d);CHKERRQ(ierr);}
1098       }
1099     }
1100     ierr = DMAddLabel(dmParallel, labelNew);CHKERRQ(ierr);
1101     /* Put the output flag in the new label */
1102     if (hasLabels) {ierr = DMGetLabelOutput(dm, name, &lisOutput);CHKERRQ(ierr);}
1103     ierr = MPIU_Allreduce(&lisOutput, &isOutput, 1, MPIU_BOOL, MPI_LAND, comm);CHKERRQ(ierr);
1104     ierr = PetscObjectGetName((PetscObject) labelNew, &name);CHKERRQ(ierr);
1105     ierr = DMSetLabelOutput(dmParallel, name, isOutput);CHKERRQ(ierr);
1106   }
1107   ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1108   PetscFunctionReturn(0);
1109 }
1110 
1111 static PetscErrorCode DMPlexDistributeSetupHybrid(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1112 {
1113   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1114   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1115   PetscBool      *isHybrid, *isHybridParallel;
1116   PetscInt        dim, depth, d;
1117   PetscInt        pStart, pEnd, pStartP, pEndP;
1118   PetscErrorCode  ierr;
1119 
1120   PetscFunctionBegin;
1121   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1122   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1123 
1124   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1125   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1126   ierr = DMPlexGetChart(dm,&pStart,&pEnd);CHKERRQ(ierr);
1127   ierr = DMPlexGetChart(dmParallel,&pStartP,&pEndP);CHKERRQ(ierr);
1128   ierr = PetscCalloc2(pEnd-pStart,&isHybrid,pEndP-pStartP,&isHybridParallel);CHKERRQ(ierr);
1129   for (d = 0; d <= depth; d++) {
1130     PetscInt hybridMax = (depth == 1 && d == 1) ? mesh->hybridPointMax[dim] : mesh->hybridPointMax[d];
1131 
1132     if (hybridMax >= 0) {
1133       PetscInt sStart, sEnd, p;
1134 
1135       ierr = DMPlexGetDepthStratum(dm,d,&sStart,&sEnd);CHKERRQ(ierr);
1136       for (p = hybridMax; p < sEnd; p++) isHybrid[p-pStart] = PETSC_TRUE;
1137     }
1138   }
1139   ierr = PetscSFBcastBegin(migrationSF,MPIU_BOOL,isHybrid,isHybridParallel);CHKERRQ(ierr);
1140   ierr = PetscSFBcastEnd(migrationSF,MPIU_BOOL,isHybrid,isHybridParallel);CHKERRQ(ierr);
1141   for (d = 0; d <= dim; d++) pmesh->hybridPointMax[d] = -1;
1142   for (d = 0; d <= depth; d++) {
1143     PetscInt sStart, sEnd, p, dd;
1144 
1145     ierr = DMPlexGetDepthStratum(dmParallel,d,&sStart,&sEnd);CHKERRQ(ierr);
1146     dd = (depth == 1 && d == 1) ? dim : d;
1147     for (p = sStart; p < sEnd; p++) {
1148       if (isHybridParallel[p-pStartP]) {
1149         pmesh->hybridPointMax[dd] = p;
1150         break;
1151       }
1152     }
1153   }
1154   ierr = PetscFree2(isHybrid,isHybridParallel);CHKERRQ(ierr);
1155   PetscFunctionReturn(0);
1156 }
1157 
1158 static PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1159 {
1160   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1161   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1162   MPI_Comm        comm;
1163   DM              refTree;
1164   PetscSection    origParentSection, newParentSection;
1165   PetscInt        *origParents, *origChildIDs;
1166   PetscBool       flg;
1167   PetscErrorCode  ierr;
1168 
1169   PetscFunctionBegin;
1170   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1171   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
1172   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1173 
1174   /* Set up tree */
1175   ierr = DMPlexGetReferenceTree(dm,&refTree);CHKERRQ(ierr);
1176   ierr = DMPlexSetReferenceTree(dmParallel,refTree);CHKERRQ(ierr);
1177   ierr = DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL);CHKERRQ(ierr);
1178   if (origParentSection) {
1179     PetscInt        pStart, pEnd;
1180     PetscInt        *newParents, *newChildIDs, *globParents;
1181     PetscInt        *remoteOffsetsParents, newParentSize;
1182     PetscSF         parentSF;
1183 
1184     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1185     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);CHKERRQ(ierr);
1186     ierr = PetscSectionSetChart(newParentSection,pStart,pEnd);CHKERRQ(ierr);
1187     ierr = PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);CHKERRQ(ierr);
1188     ierr = PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);CHKERRQ(ierr);
1189     ierr = PetscFree(remoteOffsetsParents);CHKERRQ(ierr);
1190     ierr = PetscSectionGetStorageSize(newParentSection,&newParentSize);CHKERRQ(ierr);
1191     ierr = PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs);CHKERRQ(ierr);
1192     if (original) {
1193       PetscInt numParents;
1194 
1195       ierr = PetscSectionGetStorageSize(origParentSection,&numParents);CHKERRQ(ierr);
1196       ierr = PetscMalloc1(numParents,&globParents);CHKERRQ(ierr);
1197       ierr = ISLocalToGlobalMappingApplyBlock(original, numParents, origParents, globParents);CHKERRQ(ierr);
1198     }
1199     else {
1200       globParents = origParents;
1201     }
1202     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr);
1203     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr);
1204     if (original) {
1205       ierr = PetscFree(globParents);CHKERRQ(ierr);
1206     }
1207     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1208     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1209     ierr = ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents);CHKERRQ(ierr);
1210 #if defined(PETSC_USE_DEBUG)
1211     {
1212       PetscInt  p;
1213       PetscBool valid = PETSC_TRUE;
1214       for (p = 0; p < newParentSize; ++p) {
1215         if (newParents[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
1216       }
1217       if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1218     }
1219 #endif
1220     ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-parents_view", &flg);CHKERRQ(ierr);
1221     if (flg) {
1222       ierr = PetscPrintf(comm, "Serial Parent Section: \n");CHKERRQ(ierr);
1223       ierr = PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1224       ierr = PetscPrintf(comm, "Parallel Parent Section: \n");CHKERRQ(ierr);
1225       ierr = PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1226       ierr = PetscSFView(parentSF, NULL);CHKERRQ(ierr);
1227     }
1228     ierr = DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);CHKERRQ(ierr);
1229     ierr = PetscSectionDestroy(&newParentSection);CHKERRQ(ierr);
1230     ierr = PetscFree2(newParents,newChildIDs);CHKERRQ(ierr);
1231     ierr = PetscSFDestroy(&parentSF);CHKERRQ(ierr);
1232   }
1233   pmesh->useAnchors = mesh->useAnchors;
1234   PetscFunctionReturn(0);
1235 }
1236 
1237 PETSC_UNUSED static PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, DM dmParallel)
1238 {
1239   PetscMPIInt            rank, size;
1240   MPI_Comm               comm;
1241   PetscErrorCode         ierr;
1242 
1243   PetscFunctionBegin;
1244   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1245   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1246 
1247   /* Create point SF for parallel mesh */
1248   ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1249   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1250   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1251   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1252   {
1253     const PetscInt *leaves;
1254     PetscSFNode    *remotePoints, *rowners, *lowners;
1255     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1256     PetscInt        pStart, pEnd;
1257 
1258     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1259     ierr = PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr);
1260     ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr);
1261     for (p=0; p<numRoots; p++) {
1262       rowners[p].rank  = -1;
1263       rowners[p].index = -1;
1264     }
1265     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1266     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1267     for (p = 0; p < numLeaves; ++p) {
1268       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1269         lowners[p].rank  = rank;
1270         lowners[p].index = leaves ? leaves[p] : p;
1271       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1272         lowners[p].rank  = -2;
1273         lowners[p].index = -2;
1274       }
1275     }
1276     for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1277       rowners[p].rank  = -3;
1278       rowners[p].index = -3;
1279     }
1280     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1281     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1282     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1283     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1284     for (p = 0; p < numLeaves; ++p) {
1285       if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
1286       if (lowners[p].rank != rank) ++numGhostPoints;
1287     }
1288     ierr = PetscMalloc1(numGhostPoints, &ghostPoints);CHKERRQ(ierr);
1289     ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr);
1290     for (p = 0, gp = 0; p < numLeaves; ++p) {
1291       if (lowners[p].rank != rank) {
1292         ghostPoints[gp]        = leaves ? leaves[p] : p;
1293         remotePoints[gp].rank  = lowners[p].rank;
1294         remotePoints[gp].index = lowners[p].index;
1295         ++gp;
1296       }
1297     }
1298     ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr);
1299     ierr = PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1300     ierr = PetscSFSetFromOptions((dmParallel)->sf);CHKERRQ(ierr);
1301   }
1302   {
1303     PetscBool useCone, useClosure, useAnchors;
1304 
1305     ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
1306     ierr = DMSetBasicAdjacency(dmParallel, useCone, useClosure);CHKERRQ(ierr);
1307     ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
1308     ierr = DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors);CHKERRQ(ierr);
1309   }
1310   ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1311   PetscFunctionReturn(0);
1312 }
1313 
1314 /*@
1315   DMPlexSetPartitionBalance - Should distribution of the DM attempt to balance the shared point partition?
1316 
1317   Input Parameters:
1318 + dm - The DMPlex object
1319 - flg - Balance the partition?
1320 
1321   Level: intermediate
1322 
1323 .seealso: DMPlexDistribute(), DMPlexGetPartitionBalance()
1324 @*/
1325 PetscErrorCode DMPlexSetPartitionBalance(DM dm, PetscBool flg)
1326 {
1327   DM_Plex *mesh = (DM_Plex *)dm->data;
1328 
1329   PetscFunctionBegin;
1330   mesh->partitionBalance = flg;
1331   PetscFunctionReturn(0);
1332 }
1333 
1334 /*@
1335   DMPlexGetPartitionBalance - Does distribution of the DM attempt to balance the shared point partition?
1336 
1337   Input Parameter:
1338 + dm - The DMPlex object
1339 
1340   Output Parameter:
1341 + flg - Balance the partition?
1342 
1343   Level: intermediate
1344 
1345 .seealso: DMPlexDistribute(), DMPlexSetPartitionBalance()
1346 @*/
1347 PetscErrorCode DMPlexGetPartitionBalance(DM dm, PetscBool *flg)
1348 {
1349   DM_Plex *mesh = (DM_Plex *)dm->data;
1350 
1351   PetscFunctionBegin;
1352   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1353   PetscValidIntPointer(flg, 2);
1354   *flg = mesh->partitionBalance;
1355   PetscFunctionReturn(0);
1356 }
1357 
1358 /*@C
1359   DMPlexDerivePointSF - Build a point SF from an SF describing a point migration
1360 
1361   Input Parameter:
1362 + dm          - The source DMPlex object
1363 . migrationSF - The star forest that describes the parallel point remapping
1364 . ownership   - Flag causing a vote to determine point ownership
1365 
1366   Output Parameter:
1367 - pointSF     - The star forest describing the point overlap in the remapped DM
1368 
1369   Level: developer
1370 
1371 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1372 @*/
1373 PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownership, PetscSF *pointSF)
1374 {
1375   PetscMPIInt        rank, size;
1376   PetscInt           p, nroots, nleaves, idx, npointLeaves;
1377   PetscInt          *pointLocal;
1378   const PetscInt    *leaves;
1379   const PetscSFNode *roots;
1380   PetscSFNode       *rootNodes, *leafNodes, *pointRemote;
1381   Vec                shifts;
1382   const PetscInt     numShifts = 13759;
1383   const PetscScalar *shift = NULL;
1384   const PetscBool    shiftDebug = PETSC_FALSE;
1385   PetscBool          balance;
1386   PetscErrorCode     ierr;
1387 
1388   PetscFunctionBegin;
1389   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1390   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1391   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
1392 
1393   ierr = DMPlexGetPartitionBalance(dm, &balance);CHKERRQ(ierr);
1394   ierr = PetscSFGetGraph(migrationSF, &nroots, &nleaves, &leaves, &roots);CHKERRQ(ierr);
1395   ierr = PetscMalloc2(nroots, &rootNodes, nleaves, &leafNodes);CHKERRQ(ierr);
1396   if (ownership) {
1397     /* If balancing, we compute a random cyclic shift of the rank for each remote point. That way, the max will evenly distribute among ranks. */
1398     if (balance) {
1399       PetscRandom r;
1400 
1401       ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
1402       ierr = PetscRandomSetInterval(r, 0, 2467*size);CHKERRQ(ierr);
1403       ierr = VecCreate(PETSC_COMM_SELF, &shifts);CHKERRQ(ierr);
1404       ierr = VecSetSizes(shifts, numShifts, numShifts);CHKERRQ(ierr);
1405       ierr = VecSetType(shifts, VECSTANDARD);CHKERRQ(ierr);
1406       ierr = VecSetRandom(shifts, r);CHKERRQ(ierr);
1407       ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
1408       ierr = VecGetArrayRead(shifts, &shift);CHKERRQ(ierr);
1409     }
1410 
1411     /* Point ownership vote: Process with highest rank owns shared points */
1412     for (p = 0; p < nleaves; ++p) {
1413       if (shiftDebug) {
1414         ierr = PetscSynchronizedPrintf(PetscObjectComm((PetscObject) dm), "[%d] Point %D RemotePoint %D Shift %D MyRank %D\n", rank, leaves ? leaves[p] : p, roots[p].index, (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]), (rank + (shift ? (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]) : 0))%size);CHKERRQ(ierr);
1415       }
1416       /* Either put in a bid or we know we own it */
1417       leafNodes[p].rank  = (rank + (shift ? (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]) : 0))%size;
1418       leafNodes[p].index = p;
1419     }
1420     for (p = 0; p < nroots; p++) {
1421       /* Root must not participate in the reduction, flag so that MAXLOC does not use */
1422       rootNodes[p].rank  = -3;
1423       rootNodes[p].index = -3;
1424     }
1425     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);CHKERRQ(ierr);
1426     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);CHKERRQ(ierr);
1427     if (balance) {
1428       /* We've voted, now we need to get the rank.  When we're balancing the partition, the "rank" in rootNotes is not
1429        * the rank but rather (rank + random)%size.  So we do another reduction, voting the same way, but sending the
1430        * rank instead of the index. */
1431       PetscSFNode *rootRanks = NULL;
1432       ierr = PetscMalloc1(nroots, &rootRanks);CHKERRQ(ierr);
1433       for (p = 0; p < nroots; p++) {
1434         rootRanks[p].rank = -3;
1435         rootRanks[p].index = -3;
1436       }
1437       for (p = 0; p < nleaves; p++) leafNodes[p].index = rank;
1438       ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, leafNodes, rootRanks, MPI_MAXLOC);CHKERRQ(ierr);
1439       ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, leafNodes, rootRanks, MPI_MAXLOC);CHKERRQ(ierr);
1440       for (p = 0; p < nroots; p++) rootNodes[p].rank = rootRanks[p].index;
1441       ierr = PetscFree(rootRanks);CHKERRQ(ierr);
1442     }
1443   } else {
1444     for (p = 0; p < nroots; p++) {
1445       rootNodes[p].index = -1;
1446       rootNodes[p].rank = rank;
1447     };
1448     for (p = 0; p < nleaves; p++) {
1449       /* Write new local id into old location */
1450       if (roots[p].rank == rank) {
1451         rootNodes[roots[p].index].index = leaves ? leaves[p] : p;
1452       }
1453     }
1454   }
1455   ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1456   ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1457 
1458   for (npointLeaves = 0, p = 0; p < nleaves; p++) {
1459     if (leafNodes[p].rank != rank) npointLeaves++;
1460   }
1461   ierr = PetscMalloc1(npointLeaves, &pointLocal);CHKERRQ(ierr);
1462   ierr = PetscMalloc1(npointLeaves, &pointRemote);CHKERRQ(ierr);
1463   for (idx = 0, p = 0; p < nleaves; p++) {
1464     if (leafNodes[p].rank != rank) {
1465       pointLocal[idx] = p;
1466       pointRemote[idx] = leafNodes[p];
1467       idx++;
1468     }
1469   }
1470   if (shift) {
1471     ierr = VecRestoreArrayRead(shifts, &shift);CHKERRQ(ierr);
1472     ierr = VecDestroy(&shifts);CHKERRQ(ierr);
1473   }
1474   if (shiftDebug) {ierr = PetscSynchronizedFlush(PetscObjectComm((PetscObject) dm), PETSC_STDOUT);CHKERRQ(ierr);}
1475   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), pointSF);CHKERRQ(ierr);
1476   ierr = PetscSFSetFromOptions(*pointSF);CHKERRQ(ierr);
1477   ierr = PetscSFSetGraph(*pointSF, nleaves, npointLeaves, pointLocal, PETSC_OWN_POINTER, pointRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1478   ierr = PetscFree2(rootNodes, leafNodes);CHKERRQ(ierr);
1479   PetscFunctionReturn(0);
1480 }
1481 
1482 /*@C
1483   DMPlexMigrate  - Migrates internal DM data over the supplied star forest
1484 
1485   Collective on DM and PetscSF
1486 
1487   Input Parameter:
1488 + dm       - The source DMPlex object
1489 . sf       - The star forest communication context describing the migration pattern
1490 
1491   Output Parameter:
1492 - targetDM - The target DMPlex object
1493 
1494   Level: intermediate
1495 
1496 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1497 @*/
1498 PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM)
1499 {
1500   MPI_Comm               comm;
1501   PetscInt               dim, cdim, nroots;
1502   PetscSF                sfPoint;
1503   ISLocalToGlobalMapping ltogMigration;
1504   ISLocalToGlobalMapping ltogOriginal = NULL;
1505   PetscBool              flg;
1506   PetscErrorCode         ierr;
1507 
1508   PetscFunctionBegin;
1509   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1510   ierr = PetscLogEventBegin(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1511   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1512   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1513   ierr = DMSetDimension(targetDM, dim);CHKERRQ(ierr);
1514   ierr = DMGetCoordinateDim(dm, &cdim);CHKERRQ(ierr);
1515   ierr = DMSetCoordinateDim(targetDM, cdim);CHKERRQ(ierr);
1516 
1517   /* Check for a one-to-all distribution pattern */
1518   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1519   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
1520   if (nroots >= 0) {
1521     IS        isOriginal;
1522     PetscInt  n, size, nleaves;
1523     PetscInt  *numbering_orig, *numbering_new;
1524 
1525     /* Get the original point numbering */
1526     ierr = DMPlexCreatePointNumbering(dm, &isOriginal);CHKERRQ(ierr);
1527     ierr = ISLocalToGlobalMappingCreateIS(isOriginal, &ltogOriginal);CHKERRQ(ierr);
1528     ierr = ISLocalToGlobalMappingGetSize(ltogOriginal, &size);CHKERRQ(ierr);
1529     ierr = ISLocalToGlobalMappingGetBlockIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1530     /* Convert to positive global numbers */
1531     for (n=0; n<size; n++) {if (numbering_orig[n] < 0) numbering_orig[n] = -(numbering_orig[n]+1);}
1532     /* Derive the new local-to-global mapping from the old one */
1533     ierr = PetscSFGetGraph(sf, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1534     ierr = PetscMalloc1(nleaves, &numbering_new);CHKERRQ(ierr);
1535     ierr = PetscSFBcastBegin(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr);
1536     ierr = PetscSFBcastEnd(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr);
1537     ierr = ISLocalToGlobalMappingCreate(comm, 1, nleaves, (const PetscInt*) numbering_new, PETSC_OWN_POINTER, &ltogMigration);CHKERRQ(ierr);
1538     ierr = ISLocalToGlobalMappingRestoreIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1539     ierr = ISDestroy(&isOriginal);CHKERRQ(ierr);
1540   } else {
1541     /* One-to-all distribution pattern: We can derive LToG from SF */
1542     ierr = ISLocalToGlobalMappingCreateSF(sf, 0, &ltogMigration);CHKERRQ(ierr);
1543   }
1544   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1545   if (flg) {
1546     ierr = PetscPrintf(comm, "Point renumbering for DM migration:\n");CHKERRQ(ierr);
1547     ierr = ISLocalToGlobalMappingView(ltogMigration, NULL);CHKERRQ(ierr);
1548   }
1549   /* Migrate DM data to target DM */
1550   ierr = DMPlexDistributeCones(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1551   ierr = DMPlexDistributeLabels(dm, sf, targetDM);CHKERRQ(ierr);
1552   ierr = DMPlexDistributeCoordinates(dm, sf, targetDM);CHKERRQ(ierr);
1553   ierr = DMPlexDistributeSetupHybrid(dm, sf, ltogMigration, targetDM);CHKERRQ(ierr);
1554   ierr = DMPlexDistributeSetupTree(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1555   ierr = ISLocalToGlobalMappingDestroy(&ltogOriginal);CHKERRQ(ierr);
1556   ierr = ISLocalToGlobalMappingDestroy(&ltogMigration);CHKERRQ(ierr);
1557   ierr = PetscLogEventEnd(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1558   PetscFunctionReturn(0);
1559 }
1560 
1561 /*@C
1562   DMPlexDistribute - Distributes the mesh and any associated sections.
1563 
1564   Collective on DM
1565 
1566   Input Parameter:
1567 + dm  - The original DMPlex object
1568 - overlap - The overlap of partitions, 0 is the default
1569 
1570   Output Parameter:
1571 + sf - The PetscSF used for point distribution, or NULL if not needed
1572 - dmParallel - The distributed DMPlex object
1573 
1574   Note: If the mesh was not distributed, the output dmParallel will be NULL.
1575 
1576   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
1577   representation on the mesh.
1578 
1579   Level: intermediate
1580 
1581 .keywords: mesh, elements
1582 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMSetAdjacency()
1583 @*/
1584 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1585 {
1586   MPI_Comm               comm;
1587   PetscPartitioner       partitioner;
1588   IS                     cellPart;
1589   PetscSection           cellPartSection;
1590   DM                     dmCoord;
1591   DMLabel                lblPartition, lblMigration;
1592   PetscSF                sfProcess, sfMigration, sfStratified, sfPoint;
1593   PetscBool              flg, balance;
1594   PetscMPIInt            rank, size, p;
1595   PetscErrorCode         ierr;
1596 
1597   PetscFunctionBegin;
1598   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1599   PetscValidLogicalCollectiveInt(dm, overlap, 2);
1600   if (sf) PetscValidPointer(sf,3);
1601   PetscValidPointer(dmParallel,4);
1602 
1603   if (sf) *sf = NULL;
1604   *dmParallel = NULL;
1605   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1606   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1607   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1608   if (size == 1) PetscFunctionReturn(0);
1609 
1610   ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1611   /* Create cell partition */
1612   ierr = PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1613   ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr);
1614   ierr = DMPlexGetPartitioner(dm, &partitioner);CHKERRQ(ierr);
1615   ierr = PetscPartitionerPartition(partitioner, dm, cellPartSection, &cellPart);CHKERRQ(ierr);
1616   {
1617     /* Convert partition to DMLabel */
1618     PetscInt proc, pStart, pEnd, npoints, poffset;
1619     const PetscInt *points;
1620     ierr = DMLabelCreate(PETSC_COMM_SELF, "Point Partition", &lblPartition);CHKERRQ(ierr);
1621     ierr = ISGetIndices(cellPart, &points);CHKERRQ(ierr);
1622     ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr);
1623     for (proc = pStart; proc < pEnd; proc++) {
1624       ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr);
1625       ierr = PetscSectionGetOffset(cellPartSection, proc, &poffset);CHKERRQ(ierr);
1626       for (p = poffset; p < poffset+npoints; p++) {
1627         ierr = DMLabelSetValue(lblPartition, points[p], proc);CHKERRQ(ierr);
1628       }
1629     }
1630     ierr = ISRestoreIndices(cellPart, &points);CHKERRQ(ierr);
1631   }
1632   ierr = DMPlexPartitionLabelClosure(dm, lblPartition);CHKERRQ(ierr);
1633   {
1634     /* Build a global process SF */
1635     PetscSFNode *remoteProc;
1636     ierr = PetscMalloc1(size, &remoteProc);CHKERRQ(ierr);
1637     for (p = 0; p < size; ++p) {
1638       remoteProc[p].rank  = p;
1639       remoteProc[p].index = rank;
1640     }
1641     ierr = PetscSFCreate(comm, &sfProcess);CHKERRQ(ierr);
1642     ierr = PetscObjectSetName((PetscObject) sfProcess, "Process SF");CHKERRQ(ierr);
1643     ierr = PetscSFSetGraph(sfProcess, size, size, NULL, PETSC_OWN_POINTER, remoteProc, PETSC_OWN_POINTER);CHKERRQ(ierr);
1644   }
1645   ierr = DMLabelCreate(PETSC_COMM_SELF, "Point migration", &lblMigration);CHKERRQ(ierr);
1646   ierr = DMPlexPartitionLabelInvert(dm, lblPartition, sfProcess, lblMigration);CHKERRQ(ierr);
1647   ierr = DMPlexPartitionLabelCreateSF(dm, lblMigration, &sfMigration);CHKERRQ(ierr);
1648   /* Stratify the SF in case we are migrating an already parallel plex */
1649   ierr = DMPlexStratifyMigrationSF(dm, sfMigration, &sfStratified);CHKERRQ(ierr);
1650   ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1651   sfMigration = sfStratified;
1652   ierr = PetscLogEventEnd(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1653   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1654   if (flg) {
1655     ierr = DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1656     ierr = PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1657   }
1658 
1659   /* Create non-overlapping parallel DM and migrate internal data */
1660   ierr = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr);
1661   ierr = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr);
1662   ierr = DMPlexMigrate(dm, sfMigration, *dmParallel);CHKERRQ(ierr);
1663 
1664   /* Build the point SF without overlap */
1665   ierr = DMPlexGetPartitionBalance(dm, &balance);CHKERRQ(ierr);
1666   ierr = DMPlexSetPartitionBalance(*dmParallel, balance);CHKERRQ(ierr);
1667   ierr = DMPlexCreatePointSF(*dmParallel, sfMigration, PETSC_TRUE, &sfPoint);CHKERRQ(ierr);
1668   ierr = DMSetPointSF(*dmParallel, sfPoint);CHKERRQ(ierr);
1669   ierr = DMGetCoordinateDM(*dmParallel, &dmCoord);CHKERRQ(ierr);
1670   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1671   if (flg) {ierr = PetscSFView(sfPoint, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);}
1672 
1673   if (overlap > 0) {
1674     DM                 dmOverlap;
1675     PetscInt           nroots, nleaves;
1676     PetscSFNode       *newRemote;
1677     const PetscSFNode *oldRemote;
1678     PetscSF            sfOverlap, sfOverlapPoint;
1679     /* Add the partition overlap to the distributed DM */
1680     ierr = DMPlexDistributeOverlap(*dmParallel, overlap, &sfOverlap, &dmOverlap);CHKERRQ(ierr);
1681     ierr = DMDestroy(dmParallel);CHKERRQ(ierr);
1682     *dmParallel = dmOverlap;
1683     if (flg) {
1684       ierr = PetscPrintf(comm, "Overlap Migration SF:\n");CHKERRQ(ierr);
1685       ierr = PetscSFView(sfOverlap, NULL);CHKERRQ(ierr);
1686     }
1687 
1688     /* Re-map the migration SF to establish the full migration pattern */
1689     ierr = PetscSFGetGraph(sfMigration, &nroots, NULL, NULL, &oldRemote);CHKERRQ(ierr);
1690     ierr = PetscSFGetGraph(sfOverlap, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1691     ierr = PetscMalloc1(nleaves, &newRemote);CHKERRQ(ierr);
1692     ierr = PetscSFBcastBegin(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1693     ierr = PetscSFBcastEnd(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1694     ierr = PetscSFCreate(comm, &sfOverlapPoint);CHKERRQ(ierr);
1695     ierr = PetscSFSetGraph(sfOverlapPoint, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1696     ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1697     ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1698     sfMigration = sfOverlapPoint;
1699   }
1700   /* Cleanup Partition */
1701   ierr = PetscSFDestroy(&sfProcess);CHKERRQ(ierr);
1702   ierr = DMLabelDestroy(&lblPartition);CHKERRQ(ierr);
1703   ierr = DMLabelDestroy(&lblMigration);CHKERRQ(ierr);
1704   ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr);
1705   ierr = ISDestroy(&cellPart);CHKERRQ(ierr);
1706   /* Copy BC */
1707   ierr = DMCopyBoundary(dm, *dmParallel);CHKERRQ(ierr);
1708   /* Create sfNatural */
1709   if (dm->useNatural) {
1710     PetscSection section;
1711 
1712     ierr = DMGetSection(dm, &section);CHKERRQ(ierr);
1713     ierr = DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &(*dmParallel)->sfNatural);CHKERRQ(ierr);
1714     ierr = DMSetUseNatural(*dmParallel, PETSC_TRUE);CHKERRQ(ierr);
1715   }
1716   /* Cleanup */
1717   if (sf) {*sf = sfMigration;}
1718   else    {ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);}
1719   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1720   ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1721   PetscFunctionReturn(0);
1722 }
1723 
1724 /*@C
1725   DMPlexDistributeOverlap - Add partition overlap to a distributed non-overlapping DM.
1726 
1727   Collective on DM
1728 
1729   Input Parameter:
1730 + dm  - The non-overlapping distrbuted DMPlex object
1731 - overlap - The overlap of partitions
1732 
1733   Output Parameter:
1734 + sf - The PetscSF used for point distribution
1735 - dmOverlap - The overlapping distributed DMPlex object, or NULL
1736 
1737   Note: If the mesh was not distributed, the return value is NULL.
1738 
1739   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
1740   representation on the mesh.
1741 
1742   Level: intermediate
1743 
1744 .keywords: mesh, elements
1745 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMSetAdjacency()
1746 @*/
1747 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, PetscSF *sf, DM *dmOverlap)
1748 {
1749   MPI_Comm               comm;
1750   PetscMPIInt            size, rank;
1751   PetscSection           rootSection, leafSection;
1752   IS                     rootrank, leafrank;
1753   DM                     dmCoord;
1754   DMLabel                lblOverlap;
1755   PetscSF                sfOverlap, sfStratified, sfPoint;
1756   PetscErrorCode         ierr;
1757 
1758   PetscFunctionBegin;
1759   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1760   if (sf) PetscValidPointer(sf, 3);
1761   PetscValidPointer(dmOverlap, 4);
1762 
1763   if (sf) *sf = NULL;
1764   *dmOverlap  = NULL;
1765   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1766   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1767   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1768   if (size == 1) PetscFunctionReturn(0);
1769 
1770   ierr = PetscLogEventBegin(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1771   /* Compute point overlap with neighbouring processes on the distributed DM */
1772   ierr = PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1773   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1774   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1775   ierr = DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);CHKERRQ(ierr);
1776   ierr = DMPlexCreateOverlap(dm, overlap, rootSection, rootrank, leafSection, leafrank, &lblOverlap);CHKERRQ(ierr);
1777   /* Convert overlap label to stratified migration SF */
1778   ierr = DMPlexPartitionLabelCreateSF(dm, lblOverlap, &sfOverlap);CHKERRQ(ierr);
1779   ierr = DMPlexStratifyMigrationSF(dm, sfOverlap, &sfStratified);CHKERRQ(ierr);
1780   ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1781   sfOverlap = sfStratified;
1782   ierr = PetscObjectSetName((PetscObject) sfOverlap, "Overlap SF");CHKERRQ(ierr);
1783   ierr = PetscSFSetFromOptions(sfOverlap);CHKERRQ(ierr);
1784 
1785   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1786   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1787   ierr = ISDestroy(&rootrank);CHKERRQ(ierr);
1788   ierr = ISDestroy(&leafrank);CHKERRQ(ierr);
1789   ierr = PetscLogEventEnd(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1790 
1791   /* Build the overlapping DM */
1792   ierr = DMPlexCreate(comm, dmOverlap);CHKERRQ(ierr);
1793   ierr = PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");CHKERRQ(ierr);
1794   ierr = DMPlexMigrate(dm, sfOverlap, *dmOverlap);CHKERRQ(ierr);
1795   /* Build the new point SF */
1796   ierr = DMPlexCreatePointSF(*dmOverlap, sfOverlap, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
1797   ierr = DMSetPointSF(*dmOverlap, sfPoint);CHKERRQ(ierr);
1798   ierr = DMGetCoordinateDM(*dmOverlap, &dmCoord);CHKERRQ(ierr);
1799   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1800   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1801   /* Cleanup overlap partition */
1802   ierr = DMLabelDestroy(&lblOverlap);CHKERRQ(ierr);
1803   if (sf) *sf = sfOverlap;
1804   else    {ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);}
1805   ierr = PetscLogEventEnd(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1806   PetscFunctionReturn(0);
1807 }
1808 
1809 /*@C
1810   DMPlexGetGatherDM - Get a copy of the DMPlex that gathers all points on the
1811   root process of the original's communicator.
1812 
1813   Collective on DM
1814 
1815   Input Parameters:
1816 . dm - the original DMPlex object
1817 
1818   Output Parameters:
1819 + sf - the PetscSF used for point distribution (optional)
1820 - gatherMesh - the gathered DM object, or NULL
1821 
1822   Level: intermediate
1823 
1824 .keywords: mesh
1825 .seealso: DMPlexDistribute(), DMPlexGetRedundantDM()
1826 @*/
1827 PetscErrorCode DMPlexGetGatherDM(DM dm, PetscSF *sf, DM *gatherMesh)
1828 {
1829   MPI_Comm       comm;
1830   PetscMPIInt    size;
1831   PetscPartitioner oldPart, gatherPart;
1832   PetscErrorCode ierr;
1833 
1834   PetscFunctionBegin;
1835   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1836   PetscValidPointer(gatherMesh,2);
1837   *gatherMesh = NULL;
1838   if (sf) *sf = NULL;
1839   comm = PetscObjectComm((PetscObject)dm);
1840   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1841   if (size == 1) PetscFunctionReturn(0);
1842   ierr = DMPlexGetPartitioner(dm,&oldPart);CHKERRQ(ierr);
1843   ierr = PetscObjectReference((PetscObject)oldPart);CHKERRQ(ierr);
1844   ierr = PetscPartitionerCreate(comm,&gatherPart);CHKERRQ(ierr);
1845   ierr = PetscPartitionerSetType(gatherPart,PETSCPARTITIONERGATHER);CHKERRQ(ierr);
1846   ierr = DMPlexSetPartitioner(dm,gatherPart);CHKERRQ(ierr);
1847   ierr = DMPlexDistribute(dm,0,sf,gatherMesh);CHKERRQ(ierr);
1848 
1849   ierr = DMPlexSetPartitioner(dm,oldPart);CHKERRQ(ierr);
1850   ierr = PetscPartitionerDestroy(&gatherPart);CHKERRQ(ierr);
1851   ierr = PetscPartitionerDestroy(&oldPart);CHKERRQ(ierr);
1852   PetscFunctionReturn(0);
1853 }
1854 
1855 /*@C
1856   DMPlexGetRedundantDM - Get a copy of the DMPlex that is completely copied on each process.
1857 
1858   Collective on DM
1859 
1860   Input Parameters:
1861 . dm - the original DMPlex object
1862 
1863   Output Parameters:
1864 + sf - the PetscSF used for point distribution (optional)
1865 - redundantMesh - the redundant DM object, or NULL
1866 
1867   Level: intermediate
1868 
1869 .keywords: mesh
1870 .seealso: DMPlexDistribute(), DMPlexGetGatherDM()
1871 @*/
1872 PetscErrorCode DMPlexGetRedundantDM(DM dm, PetscSF *sf, DM *redundantMesh)
1873 {
1874   MPI_Comm       comm;
1875   PetscMPIInt    size, rank;
1876   PetscInt       pStart, pEnd, p;
1877   PetscInt       numPoints = -1;
1878   PetscSF        migrationSF, sfPoint, gatherSF;
1879   DM             gatherDM, dmCoord;
1880   PetscSFNode    *points;
1881   PetscErrorCode ierr;
1882 
1883   PetscFunctionBegin;
1884   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1885   PetscValidPointer(redundantMesh,2);
1886   *redundantMesh = NULL;
1887   comm = PetscObjectComm((PetscObject)dm);
1888   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1889   if (size == 1) {
1890     ierr = PetscObjectReference((PetscObject) dm);CHKERRQ(ierr);
1891     *redundantMesh = dm;
1892     if (sf) *sf = NULL;
1893     PetscFunctionReturn(0);
1894   }
1895   ierr = DMPlexGetGatherDM(dm,&gatherSF,&gatherDM);CHKERRQ(ierr);
1896   if (!gatherDM) PetscFunctionReturn(0);
1897   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1898   ierr = DMPlexGetChart(gatherDM,&pStart,&pEnd);CHKERRQ(ierr);
1899   numPoints = pEnd - pStart;
1900   ierr = MPI_Bcast(&numPoints,1,MPIU_INT,0,comm);CHKERRQ(ierr);
1901   ierr = PetscMalloc1(numPoints,&points);CHKERRQ(ierr);
1902   ierr = PetscSFCreate(comm,&migrationSF);CHKERRQ(ierr);
1903   for (p = 0; p < numPoints; p++) {
1904     points[p].index = p;
1905     points[p].rank  = 0;
1906   }
1907   ierr = PetscSFSetGraph(migrationSF,pEnd-pStart,numPoints,NULL,PETSC_OWN_POINTER,points,PETSC_OWN_POINTER);CHKERRQ(ierr);
1908   ierr = DMPlexCreate(comm, redundantMesh);CHKERRQ(ierr);
1909   ierr = PetscObjectSetName((PetscObject) *redundantMesh, "Redundant Mesh");CHKERRQ(ierr);
1910   ierr = DMPlexMigrate(gatherDM, migrationSF, *redundantMesh);CHKERRQ(ierr);
1911   ierr = DMPlexCreatePointSF(*redundantMesh, migrationSF, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
1912   ierr = DMSetPointSF(*redundantMesh, sfPoint);CHKERRQ(ierr);
1913   ierr = DMGetCoordinateDM(*redundantMesh, &dmCoord);CHKERRQ(ierr);
1914   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1915   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1916   if (sf) {
1917     PetscSF tsf;
1918 
1919     ierr = PetscSFCompose(gatherSF,migrationSF,&tsf);CHKERRQ(ierr);
1920     ierr = DMPlexStratifyMigrationSF(dm, tsf, sf);CHKERRQ(ierr);
1921     ierr = PetscSFDestroy(&tsf);CHKERRQ(ierr);
1922   }
1923   ierr = PetscSFDestroy(&migrationSF);CHKERRQ(ierr);
1924   ierr = PetscSFDestroy(&gatherSF);CHKERRQ(ierr);
1925   ierr = DMDestroy(&gatherDM);CHKERRQ(ierr);
1926   PetscFunctionReturn(0);
1927 }
1928