xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision ddcfb6fc814de002685263716f916f1eccaf9f7f)
1 #include <petsc/private/dmpleximpl.h>    /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/dmlabelimpl.h>   /*I      "petscdmlabel.h"  I*/
3 
4 /*@C
5   DMPlexSetAdjacencyUser - Define adjacency in the mesh using a user-provided callback
6 
7   Input Parameters:
8 + dm      - The DM object
9 . user    - The user callback, may be NULL (to clear the callback)
10 - ctx     - context for callback evaluation, may be NULL
11 
12   Level: advanced
13 
14   Notes:
15      The caller of DMPlexGetAdjacency may need to arrange that a large enough array is available for the adjacency.
16 
17      Any setting here overrides other configuration of DMPlex adjacency determination.
18 
19 .seealso: DMSetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexGetAdjacency(), DMPlexGetAdjacencyUser()
20 @*/
21 PetscErrorCode DMPlexSetAdjacencyUser(DM dm,PetscErrorCode (*user)(DM,PetscInt,PetscInt*,PetscInt[],void*),void *ctx)
22 {
23   DM_Plex *mesh = (DM_Plex *)dm->data;
24 
25   PetscFunctionBegin;
26   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
27   mesh->useradjacency = user;
28   mesh->useradjacencyctx = ctx;
29   PetscFunctionReturn(0);
30 }
31 
32 /*@C
33   DMPlexGetAdjacencyUser - get the user-defined adjacency callback
34 
35   Input Parameter:
36 . dm      - The DM object
37 
38   Output Parameters:
39 - user    - The user callback
40 - ctx     - context for callback evaluation
41 
42   Level: advanced
43 
44 .seealso: DMSetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexGetAdjacency(), DMPlexSetAdjacencyUser()
45 @*/
46 PetscErrorCode DMPlexGetAdjacencyUser(DM dm, PetscErrorCode (**user)(DM,PetscInt,PetscInt*,PetscInt[],void*), void **ctx)
47 {
48   DM_Plex *mesh = (DM_Plex *)dm->data;
49 
50   PetscFunctionBegin;
51   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
52   if (user) *user = mesh->useradjacency;
53   if (ctx) *ctx = mesh->useradjacencyctx;
54   PetscFunctionReturn(0);
55 }
56 
57 /*@
58   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
59 
60   Input Parameters:
61 + dm      - The DM object
62 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
63 
64   Level: intermediate
65 
66 .seealso: DMGetAdjacency(), DMSetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
67 @*/
68 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
69 {
70   DM_Plex *mesh = (DM_Plex *) dm->data;
71 
72   PetscFunctionBegin;
73   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
74   mesh->useAnchors = useAnchors;
75   PetscFunctionReturn(0);
76 }
77 
78 /*@
79   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
80 
81   Input Parameter:
82 . dm      - The DM object
83 
84   Output Parameter:
85 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
86 
87   Level: intermediate
88 
89 .seealso: DMPlexSetAdjacencyUseAnchors(), DMSetAdjacency(), DMGetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
90 @*/
91 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
92 {
93   DM_Plex *mesh = (DM_Plex *) dm->data;
94 
95   PetscFunctionBegin;
96   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
97   PetscValidIntPointer(useAnchors, 2);
98   *useAnchors = mesh->useAnchors;
99   PetscFunctionReturn(0);
100 }
101 
102 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
103 {
104   const PetscInt *cone = NULL;
105   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
106   PetscErrorCode  ierr;
107 
108   PetscFunctionBeginHot;
109   ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
110   ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
111   for (c = 0; c <= coneSize; ++c) {
112     const PetscInt  point   = !c ? p : cone[c-1];
113     const PetscInt *support = NULL;
114     PetscInt        supportSize, s, q;
115 
116     ierr = DMPlexGetSupportSize(dm, point, &supportSize);CHKERRQ(ierr);
117     ierr = DMPlexGetSupport(dm, point, &support);CHKERRQ(ierr);
118     for (s = 0; s < supportSize; ++s) {
119       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = support[s]),0); ++q) {
120         if (support[s] == adj[q]) break;
121       }
122       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
123     }
124   }
125   *adjSize = numAdj;
126   PetscFunctionReturn(0);
127 }
128 
129 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
130 {
131   const PetscInt *support = NULL;
132   PetscInt        numAdj   = 0, maxAdjSize = *adjSize, supportSize, s;
133   PetscErrorCode  ierr;
134 
135   PetscFunctionBeginHot;
136   ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr);
137   ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr);
138   for (s = 0; s <= supportSize; ++s) {
139     const PetscInt  point = !s ? p : support[s-1];
140     const PetscInt *cone  = NULL;
141     PetscInt        coneSize, c, q;
142 
143     ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
144     ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
145     for (c = 0; c < coneSize; ++c) {
146       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = cone[c]),0); ++q) {
147         if (cone[c] == adj[q]) break;
148       }
149       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
150     }
151   }
152   *adjSize = numAdj;
153   PetscFunctionReturn(0);
154 }
155 
156 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
157 {
158   PetscInt      *star = NULL;
159   PetscInt       numAdj = 0, maxAdjSize = *adjSize, starSize, s;
160   PetscErrorCode ierr;
161 
162   PetscFunctionBeginHot;
163   ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
164   for (s = 0; s < starSize*2; s += 2) {
165     const PetscInt *closure = NULL;
166     PetscInt        closureSize, c, q;
167 
168     ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
169     for (c = 0; c < closureSize*2; c += 2) {
170       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = closure[c]),0); ++q) {
171         if (closure[c] == adj[q]) break;
172       }
173       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
174     }
175     ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
176   }
177   ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
178   *adjSize = numAdj;
179   PetscFunctionReturn(0);
180 }
181 
182 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
183 {
184   static PetscInt asiz = 0;
185   PetscInt maxAnchors = 1;
186   PetscInt aStart = -1, aEnd = -1;
187   PetscInt maxAdjSize;
188   PetscSection aSec = NULL;
189   IS aIS = NULL;
190   const PetscInt *anchors;
191   DM_Plex *mesh = (DM_Plex *)dm->data;
192   PetscErrorCode  ierr;
193 
194   PetscFunctionBeginHot;
195   if (useAnchors) {
196     ierr = DMPlexGetAnchors(dm,&aSec,&aIS);CHKERRQ(ierr);
197     if (aSec) {
198       ierr = PetscSectionGetMaxDof(aSec,&maxAnchors);CHKERRQ(ierr);
199       maxAnchors = PetscMax(1,maxAnchors);
200       ierr = PetscSectionGetChart(aSec,&aStart,&aEnd);CHKERRQ(ierr);
201       ierr = ISGetIndices(aIS,&anchors);CHKERRQ(ierr);
202     }
203   }
204   if (!*adj) {
205     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
206 
207     ierr  = DMPlexGetChart(dm, &pStart,&pEnd);CHKERRQ(ierr);
208     ierr  = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
209     ierr  = DMPlexGetMaxSizes(dm, &maxC, &maxS);CHKERRQ(ierr);
210     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1;
211     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1;
212     asiz  = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries);
213     asiz *= maxAnchors;
214     asiz  = PetscMin(asiz,pEnd-pStart);
215     ierr  = PetscMalloc1(asiz,adj);CHKERRQ(ierr);
216   }
217   if (*adjSize < 0) *adjSize = asiz;
218   maxAdjSize = *adjSize;
219   if (mesh->useradjacency) {
220     ierr = mesh->useradjacency(dm, p, adjSize, *adj, mesh->useradjacencyctx);CHKERRQ(ierr);
221   } else if (useTransitiveClosure) {
222     ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr);
223   } else if (useCone) {
224     ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
225   } else {
226     ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
227   }
228   if (useAnchors && aSec) {
229     PetscInt origSize = *adjSize;
230     PetscInt numAdj = origSize;
231     PetscInt i = 0, j;
232     PetscInt *orig = *adj;
233 
234     while (i < origSize) {
235       PetscInt p = orig[i];
236       PetscInt aDof = 0;
237 
238       if (p >= aStart && p < aEnd) {
239         ierr = PetscSectionGetDof(aSec,p,&aDof);CHKERRQ(ierr);
240       }
241       if (aDof) {
242         PetscInt aOff;
243         PetscInt s, q;
244 
245         for (j = i + 1; j < numAdj; j++) {
246           orig[j - 1] = orig[j];
247         }
248         origSize--;
249         numAdj--;
250         ierr = PetscSectionGetOffset(aSec,p,&aOff);CHKERRQ(ierr);
251         for (s = 0; s < aDof; ++s) {
252           for (q = 0; q < numAdj || ((void)(orig[numAdj++] = anchors[aOff+s]),0); ++q) {
253             if (anchors[aOff+s] == orig[q]) break;
254           }
255           if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
256         }
257       }
258       else {
259         i++;
260       }
261     }
262     *adjSize = numAdj;
263     ierr = ISRestoreIndices(aIS,&anchors);CHKERRQ(ierr);
264   }
265   PetscFunctionReturn(0);
266 }
267 
268 /*@
269   DMPlexGetAdjacency - Return all points adjacent to the given point
270 
271   Input Parameters:
272 + dm - The DM object
273 . p  - The point
274 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE
275 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize
276 
277   Output Parameters:
278 + adjSize - The number of adjacent points
279 - adj - The adjacent points
280 
281   Level: advanced
282 
283   Notes:
284     The user must PetscFree the adj array if it was not passed in.
285 
286 .seealso: DMSetAdjacency(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator()
287 @*/
288 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
289 {
290   PetscBool      useCone, useClosure, useAnchors;
291   PetscErrorCode ierr;
292 
293   PetscFunctionBeginHot;
294   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
295   PetscValidPointer(adjSize,3);
296   PetscValidPointer(adj,4);
297   ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
298   ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
299   ierr = DMPlexGetAdjacency_Internal(dm, p, useCone, useClosure, useAnchors, adjSize, adj);CHKERRQ(ierr);
300   PetscFunctionReturn(0);
301 }
302 
303 /*@
304   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
305 
306   Collective on DM
307 
308   Input Parameters:
309 + dm      - The DM
310 - sfPoint - The PetscSF which encodes point connectivity
311 
312   Output Parameters:
313 + processRanks - A list of process neighbors, or NULL
314 - sfProcess    - An SF encoding the two-sided process connectivity, or NULL
315 
316   Level: developer
317 
318 .seealso: PetscSFCreate(), DMPlexCreateProcessSF()
319 @*/
320 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
321 {
322   const PetscSFNode *remotePoints;
323   PetscInt          *localPointsNew;
324   PetscSFNode       *remotePointsNew;
325   const PetscInt    *nranks;
326   PetscInt          *ranksNew;
327   PetscBT            neighbors;
328   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
329   PetscMPIInt        size, proc, rank;
330   PetscErrorCode     ierr;
331 
332   PetscFunctionBegin;
333   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
334   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
335   if (processRanks) {PetscValidPointer(processRanks, 3);}
336   if (sfProcess)    {PetscValidPointer(sfProcess, 4);}
337   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
338   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
339   ierr = PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);CHKERRQ(ierr);
340   ierr = PetscBTCreate(size, &neighbors);CHKERRQ(ierr);
341   ierr = PetscBTMemzero(size, neighbors);CHKERRQ(ierr);
342   /* Compute root-to-leaf process connectivity */
343   ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr);
344   ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr);
345   for (p = pStart; p < pEnd; ++p) {
346     PetscInt ndof, noff, n;
347 
348     ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr);
349     ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr);
350     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
351   }
352   ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr);
353   /* Compute leaf-to-neighbor process connectivity */
354   ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr);
355   ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr);
356   for (p = pStart; p < pEnd; ++p) {
357     PetscInt ndof, noff, n;
358 
359     ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr);
360     ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr);
361     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
362   }
363   ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr);
364   /* Compute leaf-to-root process connectivity */
365   for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);}
366   /* Calculate edges */
367   PetscBTClear(neighbors, rank);
368   for(proc = 0, numNeighbors = 0; proc < size; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;}
369   ierr = PetscMalloc1(numNeighbors, &ranksNew);CHKERRQ(ierr);
370   ierr = PetscMalloc1(numNeighbors, &localPointsNew);CHKERRQ(ierr);
371   ierr = PetscMalloc1(numNeighbors, &remotePointsNew);CHKERRQ(ierr);
372   for(proc = 0, n = 0; proc < size; ++proc) {
373     if (PetscBTLookup(neighbors, proc)) {
374       ranksNew[n]              = proc;
375       localPointsNew[n]        = proc;
376       remotePointsNew[n].index = rank;
377       remotePointsNew[n].rank  = proc;
378       ++n;
379     }
380   }
381   ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr);
382   if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr);}
383   else              {ierr = PetscFree(ranksNew);CHKERRQ(ierr);}
384   if (sfProcess) {
385     ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr);
386     ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr);
387     ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr);
388     ierr = PetscSFSetGraph(*sfProcess, size, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
389   }
390   PetscFunctionReturn(0);
391 }
392 
393 /*@
394   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
395 
396   Collective on DM
397 
398   Input Parameter:
399 . dm - The DM
400 
401   Output Parameters:
402 + rootSection - The number of leaves for a given root point
403 . rootrank    - The rank of each edge into the root point
404 . leafSection - The number of processes sharing a given leaf point
405 - leafrank    - The rank of each process sharing a leaf point
406 
407   Level: developer
408 
409 .seealso: DMPlexCreateOverlap()
410 @*/
411 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
412 {
413   MPI_Comm        comm;
414   PetscSF         sfPoint;
415   const PetscInt *rootdegree;
416   PetscInt       *myrank, *remoterank;
417   PetscInt        pStart, pEnd, p, nedges;
418   PetscMPIInt     rank;
419   PetscErrorCode  ierr;
420 
421   PetscFunctionBegin;
422   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
423   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
424   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
425   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
426   /* Compute number of leaves for each root */
427   ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr);
428   ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr);
429   ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr);
430   ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr);
431   for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr);}
432   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
433   /* Gather rank of each leaf to root */
434   ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr);
435   ierr = PetscMalloc1(pEnd-pStart, &myrank);CHKERRQ(ierr);
436   ierr = PetscMalloc1(nedges,  &remoterank);CHKERRQ(ierr);
437   for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank;
438   ierr = PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
439   ierr = PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
440   ierr = PetscFree(myrank);CHKERRQ(ierr);
441   ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr);
442   /* Distribute remote ranks to leaves */
443   ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr);
444   ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr);
445   PetscFunctionReturn(0);
446 }
447 
448 /*@C
449   DMPlexCreateOverlap - Compute owner information for shared points. This basically gets two-sided for an SF.
450 
451   Collective on DM
452 
453   Input Parameters:
454 + dm          - The DM
455 . levels      - Number of overlap levels
456 . rootSection - The number of leaves for a given root point
457 . rootrank    - The rank of each edge into the root point
458 . leafSection - The number of processes sharing a given leaf point
459 - leafrank    - The rank of each process sharing a leaf point
460 
461   Output Parameters:
462 + ovLabel     - DMLabel containing remote overlap contributions as point/rank pairings
463 
464   Level: developer
465 
466 .seealso: DMPlexDistributeOwnership(), DMPlexDistribute()
467 @*/
468 PetscErrorCode DMPlexCreateOverlap(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
469 {
470   MPI_Comm           comm;
471   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
472   PetscSF            sfPoint;
473   const PetscSFNode *remote;
474   const PetscInt    *local;
475   const PetscInt    *nrank, *rrank;
476   PetscInt          *adj = NULL;
477   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l;
478   PetscMPIInt        rank, size;
479   PetscBool          useCone, useClosure, flg;
480   PetscErrorCode     ierr;
481 
482   PetscFunctionBegin;
483   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
484   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
485   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
486   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
487   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
488   ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr);
489   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
490   ierr = DMLabelCreate(PETSC_COMM_SELF, "Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr);
491   /* Handle leaves: shared with the root point */
492   for (l = 0; l < nleaves; ++l) {
493     PetscInt adjSize = PETSC_DETERMINE, a;
494 
495     ierr = DMPlexGetAdjacency(dm, local ? local[l] : l, &adjSize, &adj);CHKERRQ(ierr);
496     for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr);}
497   }
498   ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr);
499   ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr);
500   /* Handle roots */
501   for (p = pStart; p < pEnd; ++p) {
502     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
503 
504     if ((p >= sStart) && (p < sEnd)) {
505       /* Some leaves share a root with other leaves on different processes */
506       ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr);
507       if (neighbors) {
508         ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr);
509         ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
510         for (n = 0; n < neighbors; ++n) {
511           const PetscInt remoteRank = nrank[noff+n];
512 
513           if (remoteRank == rank) continue;
514           for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
515         }
516       }
517     }
518     /* Roots are shared with leaves */
519     ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr);
520     if (!neighbors) continue;
521     ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr);
522     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
523     for (n = 0; n < neighbors; ++n) {
524       const PetscInt remoteRank = rrank[noff+n];
525 
526       if (remoteRank == rank) continue;
527       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
528     }
529   }
530   ierr = PetscFree(adj);CHKERRQ(ierr);
531   ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr);
532   ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr);
533   /* Add additional overlap levels */
534   for (l = 1; l < levels; l++) {
535     /* Propagate point donations over SF to capture remote connections */
536     ierr = DMPlexPartitionLabelPropagate(dm, ovAdjByRank);CHKERRQ(ierr);
537     /* Add next level of point donations to the label */
538     ierr = DMPlexPartitionLabelAdjacency(dm, ovAdjByRank);CHKERRQ(ierr);
539   }
540   /* We require the closure in the overlap */
541   ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
542   if (useCone || !useClosure) {
543     ierr = DMPlexPartitionLabelClosure(dm, ovAdjByRank);CHKERRQ(ierr);
544   }
545   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-overlap_view", &flg);CHKERRQ(ierr);
546   if (flg) {
547     PetscViewer viewer;
548     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm), &viewer);CHKERRQ(ierr);
549     ierr = DMLabelView(ovAdjByRank, viewer);CHKERRQ(ierr);
550   }
551   /* Invert sender to receiver label */
552   ierr = DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel);CHKERRQ(ierr);
553   ierr = DMPlexPartitionLabelInvert(dm, ovAdjByRank, NULL, *ovLabel);CHKERRQ(ierr);
554   /* Add owned points, except for shared local points */
555   for (p = pStart; p < pEnd; ++p) {ierr = DMLabelSetValue(*ovLabel, p, rank);CHKERRQ(ierr);}
556   for (l = 0; l < nleaves; ++l) {
557     ierr = DMLabelClearValue(*ovLabel, local[l], rank);CHKERRQ(ierr);
558     ierr = DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank);CHKERRQ(ierr);
559   }
560   /* Clean up */
561   ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr);
562   PetscFunctionReturn(0);
563 }
564 
565 /*@C
566   DMPlexCreateOverlapMigrationSF - Create an SF describing the new mesh distribution to make the overlap described by the input SF
567 
568   Collective on DM
569 
570   Input Parameters:
571 + dm          - The DM
572 - overlapSF   - The SF mapping ghost points in overlap to owner points on other processes
573 
574   Output Parameters:
575 + migrationSF - An SF that maps original points in old locations to points in new locations
576 
577   Level: developer
578 
579 .seealso: DMPlexCreateOverlap(), DMPlexDistribute()
580 @*/
581 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
582 {
583   MPI_Comm           comm;
584   PetscMPIInt        rank, size;
585   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
586   PetscInt          *pointDepths, *remoteDepths, *ilocal;
587   PetscInt          *depthRecv, *depthShift, *depthIdx;
588   PetscSFNode       *iremote;
589   PetscSF            pointSF;
590   const PetscInt    *sharedLocal;
591   const PetscSFNode *overlapRemote, *sharedRemote;
592   PetscErrorCode     ierr;
593 
594   PetscFunctionBegin;
595   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
596   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
597   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
598   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
599   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
600 
601   /* Before building the migration SF we need to know the new stratum offsets */
602   ierr = PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote);CHKERRQ(ierr);
603   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
604   for (d=0; d<dim+1; d++) {
605     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
606     for (p=pStart; p<pEnd; p++) pointDepths[p] = d;
607   }
608   for (p=0; p<nleaves; p++) remoteDepths[p] = -1;
609   ierr = PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
610   ierr = PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
611 
612   /* Count recevied points in each stratum and compute the internal strata shift */
613   ierr = PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx);CHKERRQ(ierr);
614   for (d=0; d<dim+1; d++) depthRecv[d]=0;
615   for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++;
616   depthShift[dim] = 0;
617   for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim];
618   for (d=1; d<dim; d++) depthShift[d] += depthRecv[0];
619   for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1];
620   for (d=0; d<dim+1; d++) {
621     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
622     depthIdx[d] = pStart + depthShift[d];
623   }
624 
625   /* Form the overlap SF build an SF that describes the full overlap migration SF */
626   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
627   newLeaves = pEnd - pStart + nleaves;
628   ierr = PetscMalloc1(newLeaves, &ilocal);CHKERRQ(ierr);
629   ierr = PetscMalloc1(newLeaves, &iremote);CHKERRQ(ierr);
630   /* First map local points to themselves */
631   for (d=0; d<dim+1; d++) {
632     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
633     for (p=pStart; p<pEnd; p++) {
634       point = p + depthShift[d];
635       ilocal[point] = point;
636       iremote[point].index = p;
637       iremote[point].rank = rank;
638       depthIdx[d]++;
639     }
640   }
641 
642   /* Add in the remote roots for currently shared points */
643   ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
644   ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote);CHKERRQ(ierr);
645   for (d=0; d<dim+1; d++) {
646     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
647     for (p=0; p<numSharedPoints; p++) {
648       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
649         point = sharedLocal[p] + depthShift[d];
650         iremote[point].index = sharedRemote[p].index;
651         iremote[point].rank = sharedRemote[p].rank;
652       }
653     }
654   }
655 
656   /* Now add the incoming overlap points */
657   for (p=0; p<nleaves; p++) {
658     point = depthIdx[remoteDepths[p]];
659     ilocal[point] = point;
660     iremote[point].index = overlapRemote[p].index;
661     iremote[point].rank = overlapRemote[p].rank;
662     depthIdx[remoteDepths[p]]++;
663   }
664   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
665 
666   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
667   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");CHKERRQ(ierr);
668   ierr = PetscSFSetFromOptions(*migrationSF);CHKERRQ(ierr);
669   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
670   ierr = PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);CHKERRQ(ierr);
671 
672   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
673   PetscFunctionReturn(0);
674 }
675 
676 /*@
677   DMPlexStratifyMigrationSF - Rearrange the leaves of a migration sf for stratification.
678 
679   Input Parameter:
680 + dm          - The DM
681 - sf          - A star forest with non-ordered leaves, usually defining a DM point migration
682 
683   Output Parameter:
684 . migrationSF - A star forest with added leaf indirection that ensures the resulting DM is stratified
685 
686   Level: developer
687 
688 .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexDistributeOverlap()
689 @*/
690 PetscErrorCode DMPlexStratifyMigrationSF(DM dm, PetscSF sf, PetscSF *migrationSF)
691 {
692   MPI_Comm           comm;
693   PetscMPIInt        rank, size;
694   PetscInt           d, ldepth, depth, p, pStart, pEnd, nroots, nleaves;
695   PetscInt          *pointDepths, *remoteDepths, *ilocal;
696   PetscInt          *depthRecv, *depthShift, *depthIdx;
697   PetscInt           hybEnd[4];
698   const PetscSFNode *iremote;
699   PetscErrorCode     ierr;
700 
701   PetscFunctionBegin;
702   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
703   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
704   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
705   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
706   ierr = DMPlexGetDepth(dm, &ldepth);CHKERRQ(ierr);
707   ierr = MPIU_Allreduce(&ldepth, &depth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr);
708   if ((ldepth >= 0) && (depth != ldepth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", ldepth, depth);
709 
710   /* Before building the migration SF we need to know the new stratum offsets */
711   ierr = PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote);CHKERRQ(ierr);
712   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
713   ierr = DMPlexGetHybridBounds(dm,&hybEnd[depth],&hybEnd[depth-1],&hybEnd[1],&hybEnd[0]);CHKERRQ(ierr);
714   for (d = 0; d < depth+1; ++d) {
715     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
716     for (p = pStart; p < pEnd; ++p) {
717       if (hybEnd[d] >= 0 && p >= hybEnd[d]) { /* put in a separate value for hybrid points */
718         pointDepths[p] = 2 * d;
719       } else {
720         pointDepths[p] = 2 * d + 1;
721       }
722     }
723   }
724   for (p = 0; p < nleaves; ++p) remoteDepths[p] = -1;
725   ierr = PetscSFBcastBegin(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
726   ierr = PetscSFBcastEnd(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
727   /* Count recevied points in each stratum and compute the internal strata shift */
728   ierr = PetscMalloc3(2*(depth+1), &depthRecv, 2*(depth+1), &depthShift, 2*(depth+1), &depthIdx);CHKERRQ(ierr);
729   for (d = 0; d < 2*(depth+1); ++d) depthRecv[d] = 0;
730   for (p = 0; p < nleaves; ++p) depthRecv[remoteDepths[p]]++;
731   depthShift[2*depth+1] = 0;
732   for (d = 0; d < 2*depth+1; ++d) depthShift[d] = depthRecv[2 * depth + 1];
733   for (d = 0; d < 2*depth; ++d) depthShift[d] += depthRecv[2 * depth];
734   depthShift[0] += depthRecv[1];
735   for (d = 2; d < 2*depth; ++d) depthShift[d] += depthRecv[1];
736   for (d = 2; d < 2*depth; ++d) depthShift[d] += depthRecv[0];
737   for (d = 2 * depth-1; d > 2; --d) {
738     PetscInt e;
739 
740     for (e = d -1; e > 1; --e) depthShift[e] += depthRecv[d];
741   }
742   for (d = 0; d < 2*(depth+1); ++d) {depthIdx[d] = 0;}
743   /* Derive a new local permutation based on stratified indices */
744   ierr = PetscMalloc1(nleaves, &ilocal);CHKERRQ(ierr);
745   for (p = 0; p < nleaves; ++p) {
746     const PetscInt dep = remoteDepths[p];
747 
748     ilocal[p] = depthShift[dep] + depthIdx[dep];
749     depthIdx[dep]++;
750   }
751   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
752   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Migration SF");CHKERRQ(ierr);
753   ierr = PetscSFSetGraph(*migrationSF, nroots, nleaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_COPY_VALUES);CHKERRQ(ierr);
754   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
755   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
756   PetscFunctionReturn(0);
757 }
758 
759 /*@
760   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
761 
762   Collective on DM
763 
764   Input Parameters:
765 + dm - The DMPlex object
766 . pointSF - The PetscSF describing the communication pattern
767 . originalSection - The PetscSection for existing data layout
768 - originalVec - The existing data
769 
770   Output Parameters:
771 + newSection - The PetscSF describing the new data layout
772 - newVec - The new data
773 
774   Level: developer
775 
776 .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData()
777 @*/
778 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
779 {
780   PetscSF        fieldSF;
781   PetscInt      *remoteOffsets, fieldSize;
782   PetscScalar   *originalValues, *newValues;
783   PetscErrorCode ierr;
784 
785   PetscFunctionBegin;
786   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
787   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
788 
789   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
790   ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr);
791   ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr);
792 
793   ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr);
794   ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr);
795   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
796   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
797   ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
798   ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
799   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
800   ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr);
801   ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr);
802   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
803   PetscFunctionReturn(0);
804 }
805 
806 /*@
807   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
808 
809   Collective on DM
810 
811   Input Parameters:
812 + dm - The DMPlex object
813 . pointSF - The PetscSF describing the communication pattern
814 . originalSection - The PetscSection for existing data layout
815 - originalIS - The existing data
816 
817   Output Parameters:
818 + newSection - The PetscSF describing the new data layout
819 - newIS - The new data
820 
821   Level: developer
822 
823 .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData()
824 @*/
825 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
826 {
827   PetscSF         fieldSF;
828   PetscInt       *newValues, *remoteOffsets, fieldSize;
829   const PetscInt *originalValues;
830   PetscErrorCode  ierr;
831 
832   PetscFunctionBegin;
833   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
834   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
835 
836   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
837   ierr = PetscMalloc1(fieldSize, &newValues);CHKERRQ(ierr);
838 
839   ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr);
840   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
841   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
842   ierr = PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
843   ierr = PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
844   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
845   ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr);
846   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr);
847   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
848   PetscFunctionReturn(0);
849 }
850 
851 /*@
852   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
853 
854   Collective on DM
855 
856   Input Parameters:
857 + dm - The DMPlex object
858 . pointSF - The PetscSF describing the communication pattern
859 . originalSection - The PetscSection for existing data layout
860 . datatype - The type of data
861 - originalData - The existing data
862 
863   Output Parameters:
864 + newSection - The PetscSection describing the new data layout
865 - newData - The new data
866 
867   Level: developer
868 
869 .seealso: DMPlexDistribute(), DMPlexDistributeField()
870 @*/
871 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
872 {
873   PetscSF        fieldSF;
874   PetscInt      *remoteOffsets, fieldSize;
875   PetscMPIInt    dataSize;
876   PetscErrorCode ierr;
877 
878   PetscFunctionBegin;
879   ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
880   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
881 
882   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
883   ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr);
884   ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr);
885 
886   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
887   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
888   ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
889   ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
890   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
891   ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
892   PetscFunctionReturn(0);
893 }
894 
895 static PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
896 {
897   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
898   MPI_Comm               comm;
899   PetscSF                coneSF;
900   PetscSection           originalConeSection, newConeSection;
901   PetscInt              *remoteOffsets, *cones, *globCones, *newCones, newConesSize;
902   PetscBool              flg;
903   PetscErrorCode         ierr;
904 
905   PetscFunctionBegin;
906   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
907   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
908 
909   ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
910   /* Distribute cone section */
911   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
912   ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr);
913   ierr = DMPlexGetConeSection(dmParallel, &newConeSection);CHKERRQ(ierr);
914   ierr = PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr);
915   ierr = DMSetUp(dmParallel);CHKERRQ(ierr);
916   {
917     PetscInt pStart, pEnd, p;
918 
919     ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr);
920     for (p = pStart; p < pEnd; ++p) {
921       PetscInt coneSize;
922       ierr               = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr);
923       pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize);
924     }
925   }
926   /* Communicate and renumber cones */
927   ierr = PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr);
928   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
929   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
930   if (original) {
931     PetscInt numCones;
932 
933     ierr = PetscSectionGetStorageSize(originalConeSection,&numCones);CHKERRQ(ierr);
934     ierr = PetscMalloc1(numCones,&globCones);CHKERRQ(ierr);
935     ierr = ISLocalToGlobalMappingApplyBlock(original, numCones, cones, globCones);CHKERRQ(ierr);
936   } else {
937     globCones = cones;
938   }
939   ierr = DMPlexGetCones(dmParallel, &newCones);CHKERRQ(ierr);
940   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr);
941   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr);
942   if (original) {
943     ierr = PetscFree(globCones);CHKERRQ(ierr);
944   }
945   ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr);
946   ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr);
947 #if defined(PETSC_USE_DEBUG)
948   {
949     PetscInt  p;
950     PetscBool valid = PETSC_TRUE;
951     for (p = 0; p < newConesSize; ++p) {
952       if (newCones[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "[%d] Point %D not in overlap SF\n", PetscGlobalRank,p);CHKERRQ(ierr);}
953     }
954     if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
955   }
956 #endif
957   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr);
958   if (flg) {
959     ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr);
960     ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
961     ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr);
962     ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
963     ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr);
964   }
965   ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr);
966   ierr = DMPlexGetConeOrientations(dmParallel, &newCones);CHKERRQ(ierr);
967   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
968   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
969   ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr);
970   ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
971   /* Create supports and stratify DMPlex */
972   {
973     PetscInt pStart, pEnd;
974 
975     ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr);
976     ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr);
977   }
978   ierr = DMPlexSymmetrize(dmParallel);CHKERRQ(ierr);
979   ierr = DMPlexStratify(dmParallel);CHKERRQ(ierr);
980   {
981     PetscBool useCone, useClosure, useAnchors;
982 
983     ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
984     ierr = DMSetBasicAdjacency(dmParallel, useCone, useClosure);CHKERRQ(ierr);
985     ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
986     ierr = DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors);CHKERRQ(ierr);
987   }
988   PetscFunctionReturn(0);
989 }
990 
991 static PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
992 {
993   MPI_Comm         comm;
994   PetscSection     originalCoordSection, newCoordSection;
995   Vec              originalCoordinates, newCoordinates;
996   PetscInt         bs;
997   PetscBool        isper;
998   const char      *name;
999   const PetscReal *maxCell, *L;
1000   const DMBoundaryType *bd;
1001   PetscErrorCode   ierr;
1002 
1003   PetscFunctionBegin;
1004   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1005   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1006 
1007   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1008   ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr);
1009   ierr = DMGetCoordinateSection(dmParallel, &newCoordSection);CHKERRQ(ierr);
1010   ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr);
1011   if (originalCoordinates) {
1012     ierr = VecCreate(PETSC_COMM_SELF, &newCoordinates);CHKERRQ(ierr);
1013     ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr);
1014     ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr);
1015 
1016     ierr = DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr);
1017     ierr = DMSetCoordinatesLocal(dmParallel, newCoordinates);CHKERRQ(ierr);
1018     ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr);
1019     ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr);
1020     ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr);
1021   }
1022   ierr = DMGetPeriodicity(dm, &isper, &maxCell, &L, &bd);CHKERRQ(ierr);
1023   ierr = DMSetPeriodicity(dmParallel, isper, maxCell, L, bd);CHKERRQ(ierr);
1024   PetscFunctionReturn(0);
1025 }
1026 
1027 /* Here we are assuming that process 0 always has everything */
1028 static PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1029 {
1030   DM_Plex         *mesh = (DM_Plex*) dm->data;
1031   MPI_Comm         comm;
1032   DMLabel          depthLabel;
1033   PetscMPIInt      rank;
1034   PetscInt         depth, d, numLabels, numLocalLabels, l;
1035   PetscBool        hasLabels = PETSC_FALSE, lsendDepth, sendDepth;
1036   PetscObjectState depthState = -1;
1037   PetscErrorCode   ierr;
1038 
1039   PetscFunctionBegin;
1040   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1041   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1042 
1043   ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1044   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1045   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1046 
1047   /* If the user has changed the depth label, communicate it instead */
1048   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1049   ierr = DMPlexGetDepthLabel(dm, &depthLabel);CHKERRQ(ierr);
1050   if (depthLabel) {ierr = PetscObjectStateGet((PetscObject) depthLabel, &depthState);CHKERRQ(ierr);}
1051   lsendDepth = mesh->depthState != depthState ? PETSC_TRUE : PETSC_FALSE;
1052   ierr = MPIU_Allreduce(&lsendDepth, &sendDepth, 1, MPIU_BOOL, MPI_LOR, comm);CHKERRQ(ierr);
1053   if (sendDepth) {
1054     ierr = DMRemoveLabel(dmParallel, "depth", &depthLabel);CHKERRQ(ierr);
1055     ierr = DMLabelDestroy(&depthLabel);CHKERRQ(ierr);
1056   }
1057   /* Everyone must have either the same number of labels, or none */
1058   ierr = DMGetNumLabels(dm, &numLocalLabels);CHKERRQ(ierr);
1059   numLabels = numLocalLabels;
1060   ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1061   if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1062   for (l = numLabels-1; l >= 0; --l) {
1063     DMLabel     label = NULL, labelNew = NULL;
1064     PetscBool   isDepth, lisOutput = PETSC_TRUE, isOutput;
1065     const char *name = NULL;
1066 
1067     if (hasLabels) {
1068       ierr = DMGetLabelByNum(dm, l, &label);CHKERRQ(ierr);
1069       /* Skip "depth" because it is recreated */
1070       ierr = PetscObjectGetName((PetscObject) label, &name);CHKERRQ(ierr);
1071       ierr = PetscStrcmp(name, "depth", &isDepth);CHKERRQ(ierr);
1072     }
1073     ierr = MPI_Bcast(&isDepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr);
1074     if (isDepth && !sendDepth) continue;
1075     ierr = DMLabelDistribute(label, migrationSF, &labelNew);CHKERRQ(ierr);
1076     if (isDepth) {
1077       /* Put in any missing strata which can occur if users are managing the depth label themselves */
1078       PetscInt gdepth;
1079 
1080       ierr = MPIU_Allreduce(&depth, &gdepth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr);
1081       if ((depth >= 0) && (gdepth != depth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", depth, gdepth);
1082       for (d = 0; d <= gdepth; ++d) {
1083         PetscBool has;
1084 
1085         ierr = DMLabelHasStratum(labelNew, d, &has);CHKERRQ(ierr);
1086         if (!has) {ierr = DMLabelAddStratum(labelNew, d);CHKERRQ(ierr);}
1087       }
1088     }
1089     ierr = DMAddLabel(dmParallel, labelNew);CHKERRQ(ierr);
1090     /* Put the output flag in the new label */
1091     if (hasLabels) {ierr = DMGetLabelOutput(dm, name, &lisOutput);CHKERRQ(ierr);}
1092     ierr = MPIU_Allreduce(&lisOutput, &isOutput, 1, MPIU_BOOL, MPI_LAND, comm);CHKERRQ(ierr);
1093     ierr = PetscObjectGetName((PetscObject) labelNew, &name);CHKERRQ(ierr);
1094     ierr = DMSetLabelOutput(dmParallel, name, isOutput);CHKERRQ(ierr);
1095   }
1096   ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1097   PetscFunctionReturn(0);
1098 }
1099 
1100 static PetscErrorCode DMPlexDistributeSetupHybrid(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1101 {
1102   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1103   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1104   PetscBool      *isHybrid, *isHybridParallel;
1105   PetscInt        dim, depth, d;
1106   PetscInt        pStart, pEnd, pStartP, pEndP;
1107   PetscErrorCode  ierr;
1108 
1109   PetscFunctionBegin;
1110   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1111   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1112 
1113   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1114   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1115   ierr = DMPlexGetChart(dm,&pStart,&pEnd);CHKERRQ(ierr);
1116   ierr = DMPlexGetChart(dmParallel,&pStartP,&pEndP);CHKERRQ(ierr);
1117   ierr = PetscCalloc2(pEnd-pStart,&isHybrid,pEndP-pStartP,&isHybridParallel);CHKERRQ(ierr);
1118   for (d = 0; d <= depth; d++) {
1119     PetscInt hybridMax = (depth == 1 && d == 1) ? mesh->hybridPointMax[dim] : mesh->hybridPointMax[d];
1120 
1121     if (hybridMax >= 0) {
1122       PetscInt sStart, sEnd, p;
1123 
1124       ierr = DMPlexGetDepthStratum(dm,d,&sStart,&sEnd);CHKERRQ(ierr);
1125       for (p = hybridMax; p < sEnd; p++) isHybrid[p-pStart] = PETSC_TRUE;
1126     }
1127   }
1128   ierr = PetscSFBcastBegin(migrationSF,MPIU_BOOL,isHybrid,isHybridParallel);CHKERRQ(ierr);
1129   ierr = PetscSFBcastEnd(migrationSF,MPIU_BOOL,isHybrid,isHybridParallel);CHKERRQ(ierr);
1130   for (d = 0; d <= dim; d++) pmesh->hybridPointMax[d] = -1;
1131   for (d = 0; d <= depth; d++) {
1132     PetscInt sStart, sEnd, p, dd;
1133 
1134     ierr = DMPlexGetDepthStratum(dmParallel,d,&sStart,&sEnd);CHKERRQ(ierr);
1135     dd = (depth == 1 && d == 1) ? dim : d;
1136     for (p = sStart; p < sEnd; p++) {
1137       if (isHybridParallel[p-pStartP]) {
1138         pmesh->hybridPointMax[dd] = p;
1139         break;
1140       }
1141     }
1142   }
1143   ierr = PetscFree2(isHybrid,isHybridParallel);CHKERRQ(ierr);
1144   PetscFunctionReturn(0);
1145 }
1146 
1147 static PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1148 {
1149   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1150   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1151   MPI_Comm        comm;
1152   DM              refTree;
1153   PetscSection    origParentSection, newParentSection;
1154   PetscInt        *origParents, *origChildIDs;
1155   PetscBool       flg;
1156   PetscErrorCode  ierr;
1157 
1158   PetscFunctionBegin;
1159   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1160   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
1161   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1162 
1163   /* Set up tree */
1164   ierr = DMPlexGetReferenceTree(dm,&refTree);CHKERRQ(ierr);
1165   ierr = DMPlexSetReferenceTree(dmParallel,refTree);CHKERRQ(ierr);
1166   ierr = DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL);CHKERRQ(ierr);
1167   if (origParentSection) {
1168     PetscInt        pStart, pEnd;
1169     PetscInt        *newParents, *newChildIDs, *globParents;
1170     PetscInt        *remoteOffsetsParents, newParentSize;
1171     PetscSF         parentSF;
1172 
1173     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1174     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);CHKERRQ(ierr);
1175     ierr = PetscSectionSetChart(newParentSection,pStart,pEnd);CHKERRQ(ierr);
1176     ierr = PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);CHKERRQ(ierr);
1177     ierr = PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);CHKERRQ(ierr);
1178     ierr = PetscFree(remoteOffsetsParents);CHKERRQ(ierr);
1179     ierr = PetscSectionGetStorageSize(newParentSection,&newParentSize);CHKERRQ(ierr);
1180     ierr = PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs);CHKERRQ(ierr);
1181     if (original) {
1182       PetscInt numParents;
1183 
1184       ierr = PetscSectionGetStorageSize(origParentSection,&numParents);CHKERRQ(ierr);
1185       ierr = PetscMalloc1(numParents,&globParents);CHKERRQ(ierr);
1186       ierr = ISLocalToGlobalMappingApplyBlock(original, numParents, origParents, globParents);CHKERRQ(ierr);
1187     }
1188     else {
1189       globParents = origParents;
1190     }
1191     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr);
1192     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr);
1193     if (original) {
1194       ierr = PetscFree(globParents);CHKERRQ(ierr);
1195     }
1196     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1197     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1198     ierr = ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents);CHKERRQ(ierr);
1199 #if defined(PETSC_USE_DEBUG)
1200     {
1201       PetscInt  p;
1202       PetscBool valid = PETSC_TRUE;
1203       for (p = 0; p < newParentSize; ++p) {
1204         if (newParents[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
1205       }
1206       if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1207     }
1208 #endif
1209     ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-parents_view", &flg);CHKERRQ(ierr);
1210     if (flg) {
1211       ierr = PetscPrintf(comm, "Serial Parent Section: \n");CHKERRQ(ierr);
1212       ierr = PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1213       ierr = PetscPrintf(comm, "Parallel Parent Section: \n");CHKERRQ(ierr);
1214       ierr = PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1215       ierr = PetscSFView(parentSF, NULL);CHKERRQ(ierr);
1216     }
1217     ierr = DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);CHKERRQ(ierr);
1218     ierr = PetscSectionDestroy(&newParentSection);CHKERRQ(ierr);
1219     ierr = PetscFree2(newParents,newChildIDs);CHKERRQ(ierr);
1220     ierr = PetscSFDestroy(&parentSF);CHKERRQ(ierr);
1221   }
1222   pmesh->useAnchors = mesh->useAnchors;
1223   PetscFunctionReturn(0);
1224 }
1225 
1226 PETSC_UNUSED static PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, DM dmParallel)
1227 {
1228   PetscMPIInt            rank, size;
1229   MPI_Comm               comm;
1230   PetscErrorCode         ierr;
1231 
1232   PetscFunctionBegin;
1233   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1234   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1235 
1236   /* Create point SF for parallel mesh */
1237   ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1238   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1239   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1240   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1241   {
1242     const PetscInt *leaves;
1243     PetscSFNode    *remotePoints, *rowners, *lowners;
1244     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1245     PetscInt        pStart, pEnd;
1246 
1247     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1248     ierr = PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr);
1249     ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr);
1250     for (p=0; p<numRoots; p++) {
1251       rowners[p].rank  = -1;
1252       rowners[p].index = -1;
1253     }
1254     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1255     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1256     for (p = 0; p < numLeaves; ++p) {
1257       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1258         lowners[p].rank  = rank;
1259         lowners[p].index = leaves ? leaves[p] : p;
1260       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1261         lowners[p].rank  = -2;
1262         lowners[p].index = -2;
1263       }
1264     }
1265     for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1266       rowners[p].rank  = -3;
1267       rowners[p].index = -3;
1268     }
1269     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1270     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1271     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1272     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1273     for (p = 0; p < numLeaves; ++p) {
1274       if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
1275       if (lowners[p].rank != rank) ++numGhostPoints;
1276     }
1277     ierr = PetscMalloc1(numGhostPoints, &ghostPoints);CHKERRQ(ierr);
1278     ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr);
1279     for (p = 0, gp = 0; p < numLeaves; ++p) {
1280       if (lowners[p].rank != rank) {
1281         ghostPoints[gp]        = leaves ? leaves[p] : p;
1282         remotePoints[gp].rank  = lowners[p].rank;
1283         remotePoints[gp].index = lowners[p].index;
1284         ++gp;
1285       }
1286     }
1287     ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr);
1288     ierr = PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1289     ierr = PetscSFSetFromOptions((dmParallel)->sf);CHKERRQ(ierr);
1290   }
1291   {
1292     PetscBool useCone, useClosure, useAnchors;
1293 
1294     ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
1295     ierr = DMSetBasicAdjacency(dmParallel, useCone, useClosure);CHKERRQ(ierr);
1296     ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
1297     ierr = DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors);CHKERRQ(ierr);
1298   }
1299   ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1300   PetscFunctionReturn(0);
1301 }
1302 
1303 /*@
1304   DMPlexSetPartitionBalance - Should distribution of the DM attempt to balance the shared point partition?
1305 
1306   Input Parameters:
1307 + dm - The DMPlex object
1308 - flg - Balance the partition?
1309 
1310   Level: intermediate
1311 
1312 .seealso: DMPlexDistribute(), DMPlexGetPartitionBalance()
1313 @*/
1314 PetscErrorCode DMPlexSetPartitionBalance(DM dm, PetscBool flg)
1315 {
1316   DM_Plex *mesh = (DM_Plex *)dm->data;
1317 
1318   PetscFunctionBegin;
1319   mesh->partitionBalance = flg;
1320   PetscFunctionReturn(0);
1321 }
1322 
1323 /*@
1324   DMPlexGetPartitionBalance - Does distribution of the DM attempt to balance the shared point partition?
1325 
1326   Input Parameter:
1327 + dm - The DMPlex object
1328 
1329   Output Parameter:
1330 + flg - Balance the partition?
1331 
1332   Level: intermediate
1333 
1334 .seealso: DMPlexDistribute(), DMPlexSetPartitionBalance()
1335 @*/
1336 PetscErrorCode DMPlexGetPartitionBalance(DM dm, PetscBool *flg)
1337 {
1338   DM_Plex *mesh = (DM_Plex *)dm->data;
1339 
1340   PetscFunctionBegin;
1341   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1342   PetscValidIntPointer(flg, 2);
1343   *flg = mesh->partitionBalance;
1344   PetscFunctionReturn(0);
1345 }
1346 
1347 /*@C
1348   DMPlexDerivePointSF - Build a point SF from an SF describing a point migration
1349 
1350   Input Parameter:
1351 + dm          - The source DMPlex object
1352 . migrationSF - The star forest that describes the parallel point remapping
1353 . ownership   - Flag causing a vote to determine point ownership
1354 
1355   Output Parameter:
1356 - pointSF     - The star forest describing the point overlap in the remapped DM
1357 
1358   Level: developer
1359 
1360 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1361 @*/
1362 PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownership, PetscSF *pointSF)
1363 {
1364   PetscMPIInt        rank, size;
1365   PetscInt           p, nroots, nleaves, idx, npointLeaves;
1366   PetscInt          *pointLocal;
1367   const PetscInt    *leaves;
1368   const PetscSFNode *roots;
1369   PetscSFNode       *rootNodes, *leafNodes, *pointRemote;
1370   Vec                shifts;
1371   const PetscInt     numShifts = 13759;
1372   const PetscScalar *shift = NULL;
1373   const PetscBool    shiftDebug = PETSC_FALSE;
1374   PetscBool          balance;
1375   PetscErrorCode     ierr;
1376 
1377   PetscFunctionBegin;
1378   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1379   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1380   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
1381 
1382   ierr = DMPlexGetPartitionBalance(dm, &balance);CHKERRQ(ierr);
1383   ierr = PetscSFGetGraph(migrationSF, &nroots, &nleaves, &leaves, &roots);CHKERRQ(ierr);
1384   ierr = PetscMalloc2(nroots, &rootNodes, nleaves, &leafNodes);CHKERRQ(ierr);
1385   if (ownership) {
1386     /* If balancing, we compute a random cyclic shift of the rank for each remote point. That way, the max will evenly distribute among ranks. */
1387     if (balance) {
1388       PetscRandom r;
1389 
1390       ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
1391       ierr = PetscRandomSetInterval(r, 0, 2467*size);CHKERRQ(ierr);
1392       ierr = VecCreate(PETSC_COMM_SELF, &shifts);CHKERRQ(ierr);
1393       ierr = VecSetSizes(shifts, numShifts, numShifts);CHKERRQ(ierr);
1394       ierr = VecSetType(shifts, VECSTANDARD);CHKERRQ(ierr);
1395       ierr = VecSetRandom(shifts, r);CHKERRQ(ierr);
1396       ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
1397       ierr = VecGetArrayRead(shifts, &shift);CHKERRQ(ierr);
1398     }
1399 
1400     /* Point ownership vote: Process with highest rank owns shared points */
1401     for (p = 0; p < nleaves; ++p) {
1402       if (shiftDebug) {
1403         ierr = PetscSynchronizedPrintf(PetscObjectComm((PetscObject) dm), "[%d] Point %D RemotePoint %D Shift %D MyRank %D\n", rank, leaves ? leaves[p] : p, roots[p].index, (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]), (rank + (shift ? (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]) : 0))%size);CHKERRQ(ierr);
1404       }
1405       /* Either put in a bid or we know we own it */
1406       leafNodes[p].rank  = (rank + (shift ? (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]) : 0))%size;
1407       leafNodes[p].index = p;
1408     }
1409     for (p = 0; p < nroots; p++) {
1410       /* Root must not participate in the reduction, flag so that MAXLOC does not use */
1411       rootNodes[p].rank  = -3;
1412       rootNodes[p].index = -3;
1413     }
1414     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);CHKERRQ(ierr);
1415     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);CHKERRQ(ierr);
1416     if (balance) {
1417       /* We've voted, now we need to get the rank.  When we're balancing the partition, the "rank" in rootNotes is not
1418        * the rank but rather (rank + random)%size.  So we do another reduction, voting the same way, but sending the
1419        * rank instead of the index. */
1420       PetscSFNode *rootRanks = NULL;
1421       ierr = PetscMalloc1(nroots, &rootRanks);CHKERRQ(ierr);
1422       for (p = 0; p < nroots; p++) {
1423         rootRanks[p].rank = -3;
1424         rootRanks[p].index = -3;
1425       }
1426       for (p = 0; p < nleaves; p++) leafNodes[p].index = rank;
1427       ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, leafNodes, rootRanks, MPI_MAXLOC);CHKERRQ(ierr);
1428       ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, leafNodes, rootRanks, MPI_MAXLOC);CHKERRQ(ierr);
1429       for (p = 0; p < nroots; p++) rootNodes[p].rank = rootRanks[p].index;
1430       ierr = PetscFree(rootRanks);CHKERRQ(ierr);
1431     }
1432   } else {
1433     for (p = 0; p < nroots; p++) {
1434       rootNodes[p].index = -1;
1435       rootNodes[p].rank = rank;
1436     };
1437     for (p = 0; p < nleaves; p++) {
1438       /* Write new local id into old location */
1439       if (roots[p].rank == rank) {
1440         rootNodes[roots[p].index].index = leaves ? leaves[p] : p;
1441       }
1442     }
1443   }
1444   ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1445   ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1446 
1447   for (npointLeaves = 0, p = 0; p < nleaves; p++) {
1448     if (leafNodes[p].rank != rank) npointLeaves++;
1449   }
1450   ierr = PetscMalloc1(npointLeaves, &pointLocal);CHKERRQ(ierr);
1451   ierr = PetscMalloc1(npointLeaves, &pointRemote);CHKERRQ(ierr);
1452   for (idx = 0, p = 0; p < nleaves; p++) {
1453     if (leafNodes[p].rank != rank) {
1454       pointLocal[idx] = p;
1455       pointRemote[idx] = leafNodes[p];
1456       idx++;
1457     }
1458   }
1459   if (shift) {
1460     ierr = VecRestoreArrayRead(shifts, &shift);CHKERRQ(ierr);
1461     ierr = VecDestroy(&shifts);CHKERRQ(ierr);
1462   }
1463   if (shiftDebug) {ierr = PetscSynchronizedFlush(PetscObjectComm((PetscObject) dm), PETSC_STDOUT);CHKERRQ(ierr);}
1464   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), pointSF);CHKERRQ(ierr);
1465   ierr = PetscSFSetFromOptions(*pointSF);CHKERRQ(ierr);
1466   ierr = PetscSFSetGraph(*pointSF, nleaves, npointLeaves, pointLocal, PETSC_OWN_POINTER, pointRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1467   ierr = PetscFree2(rootNodes, leafNodes);CHKERRQ(ierr);
1468   PetscFunctionReturn(0);
1469 }
1470 
1471 /*@C
1472   DMPlexMigrate  - Migrates internal DM data over the supplied star forest
1473 
1474   Collective on DM and PetscSF
1475 
1476   Input Parameter:
1477 + dm       - The source DMPlex object
1478 . sf       - The star forest communication context describing the migration pattern
1479 
1480   Output Parameter:
1481 - targetDM - The target DMPlex object
1482 
1483   Level: intermediate
1484 
1485 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1486 @*/
1487 PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM)
1488 {
1489   MPI_Comm               comm;
1490   PetscInt               dim, cdim, nroots;
1491   PetscSF                sfPoint;
1492   ISLocalToGlobalMapping ltogMigration;
1493   ISLocalToGlobalMapping ltogOriginal = NULL;
1494   PetscBool              flg;
1495   PetscErrorCode         ierr;
1496 
1497   PetscFunctionBegin;
1498   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1499   ierr = PetscLogEventBegin(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1500   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1501   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1502   ierr = DMSetDimension(targetDM, dim);CHKERRQ(ierr);
1503   ierr = DMGetCoordinateDim(dm, &cdim);CHKERRQ(ierr);
1504   ierr = DMSetCoordinateDim(targetDM, cdim);CHKERRQ(ierr);
1505 
1506   /* Check for a one-to-all distribution pattern */
1507   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1508   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
1509   if (nroots >= 0) {
1510     IS        isOriginal;
1511     PetscInt  n, size, nleaves;
1512     PetscInt  *numbering_orig, *numbering_new;
1513 
1514     /* Get the original point numbering */
1515     ierr = DMPlexCreatePointNumbering(dm, &isOriginal);CHKERRQ(ierr);
1516     ierr = ISLocalToGlobalMappingCreateIS(isOriginal, &ltogOriginal);CHKERRQ(ierr);
1517     ierr = ISLocalToGlobalMappingGetSize(ltogOriginal, &size);CHKERRQ(ierr);
1518     ierr = ISLocalToGlobalMappingGetBlockIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1519     /* Convert to positive global numbers */
1520     for (n=0; n<size; n++) {if (numbering_orig[n] < 0) numbering_orig[n] = -(numbering_orig[n]+1);}
1521     /* Derive the new local-to-global mapping from the old one */
1522     ierr = PetscSFGetGraph(sf, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1523     ierr = PetscMalloc1(nleaves, &numbering_new);CHKERRQ(ierr);
1524     ierr = PetscSFBcastBegin(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr);
1525     ierr = PetscSFBcastEnd(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr);
1526     ierr = ISLocalToGlobalMappingCreate(comm, 1, nleaves, (const PetscInt*) numbering_new, PETSC_OWN_POINTER, &ltogMigration);CHKERRQ(ierr);
1527     ierr = ISLocalToGlobalMappingRestoreIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1528     ierr = ISDestroy(&isOriginal);CHKERRQ(ierr);
1529   } else {
1530     /* One-to-all distribution pattern: We can derive LToG from SF */
1531     ierr = ISLocalToGlobalMappingCreateSF(sf, 0, &ltogMigration);CHKERRQ(ierr);
1532   }
1533   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1534   if (flg) {
1535     ierr = PetscPrintf(comm, "Point renumbering for DM migration:\n");CHKERRQ(ierr);
1536     ierr = ISLocalToGlobalMappingView(ltogMigration, NULL);CHKERRQ(ierr);
1537   }
1538   /* Migrate DM data to target DM */
1539   ierr = DMPlexDistributeCones(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1540   ierr = DMPlexDistributeLabels(dm, sf, targetDM);CHKERRQ(ierr);
1541   ierr = DMPlexDistributeCoordinates(dm, sf, targetDM);CHKERRQ(ierr);
1542   ierr = DMPlexDistributeSetupHybrid(dm, sf, ltogMigration, targetDM);CHKERRQ(ierr);
1543   ierr = DMPlexDistributeSetupTree(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1544   ierr = ISLocalToGlobalMappingDestroy(&ltogOriginal);CHKERRQ(ierr);
1545   ierr = ISLocalToGlobalMappingDestroy(&ltogMigration);CHKERRQ(ierr);
1546   ierr = PetscLogEventEnd(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1547   PetscFunctionReturn(0);
1548 }
1549 
1550 PETSC_INTERN PetscErrorCode DMPlexPartitionLabelClosure_Private(DM,DMLabel,PetscInt,PetscInt,const PetscInt[],IS*);
1551 
1552 /*@C
1553   DMPlexDistribute - Distributes the mesh and any associated sections.
1554 
1555   Collective on DM
1556 
1557   Input Parameter:
1558 + dm  - The original DMPlex object
1559 - overlap - The overlap of partitions, 0 is the default
1560 
1561   Output Parameter:
1562 + sf - The PetscSF used for point distribution, or NULL if not needed
1563 - dmParallel - The distributed DMPlex object
1564 
1565   Note: If the mesh was not distributed, the output dmParallel will be NULL.
1566 
1567   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
1568   representation on the mesh.
1569 
1570   Level: intermediate
1571 
1572 .keywords: mesh, elements
1573 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMSetAdjacency()
1574 @*/
1575 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1576 {
1577   MPI_Comm               comm;
1578   PetscPartitioner       partitioner;
1579   IS                     cellPart;
1580   PetscSection           cellPartSection;
1581   DM                     dmCoord;
1582   DMLabel                lblPartition, lblMigration;
1583   PetscSF                sfMigration, sfStratified, sfPoint;
1584   PetscBool              flg, balance;
1585   PetscMPIInt            rank, size;
1586   PetscErrorCode         ierr;
1587 
1588   PetscFunctionBegin;
1589   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1590   PetscValidLogicalCollectiveInt(dm, overlap, 2);
1591   if (sf) PetscValidPointer(sf,3);
1592   PetscValidPointer(dmParallel,4);
1593 
1594   if (sf) *sf = NULL;
1595   *dmParallel = NULL;
1596   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1597   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1598   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1599   if (size == 1) PetscFunctionReturn(0);
1600 
1601   ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1602   /* Create cell partition */
1603   ierr = PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1604   ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr);
1605   ierr = DMPlexGetPartitioner(dm, &partitioner);CHKERRQ(ierr);
1606   ierr = PetscPartitionerPartition(partitioner, dm, cellPartSection, &cellPart);CHKERRQ(ierr);
1607   {
1608     /* Convert partition to DMLabel */
1609     IS         is;
1610     PetscHSetI ht;
1611     PetscInt pStart, pEnd, proc, npoints, poff = 0, nranks, *iranks;
1612     const PetscInt *points;
1613 
1614     ierr = DMLabelCreate(PETSC_COMM_SELF, "Point Partition", &lblPartition);CHKERRQ(ierr);
1615     /* Preallocate strata */
1616     ierr = PetscHSetICreate(&ht);CHKERRQ(ierr);
1617     ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr);
1618     for (proc = pStart; proc < pEnd; proc++) {
1619       ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr);
1620       if (npoints) {ierr = PetscHSetIAdd(ht, proc);CHKERRQ(ierr);}
1621     }
1622     ierr = PetscHSetIGetSize(ht, &nranks);CHKERRQ(ierr);
1623     ierr = PetscMalloc1(nranks, &iranks);CHKERRQ(ierr);
1624     ierr = PetscHSetIGetElems(ht, &poff, iranks);CHKERRQ(ierr);
1625     ierr = PetscHSetIDestroy(&ht);CHKERRQ(ierr);
1626     ierr = DMLabelAddStrata(lblPartition, nranks, iranks);CHKERRQ(ierr);
1627     ierr = PetscFree(iranks);CHKERRQ(ierr);
1628     /* Inline DMPlexPartitionLabelClosure() */
1629     ierr = ISGetIndices(cellPart, &points);CHKERRQ(ierr);
1630     ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr);
1631     for (proc = pStart; proc < pEnd; proc++) {
1632       ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr);
1633       if (!npoints) continue;
1634       ierr = PetscSectionGetOffset(cellPartSection, proc, &poff);CHKERRQ(ierr);
1635       ierr = DMPlexPartitionLabelClosure_Private(dm, lblPartition, proc, npoints, points+poff, &is);CHKERRQ(ierr);
1636       ierr = DMLabelSetStratumIS(lblPartition, proc, is);CHKERRQ(ierr);
1637       ierr = ISDestroy(&is);CHKERRQ(ierr);
1638     }
1639     ierr = ISRestoreIndices(cellPart, &points);CHKERRQ(ierr);
1640   }
1641   ierr = DMLabelCreate(PETSC_COMM_SELF, "Point migration", &lblMigration);CHKERRQ(ierr);
1642   ierr = DMPlexPartitionLabelInvert(dm, lblPartition, NULL, lblMigration);CHKERRQ(ierr);
1643   ierr = DMPlexPartitionLabelCreateSF(dm, lblMigration, &sfMigration);CHKERRQ(ierr);
1644   /* Stratify the SF in case we are migrating an already parallel plex */
1645   ierr = DMPlexStratifyMigrationSF(dm, sfMigration, &sfStratified);CHKERRQ(ierr);
1646   ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1647   sfMigration = sfStratified;
1648   ierr = PetscLogEventEnd(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1649   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1650   if (flg) {
1651     ierr = DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1652     ierr = PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1653   }
1654 
1655   /* Create non-overlapping parallel DM and migrate internal data */
1656   ierr = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr);
1657   ierr = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr);
1658   ierr = DMPlexMigrate(dm, sfMigration, *dmParallel);CHKERRQ(ierr);
1659 
1660   /* Build the point SF without overlap */
1661   ierr = DMPlexGetPartitionBalance(dm, &balance);CHKERRQ(ierr);
1662   ierr = DMPlexSetPartitionBalance(*dmParallel, balance);CHKERRQ(ierr);
1663   ierr = DMPlexCreatePointSF(*dmParallel, sfMigration, PETSC_TRUE, &sfPoint);CHKERRQ(ierr);
1664   ierr = DMSetPointSF(*dmParallel, sfPoint);CHKERRQ(ierr);
1665   ierr = DMGetCoordinateDM(*dmParallel, &dmCoord);CHKERRQ(ierr);
1666   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1667   if (flg) {ierr = PetscSFView(sfPoint, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);}
1668 
1669   if (overlap > 0) {
1670     DM                 dmOverlap;
1671     PetscInt           nroots, nleaves;
1672     PetscSFNode       *newRemote;
1673     const PetscSFNode *oldRemote;
1674     PetscSF            sfOverlap, sfOverlapPoint;
1675     /* Add the partition overlap to the distributed DM */
1676     ierr = DMPlexDistributeOverlap(*dmParallel, overlap, &sfOverlap, &dmOverlap);CHKERRQ(ierr);
1677     ierr = DMDestroy(dmParallel);CHKERRQ(ierr);
1678     *dmParallel = dmOverlap;
1679     if (flg) {
1680       ierr = PetscPrintf(comm, "Overlap Migration SF:\n");CHKERRQ(ierr);
1681       ierr = PetscSFView(sfOverlap, NULL);CHKERRQ(ierr);
1682     }
1683 
1684     /* Re-map the migration SF to establish the full migration pattern */
1685     ierr = PetscSFGetGraph(sfMigration, &nroots, NULL, NULL, &oldRemote);CHKERRQ(ierr);
1686     ierr = PetscSFGetGraph(sfOverlap, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1687     ierr = PetscMalloc1(nleaves, &newRemote);CHKERRQ(ierr);
1688     ierr = PetscSFBcastBegin(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1689     ierr = PetscSFBcastEnd(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1690     ierr = PetscSFCreate(comm, &sfOverlapPoint);CHKERRQ(ierr);
1691     ierr = PetscSFSetGraph(sfOverlapPoint, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1692     ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1693     ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1694     sfMigration = sfOverlapPoint;
1695   }
1696   /* Cleanup Partition */
1697   ierr = DMLabelDestroy(&lblPartition);CHKERRQ(ierr);
1698   ierr = DMLabelDestroy(&lblMigration);CHKERRQ(ierr);
1699   ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr);
1700   ierr = ISDestroy(&cellPart);CHKERRQ(ierr);
1701   /* Copy BC */
1702   ierr = DMCopyBoundary(dm, *dmParallel);CHKERRQ(ierr);
1703   /* Create sfNatural */
1704   if (dm->useNatural) {
1705     PetscSection section;
1706 
1707     ierr = DMGetSection(dm, &section);CHKERRQ(ierr);
1708     ierr = DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &(*dmParallel)->sfNatural);CHKERRQ(ierr);
1709     ierr = DMSetUseNatural(*dmParallel, PETSC_TRUE);CHKERRQ(ierr);
1710   }
1711   /* Cleanup */
1712   if (sf) {*sf = sfMigration;}
1713   else    {ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);}
1714   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1715   ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1716   PetscFunctionReturn(0);
1717 }
1718 
1719 /*@C
1720   DMPlexDistributeOverlap - Add partition overlap to a distributed non-overlapping DM.
1721 
1722   Collective on DM
1723 
1724   Input Parameter:
1725 + dm  - The non-overlapping distrbuted DMPlex object
1726 - overlap - The overlap of partitions
1727 
1728   Output Parameter:
1729 + sf - The PetscSF used for point distribution
1730 - dmOverlap - The overlapping distributed DMPlex object, or NULL
1731 
1732   Note: If the mesh was not distributed, the return value is NULL.
1733 
1734   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
1735   representation on the mesh.
1736 
1737   Level: intermediate
1738 
1739 .keywords: mesh, elements
1740 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMSetAdjacency()
1741 @*/
1742 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, PetscSF *sf, DM *dmOverlap)
1743 {
1744   MPI_Comm               comm;
1745   PetscMPIInt            size, rank;
1746   PetscSection           rootSection, leafSection;
1747   IS                     rootrank, leafrank;
1748   DM                     dmCoord;
1749   DMLabel                lblOverlap;
1750   PetscSF                sfOverlap, sfStratified, sfPoint;
1751   PetscErrorCode         ierr;
1752 
1753   PetscFunctionBegin;
1754   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1755   if (sf) PetscValidPointer(sf, 3);
1756   PetscValidPointer(dmOverlap, 4);
1757 
1758   if (sf) *sf = NULL;
1759   *dmOverlap  = NULL;
1760   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1761   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1762   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1763   if (size == 1) PetscFunctionReturn(0);
1764 
1765   ierr = PetscLogEventBegin(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1766   /* Compute point overlap with neighbouring processes on the distributed DM */
1767   ierr = PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1768   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1769   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1770   ierr = DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);CHKERRQ(ierr);
1771   ierr = DMPlexCreateOverlap(dm, overlap, rootSection, rootrank, leafSection, leafrank, &lblOverlap);CHKERRQ(ierr);
1772   /* Convert overlap label to stratified migration SF */
1773   ierr = DMPlexPartitionLabelCreateSF(dm, lblOverlap, &sfOverlap);CHKERRQ(ierr);
1774   ierr = DMPlexStratifyMigrationSF(dm, sfOverlap, &sfStratified);CHKERRQ(ierr);
1775   ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1776   sfOverlap = sfStratified;
1777   ierr = PetscObjectSetName((PetscObject) sfOverlap, "Overlap SF");CHKERRQ(ierr);
1778   ierr = PetscSFSetFromOptions(sfOverlap);CHKERRQ(ierr);
1779 
1780   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1781   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1782   ierr = ISDestroy(&rootrank);CHKERRQ(ierr);
1783   ierr = ISDestroy(&leafrank);CHKERRQ(ierr);
1784   ierr = PetscLogEventEnd(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1785 
1786   /* Build the overlapping DM */
1787   ierr = DMPlexCreate(comm, dmOverlap);CHKERRQ(ierr);
1788   ierr = PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");CHKERRQ(ierr);
1789   ierr = DMPlexMigrate(dm, sfOverlap, *dmOverlap);CHKERRQ(ierr);
1790   /* Build the new point SF */
1791   ierr = DMPlexCreatePointSF(*dmOverlap, sfOverlap, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
1792   ierr = DMSetPointSF(*dmOverlap, sfPoint);CHKERRQ(ierr);
1793   ierr = DMGetCoordinateDM(*dmOverlap, &dmCoord);CHKERRQ(ierr);
1794   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1795   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1796   /* Cleanup overlap partition */
1797   ierr = DMLabelDestroy(&lblOverlap);CHKERRQ(ierr);
1798   if (sf) *sf = sfOverlap;
1799   else    {ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);}
1800   ierr = PetscLogEventEnd(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1801   PetscFunctionReturn(0);
1802 }
1803 
1804 /*@C
1805   DMPlexGetGatherDM - Get a copy of the DMPlex that gathers all points on the
1806   root process of the original's communicator.
1807 
1808   Collective on DM
1809 
1810   Input Parameters:
1811 . dm - the original DMPlex object
1812 
1813   Output Parameters:
1814 + sf - the PetscSF used for point distribution (optional)
1815 - gatherMesh - the gathered DM object, or NULL
1816 
1817   Level: intermediate
1818 
1819 .keywords: mesh
1820 .seealso: DMPlexDistribute(), DMPlexGetRedundantDM()
1821 @*/
1822 PetscErrorCode DMPlexGetGatherDM(DM dm, PetscSF *sf, DM *gatherMesh)
1823 {
1824   MPI_Comm       comm;
1825   PetscMPIInt    size;
1826   PetscPartitioner oldPart, gatherPart;
1827   PetscErrorCode ierr;
1828 
1829   PetscFunctionBegin;
1830   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1831   PetscValidPointer(gatherMesh,2);
1832   *gatherMesh = NULL;
1833   if (sf) *sf = NULL;
1834   comm = PetscObjectComm((PetscObject)dm);
1835   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1836   if (size == 1) PetscFunctionReturn(0);
1837   ierr = DMPlexGetPartitioner(dm,&oldPart);CHKERRQ(ierr);
1838   ierr = PetscObjectReference((PetscObject)oldPart);CHKERRQ(ierr);
1839   ierr = PetscPartitionerCreate(comm,&gatherPart);CHKERRQ(ierr);
1840   ierr = PetscPartitionerSetType(gatherPart,PETSCPARTITIONERGATHER);CHKERRQ(ierr);
1841   ierr = DMPlexSetPartitioner(dm,gatherPart);CHKERRQ(ierr);
1842   ierr = DMPlexDistribute(dm,0,sf,gatherMesh);CHKERRQ(ierr);
1843 
1844   ierr = DMPlexSetPartitioner(dm,oldPart);CHKERRQ(ierr);
1845   ierr = PetscPartitionerDestroy(&gatherPart);CHKERRQ(ierr);
1846   ierr = PetscPartitionerDestroy(&oldPart);CHKERRQ(ierr);
1847   PetscFunctionReturn(0);
1848 }
1849 
1850 /*@C
1851   DMPlexGetRedundantDM - Get a copy of the DMPlex that is completely copied on each process.
1852 
1853   Collective on DM
1854 
1855   Input Parameters:
1856 . dm - the original DMPlex object
1857 
1858   Output Parameters:
1859 + sf - the PetscSF used for point distribution (optional)
1860 - redundantMesh - the redundant DM object, or NULL
1861 
1862   Level: intermediate
1863 
1864 .keywords: mesh
1865 .seealso: DMPlexDistribute(), DMPlexGetGatherDM()
1866 @*/
1867 PetscErrorCode DMPlexGetRedundantDM(DM dm, PetscSF *sf, DM *redundantMesh)
1868 {
1869   MPI_Comm       comm;
1870   PetscMPIInt    size, rank;
1871   PetscInt       pStart, pEnd, p;
1872   PetscInt       numPoints = -1;
1873   PetscSF        migrationSF, sfPoint, gatherSF;
1874   DM             gatherDM, dmCoord;
1875   PetscSFNode    *points;
1876   PetscErrorCode ierr;
1877 
1878   PetscFunctionBegin;
1879   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1880   PetscValidPointer(redundantMesh,2);
1881   *redundantMesh = NULL;
1882   comm = PetscObjectComm((PetscObject)dm);
1883   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1884   if (size == 1) {
1885     ierr = PetscObjectReference((PetscObject) dm);CHKERRQ(ierr);
1886     *redundantMesh = dm;
1887     if (sf) *sf = NULL;
1888     PetscFunctionReturn(0);
1889   }
1890   ierr = DMPlexGetGatherDM(dm,&gatherSF,&gatherDM);CHKERRQ(ierr);
1891   if (!gatherDM) PetscFunctionReturn(0);
1892   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1893   ierr = DMPlexGetChart(gatherDM,&pStart,&pEnd);CHKERRQ(ierr);
1894   numPoints = pEnd - pStart;
1895   ierr = MPI_Bcast(&numPoints,1,MPIU_INT,0,comm);CHKERRQ(ierr);
1896   ierr = PetscMalloc1(numPoints,&points);CHKERRQ(ierr);
1897   ierr = PetscSFCreate(comm,&migrationSF);CHKERRQ(ierr);
1898   for (p = 0; p < numPoints; p++) {
1899     points[p].index = p;
1900     points[p].rank  = 0;
1901   }
1902   ierr = PetscSFSetGraph(migrationSF,pEnd-pStart,numPoints,NULL,PETSC_OWN_POINTER,points,PETSC_OWN_POINTER);CHKERRQ(ierr);
1903   ierr = DMPlexCreate(comm, redundantMesh);CHKERRQ(ierr);
1904   ierr = PetscObjectSetName((PetscObject) *redundantMesh, "Redundant Mesh");CHKERRQ(ierr);
1905   ierr = DMPlexMigrate(gatherDM, migrationSF, *redundantMesh);CHKERRQ(ierr);
1906   ierr = DMPlexCreatePointSF(*redundantMesh, migrationSF, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
1907   ierr = DMSetPointSF(*redundantMesh, sfPoint);CHKERRQ(ierr);
1908   ierr = DMGetCoordinateDM(*redundantMesh, &dmCoord);CHKERRQ(ierr);
1909   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1910   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1911   if (sf) {
1912     PetscSF tsf;
1913 
1914     ierr = PetscSFCompose(gatherSF,migrationSF,&tsf);CHKERRQ(ierr);
1915     ierr = DMPlexStratifyMigrationSF(dm, tsf, sf);CHKERRQ(ierr);
1916     ierr = PetscSFDestroy(&tsf);CHKERRQ(ierr);
1917   }
1918   ierr = PetscSFDestroy(&migrationSF);CHKERRQ(ierr);
1919   ierr = PetscSFDestroy(&gatherSF);CHKERRQ(ierr);
1920   ierr = DMDestroy(&gatherDM);CHKERRQ(ierr);
1921   PetscFunctionReturn(0);
1922 }
1923