xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision 57715deb645f54fa2d0f094b2765b18b9347b2aa)
1 #include <petsc/private/dmpleximpl.h>    /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/dmlabelimpl.h>   /*I      "petscdmlabel.h"  I*/
3 
4 /*@C
5   DMPlexSetAdjacencyUser - Define adjacency in the mesh using a user-provided callback
6 
7   Input Parameters:
8 + dm      - The DM object
9 . user    - The user callback, may be NULL (to clear the callback)
10 - ctx     - context for callback evaluation, may be NULL
11 
12   Level: advanced
13 
14   Notes:
15      The caller of DMPlexGetAdjacency may need to arrange that a large enough array is available for the adjacency.
16 
17      Any setting here overrides other configuration of DMPlex adjacency determination.
18 
19 .seealso: DMSetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexGetAdjacency(), DMPlexGetAdjacencyUser()
20 @*/
21 PetscErrorCode DMPlexSetAdjacencyUser(DM dm,PetscErrorCode (*user)(DM,PetscInt,PetscInt*,PetscInt[],void*),void *ctx)
22 {
23   DM_Plex *mesh = (DM_Plex *)dm->data;
24 
25   PetscFunctionBegin;
26   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
27   mesh->useradjacency = user;
28   mesh->useradjacencyctx = ctx;
29   PetscFunctionReturn(0);
30 }
31 
32 /*@C
33   DMPlexGetAdjacencyUser - get the user-defined adjacency callback
34 
35   Input Parameter:
36 . dm      - The DM object
37 
38   Output Parameters:
39 - user    - The user callback
40 - ctx     - context for callback evaluation
41 
42   Level: advanced
43 
44 .seealso: DMSetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexGetAdjacency(), DMPlexSetAdjacencyUser()
45 @*/
46 PetscErrorCode DMPlexGetAdjacencyUser(DM dm, PetscErrorCode (**user)(DM,PetscInt,PetscInt*,PetscInt[],void*), void **ctx)
47 {
48   DM_Plex *mesh = (DM_Plex *)dm->data;
49 
50   PetscFunctionBegin;
51   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
52   if (user) *user = mesh->useradjacency;
53   if (ctx) *ctx = mesh->useradjacencyctx;
54   PetscFunctionReturn(0);
55 }
56 
57 /*@
58   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
59 
60   Input Parameters:
61 + dm      - The DM object
62 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
63 
64   Level: intermediate
65 
66 .seealso: DMGetAdjacency(), DMSetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
67 @*/
68 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
69 {
70   DM_Plex *mesh = (DM_Plex *) dm->data;
71 
72   PetscFunctionBegin;
73   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
74   mesh->useAnchors = useAnchors;
75   PetscFunctionReturn(0);
76 }
77 
78 /*@
79   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
80 
81   Input Parameter:
82 . dm      - The DM object
83 
84   Output Parameter:
85 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
86 
87   Level: intermediate
88 
89 .seealso: DMPlexSetAdjacencyUseAnchors(), DMSetAdjacency(), DMGetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
90 @*/
91 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
92 {
93   DM_Plex *mesh = (DM_Plex *) dm->data;
94 
95   PetscFunctionBegin;
96   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
97   PetscValidIntPointer(useAnchors, 2);
98   *useAnchors = mesh->useAnchors;
99   PetscFunctionReturn(0);
100 }
101 
102 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
103 {
104   const PetscInt *cone = NULL;
105   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
106   PetscErrorCode  ierr;
107 
108   PetscFunctionBeginHot;
109   ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
110   ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
111   for (c = 0; c <= coneSize; ++c) {
112     const PetscInt  point   = !c ? p : cone[c-1];
113     const PetscInt *support = NULL;
114     PetscInt        supportSize, s, q;
115 
116     ierr = DMPlexGetSupportSize(dm, point, &supportSize);CHKERRQ(ierr);
117     ierr = DMPlexGetSupport(dm, point, &support);CHKERRQ(ierr);
118     for (s = 0; s < supportSize; ++s) {
119       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = support[s]),0); ++q) {
120         if (support[s] == adj[q]) break;
121       }
122       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
123     }
124   }
125   *adjSize = numAdj;
126   PetscFunctionReturn(0);
127 }
128 
129 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
130 {
131   const PetscInt *support = NULL;
132   PetscInt        numAdj   = 0, maxAdjSize = *adjSize, supportSize, s;
133   PetscErrorCode  ierr;
134 
135   PetscFunctionBeginHot;
136   ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr);
137   ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr);
138   for (s = 0; s <= supportSize; ++s) {
139     const PetscInt  point = !s ? p : support[s-1];
140     const PetscInt *cone  = NULL;
141     PetscInt        coneSize, c, q;
142 
143     ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
144     ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
145     for (c = 0; c < coneSize; ++c) {
146       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = cone[c]),0); ++q) {
147         if (cone[c] == adj[q]) break;
148       }
149       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
150     }
151   }
152   *adjSize = numAdj;
153   PetscFunctionReturn(0);
154 }
155 
156 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
157 {
158   PetscInt      *star = NULL;
159   PetscInt       numAdj = 0, maxAdjSize = *adjSize, starSize, s;
160   PetscErrorCode ierr;
161 
162   PetscFunctionBeginHot;
163   ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
164   for (s = 0; s < starSize*2; s += 2) {
165     const PetscInt *closure = NULL;
166     PetscInt        closureSize, c, q;
167 
168     ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
169     for (c = 0; c < closureSize*2; c += 2) {
170       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = closure[c]),0); ++q) {
171         if (closure[c] == adj[q]) break;
172       }
173       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
174     }
175     ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
176   }
177   ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
178   *adjSize = numAdj;
179   PetscFunctionReturn(0);
180 }
181 
182 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
183 {
184   static PetscInt asiz = 0;
185   PetscInt maxAnchors = 1;
186   PetscInt aStart = -1, aEnd = -1;
187   PetscInt maxAdjSize;
188   PetscSection aSec = NULL;
189   IS aIS = NULL;
190   const PetscInt *anchors;
191   DM_Plex *mesh = (DM_Plex *)dm->data;
192   PetscErrorCode  ierr;
193 
194   PetscFunctionBeginHot;
195   if (useAnchors) {
196     ierr = DMPlexGetAnchors(dm,&aSec,&aIS);CHKERRQ(ierr);
197     if (aSec) {
198       ierr = PetscSectionGetMaxDof(aSec,&maxAnchors);CHKERRQ(ierr);
199       maxAnchors = PetscMax(1,maxAnchors);
200       ierr = PetscSectionGetChart(aSec,&aStart,&aEnd);CHKERRQ(ierr);
201       ierr = ISGetIndices(aIS,&anchors);CHKERRQ(ierr);
202     }
203   }
204   if (!*adj) {
205     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
206 
207     ierr  = DMPlexGetChart(dm, &pStart,&pEnd);CHKERRQ(ierr);
208     ierr  = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
209     depth = PetscMax(depth, -depth);
210     ierr  = DMPlexGetMaxSizes(dm, &maxC, &maxS);CHKERRQ(ierr);
211     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1;
212     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1;
213     asiz  = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries);
214     asiz *= maxAnchors;
215     asiz  = PetscMin(asiz,pEnd-pStart);
216     ierr  = PetscMalloc1(asiz,adj);CHKERRQ(ierr);
217   }
218   if (*adjSize < 0) *adjSize = asiz;
219   maxAdjSize = *adjSize;
220   if (mesh->useradjacency) {
221     ierr = (*mesh->useradjacency)(dm, p, adjSize, *adj, mesh->useradjacencyctx);CHKERRQ(ierr);
222   } else if (useTransitiveClosure) {
223     ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr);
224   } else if (useCone) {
225     ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
226   } else {
227     ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
228   }
229   if (useAnchors && aSec) {
230     PetscInt origSize = *adjSize;
231     PetscInt numAdj = origSize;
232     PetscInt i = 0, j;
233     PetscInt *orig = *adj;
234 
235     while (i < origSize) {
236       PetscInt p = orig[i];
237       PetscInt aDof = 0;
238 
239       if (p >= aStart && p < aEnd) {
240         ierr = PetscSectionGetDof(aSec,p,&aDof);CHKERRQ(ierr);
241       }
242       if (aDof) {
243         PetscInt aOff;
244         PetscInt s, q;
245 
246         for (j = i + 1; j < numAdj; j++) {
247           orig[j - 1] = orig[j];
248         }
249         origSize--;
250         numAdj--;
251         ierr = PetscSectionGetOffset(aSec,p,&aOff);CHKERRQ(ierr);
252         for (s = 0; s < aDof; ++s) {
253           for (q = 0; q < numAdj || ((void)(orig[numAdj++] = anchors[aOff+s]),0); ++q) {
254             if (anchors[aOff+s] == orig[q]) break;
255           }
256           if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
257         }
258       }
259       else {
260         i++;
261       }
262     }
263     *adjSize = numAdj;
264     ierr = ISRestoreIndices(aIS,&anchors);CHKERRQ(ierr);
265   }
266   PetscFunctionReturn(0);
267 }
268 
269 /*@
270   DMPlexGetAdjacency - Return all points adjacent to the given point
271 
272   Input Parameters:
273 + dm - The DM object
274 . p  - The point
275 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE
276 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize
277 
278   Output Parameters:
279 + adjSize - The number of adjacent points
280 - adj - The adjacent points
281 
282   Level: advanced
283 
284   Notes:
285     The user must PetscFree the adj array if it was not passed in.
286 
287 .seealso: DMSetAdjacency(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator()
288 @*/
289 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
290 {
291   PetscBool      useCone, useClosure, useAnchors;
292   PetscErrorCode ierr;
293 
294   PetscFunctionBeginHot;
295   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
296   PetscValidPointer(adjSize,3);
297   PetscValidPointer(adj,4);
298   ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
299   ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
300   ierr = DMPlexGetAdjacency_Internal(dm, p, useCone, useClosure, useAnchors, adjSize, adj);CHKERRQ(ierr);
301   PetscFunctionReturn(0);
302 }
303 
304 /*@
305   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
306 
307   Collective on dm
308 
309   Input Parameters:
310 + dm      - The DM
311 - sfPoint - The PetscSF which encodes point connectivity
312 
313   Output Parameters:
314 + processRanks - A list of process neighbors, or NULL
315 - sfProcess    - An SF encoding the two-sided process connectivity, or NULL
316 
317   Level: developer
318 
319 .seealso: PetscSFCreate(), DMPlexCreateProcessSF()
320 @*/
321 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
322 {
323   const PetscSFNode *remotePoints;
324   PetscInt          *localPointsNew;
325   PetscSFNode       *remotePointsNew;
326   const PetscInt    *nranks;
327   PetscInt          *ranksNew;
328   PetscBT            neighbors;
329   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
330   PetscMPIInt        size, proc, rank;
331   PetscErrorCode     ierr;
332 
333   PetscFunctionBegin;
334   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
335   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
336   if (processRanks) {PetscValidPointer(processRanks, 3);}
337   if (sfProcess)    {PetscValidPointer(sfProcess, 4);}
338   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
339   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
340   ierr = PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);CHKERRQ(ierr);
341   ierr = PetscBTCreate(size, &neighbors);CHKERRQ(ierr);
342   ierr = PetscBTMemzero(size, neighbors);CHKERRQ(ierr);
343   /* Compute root-to-leaf process connectivity */
344   ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr);
345   ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr);
346   for (p = pStart; p < pEnd; ++p) {
347     PetscInt ndof, noff, n;
348 
349     ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr);
350     ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr);
351     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
352   }
353   ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr);
354   /* Compute leaf-to-neighbor process connectivity */
355   ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr);
356   ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr);
357   for (p = pStart; p < pEnd; ++p) {
358     PetscInt ndof, noff, n;
359 
360     ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr);
361     ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr);
362     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
363   }
364   ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr);
365   /* Compute leaf-to-root process connectivity */
366   for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);}
367   /* Calculate edges */
368   PetscBTClear(neighbors, rank);
369   for(proc = 0, numNeighbors = 0; proc < size; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;}
370   ierr = PetscMalloc1(numNeighbors, &ranksNew);CHKERRQ(ierr);
371   ierr = PetscMalloc1(numNeighbors, &localPointsNew);CHKERRQ(ierr);
372   ierr = PetscMalloc1(numNeighbors, &remotePointsNew);CHKERRQ(ierr);
373   for(proc = 0, n = 0; proc < size; ++proc) {
374     if (PetscBTLookup(neighbors, proc)) {
375       ranksNew[n]              = proc;
376       localPointsNew[n]        = proc;
377       remotePointsNew[n].index = rank;
378       remotePointsNew[n].rank  = proc;
379       ++n;
380     }
381   }
382   ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr);
383   if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr);}
384   else              {ierr = PetscFree(ranksNew);CHKERRQ(ierr);}
385   if (sfProcess) {
386     ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr);
387     ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr);
388     ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr);
389     ierr = PetscSFSetGraph(*sfProcess, size, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
390   }
391   PetscFunctionReturn(0);
392 }
393 
394 /*@
395   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
396 
397   Collective on dm
398 
399   Input Parameter:
400 . dm - The DM
401 
402   Output Parameters:
403 + rootSection - The number of leaves for a given root point
404 . rootrank    - The rank of each edge into the root point
405 . leafSection - The number of processes sharing a given leaf point
406 - leafrank    - The rank of each process sharing a leaf point
407 
408   Level: developer
409 
410 .seealso: DMPlexCreateOverlapLabel(), DMPlexDistribute(), DMPlexDistributeOverlap()
411 @*/
412 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
413 {
414   MPI_Comm        comm;
415   PetscSF         sfPoint;
416   const PetscInt *rootdegree;
417   PetscInt       *myrank, *remoterank;
418   PetscInt        pStart, pEnd, p, nedges;
419   PetscMPIInt     rank;
420   PetscErrorCode  ierr;
421 
422   PetscFunctionBegin;
423   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
424   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
425   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
426   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
427   /* Compute number of leaves for each root */
428   ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr);
429   ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr);
430   ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr);
431   ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr);
432   for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr);}
433   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
434   /* Gather rank of each leaf to root */
435   ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr);
436   ierr = PetscMalloc1(pEnd-pStart, &myrank);CHKERRQ(ierr);
437   ierr = PetscMalloc1(nedges,  &remoterank);CHKERRQ(ierr);
438   for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank;
439   ierr = PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
440   ierr = PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
441   ierr = PetscFree(myrank);CHKERRQ(ierr);
442   ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr);
443   /* Distribute remote ranks to leaves */
444   ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr);
445   ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr);
446   PetscFunctionReturn(0);
447 }
448 
449 /*@C
450   DMPlexCreateOverlapLabel - Compute owner information for shared points. This basically gets two-sided for an SF.
451 
452   Collective on dm
453 
454   Input Parameters:
455 + dm          - The DM
456 . levels      - Number of overlap levels
457 . rootSection - The number of leaves for a given root point
458 . rootrank    - The rank of each edge into the root point
459 . leafSection - The number of processes sharing a given leaf point
460 - leafrank    - The rank of each process sharing a leaf point
461 
462   Output Parameter:
463 . ovLabel     - DMLabel containing remote overlap contributions as point/rank pairings
464 
465   Level: developer
466 
467 .seealso: DMPlexDistributeOwnership(), DMPlexDistribute()
468 @*/
469 PetscErrorCode DMPlexCreateOverlapLabel(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
470 {
471   MPI_Comm           comm;
472   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
473   PetscSF            sfPoint;
474   const PetscSFNode *remote;
475   const PetscInt    *local;
476   const PetscInt    *nrank, *rrank;
477   PetscInt          *adj = NULL;
478   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l;
479   PetscMPIInt        rank, size;
480   PetscBool          flg;
481   PetscErrorCode     ierr;
482 
483   PetscFunctionBegin;
484   *ovLabel = NULL;
485   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
486   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
487   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
488   if (size == 1) PetscFunctionReturn(0);
489   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
490   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
491   ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr);
492   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
493   ierr = DMLabelCreate(PETSC_COMM_SELF, "Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr);
494   /* Handle leaves: shared with the root point */
495   for (l = 0; l < nleaves; ++l) {
496     PetscInt adjSize = PETSC_DETERMINE, a;
497 
498     ierr = DMPlexGetAdjacency(dm, local ? local[l] : l, &adjSize, &adj);CHKERRQ(ierr);
499     for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr);}
500   }
501   ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr);
502   ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr);
503   /* Handle roots */
504   for (p = pStart; p < pEnd; ++p) {
505     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
506 
507     if ((p >= sStart) && (p < sEnd)) {
508       /* Some leaves share a root with other leaves on different processes */
509       ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr);
510       if (neighbors) {
511         ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr);
512         ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
513         for (n = 0; n < neighbors; ++n) {
514           const PetscInt remoteRank = nrank[noff+n];
515 
516           if (remoteRank == rank) continue;
517           for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
518         }
519       }
520     }
521     /* Roots are shared with leaves */
522     ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr);
523     if (!neighbors) continue;
524     ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr);
525     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
526     for (n = 0; n < neighbors; ++n) {
527       const PetscInt remoteRank = rrank[noff+n];
528 
529       if (remoteRank == rank) continue;
530       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
531     }
532   }
533   ierr = PetscFree(adj);CHKERRQ(ierr);
534   ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr);
535   ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr);
536   /* Add additional overlap levels */
537   for (l = 1; l < levels; l++) {
538     /* Propagate point donations over SF to capture remote connections */
539     ierr = DMPlexPartitionLabelPropagate(dm, ovAdjByRank);CHKERRQ(ierr);
540     /* Add next level of point donations to the label */
541     ierr = DMPlexPartitionLabelAdjacency(dm, ovAdjByRank);CHKERRQ(ierr);
542   }
543   /* We require the closure in the overlap */
544   ierr = DMPlexPartitionLabelClosure(dm, ovAdjByRank);CHKERRQ(ierr);
545   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-overlap_view", &flg);CHKERRQ(ierr);
546   if (flg) {
547     PetscViewer viewer;
548     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm), &viewer);CHKERRQ(ierr);
549     ierr = DMLabelView(ovAdjByRank, viewer);CHKERRQ(ierr);
550   }
551   /* Invert sender to receiver label */
552   ierr = DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel);CHKERRQ(ierr);
553   ierr = DMPlexPartitionLabelInvert(dm, ovAdjByRank, NULL, *ovLabel);CHKERRQ(ierr);
554   /* Add owned points, except for shared local points */
555   for (p = pStart; p < pEnd; ++p) {ierr = DMLabelSetValue(*ovLabel, p, rank);CHKERRQ(ierr);}
556   for (l = 0; l < nleaves; ++l) {
557     ierr = DMLabelClearValue(*ovLabel, local[l], rank);CHKERRQ(ierr);
558     ierr = DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank);CHKERRQ(ierr);
559   }
560   /* Clean up */
561   ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr);
562   PetscFunctionReturn(0);
563 }
564 
565 /*@C
566   DMPlexCreateOverlapMigrationSF - Create an SF describing the new mesh distribution to make the overlap described by the input SF
567 
568   Collective on dm
569 
570   Input Parameters:
571 + dm          - The DM
572 - overlapSF   - The SF mapping ghost points in overlap to owner points on other processes
573 
574   Output Parameter:
575 . migrationSF - An SF that maps original points in old locations to points in new locations
576 
577   Level: developer
578 
579 .seealso: DMPlexCreateOverlapLabel(), DMPlexDistributeOverlap(), DMPlexDistribute()
580 @*/
581 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
582 {
583   MPI_Comm           comm;
584   PetscMPIInt        rank, size;
585   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
586   PetscInt          *pointDepths, *remoteDepths, *ilocal;
587   PetscInt          *depthRecv, *depthShift, *depthIdx;
588   PetscSFNode       *iremote;
589   PetscSF            pointSF;
590   const PetscInt    *sharedLocal;
591   const PetscSFNode *overlapRemote, *sharedRemote;
592   PetscErrorCode     ierr;
593 
594   PetscFunctionBegin;
595   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
596   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
597   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
598   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
599   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
600 
601   /* Before building the migration SF we need to know the new stratum offsets */
602   ierr = PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote);CHKERRQ(ierr);
603   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
604   for (d=0; d<dim+1; d++) {
605     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
606     for (p=pStart; p<pEnd; p++) pointDepths[p] = d;
607   }
608   for (p=0; p<nleaves; p++) remoteDepths[p] = -1;
609   ierr = PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
610   ierr = PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
611 
612   /* Count received points in each stratum and compute the internal strata shift */
613   ierr = PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx);CHKERRQ(ierr);
614   for (d=0; d<dim+1; d++) depthRecv[d]=0;
615   for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++;
616   depthShift[dim] = 0;
617   for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim];
618   for (d=1; d<dim; d++) depthShift[d] += depthRecv[0];
619   for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1];
620   for (d=0; d<dim+1; d++) {
621     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
622     depthIdx[d] = pStart + depthShift[d];
623   }
624 
625   /* Form the overlap SF build an SF that describes the full overlap migration SF */
626   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
627   newLeaves = pEnd - pStart + nleaves;
628   ierr = PetscMalloc1(newLeaves, &ilocal);CHKERRQ(ierr);
629   ierr = PetscMalloc1(newLeaves, &iremote);CHKERRQ(ierr);
630   /* First map local points to themselves */
631   for (d=0; d<dim+1; d++) {
632     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
633     for (p=pStart; p<pEnd; p++) {
634       point = p + depthShift[d];
635       ilocal[point] = point;
636       iremote[point].index = p;
637       iremote[point].rank = rank;
638       depthIdx[d]++;
639     }
640   }
641 
642   /* Add in the remote roots for currently shared points */
643   ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
644   ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote);CHKERRQ(ierr);
645   for (d=0; d<dim+1; d++) {
646     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
647     for (p=0; p<numSharedPoints; p++) {
648       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
649         point = sharedLocal[p] + depthShift[d];
650         iremote[point].index = sharedRemote[p].index;
651         iremote[point].rank = sharedRemote[p].rank;
652       }
653     }
654   }
655 
656   /* Now add the incoming overlap points */
657   for (p=0; p<nleaves; p++) {
658     point = depthIdx[remoteDepths[p]];
659     ilocal[point] = point;
660     iremote[point].index = overlapRemote[p].index;
661     iremote[point].rank = overlapRemote[p].rank;
662     depthIdx[remoteDepths[p]]++;
663   }
664   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
665 
666   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
667   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");CHKERRQ(ierr);
668   ierr = PetscSFSetFromOptions(*migrationSF);CHKERRQ(ierr);
669   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
670   ierr = PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);CHKERRQ(ierr);
671 
672   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
673   PetscFunctionReturn(0);
674 }
675 
676 /*@
677   DMPlexStratifyMigrationSF - Rearrange the leaves of a migration sf for stratification.
678 
679   Input Parameters:
680 + dm          - The DM
681 - sf          - A star forest with non-ordered leaves, usually defining a DM point migration
682 
683   Output Parameter:
684 . migrationSF - A star forest with added leaf indirection that ensures the resulting DM is stratified
685 
686   Note:
687   This lexicographically sorts by (depth, cellType)
688 
689   Level: developer
690 
691 .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexDistributeOverlap()
692 @*/
693 PetscErrorCode DMPlexStratifyMigrationSF(DM dm, PetscSF sf, PetscSF *migrationSF)
694 {
695   MPI_Comm           comm;
696   PetscMPIInt        rank, size;
697   PetscInt           d, ldepth, depth, dim, p, pStart, pEnd, nroots, nleaves;
698   PetscSFNode       *pointDepths, *remoteDepths;
699   PetscInt          *ilocal;
700   PetscInt          *depthRecv, *depthShift, *depthIdx;
701   PetscInt          *ctRecv,    *ctShift,    *ctIdx;
702   const PetscSFNode *iremote;
703   PetscErrorCode     ierr;
704 
705   PetscFunctionBegin;
706   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
707   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
708   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
709   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
710   ierr = DMPlexGetDepth(dm, &ldepth);CHKERRQ(ierr);
711   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
712   ierr = MPIU_Allreduce(&ldepth, &depth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr);
713   if ((ldepth >= 0) && (depth != ldepth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", ldepth, depth);
714   ierr = PetscLogEventBegin(DMPLEX_PartStratSF,dm,0,0,0);CHKERRQ(ierr);
715 
716   /* Before building the migration SF we need to know the new stratum offsets */
717   ierr = PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote);CHKERRQ(ierr);
718   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
719   for (d = 0; d < depth+1; ++d) {
720     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
721     for (p = pStart; p < pEnd; ++p) {
722       DMPolytopeType ct;
723 
724       ierr = DMPlexGetCellType(dm, p, &ct);CHKERRQ(ierr);
725       pointDepths[p].index = d;
726       pointDepths[p].rank  = ct;
727     }
728   }
729   for (p = 0; p < nleaves; ++p) {remoteDepths[p].index = -1; remoteDepths[p].rank = -1;}
730   ierr = PetscSFBcastBegin(sf, MPIU_2INT, pointDepths, remoteDepths);CHKERRQ(ierr);
731   ierr = PetscSFBcastEnd(sf, MPIU_2INT, pointDepths, remoteDepths);CHKERRQ(ierr);
732   /* Count received points in each stratum and compute the internal strata shift */
733   ierr = PetscCalloc6(depth+1, &depthRecv, depth+1, &depthShift, depth+1, &depthIdx, DM_NUM_POLYTOPES, &ctRecv, DM_NUM_POLYTOPES, &ctShift, DM_NUM_POLYTOPES, &ctIdx);CHKERRQ(ierr);
734   for (p = 0; p < nleaves; ++p) {
735     if (remoteDepths[p].rank < 0) {
736       ++depthRecv[remoteDepths[p].index];
737     } else {
738       ++ctRecv[remoteDepths[p].rank];
739     }
740   }
741   {
742     PetscInt depths[4], dims[4], shift = 0, i, c;
743 
744     /* Cells (depth), Vertices (0), Faces (depth-1), Edges (1) */
745     depths[0] = depth; depths[1] = 0; depths[2] = depth-1; depths[3] = 1;
746     dims[0]   = dim;   dims[1]   = 0; dims[2]   = dim-1;   dims[3]   = 1;
747     for (i = 0; i <= depth; ++i) {
748       const PetscInt dep = depths[i];
749       const PetscInt dim = dims[i];
750 
751       for (c = 0; c < DM_NUM_POLYTOPES; ++c) {
752         if (DMPolytopeTypeGetDim((DMPolytopeType) c) != dim && !(i == 0 && c == DM_POLYTOPE_FV_GHOST)) continue;
753         ctShift[c] = shift;
754         shift     += ctRecv[c];
755       }
756       depthShift[dep] = shift;
757       shift          += depthRecv[dep];
758     }
759     for (c = 0; c < DM_NUM_POLYTOPES; ++c) {
760       const PetscInt ctDim = DMPolytopeTypeGetDim((DMPolytopeType) c);
761 
762       if ((ctDim < 0 || ctDim > dim) && c != DM_POLYTOPE_FV_GHOST) {
763         ctShift[c] = shift;
764         shift     += ctRecv[c];
765       }
766     }
767   }
768   /* Derive a new local permutation based on stratified indices */
769   ierr = PetscMalloc1(nleaves, &ilocal);CHKERRQ(ierr);
770   for (p = 0; p < nleaves; ++p) {
771     const PetscInt       dep = remoteDepths[p].index;
772     const DMPolytopeType ct  = (DMPolytopeType) remoteDepths[p].rank;
773 
774     if ((PetscInt) ct < 0) {
775       ilocal[p] = depthShift[dep] + depthIdx[dep];
776       ++depthIdx[dep];
777     } else {
778       ilocal[p] = ctShift[ct] + ctIdx[ct];
779       ++ctIdx[ct];
780     }
781   }
782   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
783   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Migration SF");CHKERRQ(ierr);
784   ierr = PetscSFSetGraph(*migrationSF, nroots, nleaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_COPY_VALUES);CHKERRQ(ierr);
785   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
786   ierr = PetscFree6(depthRecv, depthShift, depthIdx, ctRecv, ctShift, ctIdx);CHKERRQ(ierr);
787   ierr = PetscLogEventEnd(DMPLEX_PartStratSF,dm,0,0,0);CHKERRQ(ierr);
788   PetscFunctionReturn(0);
789 }
790 
791 /*@
792   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
793 
794   Collective on dm
795 
796   Input Parameters:
797 + dm - The DMPlex object
798 . pointSF - The PetscSF describing the communication pattern
799 . originalSection - The PetscSection for existing data layout
800 - originalVec - The existing data in a local vector
801 
802   Output Parameters:
803 + newSection - The PetscSF describing the new data layout
804 - newVec - The new data in a local vector
805 
806   Level: developer
807 
808 .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData()
809 @*/
810 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
811 {
812   PetscSF        fieldSF;
813   PetscInt      *remoteOffsets, fieldSize;
814   PetscScalar   *originalValues, *newValues;
815   PetscErrorCode ierr;
816 
817   PetscFunctionBegin;
818   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
819   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
820 
821   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
822   ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr);
823   ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr);
824 
825   ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr);
826   ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr);
827   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
828   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
829   ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
830   ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
831   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
832   ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr);
833   ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr);
834   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
835   PetscFunctionReturn(0);
836 }
837 
838 /*@
839   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
840 
841   Collective on dm
842 
843   Input Parameters:
844 + dm - The DMPlex object
845 . pointSF - The PetscSF describing the communication pattern
846 . originalSection - The PetscSection for existing data layout
847 - originalIS - The existing data
848 
849   Output Parameters:
850 + newSection - The PetscSF describing the new data layout
851 - newIS - The new data
852 
853   Level: developer
854 
855 .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData()
856 @*/
857 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
858 {
859   PetscSF         fieldSF;
860   PetscInt       *newValues, *remoteOffsets, fieldSize;
861   const PetscInt *originalValues;
862   PetscErrorCode  ierr;
863 
864   PetscFunctionBegin;
865   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
866   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
867 
868   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
869   ierr = PetscMalloc1(fieldSize, &newValues);CHKERRQ(ierr);
870 
871   ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr);
872   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
873   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
874   ierr = PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
875   ierr = PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
876   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
877   ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr);
878   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr);
879   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
880   PetscFunctionReturn(0);
881 }
882 
883 /*@
884   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
885 
886   Collective on dm
887 
888   Input Parameters:
889 + dm - The DMPlex object
890 . pointSF - The PetscSF describing the communication pattern
891 . originalSection - The PetscSection for existing data layout
892 . datatype - The type of data
893 - originalData - The existing data
894 
895   Output Parameters:
896 + newSection - The PetscSection describing the new data layout
897 - newData - The new data
898 
899   Level: developer
900 
901 .seealso: DMPlexDistribute(), DMPlexDistributeField()
902 @*/
903 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
904 {
905   PetscSF        fieldSF;
906   PetscInt      *remoteOffsets, fieldSize;
907   PetscMPIInt    dataSize;
908   PetscErrorCode ierr;
909 
910   PetscFunctionBegin;
911   ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
912   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
913 
914   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
915   ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr);
916   ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr);
917 
918   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
919   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
920   ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
921   ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
922   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
923   ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
924   PetscFunctionReturn(0);
925 }
926 
927 static PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
928 {
929   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
930   MPI_Comm               comm;
931   PetscSF                coneSF;
932   PetscSection           originalConeSection, newConeSection;
933   PetscInt              *remoteOffsets, *cones, *globCones, *newCones, newConesSize;
934   PetscBool              flg;
935   PetscErrorCode         ierr;
936 
937   PetscFunctionBegin;
938   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
939   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
940   ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
941   /* Distribute cone section */
942   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
943   ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr);
944   ierr = DMPlexGetConeSection(dmParallel, &newConeSection);CHKERRQ(ierr);
945   ierr = PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr);
946   ierr = DMSetUp(dmParallel);CHKERRQ(ierr);
947   {
948     PetscInt pStart, pEnd, p;
949 
950     ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr);
951     for (p = pStart; p < pEnd; ++p) {
952       PetscInt coneSize;
953       ierr               = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr);
954       pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize);
955     }
956   }
957   /* Communicate and renumber cones */
958   ierr = PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr);
959   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
960   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
961   if (original) {
962     PetscInt numCones;
963 
964     ierr = PetscSectionGetStorageSize(originalConeSection,&numCones);CHKERRQ(ierr);
965     ierr = PetscMalloc1(numCones,&globCones);CHKERRQ(ierr);
966     ierr = ISLocalToGlobalMappingApplyBlock(original, numCones, cones, globCones);CHKERRQ(ierr);
967   } else {
968     globCones = cones;
969   }
970   ierr = DMPlexGetCones(dmParallel, &newCones);CHKERRQ(ierr);
971   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr);
972   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr);
973   if (original) {
974     ierr = PetscFree(globCones);CHKERRQ(ierr);
975   }
976   ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr);
977   ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr);
978   if (PetscDefined(USE_DEBUG)) {
979     PetscInt  p;
980     PetscBool valid = PETSC_TRUE;
981     for (p = 0; p < newConesSize; ++p) {
982       if (newCones[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "[%d] Point %D not in overlap SF\n", PetscGlobalRank,p);CHKERRQ(ierr);}
983     }
984     if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
985   }
986   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr);
987   if (flg) {
988     ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr);
989     ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr);
990     ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr);
991     ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr);
992     ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr);
993   }
994   ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr);
995   ierr = DMPlexGetConeOrientations(dmParallel, &newCones);CHKERRQ(ierr);
996   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
997   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
998   ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr);
999   ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
1000   /* Create supports and stratify DMPlex */
1001   {
1002     PetscInt pStart, pEnd;
1003 
1004     ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr);
1005     ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr);
1006   }
1007   ierr = DMPlexSymmetrize(dmParallel);CHKERRQ(ierr);
1008   ierr = DMPlexStratify(dmParallel);CHKERRQ(ierr);
1009   {
1010     PetscBool useCone, useClosure, useAnchors;
1011 
1012     ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
1013     ierr = DMSetBasicAdjacency(dmParallel, useCone, useClosure);CHKERRQ(ierr);
1014     ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
1015     ierr = DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors);CHKERRQ(ierr);
1016   }
1017   PetscFunctionReturn(0);
1018 }
1019 
1020 static PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
1021 {
1022   MPI_Comm         comm;
1023   PetscSection     originalCoordSection, newCoordSection;
1024   Vec              originalCoordinates, newCoordinates;
1025   PetscInt         bs;
1026   PetscBool        isper;
1027   const char      *name;
1028   const PetscReal *maxCell, *L;
1029   const DMBoundaryType *bd;
1030   PetscErrorCode   ierr;
1031 
1032   PetscFunctionBegin;
1033   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1034   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1035 
1036   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1037   ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr);
1038   ierr = DMGetCoordinateSection(dmParallel, &newCoordSection);CHKERRQ(ierr);
1039   ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr);
1040   if (originalCoordinates) {
1041     ierr = VecCreate(PETSC_COMM_SELF, &newCoordinates);CHKERRQ(ierr);
1042     ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr);
1043     ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr);
1044 
1045     ierr = DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr);
1046     ierr = DMSetCoordinatesLocal(dmParallel, newCoordinates);CHKERRQ(ierr);
1047     ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr);
1048     ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr);
1049     ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr);
1050   }
1051   ierr = DMGetPeriodicity(dm, &isper, &maxCell, &L, &bd);CHKERRQ(ierr);
1052   ierr = DMSetPeriodicity(dmParallel, isper, maxCell, L, bd);CHKERRQ(ierr);
1053   PetscFunctionReturn(0);
1054 }
1055 
1056 static PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1057 {
1058   DM_Plex         *mesh = (DM_Plex*) dm->data;
1059   MPI_Comm         comm;
1060   DMLabel          depthLabel;
1061   PetscMPIInt      rank;
1062   PetscInt         depth, d, numLabels, numLocalLabels, l;
1063   PetscBool        hasLabels = PETSC_FALSE, lsendDepth, sendDepth;
1064   PetscObjectState depthState = -1;
1065   PetscErrorCode   ierr;
1066 
1067   PetscFunctionBegin;
1068   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1069   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1070 
1071   ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1072   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1073   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1074 
1075   /* If the user has changed the depth label, communicate it instead */
1076   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1077   ierr = DMPlexGetDepthLabel(dm, &depthLabel);CHKERRQ(ierr);
1078   if (depthLabel) {ierr = PetscObjectStateGet((PetscObject) depthLabel, &depthState);CHKERRQ(ierr);}
1079   lsendDepth = mesh->depthState != depthState ? PETSC_TRUE : PETSC_FALSE;
1080   ierr = MPIU_Allreduce(&lsendDepth, &sendDepth, 1, MPIU_BOOL, MPI_LOR, comm);CHKERRQ(ierr);
1081   if (sendDepth) {
1082     ierr = DMPlexGetDepthLabel(dmParallel, &dmParallel->depthLabel);CHKERRQ(ierr);
1083     ierr = DMRemoveLabelBySelf(dmParallel, &dmParallel->depthLabel, PETSC_FALSE);CHKERRQ(ierr);
1084   }
1085   /* Everyone must have either the same number of labels, or none */
1086   ierr = DMGetNumLabels(dm, &numLocalLabels);CHKERRQ(ierr);
1087   numLabels = numLocalLabels;
1088   ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1089   if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1090   for (l = 0; l < numLabels; ++l) {
1091     DMLabel     label = NULL, labelNew = NULL;
1092     PetscBool   isDepth, lisOutput = PETSC_TRUE, isOutput;
1093     const char *name = NULL;
1094 
1095     if (hasLabels) {
1096       ierr = DMGetLabelByNum(dm, l, &label);CHKERRQ(ierr);
1097       /* Skip "depth" because it is recreated */
1098       ierr = PetscObjectGetName((PetscObject) label, &name);CHKERRQ(ierr);
1099       ierr = PetscStrcmp(name, "depth", &isDepth);CHKERRQ(ierr);
1100     }
1101     ierr = MPI_Bcast(&isDepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr);
1102     if (isDepth && !sendDepth) continue;
1103     ierr = DMLabelDistribute(label, migrationSF, &labelNew);CHKERRQ(ierr);
1104     if (isDepth) {
1105       /* Put in any missing strata which can occur if users are managing the depth label themselves */
1106       PetscInt gdepth;
1107 
1108       ierr = MPIU_Allreduce(&depth, &gdepth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr);
1109       if ((depth >= 0) && (gdepth != depth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", depth, gdepth);
1110       for (d = 0; d <= gdepth; ++d) {
1111         PetscBool has;
1112 
1113         ierr = DMLabelHasStratum(labelNew, d, &has);CHKERRQ(ierr);
1114         if (!has) {ierr = DMLabelAddStratum(labelNew, d);CHKERRQ(ierr);}
1115       }
1116     }
1117     ierr = DMAddLabel(dmParallel, labelNew);CHKERRQ(ierr);
1118     /* Put the output flag in the new label */
1119     if (hasLabels) {ierr = DMGetLabelOutput(dm, name, &lisOutput);CHKERRQ(ierr);}
1120     ierr = MPIU_Allreduce(&lisOutput, &isOutput, 1, MPIU_BOOL, MPI_LAND, comm);CHKERRQ(ierr);
1121     ierr = PetscObjectGetName((PetscObject) labelNew, &name);CHKERRQ(ierr);
1122     ierr = DMSetLabelOutput(dmParallel, name, isOutput);CHKERRQ(ierr);
1123     ierr = DMLabelDestroy(&labelNew);CHKERRQ(ierr);
1124   }
1125   ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1126   PetscFunctionReturn(0);
1127 }
1128 
1129 static PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1130 {
1131   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1132   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1133   MPI_Comm        comm;
1134   DM              refTree;
1135   PetscSection    origParentSection, newParentSection;
1136   PetscInt        *origParents, *origChildIDs;
1137   PetscBool       flg;
1138   PetscErrorCode  ierr;
1139 
1140   PetscFunctionBegin;
1141   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1142   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
1143   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1144 
1145   /* Set up tree */
1146   ierr = DMPlexGetReferenceTree(dm,&refTree);CHKERRQ(ierr);
1147   ierr = DMPlexSetReferenceTree(dmParallel,refTree);CHKERRQ(ierr);
1148   ierr = DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL);CHKERRQ(ierr);
1149   if (origParentSection) {
1150     PetscInt        pStart, pEnd;
1151     PetscInt        *newParents, *newChildIDs, *globParents;
1152     PetscInt        *remoteOffsetsParents, newParentSize;
1153     PetscSF         parentSF;
1154 
1155     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1156     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);CHKERRQ(ierr);
1157     ierr = PetscSectionSetChart(newParentSection,pStart,pEnd);CHKERRQ(ierr);
1158     ierr = PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);CHKERRQ(ierr);
1159     ierr = PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);CHKERRQ(ierr);
1160     ierr = PetscFree(remoteOffsetsParents);CHKERRQ(ierr);
1161     ierr = PetscSectionGetStorageSize(newParentSection,&newParentSize);CHKERRQ(ierr);
1162     ierr = PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs);CHKERRQ(ierr);
1163     if (original) {
1164       PetscInt numParents;
1165 
1166       ierr = PetscSectionGetStorageSize(origParentSection,&numParents);CHKERRQ(ierr);
1167       ierr = PetscMalloc1(numParents,&globParents);CHKERRQ(ierr);
1168       ierr = ISLocalToGlobalMappingApplyBlock(original, numParents, origParents, globParents);CHKERRQ(ierr);
1169     }
1170     else {
1171       globParents = origParents;
1172     }
1173     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr);
1174     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr);
1175     if (original) {
1176       ierr = PetscFree(globParents);CHKERRQ(ierr);
1177     }
1178     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1179     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1180     ierr = ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents);CHKERRQ(ierr);
1181     if (PetscDefined(USE_DEBUG)) {
1182       PetscInt  p;
1183       PetscBool valid = PETSC_TRUE;
1184       for (p = 0; p < newParentSize; ++p) {
1185         if (newParents[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
1186       }
1187       if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1188     }
1189     ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-parents_view", &flg);CHKERRQ(ierr);
1190     if (flg) {
1191       ierr = PetscPrintf(comm, "Serial Parent Section: \n");CHKERRQ(ierr);
1192       ierr = PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr);
1193       ierr = PetscPrintf(comm, "Parallel Parent Section: \n");CHKERRQ(ierr);
1194       ierr = PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr);
1195       ierr = PetscSFView(parentSF, NULL);CHKERRQ(ierr);
1196     }
1197     ierr = DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);CHKERRQ(ierr);
1198     ierr = PetscSectionDestroy(&newParentSection);CHKERRQ(ierr);
1199     ierr = PetscFree2(newParents,newChildIDs);CHKERRQ(ierr);
1200     ierr = PetscSFDestroy(&parentSF);CHKERRQ(ierr);
1201   }
1202   pmesh->useAnchors = mesh->useAnchors;
1203   PetscFunctionReturn(0);
1204 }
1205 
1206 PETSC_UNUSED static PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, DM dmParallel)
1207 {
1208   PetscMPIInt            rank, size;
1209   MPI_Comm               comm;
1210   PetscErrorCode         ierr;
1211 
1212   PetscFunctionBegin;
1213   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1214   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1215 
1216   /* Create point SF for parallel mesh */
1217   ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1218   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1219   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1220   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1221   {
1222     const PetscInt *leaves;
1223     PetscSFNode    *remotePoints, *rowners, *lowners;
1224     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1225     PetscInt        pStart, pEnd;
1226 
1227     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1228     ierr = PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr);
1229     ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr);
1230     for (p=0; p<numRoots; p++) {
1231       rowners[p].rank  = -1;
1232       rowners[p].index = -1;
1233     }
1234     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1235     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1236     for (p = 0; p < numLeaves; ++p) {
1237       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1238         lowners[p].rank  = rank;
1239         lowners[p].index = leaves ? leaves[p] : p;
1240       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1241         lowners[p].rank  = -2;
1242         lowners[p].index = -2;
1243       }
1244     }
1245     for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1246       rowners[p].rank  = -3;
1247       rowners[p].index = -3;
1248     }
1249     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1250     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1251     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1252     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1253     for (p = 0; p < numLeaves; ++p) {
1254       if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
1255       if (lowners[p].rank != rank) ++numGhostPoints;
1256     }
1257     ierr = PetscMalloc1(numGhostPoints, &ghostPoints);CHKERRQ(ierr);
1258     ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr);
1259     for (p = 0, gp = 0; p < numLeaves; ++p) {
1260       if (lowners[p].rank != rank) {
1261         ghostPoints[gp]        = leaves ? leaves[p] : p;
1262         remotePoints[gp].rank  = lowners[p].rank;
1263         remotePoints[gp].index = lowners[p].index;
1264         ++gp;
1265       }
1266     }
1267     ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr);
1268     ierr = PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1269     ierr = PetscSFSetFromOptions((dmParallel)->sf);CHKERRQ(ierr);
1270   }
1271   {
1272     PetscBool useCone, useClosure, useAnchors;
1273 
1274     ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
1275     ierr = DMSetBasicAdjacency(dmParallel, useCone, useClosure);CHKERRQ(ierr);
1276     ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
1277     ierr = DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors);CHKERRQ(ierr);
1278   }
1279   ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1280   PetscFunctionReturn(0);
1281 }
1282 
1283 /*@
1284   DMPlexSetPartitionBalance - Should distribution of the DM attempt to balance the shared point partition?
1285 
1286   Input Parameters:
1287 + dm - The DMPlex object
1288 - flg - Balance the partition?
1289 
1290   Level: intermediate
1291 
1292 .seealso: DMPlexDistribute(), DMPlexGetPartitionBalance()
1293 @*/
1294 PetscErrorCode DMPlexSetPartitionBalance(DM dm, PetscBool flg)
1295 {
1296   DM_Plex *mesh = (DM_Plex *)dm->data;
1297 
1298   PetscFunctionBegin;
1299   mesh->partitionBalance = flg;
1300   PetscFunctionReturn(0);
1301 }
1302 
1303 /*@
1304   DMPlexGetPartitionBalance - Does distribution of the DM attempt to balance the shared point partition?
1305 
1306   Input Parameter:
1307 . dm - The DMPlex object
1308 
1309   Output Parameter:
1310 . flg - Balance the partition?
1311 
1312   Level: intermediate
1313 
1314 .seealso: DMPlexDistribute(), DMPlexSetPartitionBalance()
1315 @*/
1316 PetscErrorCode DMPlexGetPartitionBalance(DM dm, PetscBool *flg)
1317 {
1318   DM_Plex *mesh = (DM_Plex *)dm->data;
1319 
1320   PetscFunctionBegin;
1321   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1322   PetscValidBoolPointer(flg, 2);
1323   *flg = mesh->partitionBalance;
1324   PetscFunctionReturn(0);
1325 }
1326 
1327 typedef struct {
1328   PetscInt vote, rank, index;
1329 } Petsc3Int;
1330 
1331 /* MaxLoc, but carry a third piece of information around */
1332 static void MaxLocCarry(void *in_, void *inout_, PetscMPIInt *len_, MPI_Datatype *dtype)
1333 {
1334   Petsc3Int *a = (Petsc3Int *)inout_;
1335   Petsc3Int *b = (Petsc3Int *)in_;
1336   PetscInt i, len = *len_;
1337   for (i = 0; i < len; i++) {
1338     if (a[i].vote < b[i].vote) {
1339       a[i].vote = b[i].vote;
1340       a[i].rank = b[i].rank;
1341       a[i].index = b[i].index;
1342     } else if (a[i].vote <= b[i].vote) {
1343       if (a[i].rank >= b[i].rank) {
1344         a[i].rank = b[i].rank;
1345         a[i].index = b[i].index;
1346       }
1347     }
1348   }
1349 }
1350 
1351 /*@C
1352   DMPlexCreatePointSF - Build a point SF from an SF describing a point migration
1353 
1354   Input Parameters:
1355 + dm          - The source DMPlex object
1356 . migrationSF - The star forest that describes the parallel point remapping
1357 . ownership   - Flag causing a vote to determine point ownership
1358 
1359   Output Parameter:
1360 - pointSF     - The star forest describing the point overlap in the remapped DM
1361 
1362   Notes:
1363   Output pointSF is guaranteed to have the array of local indices (leaves) sorted.
1364 
1365   Level: developer
1366 
1367 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1368 @*/
1369 PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownership, PetscSF *pointSF)
1370 {
1371   PetscMPIInt        rank, size;
1372   PetscInt           p, nroots, nleaves, idx, npointLeaves;
1373   PetscInt          *pointLocal;
1374   const PetscInt    *leaves;
1375   const PetscSFNode *roots;
1376   PetscSFNode       *rootNodes, *leafNodes, *pointRemote;
1377   Vec                shifts;
1378   const PetscInt     numShifts = 13759;
1379   const PetscScalar *shift = NULL;
1380   const PetscBool    shiftDebug = PETSC_FALSE;
1381   PetscBool          balance;
1382   PetscErrorCode     ierr;
1383 
1384   PetscFunctionBegin;
1385   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1386   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1387   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
1388   ierr = PetscLogEventBegin(DMPLEX_CreatePointSF,dm,0,0,0);CHKERRQ(ierr);
1389 
1390   ierr = DMPlexGetPartitionBalance(dm, &balance);CHKERRQ(ierr);
1391   ierr = PetscSFGetGraph(migrationSF, &nroots, &nleaves, &leaves, &roots);CHKERRQ(ierr);
1392   ierr = PetscMalloc2(nroots, &rootNodes, nleaves, &leafNodes);CHKERRQ(ierr);
1393   if (ownership) {
1394     MPI_Op       op;
1395     MPI_Datatype datatype;
1396     Petsc3Int   *rootVote = NULL, *leafVote = NULL;
1397     /* If balancing, we compute a random cyclic shift of the rank for each remote point. That way, the max will evenly distribute among ranks. */
1398     if (balance) {
1399       PetscRandom r;
1400 
1401       ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
1402       ierr = PetscRandomSetInterval(r, 0, 2467*size);CHKERRQ(ierr);
1403       ierr = VecCreate(PETSC_COMM_SELF, &shifts);CHKERRQ(ierr);
1404       ierr = VecSetSizes(shifts, numShifts, numShifts);CHKERRQ(ierr);
1405       ierr = VecSetType(shifts, VECSTANDARD);CHKERRQ(ierr);
1406       ierr = VecSetRandom(shifts, r);CHKERRQ(ierr);
1407       ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
1408       ierr = VecGetArrayRead(shifts, &shift);CHKERRQ(ierr);
1409     }
1410 
1411     ierr = PetscMalloc1(nroots, &rootVote);CHKERRQ(ierr);
1412     ierr = PetscMalloc1(nleaves, &leafVote);CHKERRQ(ierr);
1413     /* Point ownership vote: Process with highest rank owns shared points */
1414     for (p = 0; p < nleaves; ++p) {
1415       if (shiftDebug) {
1416         ierr = PetscSynchronizedPrintf(PetscObjectComm((PetscObject) dm), "[%d] Point %D RemotePoint %D Shift %D MyRank %D\n", rank, leaves ? leaves[p] : p, roots[p].index, (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]), (rank + (shift ? (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]) : 0))%size);CHKERRQ(ierr);
1417       }
1418       /* Either put in a bid or we know we own it */
1419       leafVote[p].vote  = (rank + (shift ? (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]) : 0))%size;
1420       leafVote[p].rank = rank;
1421       leafVote[p].index = p;
1422     }
1423     for (p = 0; p < nroots; p++) {
1424       /* Root must not participate in the reduction, flag so that MAXLOC does not use */
1425       rootVote[p].vote  = -3;
1426       rootVote[p].rank  = -3;
1427       rootVote[p].index = -3;
1428     }
1429     ierr = MPI_Type_contiguous(3, MPIU_INT, &datatype);CHKERRQ(ierr);
1430     ierr = MPI_Type_commit(&datatype);CHKERRQ(ierr);
1431     ierr = MPI_Op_create(&MaxLocCarry, 1, &op);CHKERRQ(ierr);
1432     ierr = PetscSFReduceBegin(migrationSF, datatype, leafVote, rootVote, op);CHKERRQ(ierr);
1433     ierr = PetscSFReduceEnd(migrationSF, datatype, leafVote, rootVote, op);CHKERRQ(ierr);
1434     ierr = MPI_Op_free(&op);CHKERRQ(ierr);
1435     ierr = MPI_Type_free(&datatype);CHKERRQ(ierr);
1436     for (p = 0; p < nroots; p++) {
1437       rootNodes[p].rank = rootVote[p].rank;
1438       rootNodes[p].index = rootVote[p].index;
1439     }
1440     ierr = PetscFree(leafVote);CHKERRQ(ierr);
1441     ierr = PetscFree(rootVote);CHKERRQ(ierr);
1442   } else {
1443     for (p = 0; p < nroots; p++) {
1444       rootNodes[p].index = -1;
1445       rootNodes[p].rank = rank;
1446     }
1447     for (p = 0; p < nleaves; p++) {
1448       /* Write new local id into old location */
1449       if (roots[p].rank == rank) {
1450         rootNodes[roots[p].index].index = leaves ? leaves[p] : p;
1451       }
1452     }
1453   }
1454   ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1455   ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1456 
1457   for (npointLeaves = 0, p = 0; p < nleaves; p++) {
1458     if (leafNodes[p].rank != rank) npointLeaves++;
1459   }
1460   ierr = PetscMalloc1(npointLeaves, &pointLocal);CHKERRQ(ierr);
1461   ierr = PetscMalloc1(npointLeaves, &pointRemote);CHKERRQ(ierr);
1462   for (idx = 0, p = 0; p < nleaves; p++) {
1463     if (leafNodes[p].rank != rank) {
1464       /* Note that pointLocal is automatically sorted as it is sublist of 0, ..., nleaves-1 */
1465       pointLocal[idx] = p;
1466       pointRemote[idx] = leafNodes[p];
1467       idx++;
1468     }
1469   }
1470   if (shift) {
1471     ierr = VecRestoreArrayRead(shifts, &shift);CHKERRQ(ierr);
1472     ierr = VecDestroy(&shifts);CHKERRQ(ierr);
1473   }
1474   if (shiftDebug) {ierr = PetscSynchronizedFlush(PetscObjectComm((PetscObject) dm), PETSC_STDOUT);CHKERRQ(ierr);}
1475   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), pointSF);CHKERRQ(ierr);
1476   ierr = PetscSFSetFromOptions(*pointSF);CHKERRQ(ierr);
1477   ierr = PetscSFSetGraph(*pointSF, nleaves, npointLeaves, pointLocal, PETSC_OWN_POINTER, pointRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1478   ierr = PetscFree2(rootNodes, leafNodes);CHKERRQ(ierr);
1479   ierr = PetscLogEventEnd(DMPLEX_CreatePointSF,dm,0,0,0);CHKERRQ(ierr);
1480   PetscFunctionReturn(0);
1481 }
1482 
1483 /*@C
1484   DMPlexMigrate  - Migrates internal DM data over the supplied star forest
1485 
1486   Collective on dm
1487 
1488   Input Parameters:
1489 + dm       - The source DMPlex object
1490 . sf       - The star forest communication context describing the migration pattern
1491 
1492   Output Parameter:
1493 - targetDM - The target DMPlex object
1494 
1495   Level: intermediate
1496 
1497 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1498 @*/
1499 PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM)
1500 {
1501   MPI_Comm               comm;
1502   PetscInt               dim, cdim, nroots;
1503   PetscSF                sfPoint;
1504   ISLocalToGlobalMapping ltogMigration;
1505   ISLocalToGlobalMapping ltogOriginal = NULL;
1506   PetscBool              flg;
1507   PetscErrorCode         ierr;
1508 
1509   PetscFunctionBegin;
1510   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1511   ierr = PetscLogEventBegin(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1512   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1513   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1514   ierr = DMSetDimension(targetDM, dim);CHKERRQ(ierr);
1515   ierr = DMGetCoordinateDim(dm, &cdim);CHKERRQ(ierr);
1516   ierr = DMSetCoordinateDim(targetDM, cdim);CHKERRQ(ierr);
1517 
1518   /* Check for a one-to-all distribution pattern */
1519   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1520   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
1521   if (nroots >= 0) {
1522     IS        isOriginal;
1523     PetscInt  n, size, nleaves;
1524     PetscInt  *numbering_orig, *numbering_new;
1525 
1526     /* Get the original point numbering */
1527     ierr = DMPlexCreatePointNumbering(dm, &isOriginal);CHKERRQ(ierr);
1528     ierr = ISLocalToGlobalMappingCreateIS(isOriginal, &ltogOriginal);CHKERRQ(ierr);
1529     ierr = ISLocalToGlobalMappingGetSize(ltogOriginal, &size);CHKERRQ(ierr);
1530     ierr = ISLocalToGlobalMappingGetBlockIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1531     /* Convert to positive global numbers */
1532     for (n=0; n<size; n++) {if (numbering_orig[n] < 0) numbering_orig[n] = -(numbering_orig[n]+1);}
1533     /* Derive the new local-to-global mapping from the old one */
1534     ierr = PetscSFGetGraph(sf, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1535     ierr = PetscMalloc1(nleaves, &numbering_new);CHKERRQ(ierr);
1536     ierr = PetscSFBcastBegin(sf, MPIU_INT, numbering_orig, numbering_new);CHKERRQ(ierr);
1537     ierr = PetscSFBcastEnd(sf, MPIU_INT, numbering_orig, numbering_new);CHKERRQ(ierr);
1538     ierr = ISLocalToGlobalMappingCreate(comm, 1, nleaves, numbering_new, PETSC_OWN_POINTER, &ltogMigration);CHKERRQ(ierr);
1539     ierr = ISLocalToGlobalMappingRestoreIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1540     ierr = ISDestroy(&isOriginal);CHKERRQ(ierr);
1541   } else {
1542     /* One-to-all distribution pattern: We can derive LToG from SF */
1543     ierr = ISLocalToGlobalMappingCreateSF(sf, 0, &ltogMigration);CHKERRQ(ierr);
1544   }
1545   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1546   if (flg) {
1547     ierr = PetscPrintf(comm, "Point renumbering for DM migration:\n");CHKERRQ(ierr);
1548     ierr = ISLocalToGlobalMappingView(ltogMigration, NULL);CHKERRQ(ierr);
1549   }
1550   /* Migrate DM data to target DM */
1551   ierr = DMPlexDistributeCones(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1552   ierr = DMPlexDistributeLabels(dm, sf, targetDM);CHKERRQ(ierr);
1553   ierr = DMPlexDistributeCoordinates(dm, sf, targetDM);CHKERRQ(ierr);
1554   ierr = DMPlexDistributeSetupTree(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1555   ierr = ISLocalToGlobalMappingDestroy(&ltogOriginal);CHKERRQ(ierr);
1556   ierr = ISLocalToGlobalMappingDestroy(&ltogMigration);CHKERRQ(ierr);
1557   ierr = PetscLogEventEnd(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1558   PetscFunctionReturn(0);
1559 }
1560 
1561 /*@C
1562   DMPlexDistribute - Distributes the mesh and any associated sections.
1563 
1564   Collective on dm
1565 
1566   Input Parameters:
1567 + dm  - The original DMPlex object
1568 - overlap - The overlap of partitions, 0 is the default
1569 
1570   Output Parameters:
1571 + sf - The PetscSF used for point distribution, or NULL if not needed
1572 - dmParallel - The distributed DMPlex object
1573 
1574   Note: If the mesh was not distributed, the output dmParallel will be NULL.
1575 
1576   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
1577   representation on the mesh.
1578 
1579   Level: intermediate
1580 
1581 .seealso: DMPlexCreate(), DMSetAdjacency(), DMPlexGetOverlap()
1582 @*/
1583 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1584 {
1585   MPI_Comm               comm;
1586   PetscPartitioner       partitioner;
1587   IS                     cellPart;
1588   PetscSection           cellPartSection;
1589   DM                     dmCoord;
1590   DMLabel                lblPartition, lblMigration;
1591   PetscSF                sfMigration, sfStratified, sfPoint;
1592   PetscBool              flg, balance;
1593   PetscMPIInt            rank, size;
1594   PetscErrorCode         ierr;
1595 
1596   PetscFunctionBegin;
1597   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1598   PetscValidLogicalCollectiveInt(dm, overlap, 2);
1599   if (sf) PetscValidPointer(sf,3);
1600   PetscValidPointer(dmParallel,4);
1601 
1602   if (sf) *sf = NULL;
1603   *dmParallel = NULL;
1604   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1605   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1606   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1607   if (size == 1) PetscFunctionReturn(0);
1608 
1609   ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1610   /* Create cell partition */
1611   ierr = PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1612   ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr);
1613   ierr = DMPlexGetPartitioner(dm, &partitioner);CHKERRQ(ierr);
1614   ierr = PetscPartitionerDMPlexPartition(partitioner, dm, NULL, cellPartSection, &cellPart);CHKERRQ(ierr);
1615   ierr = PetscLogEventBegin(DMPLEX_PartSelf,dm,0,0,0);CHKERRQ(ierr);
1616   {
1617     /* Convert partition to DMLabel */
1618     IS             is;
1619     PetscHSetI     ht;
1620     const PetscInt *points;
1621     PetscInt       *iranks;
1622     PetscInt       pStart, pEnd, proc, npoints, poff = 0, nranks;
1623 
1624     ierr = DMLabelCreate(PETSC_COMM_SELF, "Point Partition", &lblPartition);CHKERRQ(ierr);
1625     /* Preallocate strata */
1626     ierr = PetscHSetICreate(&ht);CHKERRQ(ierr);
1627     ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr);
1628     for (proc = pStart; proc < pEnd; proc++) {
1629       ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr);
1630       if (npoints) {ierr = PetscHSetIAdd(ht, proc);CHKERRQ(ierr);}
1631     }
1632     ierr = PetscHSetIGetSize(ht, &nranks);CHKERRQ(ierr);
1633     ierr = PetscMalloc1(nranks, &iranks);CHKERRQ(ierr);
1634     ierr = PetscHSetIGetElems(ht, &poff, iranks);CHKERRQ(ierr);
1635     ierr = PetscHSetIDestroy(&ht);CHKERRQ(ierr);
1636     ierr = DMLabelAddStrata(lblPartition, nranks, iranks);CHKERRQ(ierr);
1637     ierr = PetscFree(iranks);CHKERRQ(ierr);
1638     /* Inline DMPlexPartitionLabelClosure() */
1639     ierr = ISGetIndices(cellPart, &points);CHKERRQ(ierr);
1640     ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr);
1641     for (proc = pStart; proc < pEnd; proc++) {
1642       ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr);
1643       if (!npoints) continue;
1644       ierr = PetscSectionGetOffset(cellPartSection, proc, &poff);CHKERRQ(ierr);
1645       ierr = DMPlexClosurePoints_Private(dm, npoints, points+poff, &is);CHKERRQ(ierr);
1646       ierr = DMLabelSetStratumIS(lblPartition, proc, is);CHKERRQ(ierr);
1647       ierr = ISDestroy(&is);CHKERRQ(ierr);
1648     }
1649     ierr = ISRestoreIndices(cellPart, &points);CHKERRQ(ierr);
1650   }
1651   ierr = PetscLogEventEnd(DMPLEX_PartSelf,dm,0,0,0);CHKERRQ(ierr);
1652 
1653   ierr = DMLabelCreate(PETSC_COMM_SELF, "Point migration", &lblMigration);CHKERRQ(ierr);
1654   ierr = DMPlexPartitionLabelInvert(dm, lblPartition, NULL, lblMigration);CHKERRQ(ierr);
1655   ierr = DMPlexPartitionLabelCreateSF(dm, lblMigration, &sfMigration);CHKERRQ(ierr);
1656   ierr = DMPlexStratifyMigrationSF(dm, sfMigration, &sfStratified);CHKERRQ(ierr);
1657   ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1658   sfMigration = sfStratified;
1659   ierr = PetscSFSetUp(sfMigration);CHKERRQ(ierr);
1660   ierr = PetscLogEventEnd(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1661   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1662   if (flg) {
1663     ierr = DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr);
1664     ierr = PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr);
1665   }
1666 
1667   /* Create non-overlapping parallel DM and migrate internal data */
1668   ierr = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr);
1669   ierr = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr);
1670   ierr = DMPlexMigrate(dm, sfMigration, *dmParallel);CHKERRQ(ierr);
1671 
1672   /* Build the point SF without overlap */
1673   ierr = DMPlexGetPartitionBalance(dm, &balance);CHKERRQ(ierr);
1674   ierr = DMPlexSetPartitionBalance(*dmParallel, balance);CHKERRQ(ierr);
1675   ierr = DMPlexCreatePointSF(*dmParallel, sfMigration, PETSC_TRUE, &sfPoint);CHKERRQ(ierr);
1676   ierr = DMSetPointSF(*dmParallel, sfPoint);CHKERRQ(ierr);
1677   ierr = DMGetCoordinateDM(*dmParallel, &dmCoord);CHKERRQ(ierr);
1678   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1679   if (flg) {ierr = PetscSFView(sfPoint, NULL);CHKERRQ(ierr);}
1680 
1681   if (overlap > 0) {
1682     DM                 dmOverlap;
1683     PetscInt           nroots, nleaves, noldleaves, l;
1684     const PetscInt    *oldLeaves;
1685     PetscSFNode       *newRemote, *permRemote;
1686     const PetscSFNode *oldRemote;
1687     PetscSF            sfOverlap, sfOverlapPoint;
1688 
1689     /* Add the partition overlap to the distributed DM */
1690     ierr = DMPlexDistributeOverlap(*dmParallel, overlap, &sfOverlap, &dmOverlap);CHKERRQ(ierr);
1691     ierr = DMDestroy(dmParallel);CHKERRQ(ierr);
1692     *dmParallel = dmOverlap;
1693     if (flg) {
1694       ierr = PetscPrintf(comm, "Overlap Migration SF:\n");CHKERRQ(ierr);
1695       ierr = PetscSFView(sfOverlap, NULL);CHKERRQ(ierr);
1696     }
1697 
1698     /* Re-map the migration SF to establish the full migration pattern */
1699     ierr = PetscSFGetGraph(sfMigration, &nroots, &noldleaves, &oldLeaves, &oldRemote);CHKERRQ(ierr);
1700     ierr = PetscSFGetGraph(sfOverlap, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1701     ierr = PetscMalloc1(nleaves, &newRemote);CHKERRQ(ierr);
1702     /* oldRemote: original root point mapping to original leaf point
1703        newRemote: original leaf point mapping to overlapped leaf point */
1704     if (oldLeaves) {
1705       /* After stratification, the migration remotes may not be in root (canonical) order, so we reorder using the leaf numbering */
1706       ierr = PetscMalloc1(noldleaves, &permRemote);CHKERRQ(ierr);
1707       for (l = 0; l < noldleaves; ++l) permRemote[oldLeaves[l]] = oldRemote[l];
1708       oldRemote = permRemote;
1709     }
1710     ierr = PetscSFBcastBegin(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1711     ierr = PetscSFBcastEnd(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1712     if (oldLeaves) {ierr = PetscFree(oldRemote);CHKERRQ(ierr);}
1713     ierr = PetscSFCreate(comm, &sfOverlapPoint);CHKERRQ(ierr);
1714     ierr = PetscSFSetGraph(sfOverlapPoint, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1715     ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1716     ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1717     sfMigration = sfOverlapPoint;
1718   }
1719   /* Cleanup Partition */
1720   ierr = DMLabelDestroy(&lblPartition);CHKERRQ(ierr);
1721   ierr = DMLabelDestroy(&lblMigration);CHKERRQ(ierr);
1722   ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr);
1723   ierr = ISDestroy(&cellPart);CHKERRQ(ierr);
1724   /* Copy BC */
1725   ierr = DMCopyBoundary(dm, *dmParallel);CHKERRQ(ierr);
1726   /* Create sfNatural */
1727   if (dm->useNatural) {
1728     PetscSection section;
1729 
1730     ierr = DMGetLocalSection(dm, &section);CHKERRQ(ierr);
1731     ierr = DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &(*dmParallel)->sfNatural);CHKERRQ(ierr);
1732     ierr = DMSetUseNatural(*dmParallel, PETSC_TRUE);CHKERRQ(ierr);
1733   }
1734   /* Cleanup */
1735   if (sf) {*sf = sfMigration;}
1736   else    {ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);}
1737   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1738   ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1739   PetscFunctionReturn(0);
1740 }
1741 
1742 /*@C
1743   DMPlexDistributeOverlap - Add partition overlap to a distributed non-overlapping DM.
1744 
1745   Collective on dm
1746 
1747   Input Parameters:
1748 + dm  - The non-overlapping distributed DMPlex object
1749 - overlap - The overlap of partitions (the same on all ranks)
1750 
1751   Output Parameters:
1752 + sf - The PetscSF used for point distribution
1753 - dmOverlap - The overlapping distributed DMPlex object, or NULL
1754 
1755   Notes:
1756   If the mesh was not distributed, the return value is NULL.
1757 
1758   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
1759   representation on the mesh.
1760 
1761   Level: advanced
1762 
1763 .seealso: DMPlexCreate(), DMSetAdjacency(), DMPlexDistribute(), DMPlexCreateOverlapLabel(), DMPlexGetOverlap()
1764 @*/
1765 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, PetscSF *sf, DM *dmOverlap)
1766 {
1767   MPI_Comm               comm;
1768   PetscMPIInt            size, rank;
1769   PetscSection           rootSection, leafSection;
1770   IS                     rootrank, leafrank;
1771   DM                     dmCoord;
1772   DMLabel                lblOverlap;
1773   PetscSF                sfOverlap, sfStratified, sfPoint;
1774   PetscErrorCode         ierr;
1775 
1776   PetscFunctionBegin;
1777   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1778   PetscValidLogicalCollectiveInt(dm, overlap, 2);
1779   if (sf) PetscValidPointer(sf, 3);
1780   PetscValidPointer(dmOverlap, 4);
1781 
1782   if (sf) *sf = NULL;
1783   *dmOverlap  = NULL;
1784   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1785   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1786   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1787   if (size == 1) PetscFunctionReturn(0);
1788 
1789   ierr = PetscLogEventBegin(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1790   /* Compute point overlap with neighbouring processes on the distributed DM */
1791   ierr = PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1792   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1793   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1794   ierr = DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);CHKERRQ(ierr);
1795   ierr = DMPlexCreateOverlapLabel(dm, overlap, rootSection, rootrank, leafSection, leafrank, &lblOverlap);CHKERRQ(ierr);
1796   /* Convert overlap label to stratified migration SF */
1797   ierr = DMPlexPartitionLabelCreateSF(dm, lblOverlap, &sfOverlap);CHKERRQ(ierr);
1798   ierr = DMPlexStratifyMigrationSF(dm, sfOverlap, &sfStratified);CHKERRQ(ierr);
1799   ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1800   sfOverlap = sfStratified;
1801   ierr = PetscObjectSetName((PetscObject) sfOverlap, "Overlap SF");CHKERRQ(ierr);
1802   ierr = PetscSFSetFromOptions(sfOverlap);CHKERRQ(ierr);
1803 
1804   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1805   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1806   ierr = ISDestroy(&rootrank);CHKERRQ(ierr);
1807   ierr = ISDestroy(&leafrank);CHKERRQ(ierr);
1808   ierr = PetscLogEventEnd(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1809 
1810   /* Build the overlapping DM */
1811   ierr = DMPlexCreate(comm, dmOverlap);CHKERRQ(ierr);
1812   ierr = PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");CHKERRQ(ierr);
1813   ierr = DMPlexMigrate(dm, sfOverlap, *dmOverlap);CHKERRQ(ierr);
1814   /* Store the overlap in the new DM */
1815   ((DM_Plex*)(*dmOverlap)->data)->overlap = overlap + ((DM_Plex*)dm->data)->overlap;
1816   /* Build the new point SF */
1817   ierr = DMPlexCreatePointSF(*dmOverlap, sfOverlap, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
1818   ierr = DMSetPointSF(*dmOverlap, sfPoint);CHKERRQ(ierr);
1819   ierr = DMGetCoordinateDM(*dmOverlap, &dmCoord);CHKERRQ(ierr);
1820   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1821   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1822   /* Cleanup overlap partition */
1823   ierr = DMLabelDestroy(&lblOverlap);CHKERRQ(ierr);
1824   if (sf) *sf = sfOverlap;
1825   else    {ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);}
1826   ierr = PetscLogEventEnd(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1827   PetscFunctionReturn(0);
1828 }
1829 
1830 PetscErrorCode DMPlexGetOverlap_Plex(DM dm, PetscInt *overlap)
1831 {
1832   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1833 
1834   PetscFunctionBegin;
1835   *overlap = mesh->overlap;
1836   PetscFunctionReturn(0);
1837 }
1838 
1839 /*@
1840   DMPlexGetOverlap - Get the DMPlex partition overlap.
1841 
1842   Not collective
1843 
1844   Input Parameter:
1845 . dm - The DM
1846 
1847   Output Parameter:
1848 . overlap - The overlap of this DM
1849 
1850   Level: intermediate
1851 
1852 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap(), DMPlexCreateOverlapLabel()
1853 @*/
1854 PetscErrorCode DMPlexGetOverlap(DM dm, PetscInt *overlap)
1855 {
1856   PetscErrorCode     ierr;
1857 
1858   PetscFunctionBegin;
1859   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1860   ierr = PetscUseMethod(dm,"DMPlexGetOverlap_C",(DM,PetscInt*),(dm,overlap));CHKERRQ(ierr);
1861   PetscFunctionReturn(0);
1862 }
1863 
1864 
1865 /*@C
1866   DMPlexGetGatherDM - Get a copy of the DMPlex that gathers all points on the
1867   root process of the original's communicator.
1868 
1869   Collective on dm
1870 
1871   Input Parameter:
1872 . dm - the original DMPlex object
1873 
1874   Output Parameters:
1875 + sf - the PetscSF used for point distribution (optional)
1876 - gatherMesh - the gathered DM object, or NULL
1877 
1878   Level: intermediate
1879 
1880 .seealso: DMPlexDistribute(), DMPlexGetRedundantDM()
1881 @*/
1882 PetscErrorCode DMPlexGetGatherDM(DM dm, PetscSF *sf, DM *gatherMesh)
1883 {
1884   MPI_Comm       comm;
1885   PetscMPIInt    size;
1886   PetscPartitioner oldPart, gatherPart;
1887   PetscErrorCode ierr;
1888 
1889   PetscFunctionBegin;
1890   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1891   PetscValidPointer(gatherMesh,2);
1892   *gatherMesh = NULL;
1893   if (sf) *sf = NULL;
1894   comm = PetscObjectComm((PetscObject)dm);
1895   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1896   if (size == 1) PetscFunctionReturn(0);
1897   ierr = DMPlexGetPartitioner(dm,&oldPart);CHKERRQ(ierr);
1898   ierr = PetscObjectReference((PetscObject)oldPart);CHKERRQ(ierr);
1899   ierr = PetscPartitionerCreate(comm,&gatherPart);CHKERRQ(ierr);
1900   ierr = PetscPartitionerSetType(gatherPart,PETSCPARTITIONERGATHER);CHKERRQ(ierr);
1901   ierr = DMPlexSetPartitioner(dm,gatherPart);CHKERRQ(ierr);
1902   ierr = DMPlexDistribute(dm,0,sf,gatherMesh);CHKERRQ(ierr);
1903 
1904   ierr = DMPlexSetPartitioner(dm,oldPart);CHKERRQ(ierr);
1905   ierr = PetscPartitionerDestroy(&gatherPart);CHKERRQ(ierr);
1906   ierr = PetscPartitionerDestroy(&oldPart);CHKERRQ(ierr);
1907   PetscFunctionReturn(0);
1908 }
1909 
1910 /*@C
1911   DMPlexGetRedundantDM - Get a copy of the DMPlex that is completely copied on each process.
1912 
1913   Collective on dm
1914 
1915   Input Parameter:
1916 . dm - the original DMPlex object
1917 
1918   Output Parameters:
1919 + sf - the PetscSF used for point distribution (optional)
1920 - redundantMesh - the redundant DM object, or NULL
1921 
1922   Level: intermediate
1923 
1924 .seealso: DMPlexDistribute(), DMPlexGetGatherDM()
1925 @*/
1926 PetscErrorCode DMPlexGetRedundantDM(DM dm, PetscSF *sf, DM *redundantMesh)
1927 {
1928   MPI_Comm       comm;
1929   PetscMPIInt    size, rank;
1930   PetscInt       pStart, pEnd, p;
1931   PetscInt       numPoints = -1;
1932   PetscSF        migrationSF, sfPoint, gatherSF;
1933   DM             gatherDM, dmCoord;
1934   PetscSFNode    *points;
1935   PetscErrorCode ierr;
1936 
1937   PetscFunctionBegin;
1938   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1939   PetscValidPointer(redundantMesh,2);
1940   *redundantMesh = NULL;
1941   comm = PetscObjectComm((PetscObject)dm);
1942   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1943   if (size == 1) {
1944     ierr = PetscObjectReference((PetscObject) dm);CHKERRQ(ierr);
1945     *redundantMesh = dm;
1946     if (sf) *sf = NULL;
1947     PetscFunctionReturn(0);
1948   }
1949   ierr = DMPlexGetGatherDM(dm,&gatherSF,&gatherDM);CHKERRQ(ierr);
1950   if (!gatherDM) PetscFunctionReturn(0);
1951   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1952   ierr = DMPlexGetChart(gatherDM,&pStart,&pEnd);CHKERRQ(ierr);
1953   numPoints = pEnd - pStart;
1954   ierr = MPI_Bcast(&numPoints,1,MPIU_INT,0,comm);CHKERRQ(ierr);
1955   ierr = PetscMalloc1(numPoints,&points);CHKERRQ(ierr);
1956   ierr = PetscSFCreate(comm,&migrationSF);CHKERRQ(ierr);
1957   for (p = 0; p < numPoints; p++) {
1958     points[p].index = p;
1959     points[p].rank  = 0;
1960   }
1961   ierr = PetscSFSetGraph(migrationSF,pEnd-pStart,numPoints,NULL,PETSC_OWN_POINTER,points,PETSC_OWN_POINTER);CHKERRQ(ierr);
1962   ierr = DMPlexCreate(comm, redundantMesh);CHKERRQ(ierr);
1963   ierr = PetscObjectSetName((PetscObject) *redundantMesh, "Redundant Mesh");CHKERRQ(ierr);
1964   ierr = DMPlexMigrate(gatherDM, migrationSF, *redundantMesh);CHKERRQ(ierr);
1965   ierr = DMPlexCreatePointSF(*redundantMesh, migrationSF, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
1966   ierr = DMSetPointSF(*redundantMesh, sfPoint);CHKERRQ(ierr);
1967   ierr = DMGetCoordinateDM(*redundantMesh, &dmCoord);CHKERRQ(ierr);
1968   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1969   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1970   if (sf) {
1971     PetscSF tsf;
1972 
1973     ierr = PetscSFCompose(gatherSF,migrationSF,&tsf);CHKERRQ(ierr);
1974     ierr = DMPlexStratifyMigrationSF(dm, tsf, sf);CHKERRQ(ierr);
1975     ierr = PetscSFDestroy(&tsf);CHKERRQ(ierr);
1976   }
1977   ierr = PetscSFDestroy(&migrationSF);CHKERRQ(ierr);
1978   ierr = PetscSFDestroy(&gatherSF);CHKERRQ(ierr);
1979   ierr = DMDestroy(&gatherDM);CHKERRQ(ierr);
1980   PetscFunctionReturn(0);
1981 }
1982 
1983 /*@
1984   DMPlexIsDistributed - Find out whether this DM is distributed, i.e. more than one rank owns some points.
1985 
1986   Collective
1987 
1988   Input Parameter:
1989 . dm      - The DM object
1990 
1991   Output Parameter:
1992 . distributed - Flag whether the DM is distributed
1993 
1994   Level: intermediate
1995 
1996   Notes:
1997   This currently finds out whether at least two ranks have any DAG points.
1998   This involves MPI_Allreduce() with one integer.
1999   The result is currently not stashed so every call to this routine involves this global communication.
2000 
2001 .seealso: DMPlexDistribute(), DMPlexGetOverlap(), DMPlexIsInterpolated()
2002 @*/
2003 PetscErrorCode DMPlexIsDistributed(DM dm, PetscBool *distributed)
2004 {
2005   PetscInt          pStart, pEnd, count;
2006   MPI_Comm          comm;
2007   PetscErrorCode    ierr;
2008 
2009   PetscFunctionBegin;
2010   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2011   PetscValidPointer(distributed,2);
2012   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
2013   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
2014   count = !!(pEnd - pStart);
2015   ierr = MPI_Allreduce(MPI_IN_PLACE, &count, 1, MPIU_INT, MPI_SUM, comm);CHKERRQ(ierr);
2016   *distributed = count > 1 ? PETSC_TRUE : PETSC_FALSE;
2017   PetscFunctionReturn(0);
2018 }
2019