xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision 4e278199b78715991f5c71ebbd945c1489263e6c)
1 #include <petsc/private/dmpleximpl.h>    /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/dmlabelimpl.h>   /*I      "petscdmlabel.h"  I*/
3 
4 /*@C
5   DMPlexSetAdjacencyUser - Define adjacency in the mesh using a user-provided callback
6 
7   Input Parameters:
8 + dm      - The DM object
9 . user    - The user callback, may be NULL (to clear the callback)
10 - ctx     - context for callback evaluation, may be NULL
11 
12   Level: advanced
13 
14   Notes:
15      The caller of DMPlexGetAdjacency may need to arrange that a large enough array is available for the adjacency.
16 
17      Any setting here overrides other configuration of DMPlex adjacency determination.
18 
19 .seealso: DMSetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexGetAdjacency(), DMPlexGetAdjacencyUser()
20 @*/
21 PetscErrorCode DMPlexSetAdjacencyUser(DM dm,PetscErrorCode (*user)(DM,PetscInt,PetscInt*,PetscInt[],void*),void *ctx)
22 {
23   DM_Plex *mesh = (DM_Plex *)dm->data;
24 
25   PetscFunctionBegin;
26   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
27   mesh->useradjacency = user;
28   mesh->useradjacencyctx = ctx;
29   PetscFunctionReturn(0);
30 }
31 
32 /*@C
33   DMPlexGetAdjacencyUser - get the user-defined adjacency callback
34 
35   Input Parameter:
36 . dm      - The DM object
37 
38   Output Parameters:
39 - user    - The user callback
40 - ctx     - context for callback evaluation
41 
42   Level: advanced
43 
44 .seealso: DMSetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexGetAdjacency(), DMPlexSetAdjacencyUser()
45 @*/
46 PetscErrorCode DMPlexGetAdjacencyUser(DM dm, PetscErrorCode (**user)(DM,PetscInt,PetscInt*,PetscInt[],void*), void **ctx)
47 {
48   DM_Plex *mesh = (DM_Plex *)dm->data;
49 
50   PetscFunctionBegin;
51   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
52   if (user) *user = mesh->useradjacency;
53   if (ctx) *ctx = mesh->useradjacencyctx;
54   PetscFunctionReturn(0);
55 }
56 
57 /*@
58   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
59 
60   Input Parameters:
61 + dm      - The DM object
62 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
63 
64   Level: intermediate
65 
66 .seealso: DMGetAdjacency(), DMSetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
67 @*/
68 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
69 {
70   DM_Plex *mesh = (DM_Plex *) dm->data;
71 
72   PetscFunctionBegin;
73   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
74   mesh->useAnchors = useAnchors;
75   PetscFunctionReturn(0);
76 }
77 
78 /*@
79   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
80 
81   Input Parameter:
82 . dm      - The DM object
83 
84   Output Parameter:
85 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
86 
87   Level: intermediate
88 
89 .seealso: DMPlexSetAdjacencyUseAnchors(), DMSetAdjacency(), DMGetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
90 @*/
91 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
92 {
93   DM_Plex *mesh = (DM_Plex *) dm->data;
94 
95   PetscFunctionBegin;
96   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
97   PetscValidBoolPointer(useAnchors, 2);
98   *useAnchors = mesh->useAnchors;
99   PetscFunctionReturn(0);
100 }
101 
102 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
103 {
104   const PetscInt *cone = NULL;
105   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
106   PetscErrorCode  ierr;
107 
108   PetscFunctionBeginHot;
109   ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
110   ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
111   for (c = 0; c <= coneSize; ++c) {
112     const PetscInt  point   = !c ? p : cone[c-1];
113     const PetscInt *support = NULL;
114     PetscInt        supportSize, s, q;
115 
116     ierr = DMPlexGetSupportSize(dm, point, &supportSize);CHKERRQ(ierr);
117     ierr = DMPlexGetSupport(dm, point, &support);CHKERRQ(ierr);
118     for (s = 0; s < supportSize; ++s) {
119       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = support[s]),0); ++q) {
120         if (support[s] == adj[q]) break;
121       }
122       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
123     }
124   }
125   *adjSize = numAdj;
126   PetscFunctionReturn(0);
127 }
128 
129 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
130 {
131   const PetscInt *support = NULL;
132   PetscInt        numAdj   = 0, maxAdjSize = *adjSize, supportSize, s;
133   PetscErrorCode  ierr;
134 
135   PetscFunctionBeginHot;
136   ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr);
137   ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr);
138   for (s = 0; s <= supportSize; ++s) {
139     const PetscInt  point = !s ? p : support[s-1];
140     const PetscInt *cone  = NULL;
141     PetscInt        coneSize, c, q;
142 
143     ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
144     ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
145     for (c = 0; c < coneSize; ++c) {
146       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = cone[c]),0); ++q) {
147         if (cone[c] == adj[q]) break;
148       }
149       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
150     }
151   }
152   *adjSize = numAdj;
153   PetscFunctionReturn(0);
154 }
155 
156 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
157 {
158   PetscInt      *star = NULL;
159   PetscInt       numAdj = 0, maxAdjSize = *adjSize, starSize, s;
160   PetscErrorCode ierr;
161 
162   PetscFunctionBeginHot;
163   ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
164   for (s = 0; s < starSize*2; s += 2) {
165     const PetscInt *closure = NULL;
166     PetscInt        closureSize, c, q;
167 
168     ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
169     for (c = 0; c < closureSize*2; c += 2) {
170       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = closure[c]),0); ++q) {
171         if (closure[c] == adj[q]) break;
172       }
173       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
174     }
175     ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
176   }
177   ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
178   *adjSize = numAdj;
179   PetscFunctionReturn(0);
180 }
181 
182 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
183 {
184   static PetscInt asiz = 0;
185   PetscInt maxAnchors = 1;
186   PetscInt aStart = -1, aEnd = -1;
187   PetscInt maxAdjSize;
188   PetscSection aSec = NULL;
189   IS aIS = NULL;
190   const PetscInt *anchors;
191   DM_Plex *mesh = (DM_Plex *)dm->data;
192   PetscErrorCode  ierr;
193 
194   PetscFunctionBeginHot;
195   if (useAnchors) {
196     ierr = DMPlexGetAnchors(dm,&aSec,&aIS);CHKERRQ(ierr);
197     if (aSec) {
198       ierr = PetscSectionGetMaxDof(aSec,&maxAnchors);CHKERRQ(ierr);
199       maxAnchors = PetscMax(1,maxAnchors);
200       ierr = PetscSectionGetChart(aSec,&aStart,&aEnd);CHKERRQ(ierr);
201       ierr = ISGetIndices(aIS,&anchors);CHKERRQ(ierr);
202     }
203   }
204   if (!*adj) {
205     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
206 
207     ierr  = DMPlexGetChart(dm, &pStart,&pEnd);CHKERRQ(ierr);
208     ierr  = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
209     depth = PetscMax(depth, -depth);
210     ierr  = DMPlexGetMaxSizes(dm, &maxC, &maxS);CHKERRQ(ierr);
211     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1;
212     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1;
213     asiz  = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries);
214     asiz *= maxAnchors;
215     asiz  = PetscMin(asiz,pEnd-pStart);
216     ierr  = PetscMalloc1(asiz,adj);CHKERRQ(ierr);
217   }
218   if (*adjSize < 0) *adjSize = asiz;
219   maxAdjSize = *adjSize;
220   if (mesh->useradjacency) {
221     ierr = (*mesh->useradjacency)(dm, p, adjSize, *adj, mesh->useradjacencyctx);CHKERRQ(ierr);
222   } else if (useTransitiveClosure) {
223     ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr);
224   } else if (useCone) {
225     ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
226   } else {
227     ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
228   }
229   if (useAnchors && aSec) {
230     PetscInt origSize = *adjSize;
231     PetscInt numAdj = origSize;
232     PetscInt i = 0, j;
233     PetscInt *orig = *adj;
234 
235     while (i < origSize) {
236       PetscInt p = orig[i];
237       PetscInt aDof = 0;
238 
239       if (p >= aStart && p < aEnd) {
240         ierr = PetscSectionGetDof(aSec,p,&aDof);CHKERRQ(ierr);
241       }
242       if (aDof) {
243         PetscInt aOff;
244         PetscInt s, q;
245 
246         for (j = i + 1; j < numAdj; j++) {
247           orig[j - 1] = orig[j];
248         }
249         origSize--;
250         numAdj--;
251         ierr = PetscSectionGetOffset(aSec,p,&aOff);CHKERRQ(ierr);
252         for (s = 0; s < aDof; ++s) {
253           for (q = 0; q < numAdj || ((void)(orig[numAdj++] = anchors[aOff+s]),0); ++q) {
254             if (anchors[aOff+s] == orig[q]) break;
255           }
256           if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
257         }
258       }
259       else {
260         i++;
261       }
262     }
263     *adjSize = numAdj;
264     ierr = ISRestoreIndices(aIS,&anchors);CHKERRQ(ierr);
265   }
266   PetscFunctionReturn(0);
267 }
268 
269 /*@
270   DMPlexGetAdjacency - Return all points adjacent to the given point
271 
272   Input Parameters:
273 + dm - The DM object
274 . p  - The point
275 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE
276 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize
277 
278   Output Parameters:
279 + adjSize - The number of adjacent points
280 - adj - The adjacent points
281 
282   Level: advanced
283 
284   Notes:
285     The user must PetscFree the adj array if it was not passed in.
286 
287 .seealso: DMSetAdjacency(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator()
288 @*/
289 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
290 {
291   PetscBool      useCone, useClosure, useAnchors;
292   PetscErrorCode ierr;
293 
294   PetscFunctionBeginHot;
295   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
296   PetscValidPointer(adjSize,3);
297   PetscValidPointer(adj,4);
298   ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
299   ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
300   ierr = DMPlexGetAdjacency_Internal(dm, p, useCone, useClosure, useAnchors, adjSize, adj);CHKERRQ(ierr);
301   PetscFunctionReturn(0);
302 }
303 
304 /*@
305   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
306 
307   Collective on dm
308 
309   Input Parameters:
310 + dm      - The DM
311 - sfPoint - The PetscSF which encodes point connectivity
312 
313   Output Parameters:
314 + processRanks - A list of process neighbors, or NULL
315 - sfProcess    - An SF encoding the two-sided process connectivity, or NULL
316 
317   Level: developer
318 
319 .seealso: PetscSFCreate(), DMPlexCreateProcessSF()
320 @*/
321 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
322 {
323   const PetscSFNode *remotePoints;
324   PetscInt          *localPointsNew;
325   PetscSFNode       *remotePointsNew;
326   const PetscInt    *nranks;
327   PetscInt          *ranksNew;
328   PetscBT            neighbors;
329   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
330   PetscMPIInt        size, proc, rank;
331   PetscErrorCode     ierr;
332 
333   PetscFunctionBegin;
334   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
335   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
336   if (processRanks) {PetscValidPointer(processRanks, 7);}
337   if (sfProcess)    {PetscValidPointer(sfProcess, 8);}
338   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRMPI(ierr);
339   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRMPI(ierr);
340   ierr = PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);CHKERRQ(ierr);
341   ierr = PetscBTCreate(size, &neighbors);CHKERRQ(ierr);
342   ierr = PetscBTMemzero(size, neighbors);CHKERRQ(ierr);
343   /* Compute root-to-leaf process connectivity */
344   ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr);
345   ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr);
346   for (p = pStart; p < pEnd; ++p) {
347     PetscInt ndof, noff, n;
348 
349     ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr);
350     ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr);
351     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
352   }
353   ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr);
354   /* Compute leaf-to-neighbor process connectivity */
355   ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr);
356   ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr);
357   for (p = pStart; p < pEnd; ++p) {
358     PetscInt ndof, noff, n;
359 
360     ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr);
361     ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr);
362     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
363   }
364   ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr);
365   /* Compute leaf-to-root process connectivity */
366   for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);}
367   /* Calculate edges */
368   PetscBTClear(neighbors, rank);
369   for (proc = 0, numNeighbors = 0; proc < size; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;}
370   ierr = PetscMalloc1(numNeighbors, &ranksNew);CHKERRQ(ierr);
371   ierr = PetscMalloc1(numNeighbors, &localPointsNew);CHKERRQ(ierr);
372   ierr = PetscMalloc1(numNeighbors, &remotePointsNew);CHKERRQ(ierr);
373   for (proc = 0, n = 0; proc < size; ++proc) {
374     if (PetscBTLookup(neighbors, proc)) {
375       ranksNew[n]              = proc;
376       localPointsNew[n]        = proc;
377       remotePointsNew[n].index = rank;
378       remotePointsNew[n].rank  = proc;
379       ++n;
380     }
381   }
382   ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr);
383   if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr);}
384   else              {ierr = PetscFree(ranksNew);CHKERRQ(ierr);}
385   if (sfProcess) {
386     ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr);
387     ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr);
388     ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr);
389     ierr = PetscSFSetGraph(*sfProcess, size, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
390   }
391   PetscFunctionReturn(0);
392 }
393 
394 /*@
395   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
396 
397   Collective on dm
398 
399   Input Parameter:
400 . dm - The DM
401 
402   Output Parameters:
403 + rootSection - The number of leaves for a given root point
404 . rootrank    - The rank of each edge into the root point
405 . leafSection - The number of processes sharing a given leaf point
406 - leafrank    - The rank of each process sharing a leaf point
407 
408   Level: developer
409 
410 .seealso: DMPlexCreateOverlapLabel(), DMPlexDistribute(), DMPlexDistributeOverlap()
411 @*/
412 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
413 {
414   MPI_Comm        comm;
415   PetscSF         sfPoint;
416   const PetscInt *rootdegree;
417   PetscInt       *myrank, *remoterank;
418   PetscInt        pStart, pEnd, p, nedges;
419   PetscMPIInt     rank;
420   PetscErrorCode  ierr;
421 
422   PetscFunctionBegin;
423   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
424   ierr = MPI_Comm_rank(comm, &rank);CHKERRMPI(ierr);
425   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
426   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
427   /* Compute number of leaves for each root */
428   ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr);
429   ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr);
430   ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr);
431   ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr);
432   for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr);}
433   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
434   /* Gather rank of each leaf to root */
435   ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr);
436   ierr = PetscMalloc1(pEnd-pStart, &myrank);CHKERRQ(ierr);
437   ierr = PetscMalloc1(nedges,  &remoterank);CHKERRQ(ierr);
438   for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank;
439   ierr = PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
440   ierr = PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
441   ierr = PetscFree(myrank);CHKERRQ(ierr);
442   ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr);
443   /* Distribute remote ranks to leaves */
444   ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr);
445   ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr);
446   PetscFunctionReturn(0);
447 }
448 
449 /*@C
450   DMPlexCreateOverlapLabel - Compute owner information for shared points. This basically gets two-sided for an SF.
451 
452   Collective on dm
453 
454   Input Parameters:
455 + dm          - The DM
456 . levels      - Number of overlap levels
457 . rootSection - The number of leaves for a given root point
458 . rootrank    - The rank of each edge into the root point
459 . leafSection - The number of processes sharing a given leaf point
460 - leafrank    - The rank of each process sharing a leaf point
461 
462   Output Parameter:
463 . ovLabel     - DMLabel containing remote overlap contributions as point/rank pairings
464 
465   Level: developer
466 
467 .seealso: DMPlexDistributeOwnership(), DMPlexDistribute()
468 @*/
469 PetscErrorCode DMPlexCreateOverlapLabel(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
470 {
471   MPI_Comm           comm;
472   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
473   PetscSF            sfPoint;
474   const PetscSFNode *remote;
475   const PetscInt    *local;
476   const PetscInt    *nrank, *rrank;
477   PetscInt          *adj = NULL;
478   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l;
479   PetscMPIInt        rank, size;
480   PetscBool          flg;
481   PetscErrorCode     ierr;
482 
483   PetscFunctionBegin;
484   *ovLabel = NULL;
485   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
486   ierr = MPI_Comm_size(comm, &size);CHKERRMPI(ierr);
487   ierr = MPI_Comm_rank(comm, &rank);CHKERRMPI(ierr);
488   if (size == 1) PetscFunctionReturn(0);
489   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
490   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
491   if (!levels) {
492     /* Add owned points */
493     ierr = DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel);CHKERRQ(ierr);
494     for (p = pStart; p < pEnd; ++p) {ierr = DMLabelSetValue(*ovLabel, p, rank);CHKERRQ(ierr);}
495     PetscFunctionReturn(0);
496   }
497   ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr);
498   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
499   ierr = DMLabelCreate(PETSC_COMM_SELF, "Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr);
500   /* Handle leaves: shared with the root point */
501   for (l = 0; l < nleaves; ++l) {
502     PetscInt adjSize = PETSC_DETERMINE, a;
503 
504     ierr = DMPlexGetAdjacency(dm, local ? local[l] : l, &adjSize, &adj);CHKERRQ(ierr);
505     for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr);}
506   }
507   ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr);
508   ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr);
509   /* Handle roots */
510   for (p = pStart; p < pEnd; ++p) {
511     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
512 
513     if ((p >= sStart) && (p < sEnd)) {
514       /* Some leaves share a root with other leaves on different processes */
515       ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr);
516       if (neighbors) {
517         ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr);
518         ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
519         for (n = 0; n < neighbors; ++n) {
520           const PetscInt remoteRank = nrank[noff+n];
521 
522           if (remoteRank == rank) continue;
523           for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
524         }
525       }
526     }
527     /* Roots are shared with leaves */
528     ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr);
529     if (!neighbors) continue;
530     ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr);
531     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
532     for (n = 0; n < neighbors; ++n) {
533       const PetscInt remoteRank = rrank[noff+n];
534 
535       if (remoteRank == rank) continue;
536       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
537     }
538   }
539   ierr = PetscFree(adj);CHKERRQ(ierr);
540   ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr);
541   ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr);
542   /* Add additional overlap levels */
543   for (l = 1; l < levels; l++) {
544     /* Propagate point donations over SF to capture remote connections */
545     ierr = DMPlexPartitionLabelPropagate(dm, ovAdjByRank);CHKERRQ(ierr);
546     /* Add next level of point donations to the label */
547     ierr = DMPlexPartitionLabelAdjacency(dm, ovAdjByRank);CHKERRQ(ierr);
548   }
549   /* We require the closure in the overlap */
550   ierr = DMPlexPartitionLabelClosure(dm, ovAdjByRank);CHKERRQ(ierr);
551   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-overlap_view", &flg);CHKERRQ(ierr);
552   if (flg) {
553     PetscViewer viewer;
554     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm), &viewer);CHKERRQ(ierr);
555     ierr = DMLabelView(ovAdjByRank, viewer);CHKERRQ(ierr);
556   }
557   /* Invert sender to receiver label */
558   ierr = DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel);CHKERRQ(ierr);
559   ierr = DMPlexPartitionLabelInvert(dm, ovAdjByRank, NULL, *ovLabel);CHKERRQ(ierr);
560   /* Add owned points, except for shared local points */
561   for (p = pStart; p < pEnd; ++p) {ierr = DMLabelSetValue(*ovLabel, p, rank);CHKERRQ(ierr);}
562   for (l = 0; l < nleaves; ++l) {
563     ierr = DMLabelClearValue(*ovLabel, local[l], rank);CHKERRQ(ierr);
564     ierr = DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank);CHKERRQ(ierr);
565   }
566   /* Clean up */
567   ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr);
568   PetscFunctionReturn(0);
569 }
570 
571 /*@C
572   DMPlexCreateOverlapMigrationSF - Create an SF describing the new mesh distribution to make the overlap described by the input SF
573 
574   Collective on dm
575 
576   Input Parameters:
577 + dm          - The DM
578 - overlapSF   - The SF mapping ghost points in overlap to owner points on other processes
579 
580   Output Parameter:
581 . migrationSF - An SF that maps original points in old locations to points in new locations
582 
583   Level: developer
584 
585 .seealso: DMPlexCreateOverlapLabel(), DMPlexDistributeOverlap(), DMPlexDistribute()
586 @*/
587 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
588 {
589   MPI_Comm           comm;
590   PetscMPIInt        rank, size;
591   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
592   PetscInt          *pointDepths, *remoteDepths, *ilocal;
593   PetscInt          *depthRecv, *depthShift, *depthIdx;
594   PetscSFNode       *iremote;
595   PetscSF            pointSF;
596   const PetscInt    *sharedLocal;
597   const PetscSFNode *overlapRemote, *sharedRemote;
598   PetscErrorCode     ierr;
599 
600   PetscFunctionBegin;
601   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
602   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
603   ierr = MPI_Comm_rank(comm, &rank);CHKERRMPI(ierr);
604   ierr = MPI_Comm_size(comm, &size);CHKERRMPI(ierr);
605   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
606 
607   /* Before building the migration SF we need to know the new stratum offsets */
608   ierr = PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote);CHKERRQ(ierr);
609   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
610   for (d=0; d<dim+1; d++) {
611     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
612     for (p=pStart; p<pEnd; p++) pointDepths[p] = d;
613   }
614   for (p=0; p<nleaves; p++) remoteDepths[p] = -1;
615   ierr = PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths,MPI_REPLACE);CHKERRQ(ierr);
616   ierr = PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths,MPI_REPLACE);CHKERRQ(ierr);
617 
618   /* Count received points in each stratum and compute the internal strata shift */
619   ierr = PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx);CHKERRQ(ierr);
620   for (d=0; d<dim+1; d++) depthRecv[d]=0;
621   for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++;
622   depthShift[dim] = 0;
623   for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim];
624   for (d=1; d<dim; d++) depthShift[d] += depthRecv[0];
625   for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1];
626   for (d=0; d<dim+1; d++) {
627     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
628     depthIdx[d] = pStart + depthShift[d];
629   }
630 
631   /* Form the overlap SF build an SF that describes the full overlap migration SF */
632   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
633   newLeaves = pEnd - pStart + nleaves;
634   ierr = PetscMalloc1(newLeaves, &ilocal);CHKERRQ(ierr);
635   ierr = PetscMalloc1(newLeaves, &iremote);CHKERRQ(ierr);
636   /* First map local points to themselves */
637   for (d=0; d<dim+1; d++) {
638     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
639     for (p=pStart; p<pEnd; p++) {
640       point = p + depthShift[d];
641       ilocal[point] = point;
642       iremote[point].index = p;
643       iremote[point].rank = rank;
644       depthIdx[d]++;
645     }
646   }
647 
648   /* Add in the remote roots for currently shared points */
649   ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
650   ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote);CHKERRQ(ierr);
651   for (d=0; d<dim+1; d++) {
652     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
653     for (p=0; p<numSharedPoints; p++) {
654       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
655         point = sharedLocal[p] + depthShift[d];
656         iremote[point].index = sharedRemote[p].index;
657         iremote[point].rank = sharedRemote[p].rank;
658       }
659     }
660   }
661 
662   /* Now add the incoming overlap points */
663   for (p=0; p<nleaves; p++) {
664     point = depthIdx[remoteDepths[p]];
665     ilocal[point] = point;
666     iremote[point].index = overlapRemote[p].index;
667     iremote[point].rank = overlapRemote[p].rank;
668     depthIdx[remoteDepths[p]]++;
669   }
670   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
671 
672   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
673   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");CHKERRQ(ierr);
674   ierr = PetscSFSetFromOptions(*migrationSF);CHKERRQ(ierr);
675   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
676   ierr = PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);CHKERRQ(ierr);
677 
678   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
679   PetscFunctionReturn(0);
680 }
681 
682 /*@
683   DMPlexStratifyMigrationSF - Rearrange the leaves of a migration sf for stratification.
684 
685   Input Parameters:
686 + dm          - The DM
687 - sf          - A star forest with non-ordered leaves, usually defining a DM point migration
688 
689   Output Parameter:
690 . migrationSF - A star forest with added leaf indirection that ensures the resulting DM is stratified
691 
692   Note:
693   This lexicographically sorts by (depth, cellType)
694 
695   Level: developer
696 
697 .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexDistributeOverlap()
698 @*/
699 PetscErrorCode DMPlexStratifyMigrationSF(DM dm, PetscSF sf, PetscSF *migrationSF)
700 {
701   MPI_Comm           comm;
702   PetscMPIInt        rank, size;
703   PetscInt           d, ldepth, depth, dim, p, pStart, pEnd, nroots, nleaves;
704   PetscSFNode       *pointDepths, *remoteDepths;
705   PetscInt          *ilocal;
706   PetscInt          *depthRecv, *depthShift, *depthIdx;
707   PetscInt          *ctRecv,    *ctShift,    *ctIdx;
708   const PetscSFNode *iremote;
709   PetscErrorCode     ierr;
710 
711   PetscFunctionBegin;
712   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
713   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
714   ierr = MPI_Comm_rank(comm, &rank);CHKERRMPI(ierr);
715   ierr = MPI_Comm_size(comm, &size);CHKERRMPI(ierr);
716   ierr = DMPlexGetDepth(dm, &ldepth);CHKERRQ(ierr);
717   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
718   ierr = MPIU_Allreduce(&ldepth, &depth, 1, MPIU_INT, MPI_MAX, comm);CHKERRMPI(ierr);
719   if ((ldepth >= 0) && (depth != ldepth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", ldepth, depth);
720   ierr = PetscLogEventBegin(DMPLEX_PartStratSF,dm,0,0,0);CHKERRQ(ierr);
721 
722   /* Before building the migration SF we need to know the new stratum offsets */
723   ierr = PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote);CHKERRQ(ierr);
724   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
725   for (d = 0; d < depth+1; ++d) {
726     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
727     for (p = pStart; p < pEnd; ++p) {
728       DMPolytopeType ct;
729 
730       ierr = DMPlexGetCellType(dm, p, &ct);CHKERRQ(ierr);
731       pointDepths[p].index = d;
732       pointDepths[p].rank  = ct;
733     }
734   }
735   for (p = 0; p < nleaves; ++p) {remoteDepths[p].index = -1; remoteDepths[p].rank = -1;}
736   ierr = PetscSFBcastBegin(sf, MPIU_2INT, pointDepths, remoteDepths,MPI_REPLACE);CHKERRQ(ierr);
737   ierr = PetscSFBcastEnd(sf, MPIU_2INT, pointDepths, remoteDepths,MPI_REPLACE);CHKERRQ(ierr);
738   /* Count received points in each stratum and compute the internal strata shift */
739   ierr = PetscCalloc6(depth+1, &depthRecv, depth+1, &depthShift, depth+1, &depthIdx, DM_NUM_POLYTOPES, &ctRecv, DM_NUM_POLYTOPES, &ctShift, DM_NUM_POLYTOPES, &ctIdx);CHKERRQ(ierr);
740   for (p = 0; p < nleaves; ++p) {
741     if (remoteDepths[p].rank < 0) {
742       ++depthRecv[remoteDepths[p].index];
743     } else {
744       ++ctRecv[remoteDepths[p].rank];
745     }
746   }
747   {
748     PetscInt depths[4], dims[4], shift = 0, i, c;
749 
750     /* Cells (depth), Vertices (0), Faces (depth-1), Edges (1)
751          Consider DM_POLYTOPE_FV_GHOST and DM_POLYTOPE_INTERIOR_GHOST as cells
752      */
753     depths[0] = depth; depths[1] = 0; depths[2] = depth-1; depths[3] = 1;
754     dims[0]   = dim;   dims[1]   = 0; dims[2]   = dim-1;   dims[3]   = 1;
755     for (i = 0; i <= depth; ++i) {
756       const PetscInt dep = depths[i];
757       const PetscInt dim = dims[i];
758 
759       for (c = 0; c < DM_NUM_POLYTOPES; ++c) {
760         if (DMPolytopeTypeGetDim((DMPolytopeType) c) != dim && !(i == 0 && (c == DM_POLYTOPE_FV_GHOST || c == DM_POLYTOPE_INTERIOR_GHOST))) continue;
761         ctShift[c] = shift;
762         shift     += ctRecv[c];
763       }
764       depthShift[dep] = shift;
765       shift          += depthRecv[dep];
766     }
767     for (c = 0; c < DM_NUM_POLYTOPES; ++c) {
768       const PetscInt ctDim = DMPolytopeTypeGetDim((DMPolytopeType) c);
769 
770       if ((ctDim < 0 || ctDim > dim) && (c != DM_POLYTOPE_FV_GHOST && c != DM_POLYTOPE_INTERIOR_GHOST)) {
771         ctShift[c] = shift;
772         shift     += ctRecv[c];
773       }
774     }
775   }
776   /* Derive a new local permutation based on stratified indices */
777   ierr = PetscMalloc1(nleaves, &ilocal);CHKERRQ(ierr);
778   for (p = 0; p < nleaves; ++p) {
779     const PetscInt       dep = remoteDepths[p].index;
780     const DMPolytopeType ct  = (DMPolytopeType) remoteDepths[p].rank;
781 
782     if ((PetscInt) ct < 0) {
783       ilocal[p] = depthShift[dep] + depthIdx[dep];
784       ++depthIdx[dep];
785     } else {
786       ilocal[p] = ctShift[ct] + ctIdx[ct];
787       ++ctIdx[ct];
788     }
789   }
790   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
791   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Migration SF");CHKERRQ(ierr);
792   ierr = PetscSFSetGraph(*migrationSF, nroots, nleaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_COPY_VALUES);CHKERRQ(ierr);
793   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
794   ierr = PetscFree6(depthRecv, depthShift, depthIdx, ctRecv, ctShift, ctIdx);CHKERRQ(ierr);
795   ierr = PetscLogEventEnd(DMPLEX_PartStratSF,dm,0,0,0);CHKERRQ(ierr);
796   PetscFunctionReturn(0);
797 }
798 
799 /*@
800   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
801 
802   Collective on dm
803 
804   Input Parameters:
805 + dm - The DMPlex object
806 . pointSF - The PetscSF describing the communication pattern
807 . originalSection - The PetscSection for existing data layout
808 - originalVec - The existing data in a local vector
809 
810   Output Parameters:
811 + newSection - The PetscSF describing the new data layout
812 - newVec - The new data in a local vector
813 
814   Level: developer
815 
816 .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData()
817 @*/
818 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
819 {
820   PetscSF        fieldSF;
821   PetscInt      *remoteOffsets, fieldSize;
822   PetscScalar   *originalValues, *newValues;
823   PetscErrorCode ierr;
824 
825   PetscFunctionBegin;
826   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
827   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
828 
829   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
830   ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr);
831   ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr);
832 
833   ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr);
834   ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr);
835   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
836   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
837   ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues,MPI_REPLACE);CHKERRQ(ierr);
838   ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues,MPI_REPLACE);CHKERRQ(ierr);
839   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
840   ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr);
841   ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr);
842   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
843   PetscFunctionReturn(0);
844 }
845 
846 /*@
847   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
848 
849   Collective on dm
850 
851   Input Parameters:
852 + dm - The DMPlex object
853 . pointSF - The PetscSF describing the communication pattern
854 . originalSection - The PetscSection for existing data layout
855 - originalIS - The existing data
856 
857   Output Parameters:
858 + newSection - The PetscSF describing the new data layout
859 - newIS - The new data
860 
861   Level: developer
862 
863 .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData()
864 @*/
865 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
866 {
867   PetscSF         fieldSF;
868   PetscInt       *newValues, *remoteOffsets, fieldSize;
869   const PetscInt *originalValues;
870   PetscErrorCode  ierr;
871 
872   PetscFunctionBegin;
873   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
874   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
875 
876   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
877   ierr = PetscMalloc1(fieldSize, &newValues);CHKERRQ(ierr);
878 
879   ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr);
880   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
881   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
882   ierr = PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues,MPI_REPLACE);CHKERRQ(ierr);
883   ierr = PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues,MPI_REPLACE);CHKERRQ(ierr);
884   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
885   ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr);
886   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr);
887   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
888   PetscFunctionReturn(0);
889 }
890 
891 /*@
892   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
893 
894   Collective on dm
895 
896   Input Parameters:
897 + dm - The DMPlex object
898 . pointSF - The PetscSF describing the communication pattern
899 . originalSection - The PetscSection for existing data layout
900 . datatype - The type of data
901 - originalData - The existing data
902 
903   Output Parameters:
904 + newSection - The PetscSection describing the new data layout
905 - newData - The new data
906 
907   Level: developer
908 
909 .seealso: DMPlexDistribute(), DMPlexDistributeField()
910 @*/
911 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
912 {
913   PetscSF        fieldSF;
914   PetscInt      *remoteOffsets, fieldSize;
915   PetscMPIInt    dataSize;
916   PetscErrorCode ierr;
917 
918   PetscFunctionBegin;
919   ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
920   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
921 
922   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
923   ierr = MPI_Type_size(datatype, &dataSize);CHKERRMPI(ierr);
924   ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr);
925 
926   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
927   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
928   ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData,MPI_REPLACE);CHKERRQ(ierr);
929   ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData,MPI_REPLACE);CHKERRQ(ierr);
930   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
931   ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
932   PetscFunctionReturn(0);
933 }
934 
935 static PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
936 {
937   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
938   MPI_Comm               comm;
939   PetscSF                coneSF;
940   PetscSection           originalConeSection, newConeSection;
941   PetscInt              *remoteOffsets, *cones, *globCones, *newCones, newConesSize;
942   PetscBool              flg;
943   PetscErrorCode         ierr;
944 
945   PetscFunctionBegin;
946   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
947   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
948   ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
949   /* Distribute cone section */
950   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
951   ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr);
952   ierr = DMPlexGetConeSection(dmParallel, &newConeSection);CHKERRQ(ierr);
953   ierr = PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr);
954   ierr = DMSetUp(dmParallel);CHKERRQ(ierr);
955   {
956     PetscInt pStart, pEnd, p;
957 
958     ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr);
959     for (p = pStart; p < pEnd; ++p) {
960       PetscInt coneSize;
961       ierr               = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr);
962       pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize);
963     }
964   }
965   /* Communicate and renumber cones */
966   ierr = PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr);
967   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
968   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
969   if (original) {
970     PetscInt numCones;
971 
972     ierr = PetscSectionGetStorageSize(originalConeSection,&numCones);CHKERRQ(ierr);
973     ierr = PetscMalloc1(numCones,&globCones);CHKERRQ(ierr);
974     ierr = ISLocalToGlobalMappingApplyBlock(original, numCones, cones, globCones);CHKERRQ(ierr);
975   } else {
976     globCones = cones;
977   }
978   ierr = DMPlexGetCones(dmParallel, &newCones);CHKERRQ(ierr);
979   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, globCones, newCones,MPI_REPLACE);CHKERRQ(ierr);
980   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, globCones, newCones,MPI_REPLACE);CHKERRQ(ierr);
981   if (original) {
982     ierr = PetscFree(globCones);CHKERRQ(ierr);
983   }
984   ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr);
985   ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr);
986   if (PetscDefined(USE_DEBUG)) {
987     PetscInt  p;
988     PetscBool valid = PETSC_TRUE;
989     for (p = 0; p < newConesSize; ++p) {
990       if (newCones[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "[%d] Point %D not in overlap SF\n", PetscGlobalRank,p);CHKERRQ(ierr);}
991     }
992     if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
993   }
994   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr);
995   if (flg) {
996     ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr);
997     ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr);
998     ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr);
999     ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr);
1000     ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr);
1001   }
1002   ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr);
1003   ierr = DMPlexGetConeOrientations(dmParallel, &newCones);CHKERRQ(ierr);
1004   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones,MPI_REPLACE);CHKERRQ(ierr);
1005   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones,MPI_REPLACE);CHKERRQ(ierr);
1006   ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr);
1007   ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
1008   /* Create supports and stratify DMPlex */
1009   {
1010     PetscInt pStart, pEnd;
1011 
1012     ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr);
1013     ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr);
1014   }
1015   ierr = DMPlexSymmetrize(dmParallel);CHKERRQ(ierr);
1016   ierr = DMPlexStratify(dmParallel);CHKERRQ(ierr);
1017   {
1018     PetscBool useCone, useClosure, useAnchors;
1019 
1020     ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
1021     ierr = DMSetBasicAdjacency(dmParallel, useCone, useClosure);CHKERRQ(ierr);
1022     ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
1023     ierr = DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors);CHKERRQ(ierr);
1024   }
1025   PetscFunctionReturn(0);
1026 }
1027 
1028 static PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
1029 {
1030   MPI_Comm         comm;
1031   DM               cdm, cdmParallel;
1032   PetscSection     originalCoordSection, newCoordSection;
1033   Vec              originalCoordinates, newCoordinates;
1034   PetscInt         bs;
1035   PetscBool        isper;
1036   const char      *name;
1037   const PetscReal *maxCell, *L;
1038   const DMBoundaryType *bd;
1039   PetscErrorCode   ierr;
1040 
1041   PetscFunctionBegin;
1042   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1043   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1044 
1045   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1046   ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr);
1047   ierr = DMGetCoordinateSection(dmParallel, &newCoordSection);CHKERRQ(ierr);
1048   ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr);
1049   if (originalCoordinates) {
1050     ierr = VecCreate(PETSC_COMM_SELF, &newCoordinates);CHKERRQ(ierr);
1051     ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr);
1052     ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr);
1053 
1054     ierr = DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr);
1055     ierr = DMSetCoordinatesLocal(dmParallel, newCoordinates);CHKERRQ(ierr);
1056     ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr);
1057     ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr);
1058     ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr);
1059   }
1060   ierr = DMGetPeriodicity(dm, &isper, &maxCell, &L, &bd);CHKERRQ(ierr);
1061   ierr = DMSetPeriodicity(dmParallel, isper, maxCell, L, bd);CHKERRQ(ierr);
1062   ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
1063   ierr = DMGetCoordinateDM(dmParallel, &cdmParallel);CHKERRQ(ierr);
1064   ierr = DMCopyDisc(cdm, cdmParallel);CHKERRQ(ierr);
1065   PetscFunctionReturn(0);
1066 }
1067 
1068 static PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1069 {
1070   DM_Plex         *mesh = (DM_Plex*) dm->data;
1071   MPI_Comm         comm;
1072   DMLabel          depthLabel;
1073   PetscMPIInt      rank;
1074   PetscInt         depth, d, numLabels, numLocalLabels, l;
1075   PetscBool        hasLabels = PETSC_FALSE, lsendDepth, sendDepth;
1076   PetscObjectState depthState = -1;
1077   PetscErrorCode   ierr;
1078 
1079   PetscFunctionBegin;
1080   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1081   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1082 
1083   ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1084   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1085   ierr = MPI_Comm_rank(comm, &rank);CHKERRMPI(ierr);
1086 
1087   /* If the user has changed the depth label, communicate it instead */
1088   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1089   ierr = DMPlexGetDepthLabel(dm, &depthLabel);CHKERRQ(ierr);
1090   if (depthLabel) {ierr = PetscObjectStateGet((PetscObject) depthLabel, &depthState);CHKERRQ(ierr);}
1091   lsendDepth = mesh->depthState != depthState ? PETSC_TRUE : PETSC_FALSE;
1092   ierr = MPIU_Allreduce(&lsendDepth, &sendDepth, 1, MPIU_BOOL, MPI_LOR, comm);CHKERRMPI(ierr);
1093   if (sendDepth) {
1094     ierr = DMPlexGetDepthLabel(dmParallel, &dmParallel->depthLabel);CHKERRQ(ierr);
1095     ierr = DMRemoveLabelBySelf(dmParallel, &dmParallel->depthLabel, PETSC_FALSE);CHKERRQ(ierr);
1096   }
1097   /* Everyone must have either the same number of labels, or none */
1098   ierr = DMGetNumLabels(dm, &numLocalLabels);CHKERRQ(ierr);
1099   numLabels = numLocalLabels;
1100   ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRMPI(ierr);
1101   if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1102   for (l = 0; l < numLabels; ++l) {
1103     DMLabel     label = NULL, labelNew = NULL;
1104     PetscBool   isDepth, lisOutput = PETSC_TRUE, isOutput;
1105     const char *name = NULL;
1106 
1107     if (hasLabels) {
1108       ierr = DMGetLabelByNum(dm, l, &label);CHKERRQ(ierr);
1109       /* Skip "depth" because it is recreated */
1110       ierr = PetscObjectGetName((PetscObject) label, &name);CHKERRQ(ierr);
1111       ierr = PetscStrcmp(name, "depth", &isDepth);CHKERRQ(ierr);
1112     }
1113     ierr = MPI_Bcast(&isDepth, 1, MPIU_BOOL, 0, comm);CHKERRMPI(ierr);
1114     if (isDepth && !sendDepth) continue;
1115     ierr = DMLabelDistribute(label, migrationSF, &labelNew);CHKERRQ(ierr);
1116     if (isDepth) {
1117       /* Put in any missing strata which can occur if users are managing the depth label themselves */
1118       PetscInt gdepth;
1119 
1120       ierr = MPIU_Allreduce(&depth, &gdepth, 1, MPIU_INT, MPI_MAX, comm);CHKERRMPI(ierr);
1121       if ((depth >= 0) && (gdepth != depth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", depth, gdepth);
1122       for (d = 0; d <= gdepth; ++d) {
1123         PetscBool has;
1124 
1125         ierr = DMLabelHasStratum(labelNew, d, &has);CHKERRQ(ierr);
1126         if (!has) {ierr = DMLabelAddStratum(labelNew, d);CHKERRQ(ierr);}
1127       }
1128     }
1129     ierr = DMAddLabel(dmParallel, labelNew);CHKERRQ(ierr);
1130     /* Put the output flag in the new label */
1131     if (hasLabels) {ierr = DMGetLabelOutput(dm, name, &lisOutput);CHKERRQ(ierr);}
1132     ierr = MPIU_Allreduce(&lisOutput, &isOutput, 1, MPIU_BOOL, MPI_LAND, comm);CHKERRMPI(ierr);
1133     ierr = PetscObjectGetName((PetscObject) labelNew, &name);CHKERRQ(ierr);
1134     ierr = DMSetLabelOutput(dmParallel, name, isOutput);CHKERRQ(ierr);
1135     ierr = DMLabelDestroy(&labelNew);CHKERRQ(ierr);
1136   }
1137   ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1138   PetscFunctionReturn(0);
1139 }
1140 
1141 static PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1142 {
1143   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1144   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1145   MPI_Comm        comm;
1146   DM              refTree;
1147   PetscSection    origParentSection, newParentSection;
1148   PetscInt        *origParents, *origChildIDs;
1149   PetscBool       flg;
1150   PetscErrorCode  ierr;
1151 
1152   PetscFunctionBegin;
1153   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1154   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
1155   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1156 
1157   /* Set up tree */
1158   ierr = DMPlexGetReferenceTree(dm,&refTree);CHKERRQ(ierr);
1159   ierr = DMPlexSetReferenceTree(dmParallel,refTree);CHKERRQ(ierr);
1160   ierr = DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL);CHKERRQ(ierr);
1161   if (origParentSection) {
1162     PetscInt        pStart, pEnd;
1163     PetscInt        *newParents, *newChildIDs, *globParents;
1164     PetscInt        *remoteOffsetsParents, newParentSize;
1165     PetscSF         parentSF;
1166 
1167     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1168     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);CHKERRQ(ierr);
1169     ierr = PetscSectionSetChart(newParentSection,pStart,pEnd);CHKERRQ(ierr);
1170     ierr = PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);CHKERRQ(ierr);
1171     ierr = PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);CHKERRQ(ierr);
1172     ierr = PetscFree(remoteOffsetsParents);CHKERRQ(ierr);
1173     ierr = PetscSectionGetStorageSize(newParentSection,&newParentSize);CHKERRQ(ierr);
1174     ierr = PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs);CHKERRQ(ierr);
1175     if (original) {
1176       PetscInt numParents;
1177 
1178       ierr = PetscSectionGetStorageSize(origParentSection,&numParents);CHKERRQ(ierr);
1179       ierr = PetscMalloc1(numParents,&globParents);CHKERRQ(ierr);
1180       ierr = ISLocalToGlobalMappingApplyBlock(original, numParents, origParents, globParents);CHKERRQ(ierr);
1181     }
1182     else {
1183       globParents = origParents;
1184     }
1185     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, globParents, newParents,MPI_REPLACE);CHKERRQ(ierr);
1186     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, globParents, newParents,MPI_REPLACE);CHKERRQ(ierr);
1187     if (original) {
1188       ierr = PetscFree(globParents);CHKERRQ(ierr);
1189     }
1190     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs,MPI_REPLACE);CHKERRQ(ierr);
1191     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs,MPI_REPLACE);CHKERRQ(ierr);
1192     ierr = ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents);CHKERRQ(ierr);
1193     if (PetscDefined(USE_DEBUG)) {
1194       PetscInt  p;
1195       PetscBool valid = PETSC_TRUE;
1196       for (p = 0; p < newParentSize; ++p) {
1197         if (newParents[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
1198       }
1199       if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1200     }
1201     ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-parents_view", &flg);CHKERRQ(ierr);
1202     if (flg) {
1203       ierr = PetscPrintf(comm, "Serial Parent Section: \n");CHKERRQ(ierr);
1204       ierr = PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr);
1205       ierr = PetscPrintf(comm, "Parallel Parent Section: \n");CHKERRQ(ierr);
1206       ierr = PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr);
1207       ierr = PetscSFView(parentSF, NULL);CHKERRQ(ierr);
1208     }
1209     ierr = DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);CHKERRQ(ierr);
1210     ierr = PetscSectionDestroy(&newParentSection);CHKERRQ(ierr);
1211     ierr = PetscFree2(newParents,newChildIDs);CHKERRQ(ierr);
1212     ierr = PetscSFDestroy(&parentSF);CHKERRQ(ierr);
1213   }
1214   pmesh->useAnchors = mesh->useAnchors;
1215   PetscFunctionReturn(0);
1216 }
1217 
1218 PETSC_UNUSED static PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, DM dmParallel)
1219 {
1220   PetscMPIInt            rank, size;
1221   MPI_Comm               comm;
1222   PetscErrorCode         ierr;
1223 
1224   PetscFunctionBegin;
1225   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1226   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1227 
1228   /* Create point SF for parallel mesh */
1229   ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1230   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1231   ierr = MPI_Comm_rank(comm, &rank);CHKERRMPI(ierr);
1232   ierr = MPI_Comm_size(comm, &size);CHKERRMPI(ierr);
1233   {
1234     const PetscInt *leaves;
1235     PetscSFNode    *remotePoints, *rowners, *lowners;
1236     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1237     PetscInt        pStart, pEnd;
1238 
1239     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1240     ierr = PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr);
1241     ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr);
1242     for (p=0; p<numRoots; p++) {
1243       rowners[p].rank  = -1;
1244       rowners[p].index = -1;
1245     }
1246     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners,MPI_REPLACE);CHKERRQ(ierr);
1247     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners,MPI_REPLACE);CHKERRQ(ierr);
1248     for (p = 0; p < numLeaves; ++p) {
1249       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1250         lowners[p].rank  = rank;
1251         lowners[p].index = leaves ? leaves[p] : p;
1252       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1253         lowners[p].rank  = -2;
1254         lowners[p].index = -2;
1255       }
1256     }
1257     for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1258       rowners[p].rank  = -3;
1259       rowners[p].index = -3;
1260     }
1261     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1262     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1263     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners,MPI_REPLACE);CHKERRQ(ierr);
1264     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners,MPI_REPLACE);CHKERRQ(ierr);
1265     for (p = 0; p < numLeaves; ++p) {
1266       if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
1267       if (lowners[p].rank != rank) ++numGhostPoints;
1268     }
1269     ierr = PetscMalloc1(numGhostPoints, &ghostPoints);CHKERRQ(ierr);
1270     ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr);
1271     for (p = 0, gp = 0; p < numLeaves; ++p) {
1272       if (lowners[p].rank != rank) {
1273         ghostPoints[gp]        = leaves ? leaves[p] : p;
1274         remotePoints[gp].rank  = lowners[p].rank;
1275         remotePoints[gp].index = lowners[p].index;
1276         ++gp;
1277       }
1278     }
1279     ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr);
1280     ierr = PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1281     ierr = PetscSFSetFromOptions((dmParallel)->sf);CHKERRQ(ierr);
1282   }
1283   {
1284     PetscBool useCone, useClosure, useAnchors;
1285 
1286     ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
1287     ierr = DMSetBasicAdjacency(dmParallel, useCone, useClosure);CHKERRQ(ierr);
1288     ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
1289     ierr = DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors);CHKERRQ(ierr);
1290   }
1291   ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1292   PetscFunctionReturn(0);
1293 }
1294 
1295 /*@
1296   DMPlexSetPartitionBalance - Should distribution of the DM attempt to balance the shared point partition?
1297 
1298   Input Parameters:
1299 + dm - The DMPlex object
1300 - flg - Balance the partition?
1301 
1302   Level: intermediate
1303 
1304 .seealso: DMPlexDistribute(), DMPlexGetPartitionBalance()
1305 @*/
1306 PetscErrorCode DMPlexSetPartitionBalance(DM dm, PetscBool flg)
1307 {
1308   DM_Plex *mesh = (DM_Plex *)dm->data;
1309 
1310   PetscFunctionBegin;
1311   mesh->partitionBalance = flg;
1312   PetscFunctionReturn(0);
1313 }
1314 
1315 /*@
1316   DMPlexGetPartitionBalance - Does distribution of the DM attempt to balance the shared point partition?
1317 
1318   Input Parameter:
1319 . dm - The DMPlex object
1320 
1321   Output Parameter:
1322 . flg - Balance the partition?
1323 
1324   Level: intermediate
1325 
1326 .seealso: DMPlexDistribute(), DMPlexSetPartitionBalance()
1327 @*/
1328 PetscErrorCode DMPlexGetPartitionBalance(DM dm, PetscBool *flg)
1329 {
1330   DM_Plex *mesh = (DM_Plex *)dm->data;
1331 
1332   PetscFunctionBegin;
1333   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1334   PetscValidBoolPointer(flg, 2);
1335   *flg = mesh->partitionBalance;
1336   PetscFunctionReturn(0);
1337 }
1338 
1339 typedef struct {
1340   PetscInt vote, rank, index;
1341 } Petsc3Int;
1342 
1343 /* MaxLoc, but carry a third piece of information around */
1344 static void MPIAPI MaxLocCarry(void *in_, void *inout_, PetscMPIInt *len_, MPI_Datatype *dtype)
1345 {
1346   Petsc3Int *a = (Petsc3Int *)inout_;
1347   Petsc3Int *b = (Petsc3Int *)in_;
1348   PetscInt i, len = *len_;
1349   for (i = 0; i < len; i++) {
1350     if (a[i].vote < b[i].vote) {
1351       a[i].vote = b[i].vote;
1352       a[i].rank = b[i].rank;
1353       a[i].index = b[i].index;
1354     } else if (a[i].vote <= b[i].vote) {
1355       if (a[i].rank >= b[i].rank) {
1356         a[i].rank = b[i].rank;
1357         a[i].index = b[i].index;
1358       }
1359     }
1360   }
1361 }
1362 
1363 /*@C
1364   DMPlexCreatePointSF - Build a point SF from an SF describing a point migration
1365 
1366   Input Parameters:
1367 + dm          - The source DMPlex object
1368 . migrationSF - The star forest that describes the parallel point remapping
1369 . ownership   - Flag causing a vote to determine point ownership
1370 
1371   Output Parameter:
1372 - pointSF     - The star forest describing the point overlap in the remapped DM
1373 
1374   Notes:
1375   Output pointSF is guaranteed to have the array of local indices (leaves) sorted.
1376 
1377   Level: developer
1378 
1379 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1380 @*/
1381 PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownership, PetscSF *pointSF)
1382 {
1383   PetscMPIInt        rank, size;
1384   PetscInt           p, nroots, nleaves, idx, npointLeaves;
1385   PetscInt          *pointLocal;
1386   const PetscInt    *leaves;
1387   const PetscSFNode *roots;
1388   PetscSFNode       *rootNodes, *leafNodes, *pointRemote;
1389   Vec                shifts;
1390   const PetscInt     numShifts = 13759;
1391   const PetscScalar *shift = NULL;
1392   const PetscBool    shiftDebug = PETSC_FALSE;
1393   PetscBool          balance;
1394   PetscErrorCode     ierr;
1395 
1396   PetscFunctionBegin;
1397   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1398   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRMPI(ierr);
1399   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRMPI(ierr);
1400   ierr = PetscLogEventBegin(DMPLEX_CreatePointSF,dm,0,0,0);CHKERRQ(ierr);
1401 
1402   ierr = DMPlexGetPartitionBalance(dm, &balance);CHKERRQ(ierr);
1403   ierr = PetscSFGetGraph(migrationSF, &nroots, &nleaves, &leaves, &roots);CHKERRQ(ierr);
1404   ierr = PetscMalloc2(nroots, &rootNodes, nleaves, &leafNodes);CHKERRQ(ierr);
1405   if (ownership) {
1406     MPI_Op       op;
1407     MPI_Datatype datatype;
1408     Petsc3Int   *rootVote = NULL, *leafVote = NULL;
1409     /* If balancing, we compute a random cyclic shift of the rank for each remote point. That way, the max will evenly distribute among ranks. */
1410     if (balance) {
1411       PetscRandom r;
1412 
1413       ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
1414       ierr = PetscRandomSetInterval(r, 0, 2467*size);CHKERRQ(ierr);
1415       ierr = VecCreate(PETSC_COMM_SELF, &shifts);CHKERRQ(ierr);
1416       ierr = VecSetSizes(shifts, numShifts, numShifts);CHKERRQ(ierr);
1417       ierr = VecSetType(shifts, VECSTANDARD);CHKERRQ(ierr);
1418       ierr = VecSetRandom(shifts, r);CHKERRQ(ierr);
1419       ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
1420       ierr = VecGetArrayRead(shifts, &shift);CHKERRQ(ierr);
1421     }
1422 
1423     ierr = PetscMalloc1(nroots, &rootVote);CHKERRQ(ierr);
1424     ierr = PetscMalloc1(nleaves, &leafVote);CHKERRQ(ierr);
1425     /* Point ownership vote: Process with highest rank owns shared points */
1426     for (p = 0; p < nleaves; ++p) {
1427       if (shiftDebug) {
1428         ierr = PetscSynchronizedPrintf(PetscObjectComm((PetscObject) dm), "[%d] Point %D RemotePoint %D Shift %D MyRank %D\n", rank, leaves ? leaves[p] : p, roots[p].index, (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]), (rank + (shift ? (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]) : 0))%size);CHKERRQ(ierr);
1429       }
1430       /* Either put in a bid or we know we own it */
1431       leafVote[p].vote  = (rank + (shift ? (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]) : 0))%size;
1432       leafVote[p].rank = rank;
1433       leafVote[p].index = p;
1434     }
1435     for (p = 0; p < nroots; p++) {
1436       /* Root must not participate in the reduction, flag so that MAXLOC does not use */
1437       rootVote[p].vote  = -3;
1438       rootVote[p].rank  = -3;
1439       rootVote[p].index = -3;
1440     }
1441     ierr = MPI_Type_contiguous(3, MPIU_INT, &datatype);CHKERRMPI(ierr);
1442     ierr = MPI_Type_commit(&datatype);CHKERRMPI(ierr);
1443     ierr = MPI_Op_create(&MaxLocCarry, 1, &op);CHKERRMPI(ierr);
1444     ierr = PetscSFReduceBegin(migrationSF, datatype, leafVote, rootVote, op);CHKERRQ(ierr);
1445     ierr = PetscSFReduceEnd(migrationSF, datatype, leafVote, rootVote, op);CHKERRQ(ierr);
1446     ierr = MPI_Op_free(&op);CHKERRMPI(ierr);
1447     ierr = MPI_Type_free(&datatype);CHKERRMPI(ierr);
1448     for (p = 0; p < nroots; p++) {
1449       rootNodes[p].rank = rootVote[p].rank;
1450       rootNodes[p].index = rootVote[p].index;
1451     }
1452     ierr = PetscFree(leafVote);CHKERRQ(ierr);
1453     ierr = PetscFree(rootVote);CHKERRQ(ierr);
1454   } else {
1455     for (p = 0; p < nroots; p++) {
1456       rootNodes[p].index = -1;
1457       rootNodes[p].rank = rank;
1458     }
1459     for (p = 0; p < nleaves; p++) {
1460       /* Write new local id into old location */
1461       if (roots[p].rank == rank) {
1462         rootNodes[roots[p].index].index = leaves ? leaves[p] : p;
1463       }
1464     }
1465   }
1466   ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rootNodes, leafNodes,MPI_REPLACE);CHKERRQ(ierr);
1467   ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rootNodes, leafNodes,MPI_REPLACE);CHKERRQ(ierr);
1468 
1469   for (npointLeaves = 0, p = 0; p < nleaves; p++) {
1470     if (leafNodes[p].rank != rank) npointLeaves++;
1471   }
1472   ierr = PetscMalloc1(npointLeaves, &pointLocal);CHKERRQ(ierr);
1473   ierr = PetscMalloc1(npointLeaves, &pointRemote);CHKERRQ(ierr);
1474   for (idx = 0, p = 0; p < nleaves; p++) {
1475     if (leafNodes[p].rank != rank) {
1476       /* Note that pointLocal is automatically sorted as it is sublist of 0, ..., nleaves-1 */
1477       pointLocal[idx] = p;
1478       pointRemote[idx] = leafNodes[p];
1479       idx++;
1480     }
1481   }
1482   if (shift) {
1483     ierr = VecRestoreArrayRead(shifts, &shift);CHKERRQ(ierr);
1484     ierr = VecDestroy(&shifts);CHKERRQ(ierr);
1485   }
1486   if (shiftDebug) {ierr = PetscSynchronizedFlush(PetscObjectComm((PetscObject) dm), PETSC_STDOUT);CHKERRQ(ierr);}
1487   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), pointSF);CHKERRQ(ierr);
1488   ierr = PetscSFSetFromOptions(*pointSF);CHKERRQ(ierr);
1489   ierr = PetscSFSetGraph(*pointSF, nleaves, npointLeaves, pointLocal, PETSC_OWN_POINTER, pointRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1490   ierr = PetscFree2(rootNodes, leafNodes);CHKERRQ(ierr);
1491   ierr = PetscLogEventEnd(DMPLEX_CreatePointSF,dm,0,0,0);CHKERRQ(ierr);
1492   PetscFunctionReturn(0);
1493 }
1494 
1495 /*@C
1496   DMPlexMigrate  - Migrates internal DM data over the supplied star forest
1497 
1498   Collective on dm
1499 
1500   Input Parameters:
1501 + dm       - The source DMPlex object
1502 . sf       - The star forest communication context describing the migration pattern
1503 
1504   Output Parameter:
1505 - targetDM - The target DMPlex object
1506 
1507   Level: intermediate
1508 
1509 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1510 @*/
1511 PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM)
1512 {
1513   MPI_Comm               comm;
1514   PetscInt               dim, cdim, nroots;
1515   PetscSF                sfPoint;
1516   ISLocalToGlobalMapping ltogMigration;
1517   ISLocalToGlobalMapping ltogOriginal = NULL;
1518   PetscBool              flg;
1519   PetscErrorCode         ierr;
1520 
1521   PetscFunctionBegin;
1522   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1523   ierr = PetscLogEventBegin(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1524   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1525   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1526   ierr = DMSetDimension(targetDM, dim);CHKERRQ(ierr);
1527   ierr = DMGetCoordinateDim(dm, &cdim);CHKERRQ(ierr);
1528   ierr = DMSetCoordinateDim(targetDM, cdim);CHKERRQ(ierr);
1529 
1530   /* Check for a one-to-all distribution pattern */
1531   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1532   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
1533   if (nroots >= 0) {
1534     IS        isOriginal;
1535     PetscInt  n, size, nleaves;
1536     PetscInt  *numbering_orig, *numbering_new;
1537 
1538     /* Get the original point numbering */
1539     ierr = DMPlexCreatePointNumbering(dm, &isOriginal);CHKERRQ(ierr);
1540     ierr = ISLocalToGlobalMappingCreateIS(isOriginal, &ltogOriginal);CHKERRQ(ierr);
1541     ierr = ISLocalToGlobalMappingGetSize(ltogOriginal, &size);CHKERRQ(ierr);
1542     ierr = ISLocalToGlobalMappingGetBlockIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1543     /* Convert to positive global numbers */
1544     for (n=0; n<size; n++) {if (numbering_orig[n] < 0) numbering_orig[n] = -(numbering_orig[n]+1);}
1545     /* Derive the new local-to-global mapping from the old one */
1546     ierr = PetscSFGetGraph(sf, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1547     ierr = PetscMalloc1(nleaves, &numbering_new);CHKERRQ(ierr);
1548     ierr = PetscSFBcastBegin(sf, MPIU_INT, numbering_orig, numbering_new,MPI_REPLACE);CHKERRQ(ierr);
1549     ierr = PetscSFBcastEnd(sf, MPIU_INT, numbering_orig, numbering_new,MPI_REPLACE);CHKERRQ(ierr);
1550     ierr = ISLocalToGlobalMappingCreate(comm, 1, nleaves, numbering_new, PETSC_OWN_POINTER, &ltogMigration);CHKERRQ(ierr);
1551     ierr = ISLocalToGlobalMappingRestoreIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1552     ierr = ISDestroy(&isOriginal);CHKERRQ(ierr);
1553   } else {
1554     /* One-to-all distribution pattern: We can derive LToG from SF */
1555     ierr = ISLocalToGlobalMappingCreateSF(sf, 0, &ltogMigration);CHKERRQ(ierr);
1556   }
1557   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1558   if (flg) {
1559     ierr = PetscPrintf(comm, "Point renumbering for DM migration:\n");CHKERRQ(ierr);
1560     ierr = ISLocalToGlobalMappingView(ltogMigration, NULL);CHKERRQ(ierr);
1561   }
1562   /* Migrate DM data to target DM */
1563   ierr = DMPlexDistributeCones(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1564   ierr = DMPlexDistributeLabels(dm, sf, targetDM);CHKERRQ(ierr);
1565   ierr = DMPlexDistributeCoordinates(dm, sf, targetDM);CHKERRQ(ierr);
1566   ierr = DMPlexDistributeSetupTree(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1567   ierr = ISLocalToGlobalMappingDestroy(&ltogOriginal);CHKERRQ(ierr);
1568   ierr = ISLocalToGlobalMappingDestroy(&ltogMigration);CHKERRQ(ierr);
1569   ierr = PetscLogEventEnd(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1570   PetscFunctionReturn(0);
1571 }
1572 
1573 /*@C
1574   DMPlexDistribute - Distributes the mesh and any associated sections.
1575 
1576   Collective on dm
1577 
1578   Input Parameters:
1579 + dm  - The original DMPlex object
1580 - overlap - The overlap of partitions, 0 is the default
1581 
1582   Output Parameters:
1583 + sf - The PetscSF used for point distribution, or NULL if not needed
1584 - dmParallel - The distributed DMPlex object
1585 
1586   Note: If the mesh was not distributed, the output dmParallel will be NULL.
1587 
1588   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
1589   representation on the mesh.
1590 
1591   Level: intermediate
1592 
1593 .seealso: DMPlexCreate(), DMSetAdjacency(), DMPlexGetOverlap()
1594 @*/
1595 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1596 {
1597   MPI_Comm               comm;
1598   PetscPartitioner       partitioner;
1599   IS                     cellPart;
1600   PetscSection           cellPartSection;
1601   DM                     dmCoord;
1602   DMLabel                lblPartition, lblMigration;
1603   PetscSF                sfMigration, sfStratified, sfPoint;
1604   PetscBool              flg, balance;
1605   PetscMPIInt            rank, size;
1606   PetscErrorCode         ierr;
1607 
1608   PetscFunctionBegin;
1609   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1610   PetscValidLogicalCollectiveInt(dm, overlap, 2);
1611   if (sf) PetscValidPointer(sf,3);
1612   PetscValidPointer(dmParallel,4);
1613 
1614   if (sf) *sf = NULL;
1615   *dmParallel = NULL;
1616   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1617   ierr = MPI_Comm_rank(comm, &rank);CHKERRMPI(ierr);
1618   ierr = MPI_Comm_size(comm, &size);CHKERRMPI(ierr);
1619   if (size == 1) PetscFunctionReturn(0);
1620 
1621   ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1622   /* Create cell partition */
1623   ierr = PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1624   ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr);
1625   ierr = DMPlexGetPartitioner(dm, &partitioner);CHKERRQ(ierr);
1626   ierr = PetscPartitionerDMPlexPartition(partitioner, dm, NULL, cellPartSection, &cellPart);CHKERRQ(ierr);
1627   ierr = PetscLogEventBegin(DMPLEX_PartSelf,dm,0,0,0);CHKERRQ(ierr);
1628   {
1629     /* Convert partition to DMLabel */
1630     IS             is;
1631     PetscHSetI     ht;
1632     const PetscInt *points;
1633     PetscInt       *iranks;
1634     PetscInt       pStart, pEnd, proc, npoints, poff = 0, nranks;
1635 
1636     ierr = DMLabelCreate(PETSC_COMM_SELF, "Point Partition", &lblPartition);CHKERRQ(ierr);
1637     /* Preallocate strata */
1638     ierr = PetscHSetICreate(&ht);CHKERRQ(ierr);
1639     ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr);
1640     for (proc = pStart; proc < pEnd; proc++) {
1641       ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr);
1642       if (npoints) {ierr = PetscHSetIAdd(ht, proc);CHKERRQ(ierr);}
1643     }
1644     ierr = PetscHSetIGetSize(ht, &nranks);CHKERRQ(ierr);
1645     ierr = PetscMalloc1(nranks, &iranks);CHKERRQ(ierr);
1646     ierr = PetscHSetIGetElems(ht, &poff, iranks);CHKERRQ(ierr);
1647     ierr = PetscHSetIDestroy(&ht);CHKERRQ(ierr);
1648     ierr = DMLabelAddStrata(lblPartition, nranks, iranks);CHKERRQ(ierr);
1649     ierr = PetscFree(iranks);CHKERRQ(ierr);
1650     /* Inline DMPlexPartitionLabelClosure() */
1651     ierr = ISGetIndices(cellPart, &points);CHKERRQ(ierr);
1652     ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr);
1653     for (proc = pStart; proc < pEnd; proc++) {
1654       ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr);
1655       if (!npoints) continue;
1656       ierr = PetscSectionGetOffset(cellPartSection, proc, &poff);CHKERRQ(ierr);
1657       ierr = DMPlexClosurePoints_Private(dm, npoints, points+poff, &is);CHKERRQ(ierr);
1658       ierr = DMLabelSetStratumIS(lblPartition, proc, is);CHKERRQ(ierr);
1659       ierr = ISDestroy(&is);CHKERRQ(ierr);
1660     }
1661     ierr = ISRestoreIndices(cellPart, &points);CHKERRQ(ierr);
1662   }
1663   ierr = PetscLogEventEnd(DMPLEX_PartSelf,dm,0,0,0);CHKERRQ(ierr);
1664 
1665   ierr = DMLabelCreate(PETSC_COMM_SELF, "Point migration", &lblMigration);CHKERRQ(ierr);
1666   ierr = DMPlexPartitionLabelInvert(dm, lblPartition, NULL, lblMigration);CHKERRQ(ierr);
1667   ierr = DMPlexPartitionLabelCreateSF(dm, lblMigration, &sfMigration);CHKERRQ(ierr);
1668   ierr = DMPlexStratifyMigrationSF(dm, sfMigration, &sfStratified);CHKERRQ(ierr);
1669   ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1670   sfMigration = sfStratified;
1671   ierr = PetscSFSetUp(sfMigration);CHKERRQ(ierr);
1672   ierr = PetscLogEventEnd(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1673   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1674   if (flg) {
1675     ierr = DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr);
1676     ierr = PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr);
1677   }
1678 
1679   /* Create non-overlapping parallel DM and migrate internal data */
1680   ierr = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr);
1681   ierr = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr);
1682   ierr = DMPlexMigrate(dm, sfMigration, *dmParallel);CHKERRQ(ierr);
1683 
1684   /* Build the point SF without overlap */
1685   ierr = DMPlexGetPartitionBalance(dm, &balance);CHKERRQ(ierr);
1686   ierr = DMPlexSetPartitionBalance(*dmParallel, balance);CHKERRQ(ierr);
1687   ierr = DMPlexCreatePointSF(*dmParallel, sfMigration, PETSC_TRUE, &sfPoint);CHKERRQ(ierr);
1688   ierr = DMSetPointSF(*dmParallel, sfPoint);CHKERRQ(ierr);
1689   ierr = DMGetCoordinateDM(*dmParallel, &dmCoord);CHKERRQ(ierr);
1690   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1691   if (flg) {ierr = PetscSFView(sfPoint, NULL);CHKERRQ(ierr);}
1692 
1693   if (overlap > 0) {
1694     DM                 dmOverlap;
1695     PetscInt           nroots, nleaves, noldleaves, l;
1696     const PetscInt    *oldLeaves;
1697     PetscSFNode       *newRemote, *permRemote;
1698     const PetscSFNode *oldRemote;
1699     PetscSF            sfOverlap, sfOverlapPoint;
1700 
1701     /* Add the partition overlap to the distributed DM */
1702     ierr = DMPlexDistributeOverlap(*dmParallel, overlap, &sfOverlap, &dmOverlap);CHKERRQ(ierr);
1703     ierr = DMDestroy(dmParallel);CHKERRQ(ierr);
1704     *dmParallel = dmOverlap;
1705     if (flg) {
1706       ierr = PetscPrintf(comm, "Overlap Migration SF:\n");CHKERRQ(ierr);
1707       ierr = PetscSFView(sfOverlap, NULL);CHKERRQ(ierr);
1708     }
1709 
1710     /* Re-map the migration SF to establish the full migration pattern */
1711     ierr = PetscSFGetGraph(sfMigration, &nroots, &noldleaves, &oldLeaves, &oldRemote);CHKERRQ(ierr);
1712     ierr = PetscSFGetGraph(sfOverlap, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1713     ierr = PetscMalloc1(nleaves, &newRemote);CHKERRQ(ierr);
1714     /* oldRemote: original root point mapping to original leaf point
1715        newRemote: original leaf point mapping to overlapped leaf point */
1716     if (oldLeaves) {
1717       /* After stratification, the migration remotes may not be in root (canonical) order, so we reorder using the leaf numbering */
1718       ierr = PetscMalloc1(noldleaves, &permRemote);CHKERRQ(ierr);
1719       for (l = 0; l < noldleaves; ++l) permRemote[oldLeaves[l]] = oldRemote[l];
1720       oldRemote = permRemote;
1721     }
1722     ierr = PetscSFBcastBegin(sfOverlap, MPIU_2INT, oldRemote, newRemote,MPI_REPLACE);CHKERRQ(ierr);
1723     ierr = PetscSFBcastEnd(sfOverlap, MPIU_2INT, oldRemote, newRemote,MPI_REPLACE);CHKERRQ(ierr);
1724     if (oldLeaves) {ierr = PetscFree(oldRemote);CHKERRQ(ierr);}
1725     ierr = PetscSFCreate(comm, &sfOverlapPoint);CHKERRQ(ierr);
1726     ierr = PetscSFSetGraph(sfOverlapPoint, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1727     ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1728     ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1729     sfMigration = sfOverlapPoint;
1730   }
1731   /* Cleanup Partition */
1732   ierr = DMLabelDestroy(&lblPartition);CHKERRQ(ierr);
1733   ierr = DMLabelDestroy(&lblMigration);CHKERRQ(ierr);
1734   ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr);
1735   ierr = ISDestroy(&cellPart);CHKERRQ(ierr);
1736   /* Copy discretization */
1737   ierr = DMCopyDisc(dm, *dmParallel);CHKERRQ(ierr);
1738   /* Create sfNatural */
1739   if (dm->useNatural) {
1740     PetscSection section;
1741 
1742     ierr = DMGetLocalSection(dm, &section);CHKERRQ(ierr);
1743     ierr = DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &(*dmParallel)->sfNatural);CHKERRQ(ierr);
1744     ierr = DMSetUseNatural(*dmParallel, PETSC_TRUE);CHKERRQ(ierr);
1745   }
1746   /* Cleanup */
1747   if (sf) {*sf = sfMigration;}
1748   else    {ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);}
1749   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1750   ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1751   PetscFunctionReturn(0);
1752 }
1753 
1754 /*@C
1755   DMPlexDistributeOverlap - Add partition overlap to a distributed non-overlapping DM.
1756 
1757   Collective on dm
1758 
1759   Input Parameters:
1760 + dm  - The non-overlapping distributed DMPlex object
1761 - overlap - The overlap of partitions (the same on all ranks)
1762 
1763   Output Parameters:
1764 + sf - The PetscSF used for point distribution
1765 - dmOverlap - The overlapping distributed DMPlex object, or NULL
1766 
1767   Notes:
1768   If the mesh was not distributed, the return value is NULL.
1769 
1770   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
1771   representation on the mesh.
1772 
1773   Level: advanced
1774 
1775 .seealso: DMPlexCreate(), DMSetAdjacency(), DMPlexDistribute(), DMPlexCreateOverlapLabel(), DMPlexGetOverlap()
1776 @*/
1777 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, PetscSF *sf, DM *dmOverlap)
1778 {
1779   MPI_Comm               comm;
1780   PetscMPIInt            size, rank;
1781   PetscSection           rootSection, leafSection;
1782   IS                     rootrank, leafrank;
1783   DM                     dmCoord;
1784   DMLabel                lblOverlap;
1785   PetscSF                sfOverlap, sfStratified, sfPoint;
1786   PetscErrorCode         ierr;
1787 
1788   PetscFunctionBegin;
1789   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1790   PetscValidLogicalCollectiveInt(dm, overlap, 2);
1791   if (sf) PetscValidPointer(sf, 3);
1792   PetscValidPointer(dmOverlap, 4);
1793 
1794   if (sf) *sf = NULL;
1795   *dmOverlap  = NULL;
1796   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1797   ierr = MPI_Comm_size(comm, &size);CHKERRMPI(ierr);
1798   ierr = MPI_Comm_rank(comm, &rank);CHKERRMPI(ierr);
1799   if (size == 1) PetscFunctionReturn(0);
1800 
1801   ierr = PetscLogEventBegin(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1802   /* Compute point overlap with neighbouring processes on the distributed DM */
1803   ierr = PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1804   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1805   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1806   ierr = DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);CHKERRQ(ierr);
1807   ierr = DMPlexCreateOverlapLabel(dm, overlap, rootSection, rootrank, leafSection, leafrank, &lblOverlap);CHKERRQ(ierr);
1808   /* Convert overlap label to stratified migration SF */
1809   ierr = DMPlexPartitionLabelCreateSF(dm, lblOverlap, &sfOverlap);CHKERRQ(ierr);
1810   ierr = DMPlexStratifyMigrationSF(dm, sfOverlap, &sfStratified);CHKERRQ(ierr);
1811   ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1812   sfOverlap = sfStratified;
1813   ierr = PetscObjectSetName((PetscObject) sfOverlap, "Overlap SF");CHKERRQ(ierr);
1814   ierr = PetscSFSetFromOptions(sfOverlap);CHKERRQ(ierr);
1815 
1816   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1817   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1818   ierr = ISDestroy(&rootrank);CHKERRQ(ierr);
1819   ierr = ISDestroy(&leafrank);CHKERRQ(ierr);
1820   ierr = PetscLogEventEnd(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1821 
1822   /* Build the overlapping DM */
1823   ierr = DMPlexCreate(comm, dmOverlap);CHKERRQ(ierr);
1824   ierr = PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");CHKERRQ(ierr);
1825   ierr = DMPlexMigrate(dm, sfOverlap, *dmOverlap);CHKERRQ(ierr);
1826   /* Store the overlap in the new DM */
1827   ((DM_Plex*)(*dmOverlap)->data)->overlap = overlap + ((DM_Plex*)dm->data)->overlap;
1828   /* Build the new point SF */
1829   ierr = DMPlexCreatePointSF(*dmOverlap, sfOverlap, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
1830   ierr = DMSetPointSF(*dmOverlap, sfPoint);CHKERRQ(ierr);
1831   ierr = DMGetCoordinateDM(*dmOverlap, &dmCoord);CHKERRQ(ierr);
1832   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1833   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1834   /* Cleanup overlap partition */
1835   ierr = DMLabelDestroy(&lblOverlap);CHKERRQ(ierr);
1836   if (sf) *sf = sfOverlap;
1837   else    {ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);}
1838   ierr = PetscLogEventEnd(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1839   PetscFunctionReturn(0);
1840 }
1841 
1842 PetscErrorCode DMPlexGetOverlap_Plex(DM dm, PetscInt *overlap)
1843 {
1844   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1845 
1846   PetscFunctionBegin;
1847   *overlap = mesh->overlap;
1848   PetscFunctionReturn(0);
1849 }
1850 
1851 /*@
1852   DMPlexGetOverlap - Get the DMPlex partition overlap.
1853 
1854   Not collective
1855 
1856   Input Parameter:
1857 . dm - The DM
1858 
1859   Output Parameter:
1860 . overlap - The overlap of this DM
1861 
1862   Level: intermediate
1863 
1864 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap(), DMPlexCreateOverlapLabel()
1865 @*/
1866 PetscErrorCode DMPlexGetOverlap(DM dm, PetscInt *overlap)
1867 {
1868   PetscErrorCode     ierr;
1869 
1870   PetscFunctionBegin;
1871   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1872   ierr = PetscUseMethod(dm,"DMPlexGetOverlap_C",(DM,PetscInt*),(dm,overlap));CHKERRQ(ierr);
1873   PetscFunctionReturn(0);
1874 }
1875 
1876 /*@C
1877   DMPlexGetGatherDM - Get a copy of the DMPlex that gathers all points on the
1878   root process of the original's communicator.
1879 
1880   Collective on dm
1881 
1882   Input Parameter:
1883 . dm - the original DMPlex object
1884 
1885   Output Parameters:
1886 + sf - the PetscSF used for point distribution (optional)
1887 - gatherMesh - the gathered DM object, or NULL
1888 
1889   Level: intermediate
1890 
1891 .seealso: DMPlexDistribute(), DMPlexGetRedundantDM()
1892 @*/
1893 PetscErrorCode DMPlexGetGatherDM(DM dm, PetscSF *sf, DM *gatherMesh)
1894 {
1895   MPI_Comm       comm;
1896   PetscMPIInt    size;
1897   PetscPartitioner oldPart, gatherPart;
1898   PetscErrorCode ierr;
1899 
1900   PetscFunctionBegin;
1901   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1902   PetscValidPointer(gatherMesh,3);
1903   *gatherMesh = NULL;
1904   if (sf) *sf = NULL;
1905   comm = PetscObjectComm((PetscObject)dm);
1906   ierr = MPI_Comm_size(comm,&size);CHKERRMPI(ierr);
1907   if (size == 1) PetscFunctionReturn(0);
1908   ierr = DMPlexGetPartitioner(dm,&oldPart);CHKERRQ(ierr);
1909   ierr = PetscObjectReference((PetscObject)oldPart);CHKERRQ(ierr);
1910   ierr = PetscPartitionerCreate(comm,&gatherPart);CHKERRQ(ierr);
1911   ierr = PetscPartitionerSetType(gatherPart,PETSCPARTITIONERGATHER);CHKERRQ(ierr);
1912   ierr = DMPlexSetPartitioner(dm,gatherPart);CHKERRQ(ierr);
1913   ierr = DMPlexDistribute(dm,0,sf,gatherMesh);CHKERRQ(ierr);
1914 
1915   ierr = DMPlexSetPartitioner(dm,oldPart);CHKERRQ(ierr);
1916   ierr = PetscPartitionerDestroy(&gatherPart);CHKERRQ(ierr);
1917   ierr = PetscPartitionerDestroy(&oldPart);CHKERRQ(ierr);
1918   PetscFunctionReturn(0);
1919 }
1920 
1921 /*@C
1922   DMPlexGetRedundantDM - Get a copy of the DMPlex that is completely copied on each process.
1923 
1924   Collective on dm
1925 
1926   Input Parameter:
1927 . dm - the original DMPlex object
1928 
1929   Output Parameters:
1930 + sf - the PetscSF used for point distribution (optional)
1931 - redundantMesh - the redundant DM object, or NULL
1932 
1933   Level: intermediate
1934 
1935 .seealso: DMPlexDistribute(), DMPlexGetGatherDM()
1936 @*/
1937 PetscErrorCode DMPlexGetRedundantDM(DM dm, PetscSF *sf, DM *redundantMesh)
1938 {
1939   MPI_Comm       comm;
1940   PetscMPIInt    size, rank;
1941   PetscInt       pStart, pEnd, p;
1942   PetscInt       numPoints = -1;
1943   PetscSF        migrationSF, sfPoint, gatherSF;
1944   DM             gatherDM, dmCoord;
1945   PetscSFNode    *points;
1946   PetscErrorCode ierr;
1947 
1948   PetscFunctionBegin;
1949   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1950   PetscValidPointer(redundantMesh,3);
1951   *redundantMesh = NULL;
1952   comm = PetscObjectComm((PetscObject)dm);
1953   ierr = MPI_Comm_size(comm,&size);CHKERRMPI(ierr);
1954   if (size == 1) {
1955     ierr = PetscObjectReference((PetscObject) dm);CHKERRQ(ierr);
1956     *redundantMesh = dm;
1957     if (sf) *sf = NULL;
1958     PetscFunctionReturn(0);
1959   }
1960   ierr = DMPlexGetGatherDM(dm,&gatherSF,&gatherDM);CHKERRQ(ierr);
1961   if (!gatherDM) PetscFunctionReturn(0);
1962   ierr = MPI_Comm_rank(comm,&rank);CHKERRMPI(ierr);
1963   ierr = DMPlexGetChart(gatherDM,&pStart,&pEnd);CHKERRQ(ierr);
1964   numPoints = pEnd - pStart;
1965   ierr = MPI_Bcast(&numPoints,1,MPIU_INT,0,comm);CHKERRMPI(ierr);
1966   ierr = PetscMalloc1(numPoints,&points);CHKERRQ(ierr);
1967   ierr = PetscSFCreate(comm,&migrationSF);CHKERRQ(ierr);
1968   for (p = 0; p < numPoints; p++) {
1969     points[p].index = p;
1970     points[p].rank  = 0;
1971   }
1972   ierr = PetscSFSetGraph(migrationSF,pEnd-pStart,numPoints,NULL,PETSC_OWN_POINTER,points,PETSC_OWN_POINTER);CHKERRQ(ierr);
1973   ierr = DMPlexCreate(comm, redundantMesh);CHKERRQ(ierr);
1974   ierr = PetscObjectSetName((PetscObject) *redundantMesh, "Redundant Mesh");CHKERRQ(ierr);
1975   ierr = DMPlexMigrate(gatherDM, migrationSF, *redundantMesh);CHKERRQ(ierr);
1976   ierr = DMPlexCreatePointSF(*redundantMesh, migrationSF, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
1977   ierr = DMSetPointSF(*redundantMesh, sfPoint);CHKERRQ(ierr);
1978   ierr = DMGetCoordinateDM(*redundantMesh, &dmCoord);CHKERRQ(ierr);
1979   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1980   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1981   if (sf) {
1982     PetscSF tsf;
1983 
1984     ierr = PetscSFCompose(gatherSF,migrationSF,&tsf);CHKERRQ(ierr);
1985     ierr = DMPlexStratifyMigrationSF(dm, tsf, sf);CHKERRQ(ierr);
1986     ierr = PetscSFDestroy(&tsf);CHKERRQ(ierr);
1987   }
1988   ierr = PetscSFDestroy(&migrationSF);CHKERRQ(ierr);
1989   ierr = PetscSFDestroy(&gatherSF);CHKERRQ(ierr);
1990   ierr = DMDestroy(&gatherDM);CHKERRQ(ierr);
1991   PetscFunctionReturn(0);
1992 }
1993 
1994 /*@
1995   DMPlexIsDistributed - Find out whether this DM is distributed, i.e. more than one rank owns some points.
1996 
1997   Collective
1998 
1999   Input Parameter:
2000 . dm      - The DM object
2001 
2002   Output Parameter:
2003 . distributed - Flag whether the DM is distributed
2004 
2005   Level: intermediate
2006 
2007   Notes:
2008   This currently finds out whether at least two ranks have any DAG points.
2009   This involves MPI_Allreduce() with one integer.
2010   The result is currently not stashed so every call to this routine involves this global communication.
2011 
2012 .seealso: DMPlexDistribute(), DMPlexGetOverlap(), DMPlexIsInterpolated()
2013 @*/
2014 PetscErrorCode DMPlexIsDistributed(DM dm, PetscBool *distributed)
2015 {
2016   PetscInt          pStart, pEnd, count;
2017   MPI_Comm          comm;
2018   PetscErrorCode    ierr;
2019 
2020   PetscFunctionBegin;
2021   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2022   PetscValidPointer(distributed,2);
2023   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
2024   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
2025   count = !!(pEnd - pStart);
2026   ierr = MPI_Allreduce(MPI_IN_PLACE, &count, 1, MPIU_INT, MPI_SUM, comm);CHKERRMPI(ierr);
2027   *distributed = count > 1 ? PETSC_TRUE : PETSC_FALSE;
2028   PetscFunctionReturn(0);
2029 }
2030