xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision 7301b17255eb0fdd9851e1db33b25f523d0eaea0)
1 #include <petsc/private/dmpleximpl.h>    /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/dmlabelimpl.h>   /*I      "petscdmlabel.h"  I*/
3 
4 /*@C
5   DMPlexSetAdjacencyUser - Define adjacency in the mesh using a user-provided callback
6 
7   Input Parameters:
8 + dm      - The DM object
9 . user    - The user callback, may be NULL (to clear the callback)
10 - ctx     - context for callback evaluation, may be NULL
11 
12   Level: advanced
13 
14   Notes:
15      The caller of DMPlexGetAdjacency may need to arrange that a large enough array is available for the adjacency.
16 
17      Any setting here overrides other configuration of DMPlex adjacency determination.
18 
19 .seealso: DMSetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexGetAdjacency(), DMPlexGetAdjacencyUser()
20 @*/
21 PetscErrorCode DMPlexSetAdjacencyUser(DM dm,PetscErrorCode (*user)(DM,PetscInt,PetscInt*,PetscInt[],void*),void *ctx)
22 {
23   DM_Plex *mesh = (DM_Plex *)dm->data;
24 
25   PetscFunctionBegin;
26   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
27   mesh->useradjacency = user;
28   mesh->useradjacencyctx = ctx;
29   PetscFunctionReturn(0);
30 }
31 
32 /*@C
33   DMPlexGetAdjacencyUser - get the user-defined adjacency callback
34 
35   Input Parameter:
36 . dm      - The DM object
37 
38   Output Parameters:
39 + user    - The user callback
40 - ctx     - context for callback evaluation
41 
42   Level: advanced
43 
44 .seealso: DMSetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexGetAdjacency(), DMPlexSetAdjacencyUser()
45 @*/
46 PetscErrorCode DMPlexGetAdjacencyUser(DM dm, PetscErrorCode (**user)(DM,PetscInt,PetscInt*,PetscInt[],void*), void **ctx)
47 {
48   DM_Plex *mesh = (DM_Plex *)dm->data;
49 
50   PetscFunctionBegin;
51   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
52   if (user) *user = mesh->useradjacency;
53   if (ctx) *ctx = mesh->useradjacencyctx;
54   PetscFunctionReturn(0);
55 }
56 
57 /*@
58   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
59 
60   Input Parameters:
61 + dm      - The DM object
62 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
63 
64   Level: intermediate
65 
66 .seealso: DMGetAdjacency(), DMSetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
67 @*/
68 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
69 {
70   DM_Plex *mesh = (DM_Plex *) dm->data;
71 
72   PetscFunctionBegin;
73   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
74   mesh->useAnchors = useAnchors;
75   PetscFunctionReturn(0);
76 }
77 
78 /*@
79   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
80 
81   Input Parameter:
82 . dm      - The DM object
83 
84   Output Parameter:
85 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
86 
87   Level: intermediate
88 
89 .seealso: DMPlexSetAdjacencyUseAnchors(), DMSetAdjacency(), DMGetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
90 @*/
91 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
92 {
93   DM_Plex *mesh = (DM_Plex *) dm->data;
94 
95   PetscFunctionBegin;
96   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
97   PetscValidBoolPointer(useAnchors, 2);
98   *useAnchors = mesh->useAnchors;
99   PetscFunctionReturn(0);
100 }
101 
102 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
103 {
104   const PetscInt *cone = NULL;
105   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
106   PetscErrorCode  ierr;
107 
108   PetscFunctionBeginHot;
109   ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
110   ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
111   for (c = 0; c <= coneSize; ++c) {
112     const PetscInt  point   = !c ? p : cone[c-1];
113     const PetscInt *support = NULL;
114     PetscInt        supportSize, s, q;
115 
116     ierr = DMPlexGetSupportSize(dm, point, &supportSize);CHKERRQ(ierr);
117     ierr = DMPlexGetSupport(dm, point, &support);CHKERRQ(ierr);
118     for (s = 0; s < supportSize; ++s) {
119       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = support[s]),0); ++q) {
120         if (support[s] == adj[q]) break;
121       }
122       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
123     }
124   }
125   *adjSize = numAdj;
126   PetscFunctionReturn(0);
127 }
128 
129 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
130 {
131   const PetscInt *support = NULL;
132   PetscInt        numAdj   = 0, maxAdjSize = *adjSize, supportSize, s;
133   PetscErrorCode  ierr;
134 
135   PetscFunctionBeginHot;
136   ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr);
137   ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr);
138   for (s = 0; s <= supportSize; ++s) {
139     const PetscInt  point = !s ? p : support[s-1];
140     const PetscInt *cone  = NULL;
141     PetscInt        coneSize, c, q;
142 
143     ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
144     ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
145     for (c = 0; c < coneSize; ++c) {
146       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = cone[c]),0); ++q) {
147         if (cone[c] == adj[q]) break;
148       }
149       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
150     }
151   }
152   *adjSize = numAdj;
153   PetscFunctionReturn(0);
154 }
155 
156 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
157 {
158   PetscInt      *star = NULL;
159   PetscInt       numAdj = 0, maxAdjSize = *adjSize, starSize, s;
160   PetscErrorCode ierr;
161 
162   PetscFunctionBeginHot;
163   ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
164   for (s = 0; s < starSize*2; s += 2) {
165     const PetscInt *closure = NULL;
166     PetscInt        closureSize, c, q;
167 
168     ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
169     for (c = 0; c < closureSize*2; c += 2) {
170       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = closure[c]),0); ++q) {
171         if (closure[c] == adj[q]) break;
172       }
173       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
174     }
175     ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
176   }
177   ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
178   *adjSize = numAdj;
179   PetscFunctionReturn(0);
180 }
181 
182 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
183 {
184   static PetscInt asiz = 0;
185   PetscInt maxAnchors = 1;
186   PetscInt aStart = -1, aEnd = -1;
187   PetscInt maxAdjSize;
188   PetscSection aSec = NULL;
189   IS aIS = NULL;
190   const PetscInt *anchors;
191   DM_Plex *mesh = (DM_Plex *)dm->data;
192   PetscErrorCode  ierr;
193 
194   PetscFunctionBeginHot;
195   if (useAnchors) {
196     ierr = DMPlexGetAnchors(dm,&aSec,&aIS);CHKERRQ(ierr);
197     if (aSec) {
198       ierr = PetscSectionGetMaxDof(aSec,&maxAnchors);CHKERRQ(ierr);
199       maxAnchors = PetscMax(1,maxAnchors);
200       ierr = PetscSectionGetChart(aSec,&aStart,&aEnd);CHKERRQ(ierr);
201       ierr = ISGetIndices(aIS,&anchors);CHKERRQ(ierr);
202     }
203   }
204   if (!*adj) {
205     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
206 
207     ierr  = DMPlexGetChart(dm, &pStart,&pEnd);CHKERRQ(ierr);
208     ierr  = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
209     depth = PetscMax(depth, -depth);
210     ierr  = DMPlexGetMaxSizes(dm, &maxC, &maxS);CHKERRQ(ierr);
211     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1;
212     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1;
213     asiz  = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries);
214     asiz *= maxAnchors;
215     asiz  = PetscMin(asiz,pEnd-pStart);
216     ierr  = PetscMalloc1(asiz,adj);CHKERRQ(ierr);
217   }
218   if (*adjSize < 0) *adjSize = asiz;
219   maxAdjSize = *adjSize;
220   if (mesh->useradjacency) {
221     ierr = (*mesh->useradjacency)(dm, p, adjSize, *adj, mesh->useradjacencyctx);CHKERRQ(ierr);
222   } else if (useTransitiveClosure) {
223     ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr);
224   } else if (useCone) {
225     ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
226   } else {
227     ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
228   }
229   if (useAnchors && aSec) {
230     PetscInt origSize = *adjSize;
231     PetscInt numAdj = origSize;
232     PetscInt i = 0, j;
233     PetscInt *orig = *adj;
234 
235     while (i < origSize) {
236       PetscInt p = orig[i];
237       PetscInt aDof = 0;
238 
239       if (p >= aStart && p < aEnd) {
240         ierr = PetscSectionGetDof(aSec,p,&aDof);CHKERRQ(ierr);
241       }
242       if (aDof) {
243         PetscInt aOff;
244         PetscInt s, q;
245 
246         for (j = i + 1; j < numAdj; j++) {
247           orig[j - 1] = orig[j];
248         }
249         origSize--;
250         numAdj--;
251         ierr = PetscSectionGetOffset(aSec,p,&aOff);CHKERRQ(ierr);
252         for (s = 0; s < aDof; ++s) {
253           for (q = 0; q < numAdj || ((void)(orig[numAdj++] = anchors[aOff+s]),0); ++q) {
254             if (anchors[aOff+s] == orig[q]) break;
255           }
256           if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
257         }
258       }
259       else {
260         i++;
261       }
262     }
263     *adjSize = numAdj;
264     ierr = ISRestoreIndices(aIS,&anchors);CHKERRQ(ierr);
265   }
266   PetscFunctionReturn(0);
267 }
268 
269 /*@
270   DMPlexGetAdjacency - Return all points adjacent to the given point
271 
272   Input Parameters:
273 + dm - The DM object
274 - p  - The point
275 
276   Input/Output Parameters:
277 + adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE;
278             on output the number of adjacent points
279 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize;
280         on output contains the adjacent points
281 
282   Level: advanced
283 
284   Notes:
285     The user must PetscFree the adj array if it was not passed in.
286 
287 .seealso: DMSetAdjacency(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator()
288 @*/
289 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
290 {
291   PetscBool      useCone, useClosure, useAnchors;
292   PetscErrorCode ierr;
293 
294   PetscFunctionBeginHot;
295   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
296   PetscValidPointer(adjSize,3);
297   PetscValidPointer(adj,4);
298   ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
299   ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
300   ierr = DMPlexGetAdjacency_Internal(dm, p, useCone, useClosure, useAnchors, adjSize, adj);CHKERRQ(ierr);
301   PetscFunctionReturn(0);
302 }
303 
304 /*@
305   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
306 
307   Collective on dm
308 
309   Input Parameters:
310 + dm      - The DM
311 . sfPoint - The PetscSF which encodes point connectivity
312 . rootRankSection -
313 . rootRanks -
314 . leftRankSection -
315 - leafRanks -
316 
317   Output Parameters:
318 + processRanks - A list of process neighbors, or NULL
319 - sfProcess    - An SF encoding the two-sided process connectivity, or NULL
320 
321   Level: developer
322 
323 .seealso: PetscSFCreate(), DMPlexCreateProcessSF()
324 @*/
325 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
326 {
327   const PetscSFNode *remotePoints;
328   PetscInt          *localPointsNew;
329   PetscSFNode       *remotePointsNew;
330   const PetscInt    *nranks;
331   PetscInt          *ranksNew;
332   PetscBT            neighbors;
333   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
334   PetscMPIInt        size, proc, rank;
335   PetscErrorCode     ierr;
336 
337   PetscFunctionBegin;
338   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
339   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
340   if (processRanks) {PetscValidPointer(processRanks, 7);}
341   if (sfProcess)    {PetscValidPointer(sfProcess, 8);}
342   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRMPI(ierr);
343   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRMPI(ierr);
344   ierr = PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);CHKERRQ(ierr);
345   ierr = PetscBTCreate(size, &neighbors);CHKERRQ(ierr);
346   ierr = PetscBTMemzero(size, neighbors);CHKERRQ(ierr);
347   /* Compute root-to-leaf process connectivity */
348   ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr);
349   ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr);
350   for (p = pStart; p < pEnd; ++p) {
351     PetscInt ndof, noff, n;
352 
353     ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr);
354     ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr);
355     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
356   }
357   ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr);
358   /* Compute leaf-to-neighbor process connectivity */
359   ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr);
360   ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr);
361   for (p = pStart; p < pEnd; ++p) {
362     PetscInt ndof, noff, n;
363 
364     ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr);
365     ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr);
366     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
367   }
368   ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr);
369   /* Compute leaf-to-root process connectivity */
370   for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);}
371   /* Calculate edges */
372   PetscBTClear(neighbors, rank);
373   for (proc = 0, numNeighbors = 0; proc < size; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;}
374   ierr = PetscMalloc1(numNeighbors, &ranksNew);CHKERRQ(ierr);
375   ierr = PetscMalloc1(numNeighbors, &localPointsNew);CHKERRQ(ierr);
376   ierr = PetscMalloc1(numNeighbors, &remotePointsNew);CHKERRQ(ierr);
377   for (proc = 0, n = 0; proc < size; ++proc) {
378     if (PetscBTLookup(neighbors, proc)) {
379       ranksNew[n]              = proc;
380       localPointsNew[n]        = proc;
381       remotePointsNew[n].index = rank;
382       remotePointsNew[n].rank  = proc;
383       ++n;
384     }
385   }
386   ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr);
387   if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr);}
388   else              {ierr = PetscFree(ranksNew);CHKERRQ(ierr);}
389   if (sfProcess) {
390     ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr);
391     ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr);
392     ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr);
393     ierr = PetscSFSetGraph(*sfProcess, size, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
394   }
395   PetscFunctionReturn(0);
396 }
397 
398 /*@
399   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
400 
401   Collective on dm
402 
403   Input Parameter:
404 . dm - The DM
405 
406   Output Parameters:
407 + rootSection - The number of leaves for a given root point
408 . rootrank    - The rank of each edge into the root point
409 . leafSection - The number of processes sharing a given leaf point
410 - leafrank    - The rank of each process sharing a leaf point
411 
412   Level: developer
413 
414 .seealso: DMPlexCreateOverlapLabel(), DMPlexDistribute(), DMPlexDistributeOverlap()
415 @*/
416 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
417 {
418   MPI_Comm        comm;
419   PetscSF         sfPoint;
420   const PetscInt *rootdegree;
421   PetscInt       *myrank, *remoterank;
422   PetscInt        pStart, pEnd, p, nedges;
423   PetscMPIInt     rank;
424   PetscErrorCode  ierr;
425 
426   PetscFunctionBegin;
427   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
428   ierr = MPI_Comm_rank(comm, &rank);CHKERRMPI(ierr);
429   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
430   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
431   /* Compute number of leaves for each root */
432   ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr);
433   ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr);
434   ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr);
435   ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr);
436   for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr);}
437   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
438   /* Gather rank of each leaf to root */
439   ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr);
440   ierr = PetscMalloc1(pEnd-pStart, &myrank);CHKERRQ(ierr);
441   ierr = PetscMalloc1(nedges,  &remoterank);CHKERRQ(ierr);
442   for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank;
443   ierr = PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
444   ierr = PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
445   ierr = PetscFree(myrank);CHKERRQ(ierr);
446   ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr);
447   /* Distribute remote ranks to leaves */
448   ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr);
449   ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr);
450   PetscFunctionReturn(0);
451 }
452 
453 /*@C
454   DMPlexCreateOverlapLabel - Compute owner information for shared points. This basically gets two-sided for an SF.
455 
456   Collective on dm
457 
458   Input Parameters:
459 + dm          - The DM
460 . levels      - Number of overlap levels
461 . rootSection - The number of leaves for a given root point
462 . rootrank    - The rank of each edge into the root point
463 . leafSection - The number of processes sharing a given leaf point
464 - leafrank    - The rank of each process sharing a leaf point
465 
466   Output Parameter:
467 . ovLabel     - DMLabel containing remote overlap contributions as point/rank pairings
468 
469   Level: developer
470 
471 .seealso: DMPlexDistributeOwnership(), DMPlexDistribute()
472 @*/
473 PetscErrorCode DMPlexCreateOverlapLabel(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
474 {
475   MPI_Comm           comm;
476   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
477   PetscSF            sfPoint;
478   const PetscSFNode *remote;
479   const PetscInt    *local;
480   const PetscInt    *nrank, *rrank;
481   PetscInt          *adj = NULL;
482   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l;
483   PetscMPIInt        rank, size;
484   PetscBool          flg;
485   PetscErrorCode     ierr;
486 
487   PetscFunctionBegin;
488   *ovLabel = NULL;
489   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
490   ierr = MPI_Comm_size(comm, &size);CHKERRMPI(ierr);
491   ierr = MPI_Comm_rank(comm, &rank);CHKERRMPI(ierr);
492   if (size == 1) PetscFunctionReturn(0);
493   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
494   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
495   if (!levels) {
496     /* Add owned points */
497     ierr = DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel);CHKERRQ(ierr);
498     for (p = pStart; p < pEnd; ++p) {ierr = DMLabelSetValue(*ovLabel, p, rank);CHKERRQ(ierr);}
499     PetscFunctionReturn(0);
500   }
501   ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr);
502   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
503   ierr = DMLabelCreate(PETSC_COMM_SELF, "Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr);
504   /* Handle leaves: shared with the root point */
505   for (l = 0; l < nleaves; ++l) {
506     PetscInt adjSize = PETSC_DETERMINE, a;
507 
508     ierr = DMPlexGetAdjacency(dm, local ? local[l] : l, &adjSize, &adj);CHKERRQ(ierr);
509     for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr);}
510   }
511   ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr);
512   ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr);
513   /* Handle roots */
514   for (p = pStart; p < pEnd; ++p) {
515     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
516 
517     if ((p >= sStart) && (p < sEnd)) {
518       /* Some leaves share a root with other leaves on different processes */
519       ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr);
520       if (neighbors) {
521         ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr);
522         ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
523         for (n = 0; n < neighbors; ++n) {
524           const PetscInt remoteRank = nrank[noff+n];
525 
526           if (remoteRank == rank) continue;
527           for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
528         }
529       }
530     }
531     /* Roots are shared with leaves */
532     ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr);
533     if (!neighbors) continue;
534     ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr);
535     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
536     for (n = 0; n < neighbors; ++n) {
537       const PetscInt remoteRank = rrank[noff+n];
538 
539       if (remoteRank == rank) continue;
540       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
541     }
542   }
543   ierr = PetscFree(adj);CHKERRQ(ierr);
544   ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr);
545   ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr);
546   /* Add additional overlap levels */
547   for (l = 1; l < levels; l++) {
548     /* Propagate point donations over SF to capture remote connections */
549     ierr = DMPlexPartitionLabelPropagate(dm, ovAdjByRank);CHKERRQ(ierr);
550     /* Add next level of point donations to the label */
551     ierr = DMPlexPartitionLabelAdjacency(dm, ovAdjByRank);CHKERRQ(ierr);
552   }
553   /* We require the closure in the overlap */
554   ierr = DMPlexPartitionLabelClosure(dm, ovAdjByRank);CHKERRQ(ierr);
555   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-overlap_view", &flg);CHKERRQ(ierr);
556   if (flg) {
557     PetscViewer viewer;
558     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm), &viewer);CHKERRQ(ierr);
559     ierr = DMLabelView(ovAdjByRank, viewer);CHKERRQ(ierr);
560   }
561   /* Invert sender to receiver label */
562   ierr = DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel);CHKERRQ(ierr);
563   ierr = DMPlexPartitionLabelInvert(dm, ovAdjByRank, NULL, *ovLabel);CHKERRQ(ierr);
564   /* Add owned points, except for shared local points */
565   for (p = pStart; p < pEnd; ++p) {ierr = DMLabelSetValue(*ovLabel, p, rank);CHKERRQ(ierr);}
566   for (l = 0; l < nleaves; ++l) {
567     ierr = DMLabelClearValue(*ovLabel, local[l], rank);CHKERRQ(ierr);
568     ierr = DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank);CHKERRQ(ierr);
569   }
570   /* Clean up */
571   ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr);
572   PetscFunctionReturn(0);
573 }
574 
575 /*@C
576   DMPlexCreateOverlapMigrationSF - Create an SF describing the new mesh distribution to make the overlap described by the input SF
577 
578   Collective on dm
579 
580   Input Parameters:
581 + dm          - The DM
582 - overlapSF   - The SF mapping ghost points in overlap to owner points on other processes
583 
584   Output Parameter:
585 . migrationSF - An SF that maps original points in old locations to points in new locations
586 
587   Level: developer
588 
589 .seealso: DMPlexCreateOverlapLabel(), DMPlexDistributeOverlap(), DMPlexDistribute()
590 @*/
591 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
592 {
593   MPI_Comm           comm;
594   PetscMPIInt        rank, size;
595   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
596   PetscInt          *pointDepths, *remoteDepths, *ilocal;
597   PetscInt          *depthRecv, *depthShift, *depthIdx;
598   PetscSFNode       *iremote;
599   PetscSF            pointSF;
600   const PetscInt    *sharedLocal;
601   const PetscSFNode *overlapRemote, *sharedRemote;
602   PetscErrorCode     ierr;
603 
604   PetscFunctionBegin;
605   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
606   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
607   ierr = MPI_Comm_rank(comm, &rank);CHKERRMPI(ierr);
608   ierr = MPI_Comm_size(comm, &size);CHKERRMPI(ierr);
609   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
610 
611   /* Before building the migration SF we need to know the new stratum offsets */
612   ierr = PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote);CHKERRQ(ierr);
613   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
614   for (d=0; d<dim+1; d++) {
615     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
616     for (p=pStart; p<pEnd; p++) pointDepths[p] = d;
617   }
618   for (p=0; p<nleaves; p++) remoteDepths[p] = -1;
619   ierr = PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths,MPI_REPLACE);CHKERRQ(ierr);
620   ierr = PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths,MPI_REPLACE);CHKERRQ(ierr);
621 
622   /* Count received points in each stratum and compute the internal strata shift */
623   ierr = PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx);CHKERRQ(ierr);
624   for (d=0; d<dim+1; d++) depthRecv[d]=0;
625   for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++;
626   depthShift[dim] = 0;
627   for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim];
628   for (d=1; d<dim; d++) depthShift[d] += depthRecv[0];
629   for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1];
630   for (d=0; d<dim+1; d++) {
631     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
632     depthIdx[d] = pStart + depthShift[d];
633   }
634 
635   /* Form the overlap SF build an SF that describes the full overlap migration SF */
636   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
637   newLeaves = pEnd - pStart + nleaves;
638   ierr = PetscMalloc1(newLeaves, &ilocal);CHKERRQ(ierr);
639   ierr = PetscMalloc1(newLeaves, &iremote);CHKERRQ(ierr);
640   /* First map local points to themselves */
641   for (d=0; d<dim+1; d++) {
642     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
643     for (p=pStart; p<pEnd; p++) {
644       point = p + depthShift[d];
645       ilocal[point] = point;
646       iremote[point].index = p;
647       iremote[point].rank = rank;
648       depthIdx[d]++;
649     }
650   }
651 
652   /* Add in the remote roots for currently shared points */
653   ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
654   ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote);CHKERRQ(ierr);
655   for (d=0; d<dim+1; d++) {
656     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
657     for (p=0; p<numSharedPoints; p++) {
658       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
659         point = sharedLocal[p] + depthShift[d];
660         iremote[point].index = sharedRemote[p].index;
661         iremote[point].rank = sharedRemote[p].rank;
662       }
663     }
664   }
665 
666   /* Now add the incoming overlap points */
667   for (p=0; p<nleaves; p++) {
668     point = depthIdx[remoteDepths[p]];
669     ilocal[point] = point;
670     iremote[point].index = overlapRemote[p].index;
671     iremote[point].rank = overlapRemote[p].rank;
672     depthIdx[remoteDepths[p]]++;
673   }
674   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
675 
676   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
677   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");CHKERRQ(ierr);
678   ierr = PetscSFSetFromOptions(*migrationSF);CHKERRQ(ierr);
679   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
680   ierr = PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);CHKERRQ(ierr);
681 
682   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
683   PetscFunctionReturn(0);
684 }
685 
686 /*@
687   DMPlexStratifyMigrationSF - Rearrange the leaves of a migration sf for stratification.
688 
689   Input Parameters:
690 + dm          - The DM
691 - sf          - A star forest with non-ordered leaves, usually defining a DM point migration
692 
693   Output Parameter:
694 . migrationSF - A star forest with added leaf indirection that ensures the resulting DM is stratified
695 
696   Note:
697   This lexicographically sorts by (depth, cellType)
698 
699   Level: developer
700 
701 .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexDistributeOverlap()
702 @*/
703 PetscErrorCode DMPlexStratifyMigrationSF(DM dm, PetscSF sf, PetscSF *migrationSF)
704 {
705   MPI_Comm           comm;
706   PetscMPIInt        rank, size;
707   PetscInt           d, ldepth, depth, dim, p, pStart, pEnd, nroots, nleaves;
708   PetscSFNode       *pointDepths, *remoteDepths;
709   PetscInt          *ilocal;
710   PetscInt          *depthRecv, *depthShift, *depthIdx;
711   PetscInt          *ctRecv,    *ctShift,    *ctIdx;
712   const PetscSFNode *iremote;
713   PetscErrorCode     ierr;
714 
715   PetscFunctionBegin;
716   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
717   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
718   ierr = MPI_Comm_rank(comm, &rank);CHKERRMPI(ierr);
719   ierr = MPI_Comm_size(comm, &size);CHKERRMPI(ierr);
720   ierr = DMPlexGetDepth(dm, &ldepth);CHKERRQ(ierr);
721   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
722   ierr = MPIU_Allreduce(&ldepth, &depth, 1, MPIU_INT, MPI_MAX, comm);CHKERRMPI(ierr);
723   if ((ldepth >= 0) && (depth != ldepth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", ldepth, depth);
724   ierr = PetscLogEventBegin(DMPLEX_PartStratSF,dm,0,0,0);CHKERRQ(ierr);
725 
726   /* Before building the migration SF we need to know the new stratum offsets */
727   ierr = PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote);CHKERRQ(ierr);
728   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
729   for (d = 0; d < depth+1; ++d) {
730     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
731     for (p = pStart; p < pEnd; ++p) {
732       DMPolytopeType ct;
733 
734       ierr = DMPlexGetCellType(dm, p, &ct);CHKERRQ(ierr);
735       pointDepths[p].index = d;
736       pointDepths[p].rank  = ct;
737     }
738   }
739   for (p = 0; p < nleaves; ++p) {remoteDepths[p].index = -1; remoteDepths[p].rank = -1;}
740   ierr = PetscSFBcastBegin(sf, MPIU_2INT, pointDepths, remoteDepths,MPI_REPLACE);CHKERRQ(ierr);
741   ierr = PetscSFBcastEnd(sf, MPIU_2INT, pointDepths, remoteDepths,MPI_REPLACE);CHKERRQ(ierr);
742   /* Count received points in each stratum and compute the internal strata shift */
743   ierr = PetscCalloc6(depth+1, &depthRecv, depth+1, &depthShift, depth+1, &depthIdx, DM_NUM_POLYTOPES, &ctRecv, DM_NUM_POLYTOPES, &ctShift, DM_NUM_POLYTOPES, &ctIdx);CHKERRQ(ierr);
744   for (p = 0; p < nleaves; ++p) {
745     if (remoteDepths[p].rank < 0) {
746       ++depthRecv[remoteDepths[p].index];
747     } else {
748       ++ctRecv[remoteDepths[p].rank];
749     }
750   }
751   {
752     PetscInt depths[4], dims[4], shift = 0, i, c;
753 
754     /* Cells (depth), Vertices (0), Faces (depth-1), Edges (1)
755          Consider DM_POLYTOPE_FV_GHOST and DM_POLYTOPE_INTERIOR_GHOST as cells
756      */
757     depths[0] = depth; depths[1] = 0; depths[2] = depth-1; depths[3] = 1;
758     dims[0]   = dim;   dims[1]   = 0; dims[2]   = dim-1;   dims[3]   = 1;
759     for (i = 0; i <= depth; ++i) {
760       const PetscInt dep = depths[i];
761       const PetscInt dim = dims[i];
762 
763       for (c = 0; c < DM_NUM_POLYTOPES; ++c) {
764         if (DMPolytopeTypeGetDim((DMPolytopeType) c) != dim && !(i == 0 && (c == DM_POLYTOPE_FV_GHOST || c == DM_POLYTOPE_INTERIOR_GHOST))) continue;
765         ctShift[c] = shift;
766         shift     += ctRecv[c];
767       }
768       depthShift[dep] = shift;
769       shift          += depthRecv[dep];
770     }
771     for (c = 0; c < DM_NUM_POLYTOPES; ++c) {
772       const PetscInt ctDim = DMPolytopeTypeGetDim((DMPolytopeType) c);
773 
774       if ((ctDim < 0 || ctDim > dim) && (c != DM_POLYTOPE_FV_GHOST && c != DM_POLYTOPE_INTERIOR_GHOST)) {
775         ctShift[c] = shift;
776         shift     += ctRecv[c];
777       }
778     }
779   }
780   /* Derive a new local permutation based on stratified indices */
781   ierr = PetscMalloc1(nleaves, &ilocal);CHKERRQ(ierr);
782   for (p = 0; p < nleaves; ++p) {
783     const PetscInt       dep = remoteDepths[p].index;
784     const DMPolytopeType ct  = (DMPolytopeType) remoteDepths[p].rank;
785 
786     if ((PetscInt) ct < 0) {
787       ilocal[p] = depthShift[dep] + depthIdx[dep];
788       ++depthIdx[dep];
789     } else {
790       ilocal[p] = ctShift[ct] + ctIdx[ct];
791       ++ctIdx[ct];
792     }
793   }
794   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
795   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Migration SF");CHKERRQ(ierr);
796   ierr = PetscSFSetGraph(*migrationSF, nroots, nleaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_COPY_VALUES);CHKERRQ(ierr);
797   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
798   ierr = PetscFree6(depthRecv, depthShift, depthIdx, ctRecv, ctShift, ctIdx);CHKERRQ(ierr);
799   ierr = PetscLogEventEnd(DMPLEX_PartStratSF,dm,0,0,0);CHKERRQ(ierr);
800   PetscFunctionReturn(0);
801 }
802 
803 /*@
804   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
805 
806   Collective on dm
807 
808   Input Parameters:
809 + dm - The DMPlex object
810 . pointSF - The PetscSF describing the communication pattern
811 . originalSection - The PetscSection for existing data layout
812 - originalVec - The existing data in a local vector
813 
814   Output Parameters:
815 + newSection - The PetscSF describing the new data layout
816 - newVec - The new data in a local vector
817 
818   Level: developer
819 
820 .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData()
821 @*/
822 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
823 {
824   PetscSF        fieldSF;
825   PetscInt      *remoteOffsets, fieldSize;
826   PetscScalar   *originalValues, *newValues;
827   PetscErrorCode ierr;
828 
829   PetscFunctionBegin;
830   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
831   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
832 
833   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
834   ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr);
835   ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr);
836 
837   ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr);
838   ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr);
839   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
840   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
841   ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues,MPI_REPLACE);CHKERRQ(ierr);
842   ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues,MPI_REPLACE);CHKERRQ(ierr);
843   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
844   ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr);
845   ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr);
846   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
847   PetscFunctionReturn(0);
848 }
849 
850 /*@
851   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
852 
853   Collective on dm
854 
855   Input Parameters:
856 + dm - The DMPlex object
857 . pointSF - The PetscSF describing the communication pattern
858 . originalSection - The PetscSection for existing data layout
859 - originalIS - The existing data
860 
861   Output Parameters:
862 + newSection - The PetscSF describing the new data layout
863 - newIS - The new data
864 
865   Level: developer
866 
867 .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData()
868 @*/
869 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
870 {
871   PetscSF         fieldSF;
872   PetscInt       *newValues, *remoteOffsets, fieldSize;
873   const PetscInt *originalValues;
874   PetscErrorCode  ierr;
875 
876   PetscFunctionBegin;
877   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
878   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
879 
880   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
881   ierr = PetscMalloc1(fieldSize, &newValues);CHKERRQ(ierr);
882 
883   ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr);
884   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
885   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
886   ierr = PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues,MPI_REPLACE);CHKERRQ(ierr);
887   ierr = PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues,MPI_REPLACE);CHKERRQ(ierr);
888   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
889   ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr);
890   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr);
891   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
892   PetscFunctionReturn(0);
893 }
894 
895 /*@
896   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
897 
898   Collective on dm
899 
900   Input Parameters:
901 + dm - The DMPlex object
902 . pointSF - The PetscSF describing the communication pattern
903 . originalSection - The PetscSection for existing data layout
904 . datatype - The type of data
905 - originalData - The existing data
906 
907   Output Parameters:
908 + newSection - The PetscSection describing the new data layout
909 - newData - The new data
910 
911   Level: developer
912 
913 .seealso: DMPlexDistribute(), DMPlexDistributeField()
914 @*/
915 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
916 {
917   PetscSF        fieldSF;
918   PetscInt      *remoteOffsets, fieldSize;
919   PetscMPIInt    dataSize;
920   PetscErrorCode ierr;
921 
922   PetscFunctionBegin;
923   ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
924   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
925 
926   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
927   ierr = MPI_Type_size(datatype, &dataSize);CHKERRMPI(ierr);
928   ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr);
929 
930   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
931   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
932   ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData,MPI_REPLACE);CHKERRQ(ierr);
933   ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData,MPI_REPLACE);CHKERRQ(ierr);
934   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
935   ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
936   PetscFunctionReturn(0);
937 }
938 
939 static PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
940 {
941   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
942   MPI_Comm               comm;
943   PetscSF                coneSF;
944   PetscSection           originalConeSection, newConeSection;
945   PetscInt              *remoteOffsets, *cones, *globCones, *newCones, newConesSize;
946   PetscBool              flg;
947   PetscErrorCode         ierr;
948 
949   PetscFunctionBegin;
950   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
951   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
952   ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
953   /* Distribute cone section */
954   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
955   ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr);
956   ierr = DMPlexGetConeSection(dmParallel, &newConeSection);CHKERRQ(ierr);
957   ierr = PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr);
958   ierr = DMSetUp(dmParallel);CHKERRQ(ierr);
959   {
960     PetscInt pStart, pEnd, p;
961 
962     ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr);
963     for (p = pStart; p < pEnd; ++p) {
964       PetscInt coneSize;
965       ierr               = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr);
966       pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize);
967     }
968   }
969   /* Communicate and renumber cones */
970   ierr = PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr);
971   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
972   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
973   if (original) {
974     PetscInt numCones;
975 
976     ierr = PetscSectionGetStorageSize(originalConeSection,&numCones);CHKERRQ(ierr);
977     ierr = PetscMalloc1(numCones,&globCones);CHKERRQ(ierr);
978     ierr = ISLocalToGlobalMappingApplyBlock(original, numCones, cones, globCones);CHKERRQ(ierr);
979   } else {
980     globCones = cones;
981   }
982   ierr = DMPlexGetCones(dmParallel, &newCones);CHKERRQ(ierr);
983   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, globCones, newCones,MPI_REPLACE);CHKERRQ(ierr);
984   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, globCones, newCones,MPI_REPLACE);CHKERRQ(ierr);
985   if (original) {
986     ierr = PetscFree(globCones);CHKERRQ(ierr);
987   }
988   ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr);
989   ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr);
990   if (PetscDefined(USE_DEBUG)) {
991     PetscInt  p;
992     PetscBool valid = PETSC_TRUE;
993     for (p = 0; p < newConesSize; ++p) {
994       if (newCones[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "[%d] Point %D not in overlap SF\n", PetscGlobalRank,p);CHKERRQ(ierr);}
995     }
996     if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
997   }
998   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr);
999   if (flg) {
1000     ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr);
1001     ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr);
1002     ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr);
1003     ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr);
1004     ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr);
1005   }
1006   ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr);
1007   ierr = DMPlexGetConeOrientations(dmParallel, &newCones);CHKERRQ(ierr);
1008   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones,MPI_REPLACE);CHKERRQ(ierr);
1009   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones,MPI_REPLACE);CHKERRQ(ierr);
1010   ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr);
1011   ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
1012   /* Create supports and stratify DMPlex */
1013   {
1014     PetscInt pStart, pEnd;
1015 
1016     ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr);
1017     ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr);
1018   }
1019   ierr = DMPlexSymmetrize(dmParallel);CHKERRQ(ierr);
1020   ierr = DMPlexStratify(dmParallel);CHKERRQ(ierr);
1021   {
1022     PetscBool useCone, useClosure, useAnchors;
1023 
1024     ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
1025     ierr = DMSetBasicAdjacency(dmParallel, useCone, useClosure);CHKERRQ(ierr);
1026     ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
1027     ierr = DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors);CHKERRQ(ierr);
1028   }
1029   PetscFunctionReturn(0);
1030 }
1031 
1032 static PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
1033 {
1034   MPI_Comm         comm;
1035   DM               cdm, cdmParallel;
1036   PetscSection     originalCoordSection, newCoordSection;
1037   Vec              originalCoordinates, newCoordinates;
1038   PetscInt         bs;
1039   PetscBool        isper;
1040   const char      *name;
1041   const PetscReal *maxCell, *L;
1042   const DMBoundaryType *bd;
1043   PetscErrorCode   ierr;
1044 
1045   PetscFunctionBegin;
1046   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1047   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1048 
1049   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1050   ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr);
1051   ierr = DMGetCoordinateSection(dmParallel, &newCoordSection);CHKERRQ(ierr);
1052   ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr);
1053   if (originalCoordinates) {
1054     ierr = VecCreate(PETSC_COMM_SELF, &newCoordinates);CHKERRQ(ierr);
1055     ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr);
1056     ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr);
1057 
1058     ierr = DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr);
1059     ierr = DMSetCoordinatesLocal(dmParallel, newCoordinates);CHKERRQ(ierr);
1060     ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr);
1061     ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr);
1062     ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr);
1063   }
1064   ierr = DMGetPeriodicity(dm, &isper, &maxCell, &L, &bd);CHKERRQ(ierr);
1065   ierr = DMSetPeriodicity(dmParallel, isper, maxCell, L, bd);CHKERRQ(ierr);
1066   ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
1067   ierr = DMGetCoordinateDM(dmParallel, &cdmParallel);CHKERRQ(ierr);
1068   ierr = DMCopyDisc(cdm, cdmParallel);CHKERRQ(ierr);
1069   PetscFunctionReturn(0);
1070 }
1071 
1072 static PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1073 {
1074   DM_Plex         *mesh = (DM_Plex*) dm->data;
1075   MPI_Comm         comm;
1076   DMLabel          depthLabel;
1077   PetscMPIInt      rank;
1078   PetscInt         depth, d, numLabels, numLocalLabels, l;
1079   PetscBool        hasLabels = PETSC_FALSE, lsendDepth, sendDepth;
1080   PetscObjectState depthState = -1;
1081   PetscErrorCode   ierr;
1082 
1083   PetscFunctionBegin;
1084   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1085   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1086 
1087   ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1088   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1089   ierr = MPI_Comm_rank(comm, &rank);CHKERRMPI(ierr);
1090 
1091   /* If the user has changed the depth label, communicate it instead */
1092   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1093   ierr = DMPlexGetDepthLabel(dm, &depthLabel);CHKERRQ(ierr);
1094   if (depthLabel) {ierr = PetscObjectStateGet((PetscObject) depthLabel, &depthState);CHKERRQ(ierr);}
1095   lsendDepth = mesh->depthState != depthState ? PETSC_TRUE : PETSC_FALSE;
1096   ierr = MPIU_Allreduce(&lsendDepth, &sendDepth, 1, MPIU_BOOL, MPI_LOR, comm);CHKERRMPI(ierr);
1097   if (sendDepth) {
1098     ierr = DMPlexGetDepthLabel(dmParallel, &dmParallel->depthLabel);CHKERRQ(ierr);
1099     ierr = DMRemoveLabelBySelf(dmParallel, &dmParallel->depthLabel, PETSC_FALSE);CHKERRQ(ierr);
1100   }
1101   /* Everyone must have either the same number of labels, or none */
1102   ierr = DMGetNumLabels(dm, &numLocalLabels);CHKERRQ(ierr);
1103   numLabels = numLocalLabels;
1104   ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRMPI(ierr);
1105   if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1106   for (l = 0; l < numLabels; ++l) {
1107     DMLabel     label = NULL, labelNew = NULL;
1108     PetscBool   isDepth, lisOutput = PETSC_TRUE, isOutput;
1109     const char *name = NULL;
1110 
1111     if (hasLabels) {
1112       ierr = DMGetLabelByNum(dm, l, &label);CHKERRQ(ierr);
1113       /* Skip "depth" because it is recreated */
1114       ierr = PetscObjectGetName((PetscObject) label, &name);CHKERRQ(ierr);
1115       ierr = PetscStrcmp(name, "depth", &isDepth);CHKERRQ(ierr);
1116     }
1117     ierr = MPI_Bcast(&isDepth, 1, MPIU_BOOL, 0, comm);CHKERRMPI(ierr);
1118     if (isDepth && !sendDepth) continue;
1119     ierr = DMLabelDistribute(label, migrationSF, &labelNew);CHKERRQ(ierr);
1120     if (isDepth) {
1121       /* Put in any missing strata which can occur if users are managing the depth label themselves */
1122       PetscInt gdepth;
1123 
1124       ierr = MPIU_Allreduce(&depth, &gdepth, 1, MPIU_INT, MPI_MAX, comm);CHKERRMPI(ierr);
1125       if ((depth >= 0) && (gdepth != depth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", depth, gdepth);
1126       for (d = 0; d <= gdepth; ++d) {
1127         PetscBool has;
1128 
1129         ierr = DMLabelHasStratum(labelNew, d, &has);CHKERRQ(ierr);
1130         if (!has) {ierr = DMLabelAddStratum(labelNew, d);CHKERRQ(ierr);}
1131       }
1132     }
1133     ierr = DMAddLabel(dmParallel, labelNew);CHKERRQ(ierr);
1134     /* Put the output flag in the new label */
1135     if (hasLabels) {ierr = DMGetLabelOutput(dm, name, &lisOutput);CHKERRQ(ierr);}
1136     ierr = MPIU_Allreduce(&lisOutput, &isOutput, 1, MPIU_BOOL, MPI_LAND, comm);CHKERRMPI(ierr);
1137     ierr = PetscObjectGetName((PetscObject) labelNew, &name);CHKERRQ(ierr);
1138     ierr = DMSetLabelOutput(dmParallel, name, isOutput);CHKERRQ(ierr);
1139     ierr = DMLabelDestroy(&labelNew);CHKERRQ(ierr);
1140   }
1141   ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1142   PetscFunctionReturn(0);
1143 }
1144 
1145 static PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1146 {
1147   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1148   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1149   MPI_Comm        comm;
1150   DM              refTree;
1151   PetscSection    origParentSection, newParentSection;
1152   PetscInt        *origParents, *origChildIDs;
1153   PetscBool       flg;
1154   PetscErrorCode  ierr;
1155 
1156   PetscFunctionBegin;
1157   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1158   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
1159   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1160 
1161   /* Set up tree */
1162   ierr = DMPlexGetReferenceTree(dm,&refTree);CHKERRQ(ierr);
1163   ierr = DMPlexSetReferenceTree(dmParallel,refTree);CHKERRQ(ierr);
1164   ierr = DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL);CHKERRQ(ierr);
1165   if (origParentSection) {
1166     PetscInt        pStart, pEnd;
1167     PetscInt        *newParents, *newChildIDs, *globParents;
1168     PetscInt        *remoteOffsetsParents, newParentSize;
1169     PetscSF         parentSF;
1170 
1171     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1172     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);CHKERRQ(ierr);
1173     ierr = PetscSectionSetChart(newParentSection,pStart,pEnd);CHKERRQ(ierr);
1174     ierr = PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);CHKERRQ(ierr);
1175     ierr = PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);CHKERRQ(ierr);
1176     ierr = PetscFree(remoteOffsetsParents);CHKERRQ(ierr);
1177     ierr = PetscSectionGetStorageSize(newParentSection,&newParentSize);CHKERRQ(ierr);
1178     ierr = PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs);CHKERRQ(ierr);
1179     if (original) {
1180       PetscInt numParents;
1181 
1182       ierr = PetscSectionGetStorageSize(origParentSection,&numParents);CHKERRQ(ierr);
1183       ierr = PetscMalloc1(numParents,&globParents);CHKERRQ(ierr);
1184       ierr = ISLocalToGlobalMappingApplyBlock(original, numParents, origParents, globParents);CHKERRQ(ierr);
1185     }
1186     else {
1187       globParents = origParents;
1188     }
1189     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, globParents, newParents,MPI_REPLACE);CHKERRQ(ierr);
1190     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, globParents, newParents,MPI_REPLACE);CHKERRQ(ierr);
1191     if (original) {
1192       ierr = PetscFree(globParents);CHKERRQ(ierr);
1193     }
1194     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs,MPI_REPLACE);CHKERRQ(ierr);
1195     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs,MPI_REPLACE);CHKERRQ(ierr);
1196     ierr = ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents);CHKERRQ(ierr);
1197     if (PetscDefined(USE_DEBUG)) {
1198       PetscInt  p;
1199       PetscBool valid = PETSC_TRUE;
1200       for (p = 0; p < newParentSize; ++p) {
1201         if (newParents[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
1202       }
1203       if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1204     }
1205     ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-parents_view", &flg);CHKERRQ(ierr);
1206     if (flg) {
1207       ierr = PetscPrintf(comm, "Serial Parent Section: \n");CHKERRQ(ierr);
1208       ierr = PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr);
1209       ierr = PetscPrintf(comm, "Parallel Parent Section: \n");CHKERRQ(ierr);
1210       ierr = PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr);
1211       ierr = PetscSFView(parentSF, NULL);CHKERRQ(ierr);
1212     }
1213     ierr = DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);CHKERRQ(ierr);
1214     ierr = PetscSectionDestroy(&newParentSection);CHKERRQ(ierr);
1215     ierr = PetscFree2(newParents,newChildIDs);CHKERRQ(ierr);
1216     ierr = PetscSFDestroy(&parentSF);CHKERRQ(ierr);
1217   }
1218   pmesh->useAnchors = mesh->useAnchors;
1219   PetscFunctionReturn(0);
1220 }
1221 
1222 PETSC_UNUSED static PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, DM dmParallel)
1223 {
1224   PetscMPIInt            rank, size;
1225   MPI_Comm               comm;
1226   PetscErrorCode         ierr;
1227 
1228   PetscFunctionBegin;
1229   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1230   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1231 
1232   /* Create point SF for parallel mesh */
1233   ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1234   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1235   ierr = MPI_Comm_rank(comm, &rank);CHKERRMPI(ierr);
1236   ierr = MPI_Comm_size(comm, &size);CHKERRMPI(ierr);
1237   {
1238     const PetscInt *leaves;
1239     PetscSFNode    *remotePoints, *rowners, *lowners;
1240     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1241     PetscInt        pStart, pEnd;
1242 
1243     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1244     ierr = PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr);
1245     ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr);
1246     for (p=0; p<numRoots; p++) {
1247       rowners[p].rank  = -1;
1248       rowners[p].index = -1;
1249     }
1250     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners,MPI_REPLACE);CHKERRQ(ierr);
1251     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners,MPI_REPLACE);CHKERRQ(ierr);
1252     for (p = 0; p < numLeaves; ++p) {
1253       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1254         lowners[p].rank  = rank;
1255         lowners[p].index = leaves ? leaves[p] : p;
1256       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1257         lowners[p].rank  = -2;
1258         lowners[p].index = -2;
1259       }
1260     }
1261     for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1262       rowners[p].rank  = -3;
1263       rowners[p].index = -3;
1264     }
1265     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1266     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1267     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners,MPI_REPLACE);CHKERRQ(ierr);
1268     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners,MPI_REPLACE);CHKERRQ(ierr);
1269     for (p = 0; p < numLeaves; ++p) {
1270       if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
1271       if (lowners[p].rank != rank) ++numGhostPoints;
1272     }
1273     ierr = PetscMalloc1(numGhostPoints, &ghostPoints);CHKERRQ(ierr);
1274     ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr);
1275     for (p = 0, gp = 0; p < numLeaves; ++p) {
1276       if (lowners[p].rank != rank) {
1277         ghostPoints[gp]        = leaves ? leaves[p] : p;
1278         remotePoints[gp].rank  = lowners[p].rank;
1279         remotePoints[gp].index = lowners[p].index;
1280         ++gp;
1281       }
1282     }
1283     ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr);
1284     ierr = PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1285     ierr = PetscSFSetFromOptions((dmParallel)->sf);CHKERRQ(ierr);
1286   }
1287   {
1288     PetscBool useCone, useClosure, useAnchors;
1289 
1290     ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
1291     ierr = DMSetBasicAdjacency(dmParallel, useCone, useClosure);CHKERRQ(ierr);
1292     ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
1293     ierr = DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors);CHKERRQ(ierr);
1294   }
1295   ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1296   PetscFunctionReturn(0);
1297 }
1298 
1299 /*@
1300   DMPlexSetPartitionBalance - Should distribution of the DM attempt to balance the shared point partition?
1301 
1302   Input Parameters:
1303 + dm - The DMPlex object
1304 - flg - Balance the partition?
1305 
1306   Level: intermediate
1307 
1308 .seealso: DMPlexDistribute(), DMPlexGetPartitionBalance()
1309 @*/
1310 PetscErrorCode DMPlexSetPartitionBalance(DM dm, PetscBool flg)
1311 {
1312   DM_Plex *mesh = (DM_Plex *)dm->data;
1313 
1314   PetscFunctionBegin;
1315   mesh->partitionBalance = flg;
1316   PetscFunctionReturn(0);
1317 }
1318 
1319 /*@
1320   DMPlexGetPartitionBalance - Does distribution of the DM attempt to balance the shared point partition?
1321 
1322   Input Parameter:
1323 . dm - The DMPlex object
1324 
1325   Output Parameter:
1326 . flg - Balance the partition?
1327 
1328   Level: intermediate
1329 
1330 .seealso: DMPlexDistribute(), DMPlexSetPartitionBalance()
1331 @*/
1332 PetscErrorCode DMPlexGetPartitionBalance(DM dm, PetscBool *flg)
1333 {
1334   DM_Plex *mesh = (DM_Plex *)dm->data;
1335 
1336   PetscFunctionBegin;
1337   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1338   PetscValidBoolPointer(flg, 2);
1339   *flg = mesh->partitionBalance;
1340   PetscFunctionReturn(0);
1341 }
1342 
1343 typedef struct {
1344   PetscInt vote, rank, index;
1345 } Petsc3Int;
1346 
1347 /* MaxLoc, but carry a third piece of information around */
1348 static void MPIAPI MaxLocCarry(void *in_, void *inout_, PetscMPIInt *len_, MPI_Datatype *dtype)
1349 {
1350   Petsc3Int *a = (Petsc3Int *)inout_;
1351   Petsc3Int *b = (Petsc3Int *)in_;
1352   PetscInt i, len = *len_;
1353   for (i = 0; i < len; i++) {
1354     if (a[i].vote < b[i].vote) {
1355       a[i].vote = b[i].vote;
1356       a[i].rank = b[i].rank;
1357       a[i].index = b[i].index;
1358     } else if (a[i].vote <= b[i].vote) {
1359       if (a[i].rank >= b[i].rank) {
1360         a[i].rank = b[i].rank;
1361         a[i].index = b[i].index;
1362       }
1363     }
1364   }
1365 }
1366 
1367 /*@C
1368   DMPlexCreatePointSF - Build a point SF from an SF describing a point migration
1369 
1370   Input Parameters:
1371 + dm          - The source DMPlex object
1372 . migrationSF - The star forest that describes the parallel point remapping
1373 - ownership   - Flag causing a vote to determine point ownership
1374 
1375   Output Parameter:
1376 . pointSF     - The star forest describing the point overlap in the remapped DM
1377 
1378   Notes:
1379   Output pointSF is guaranteed to have the array of local indices (leaves) sorted.
1380 
1381   Level: developer
1382 
1383 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1384 @*/
1385 PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownership, PetscSF *pointSF)
1386 {
1387   PetscMPIInt        rank, size;
1388   PetscInt           p, nroots, nleaves, idx, npointLeaves;
1389   PetscInt          *pointLocal;
1390   const PetscInt    *leaves;
1391   const PetscSFNode *roots;
1392   PetscSFNode       *rootNodes, *leafNodes, *pointRemote;
1393   Vec                shifts;
1394   const PetscInt     numShifts = 13759;
1395   const PetscScalar *shift = NULL;
1396   const PetscBool    shiftDebug = PETSC_FALSE;
1397   PetscBool          balance;
1398   PetscErrorCode     ierr;
1399 
1400   PetscFunctionBegin;
1401   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1402   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRMPI(ierr);
1403   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRMPI(ierr);
1404   ierr = PetscLogEventBegin(DMPLEX_CreatePointSF,dm,0,0,0);CHKERRQ(ierr);
1405 
1406   ierr = DMPlexGetPartitionBalance(dm, &balance);CHKERRQ(ierr);
1407   ierr = PetscSFGetGraph(migrationSF, &nroots, &nleaves, &leaves, &roots);CHKERRQ(ierr);
1408   ierr = PetscMalloc2(nroots, &rootNodes, nleaves, &leafNodes);CHKERRQ(ierr);
1409   if (ownership) {
1410     MPI_Op       op;
1411     MPI_Datatype datatype;
1412     Petsc3Int   *rootVote = NULL, *leafVote = NULL;
1413     /* If balancing, we compute a random cyclic shift of the rank for each remote point. That way, the max will evenly distribute among ranks. */
1414     if (balance) {
1415       PetscRandom r;
1416 
1417       ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
1418       ierr = PetscRandomSetInterval(r, 0, 2467*size);CHKERRQ(ierr);
1419       ierr = VecCreate(PETSC_COMM_SELF, &shifts);CHKERRQ(ierr);
1420       ierr = VecSetSizes(shifts, numShifts, numShifts);CHKERRQ(ierr);
1421       ierr = VecSetType(shifts, VECSTANDARD);CHKERRQ(ierr);
1422       ierr = VecSetRandom(shifts, r);CHKERRQ(ierr);
1423       ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
1424       ierr = VecGetArrayRead(shifts, &shift);CHKERRQ(ierr);
1425     }
1426 
1427     ierr = PetscMalloc1(nroots, &rootVote);CHKERRQ(ierr);
1428     ierr = PetscMalloc1(nleaves, &leafVote);CHKERRQ(ierr);
1429     /* Point ownership vote: Process with highest rank owns shared points */
1430     for (p = 0; p < nleaves; ++p) {
1431       if (shiftDebug) {
1432         ierr = PetscSynchronizedPrintf(PetscObjectComm((PetscObject) dm), "[%d] Point %D RemotePoint %D Shift %D MyRank %D\n", rank, leaves ? leaves[p] : p, roots[p].index, (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]), (rank + (shift ? (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]) : 0))%size);CHKERRQ(ierr);
1433       }
1434       /* Either put in a bid or we know we own it */
1435       leafVote[p].vote  = (rank + (shift ? (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]) : 0))%size;
1436       leafVote[p].rank = rank;
1437       leafVote[p].index = p;
1438     }
1439     for (p = 0; p < nroots; p++) {
1440       /* Root must not participate in the reduction, flag so that MAXLOC does not use */
1441       rootVote[p].vote  = -3;
1442       rootVote[p].rank  = -3;
1443       rootVote[p].index = -3;
1444     }
1445     ierr = MPI_Type_contiguous(3, MPIU_INT, &datatype);CHKERRMPI(ierr);
1446     ierr = MPI_Type_commit(&datatype);CHKERRMPI(ierr);
1447     ierr = MPI_Op_create(&MaxLocCarry, 1, &op);CHKERRMPI(ierr);
1448     ierr = PetscSFReduceBegin(migrationSF, datatype, leafVote, rootVote, op);CHKERRQ(ierr);
1449     ierr = PetscSFReduceEnd(migrationSF, datatype, leafVote, rootVote, op);CHKERRQ(ierr);
1450     ierr = MPI_Op_free(&op);CHKERRMPI(ierr);
1451     ierr = MPI_Type_free(&datatype);CHKERRMPI(ierr);
1452     for (p = 0; p < nroots; p++) {
1453       rootNodes[p].rank = rootVote[p].rank;
1454       rootNodes[p].index = rootVote[p].index;
1455     }
1456     ierr = PetscFree(leafVote);CHKERRQ(ierr);
1457     ierr = PetscFree(rootVote);CHKERRQ(ierr);
1458   } else {
1459     for (p = 0; p < nroots; p++) {
1460       rootNodes[p].index = -1;
1461       rootNodes[p].rank = rank;
1462     }
1463     for (p = 0; p < nleaves; p++) {
1464       /* Write new local id into old location */
1465       if (roots[p].rank == rank) {
1466         rootNodes[roots[p].index].index = leaves ? leaves[p] : p;
1467       }
1468     }
1469   }
1470   ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rootNodes, leafNodes,MPI_REPLACE);CHKERRQ(ierr);
1471   ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rootNodes, leafNodes,MPI_REPLACE);CHKERRQ(ierr);
1472 
1473   for (npointLeaves = 0, p = 0; p < nleaves; p++) {
1474     if (leafNodes[p].rank != rank) npointLeaves++;
1475   }
1476   ierr = PetscMalloc1(npointLeaves, &pointLocal);CHKERRQ(ierr);
1477   ierr = PetscMalloc1(npointLeaves, &pointRemote);CHKERRQ(ierr);
1478   for (idx = 0, p = 0; p < nleaves; p++) {
1479     if (leafNodes[p].rank != rank) {
1480       /* Note that pointLocal is automatically sorted as it is sublist of 0, ..., nleaves-1 */
1481       pointLocal[idx] = p;
1482       pointRemote[idx] = leafNodes[p];
1483       idx++;
1484     }
1485   }
1486   if (shift) {
1487     ierr = VecRestoreArrayRead(shifts, &shift);CHKERRQ(ierr);
1488     ierr = VecDestroy(&shifts);CHKERRQ(ierr);
1489   }
1490   if (shiftDebug) {ierr = PetscSynchronizedFlush(PetscObjectComm((PetscObject) dm), PETSC_STDOUT);CHKERRQ(ierr);}
1491   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), pointSF);CHKERRQ(ierr);
1492   ierr = PetscSFSetFromOptions(*pointSF);CHKERRQ(ierr);
1493   ierr = PetscSFSetGraph(*pointSF, nleaves, npointLeaves, pointLocal, PETSC_OWN_POINTER, pointRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1494   ierr = PetscFree2(rootNodes, leafNodes);CHKERRQ(ierr);
1495   ierr = PetscLogEventEnd(DMPLEX_CreatePointSF,dm,0,0,0);CHKERRQ(ierr);
1496   PetscFunctionReturn(0);
1497 }
1498 
1499 /*@C
1500   DMPlexMigrate  - Migrates internal DM data over the supplied star forest
1501 
1502   Collective on dm
1503 
1504   Input Parameters:
1505 + dm       - The source DMPlex object
1506 - sf       - The star forest communication context describing the migration pattern
1507 
1508   Output Parameter:
1509 . targetDM - The target DMPlex object
1510 
1511   Level: intermediate
1512 
1513 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1514 @*/
1515 PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM)
1516 {
1517   MPI_Comm               comm;
1518   PetscInt               dim, cdim, nroots;
1519   PetscSF                sfPoint;
1520   ISLocalToGlobalMapping ltogMigration;
1521   ISLocalToGlobalMapping ltogOriginal = NULL;
1522   PetscBool              flg;
1523   PetscErrorCode         ierr;
1524 
1525   PetscFunctionBegin;
1526   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1527   ierr = PetscLogEventBegin(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1528   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1529   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1530   ierr = DMSetDimension(targetDM, dim);CHKERRQ(ierr);
1531   ierr = DMGetCoordinateDim(dm, &cdim);CHKERRQ(ierr);
1532   ierr = DMSetCoordinateDim(targetDM, cdim);CHKERRQ(ierr);
1533 
1534   /* Check for a one-to-all distribution pattern */
1535   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1536   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
1537   if (nroots >= 0) {
1538     IS        isOriginal;
1539     PetscInt  n, size, nleaves;
1540     PetscInt  *numbering_orig, *numbering_new;
1541 
1542     /* Get the original point numbering */
1543     ierr = DMPlexCreatePointNumbering(dm, &isOriginal);CHKERRQ(ierr);
1544     ierr = ISLocalToGlobalMappingCreateIS(isOriginal, &ltogOriginal);CHKERRQ(ierr);
1545     ierr = ISLocalToGlobalMappingGetSize(ltogOriginal, &size);CHKERRQ(ierr);
1546     ierr = ISLocalToGlobalMappingGetBlockIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1547     /* Convert to positive global numbers */
1548     for (n=0; n<size; n++) {if (numbering_orig[n] < 0) numbering_orig[n] = -(numbering_orig[n]+1);}
1549     /* Derive the new local-to-global mapping from the old one */
1550     ierr = PetscSFGetGraph(sf, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1551     ierr = PetscMalloc1(nleaves, &numbering_new);CHKERRQ(ierr);
1552     ierr = PetscSFBcastBegin(sf, MPIU_INT, numbering_orig, numbering_new,MPI_REPLACE);CHKERRQ(ierr);
1553     ierr = PetscSFBcastEnd(sf, MPIU_INT, numbering_orig, numbering_new,MPI_REPLACE);CHKERRQ(ierr);
1554     ierr = ISLocalToGlobalMappingCreate(comm, 1, nleaves, numbering_new, PETSC_OWN_POINTER, &ltogMigration);CHKERRQ(ierr);
1555     ierr = ISLocalToGlobalMappingRestoreIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1556     ierr = ISDestroy(&isOriginal);CHKERRQ(ierr);
1557   } else {
1558     /* One-to-all distribution pattern: We can derive LToG from SF */
1559     ierr = ISLocalToGlobalMappingCreateSF(sf, 0, &ltogMigration);CHKERRQ(ierr);
1560   }
1561   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1562   if (flg) {
1563     ierr = PetscPrintf(comm, "Point renumbering for DM migration:\n");CHKERRQ(ierr);
1564     ierr = ISLocalToGlobalMappingView(ltogMigration, NULL);CHKERRQ(ierr);
1565   }
1566   /* Migrate DM data to target DM */
1567   ierr = DMPlexDistributeCones(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1568   ierr = DMPlexDistributeLabels(dm, sf, targetDM);CHKERRQ(ierr);
1569   ierr = DMPlexDistributeCoordinates(dm, sf, targetDM);CHKERRQ(ierr);
1570   ierr = DMPlexDistributeSetupTree(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1571   ierr = ISLocalToGlobalMappingDestroy(&ltogOriginal);CHKERRQ(ierr);
1572   ierr = ISLocalToGlobalMappingDestroy(&ltogMigration);CHKERRQ(ierr);
1573   ierr = PetscLogEventEnd(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1574   PetscFunctionReturn(0);
1575 }
1576 
1577 /*@C
1578   DMPlexDistribute - Distributes the mesh and any associated sections.
1579 
1580   Collective on dm
1581 
1582   Input Parameters:
1583 + dm  - The original DMPlex object
1584 - overlap - The overlap of partitions, 0 is the default
1585 
1586   Output Parameters:
1587 + sf - The PetscSF used for point distribution, or NULL if not needed
1588 - dmParallel - The distributed DMPlex object
1589 
1590   Note: If the mesh was not distributed, the output dmParallel will be NULL.
1591 
1592   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
1593   representation on the mesh.
1594 
1595   Level: intermediate
1596 
1597 .seealso: DMPlexCreate(), DMSetAdjacency(), DMPlexGetOverlap()
1598 @*/
1599 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1600 {
1601   MPI_Comm               comm;
1602   PetscPartitioner       partitioner;
1603   IS                     cellPart;
1604   PetscSection           cellPartSection;
1605   DM                     dmCoord;
1606   DMLabel                lblPartition, lblMigration;
1607   PetscSF                sfMigration, sfStratified, sfPoint;
1608   PetscBool              flg, balance;
1609   PetscMPIInt            rank, size;
1610   PetscErrorCode         ierr;
1611 
1612   PetscFunctionBegin;
1613   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1614   PetscValidLogicalCollectiveInt(dm, overlap, 2);
1615   if (sf) PetscValidPointer(sf,3);
1616   PetscValidPointer(dmParallel,4);
1617 
1618   if (sf) *sf = NULL;
1619   *dmParallel = NULL;
1620   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1621   ierr = MPI_Comm_rank(comm, &rank);CHKERRMPI(ierr);
1622   ierr = MPI_Comm_size(comm, &size);CHKERRMPI(ierr);
1623   if (size == 1) PetscFunctionReturn(0);
1624 
1625   ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1626   /* Create cell partition */
1627   ierr = PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1628   ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr);
1629   ierr = DMPlexGetPartitioner(dm, &partitioner);CHKERRQ(ierr);
1630   ierr = PetscPartitionerDMPlexPartition(partitioner, dm, NULL, cellPartSection, &cellPart);CHKERRQ(ierr);
1631   ierr = PetscLogEventBegin(DMPLEX_PartSelf,dm,0,0,0);CHKERRQ(ierr);
1632   {
1633     /* Convert partition to DMLabel */
1634     IS             is;
1635     PetscHSetI     ht;
1636     const PetscInt *points;
1637     PetscInt       *iranks;
1638     PetscInt       pStart, pEnd, proc, npoints, poff = 0, nranks;
1639 
1640     ierr = DMLabelCreate(PETSC_COMM_SELF, "Point Partition", &lblPartition);CHKERRQ(ierr);
1641     /* Preallocate strata */
1642     ierr = PetscHSetICreate(&ht);CHKERRQ(ierr);
1643     ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr);
1644     for (proc = pStart; proc < pEnd; proc++) {
1645       ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr);
1646       if (npoints) {ierr = PetscHSetIAdd(ht, proc);CHKERRQ(ierr);}
1647     }
1648     ierr = PetscHSetIGetSize(ht, &nranks);CHKERRQ(ierr);
1649     ierr = PetscMalloc1(nranks, &iranks);CHKERRQ(ierr);
1650     ierr = PetscHSetIGetElems(ht, &poff, iranks);CHKERRQ(ierr);
1651     ierr = PetscHSetIDestroy(&ht);CHKERRQ(ierr);
1652     ierr = DMLabelAddStrata(lblPartition, nranks, iranks);CHKERRQ(ierr);
1653     ierr = PetscFree(iranks);CHKERRQ(ierr);
1654     /* Inline DMPlexPartitionLabelClosure() */
1655     ierr = ISGetIndices(cellPart, &points);CHKERRQ(ierr);
1656     ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr);
1657     for (proc = pStart; proc < pEnd; proc++) {
1658       ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr);
1659       if (!npoints) continue;
1660       ierr = PetscSectionGetOffset(cellPartSection, proc, &poff);CHKERRQ(ierr);
1661       ierr = DMPlexClosurePoints_Private(dm, npoints, points+poff, &is);CHKERRQ(ierr);
1662       ierr = DMLabelSetStratumIS(lblPartition, proc, is);CHKERRQ(ierr);
1663       ierr = ISDestroy(&is);CHKERRQ(ierr);
1664     }
1665     ierr = ISRestoreIndices(cellPart, &points);CHKERRQ(ierr);
1666   }
1667   ierr = PetscLogEventEnd(DMPLEX_PartSelf,dm,0,0,0);CHKERRQ(ierr);
1668 
1669   ierr = DMLabelCreate(PETSC_COMM_SELF, "Point migration", &lblMigration);CHKERRQ(ierr);
1670   ierr = DMPlexPartitionLabelInvert(dm, lblPartition, NULL, lblMigration);CHKERRQ(ierr);
1671   ierr = DMPlexPartitionLabelCreateSF(dm, lblMigration, &sfMigration);CHKERRQ(ierr);
1672   ierr = DMPlexStratifyMigrationSF(dm, sfMigration, &sfStratified);CHKERRQ(ierr);
1673   ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1674   sfMigration = sfStratified;
1675   ierr = PetscSFSetUp(sfMigration);CHKERRQ(ierr);
1676   ierr = PetscLogEventEnd(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1677   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1678   if (flg) {
1679     ierr = DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr);
1680     ierr = PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr);
1681   }
1682 
1683   /* Create non-overlapping parallel DM and migrate internal data */
1684   ierr = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr);
1685   ierr = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr);
1686   ierr = DMPlexMigrate(dm, sfMigration, *dmParallel);CHKERRQ(ierr);
1687 
1688   /* Build the point SF without overlap */
1689   ierr = DMPlexGetPartitionBalance(dm, &balance);CHKERRQ(ierr);
1690   ierr = DMPlexSetPartitionBalance(*dmParallel, balance);CHKERRQ(ierr);
1691   ierr = DMPlexCreatePointSF(*dmParallel, sfMigration, PETSC_TRUE, &sfPoint);CHKERRQ(ierr);
1692   ierr = DMSetPointSF(*dmParallel, sfPoint);CHKERRQ(ierr);
1693   ierr = DMGetCoordinateDM(*dmParallel, &dmCoord);CHKERRQ(ierr);
1694   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1695   if (flg) {ierr = PetscSFView(sfPoint, NULL);CHKERRQ(ierr);}
1696 
1697   if (overlap > 0) {
1698     DM                 dmOverlap;
1699     PetscInt           nroots, nleaves, noldleaves, l;
1700     const PetscInt    *oldLeaves;
1701     PetscSFNode       *newRemote, *permRemote;
1702     const PetscSFNode *oldRemote;
1703     PetscSF            sfOverlap, sfOverlapPoint;
1704 
1705     /* Add the partition overlap to the distributed DM */
1706     ierr = DMPlexDistributeOverlap(*dmParallel, overlap, &sfOverlap, &dmOverlap);CHKERRQ(ierr);
1707     ierr = DMDestroy(dmParallel);CHKERRQ(ierr);
1708     *dmParallel = dmOverlap;
1709     if (flg) {
1710       ierr = PetscPrintf(comm, "Overlap Migration SF:\n");CHKERRQ(ierr);
1711       ierr = PetscSFView(sfOverlap, NULL);CHKERRQ(ierr);
1712     }
1713 
1714     /* Re-map the migration SF to establish the full migration pattern */
1715     ierr = PetscSFGetGraph(sfMigration, &nroots, &noldleaves, &oldLeaves, &oldRemote);CHKERRQ(ierr);
1716     ierr = PetscSFGetGraph(sfOverlap, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1717     ierr = PetscMalloc1(nleaves, &newRemote);CHKERRQ(ierr);
1718     /* oldRemote: original root point mapping to original leaf point
1719        newRemote: original leaf point mapping to overlapped leaf point */
1720     if (oldLeaves) {
1721       /* After stratification, the migration remotes may not be in root (canonical) order, so we reorder using the leaf numbering */
1722       ierr = PetscMalloc1(noldleaves, &permRemote);CHKERRQ(ierr);
1723       for (l = 0; l < noldleaves; ++l) permRemote[oldLeaves[l]] = oldRemote[l];
1724       oldRemote = permRemote;
1725     }
1726     ierr = PetscSFBcastBegin(sfOverlap, MPIU_2INT, oldRemote, newRemote,MPI_REPLACE);CHKERRQ(ierr);
1727     ierr = PetscSFBcastEnd(sfOverlap, MPIU_2INT, oldRemote, newRemote,MPI_REPLACE);CHKERRQ(ierr);
1728     if (oldLeaves) {ierr = PetscFree(oldRemote);CHKERRQ(ierr);}
1729     ierr = PetscSFCreate(comm, &sfOverlapPoint);CHKERRQ(ierr);
1730     ierr = PetscSFSetGraph(sfOverlapPoint, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1731     ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1732     ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1733     sfMigration = sfOverlapPoint;
1734   }
1735   /* Cleanup Partition */
1736   ierr = DMLabelDestroy(&lblPartition);CHKERRQ(ierr);
1737   ierr = DMLabelDestroy(&lblMigration);CHKERRQ(ierr);
1738   ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr);
1739   ierr = ISDestroy(&cellPart);CHKERRQ(ierr);
1740   /* Copy discretization */
1741   ierr = DMCopyDisc(dm, *dmParallel);CHKERRQ(ierr);
1742   /* Create sfNatural */
1743   if (dm->useNatural) {
1744     PetscSection section;
1745 
1746     ierr = DMGetLocalSection(dm, &section);CHKERRQ(ierr);
1747     ierr = DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &(*dmParallel)->sfNatural);CHKERRQ(ierr);
1748     ierr = DMSetUseNatural(*dmParallel, PETSC_TRUE);CHKERRQ(ierr);
1749   }
1750   /* Cleanup */
1751   if (sf) {*sf = sfMigration;}
1752   else    {ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);}
1753   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1754   ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1755   PetscFunctionReturn(0);
1756 }
1757 
1758 /*@C
1759   DMPlexDistributeOverlap - Add partition overlap to a distributed non-overlapping DM.
1760 
1761   Collective on dm
1762 
1763   Input Parameters:
1764 + dm  - The non-overlapping distributed DMPlex object
1765 - overlap - The overlap of partitions (the same on all ranks)
1766 
1767   Output Parameters:
1768 + sf - The PetscSF used for point distribution
1769 - dmOverlap - The overlapping distributed DMPlex object, or NULL
1770 
1771   Notes:
1772   If the mesh was not distributed, the return value is NULL.
1773 
1774   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
1775   representation on the mesh.
1776 
1777   Level: advanced
1778 
1779 .seealso: DMPlexCreate(), DMSetAdjacency(), DMPlexDistribute(), DMPlexCreateOverlapLabel(), DMPlexGetOverlap()
1780 @*/
1781 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, PetscSF *sf, DM *dmOverlap)
1782 {
1783   MPI_Comm               comm;
1784   PetscMPIInt            size, rank;
1785   PetscSection           rootSection, leafSection;
1786   IS                     rootrank, leafrank;
1787   DM                     dmCoord;
1788   DMLabel                lblOverlap;
1789   PetscSF                sfOverlap, sfStratified, sfPoint;
1790   PetscErrorCode         ierr;
1791 
1792   PetscFunctionBegin;
1793   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1794   PetscValidLogicalCollectiveInt(dm, overlap, 2);
1795   if (sf) PetscValidPointer(sf, 3);
1796   PetscValidPointer(dmOverlap, 4);
1797 
1798   if (sf) *sf = NULL;
1799   *dmOverlap  = NULL;
1800   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1801   ierr = MPI_Comm_size(comm, &size);CHKERRMPI(ierr);
1802   ierr = MPI_Comm_rank(comm, &rank);CHKERRMPI(ierr);
1803   if (size == 1) PetscFunctionReturn(0);
1804 
1805   ierr = PetscLogEventBegin(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1806   /* Compute point overlap with neighbouring processes on the distributed DM */
1807   ierr = PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1808   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1809   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1810   ierr = DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);CHKERRQ(ierr);
1811   ierr = DMPlexCreateOverlapLabel(dm, overlap, rootSection, rootrank, leafSection, leafrank, &lblOverlap);CHKERRQ(ierr);
1812   /* Convert overlap label to stratified migration SF */
1813   ierr = DMPlexPartitionLabelCreateSF(dm, lblOverlap, &sfOverlap);CHKERRQ(ierr);
1814   ierr = DMPlexStratifyMigrationSF(dm, sfOverlap, &sfStratified);CHKERRQ(ierr);
1815   ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1816   sfOverlap = sfStratified;
1817   ierr = PetscObjectSetName((PetscObject) sfOverlap, "Overlap SF");CHKERRQ(ierr);
1818   ierr = PetscSFSetFromOptions(sfOverlap);CHKERRQ(ierr);
1819 
1820   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1821   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1822   ierr = ISDestroy(&rootrank);CHKERRQ(ierr);
1823   ierr = ISDestroy(&leafrank);CHKERRQ(ierr);
1824   ierr = PetscLogEventEnd(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1825 
1826   /* Build the overlapping DM */
1827   ierr = DMPlexCreate(comm, dmOverlap);CHKERRQ(ierr);
1828   ierr = PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");CHKERRQ(ierr);
1829   ierr = DMPlexMigrate(dm, sfOverlap, *dmOverlap);CHKERRQ(ierr);
1830   /* Store the overlap in the new DM */
1831   ((DM_Plex*)(*dmOverlap)->data)->overlap = overlap + ((DM_Plex*)dm->data)->overlap;
1832   /* Build the new point SF */
1833   ierr = DMPlexCreatePointSF(*dmOverlap, sfOverlap, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
1834   ierr = DMSetPointSF(*dmOverlap, sfPoint);CHKERRQ(ierr);
1835   ierr = DMGetCoordinateDM(*dmOverlap, &dmCoord);CHKERRQ(ierr);
1836   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1837   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1838   /* Cleanup overlap partition */
1839   ierr = DMLabelDestroy(&lblOverlap);CHKERRQ(ierr);
1840   if (sf) *sf = sfOverlap;
1841   else    {ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);}
1842   ierr = PetscLogEventEnd(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1843   PetscFunctionReturn(0);
1844 }
1845 
1846 PetscErrorCode DMPlexGetOverlap_Plex(DM dm, PetscInt *overlap)
1847 {
1848   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1849 
1850   PetscFunctionBegin;
1851   *overlap = mesh->overlap;
1852   PetscFunctionReturn(0);
1853 }
1854 
1855 /*@
1856   DMPlexGetOverlap - Get the DMPlex partition overlap.
1857 
1858   Not collective
1859 
1860   Input Parameter:
1861 . dm - The DM
1862 
1863   Output Parameter:
1864 . overlap - The overlap of this DM
1865 
1866   Level: intermediate
1867 
1868 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap(), DMPlexCreateOverlapLabel()
1869 @*/
1870 PetscErrorCode DMPlexGetOverlap(DM dm, PetscInt *overlap)
1871 {
1872   PetscErrorCode     ierr;
1873 
1874   PetscFunctionBegin;
1875   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1876   ierr = PetscUseMethod(dm,"DMPlexGetOverlap_C",(DM,PetscInt*),(dm,overlap));CHKERRQ(ierr);
1877   PetscFunctionReturn(0);
1878 }
1879 
1880 /*@C
1881   DMPlexGetGatherDM - Get a copy of the DMPlex that gathers all points on the
1882   root process of the original's communicator.
1883 
1884   Collective on dm
1885 
1886   Input Parameter:
1887 . dm - the original DMPlex object
1888 
1889   Output Parameters:
1890 + sf - the PetscSF used for point distribution (optional)
1891 - gatherMesh - the gathered DM object, or NULL
1892 
1893   Level: intermediate
1894 
1895 .seealso: DMPlexDistribute(), DMPlexGetRedundantDM()
1896 @*/
1897 PetscErrorCode DMPlexGetGatherDM(DM dm, PetscSF *sf, DM *gatherMesh)
1898 {
1899   MPI_Comm       comm;
1900   PetscMPIInt    size;
1901   PetscPartitioner oldPart, gatherPart;
1902   PetscErrorCode ierr;
1903 
1904   PetscFunctionBegin;
1905   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1906   PetscValidPointer(gatherMesh,3);
1907   *gatherMesh = NULL;
1908   if (sf) *sf = NULL;
1909   comm = PetscObjectComm((PetscObject)dm);
1910   ierr = MPI_Comm_size(comm,&size);CHKERRMPI(ierr);
1911   if (size == 1) PetscFunctionReturn(0);
1912   ierr = DMPlexGetPartitioner(dm,&oldPart);CHKERRQ(ierr);
1913   ierr = PetscObjectReference((PetscObject)oldPart);CHKERRQ(ierr);
1914   ierr = PetscPartitionerCreate(comm,&gatherPart);CHKERRQ(ierr);
1915   ierr = PetscPartitionerSetType(gatherPart,PETSCPARTITIONERGATHER);CHKERRQ(ierr);
1916   ierr = DMPlexSetPartitioner(dm,gatherPart);CHKERRQ(ierr);
1917   ierr = DMPlexDistribute(dm,0,sf,gatherMesh);CHKERRQ(ierr);
1918 
1919   ierr = DMPlexSetPartitioner(dm,oldPart);CHKERRQ(ierr);
1920   ierr = PetscPartitionerDestroy(&gatherPart);CHKERRQ(ierr);
1921   ierr = PetscPartitionerDestroy(&oldPart);CHKERRQ(ierr);
1922   PetscFunctionReturn(0);
1923 }
1924 
1925 /*@C
1926   DMPlexGetRedundantDM - Get a copy of the DMPlex that is completely copied on each process.
1927 
1928   Collective on dm
1929 
1930   Input Parameter:
1931 . dm - the original DMPlex object
1932 
1933   Output Parameters:
1934 + sf - the PetscSF used for point distribution (optional)
1935 - redundantMesh - the redundant DM object, or NULL
1936 
1937   Level: intermediate
1938 
1939 .seealso: DMPlexDistribute(), DMPlexGetGatherDM()
1940 @*/
1941 PetscErrorCode DMPlexGetRedundantDM(DM dm, PetscSF *sf, DM *redundantMesh)
1942 {
1943   MPI_Comm       comm;
1944   PetscMPIInt    size, rank;
1945   PetscInt       pStart, pEnd, p;
1946   PetscInt       numPoints = -1;
1947   PetscSF        migrationSF, sfPoint, gatherSF;
1948   DM             gatherDM, dmCoord;
1949   PetscSFNode    *points;
1950   PetscErrorCode ierr;
1951 
1952   PetscFunctionBegin;
1953   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1954   PetscValidPointer(redundantMesh,3);
1955   *redundantMesh = NULL;
1956   comm = PetscObjectComm((PetscObject)dm);
1957   ierr = MPI_Comm_size(comm,&size);CHKERRMPI(ierr);
1958   if (size == 1) {
1959     ierr = PetscObjectReference((PetscObject) dm);CHKERRQ(ierr);
1960     *redundantMesh = dm;
1961     if (sf) *sf = NULL;
1962     PetscFunctionReturn(0);
1963   }
1964   ierr = DMPlexGetGatherDM(dm,&gatherSF,&gatherDM);CHKERRQ(ierr);
1965   if (!gatherDM) PetscFunctionReturn(0);
1966   ierr = MPI_Comm_rank(comm,&rank);CHKERRMPI(ierr);
1967   ierr = DMPlexGetChart(gatherDM,&pStart,&pEnd);CHKERRQ(ierr);
1968   numPoints = pEnd - pStart;
1969   ierr = MPI_Bcast(&numPoints,1,MPIU_INT,0,comm);CHKERRMPI(ierr);
1970   ierr = PetscMalloc1(numPoints,&points);CHKERRQ(ierr);
1971   ierr = PetscSFCreate(comm,&migrationSF);CHKERRQ(ierr);
1972   for (p = 0; p < numPoints; p++) {
1973     points[p].index = p;
1974     points[p].rank  = 0;
1975   }
1976   ierr = PetscSFSetGraph(migrationSF,pEnd-pStart,numPoints,NULL,PETSC_OWN_POINTER,points,PETSC_OWN_POINTER);CHKERRQ(ierr);
1977   ierr = DMPlexCreate(comm, redundantMesh);CHKERRQ(ierr);
1978   ierr = PetscObjectSetName((PetscObject) *redundantMesh, "Redundant Mesh");CHKERRQ(ierr);
1979   ierr = DMPlexMigrate(gatherDM, migrationSF, *redundantMesh);CHKERRQ(ierr);
1980   ierr = DMPlexCreatePointSF(*redundantMesh, migrationSF, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
1981   ierr = DMSetPointSF(*redundantMesh, sfPoint);CHKERRQ(ierr);
1982   ierr = DMGetCoordinateDM(*redundantMesh, &dmCoord);CHKERRQ(ierr);
1983   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1984   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1985   if (sf) {
1986     PetscSF tsf;
1987 
1988     ierr = PetscSFCompose(gatherSF,migrationSF,&tsf);CHKERRQ(ierr);
1989     ierr = DMPlexStratifyMigrationSF(dm, tsf, sf);CHKERRQ(ierr);
1990     ierr = PetscSFDestroy(&tsf);CHKERRQ(ierr);
1991   }
1992   ierr = PetscSFDestroy(&migrationSF);CHKERRQ(ierr);
1993   ierr = PetscSFDestroy(&gatherSF);CHKERRQ(ierr);
1994   ierr = DMDestroy(&gatherDM);CHKERRQ(ierr);
1995   PetscFunctionReturn(0);
1996 }
1997 
1998 /*@
1999   DMPlexIsDistributed - Find out whether this DM is distributed, i.e. more than one rank owns some points.
2000 
2001   Collective
2002 
2003   Input Parameter:
2004 . dm      - The DM object
2005 
2006   Output Parameter:
2007 . distributed - Flag whether the DM is distributed
2008 
2009   Level: intermediate
2010 
2011   Notes:
2012   This currently finds out whether at least two ranks have any DAG points.
2013   This involves MPI_Allreduce() with one integer.
2014   The result is currently not stashed so every call to this routine involves this global communication.
2015 
2016 .seealso: DMPlexDistribute(), DMPlexGetOverlap(), DMPlexIsInterpolated()
2017 @*/
2018 PetscErrorCode DMPlexIsDistributed(DM dm, PetscBool *distributed)
2019 {
2020   PetscInt          pStart, pEnd, count;
2021   MPI_Comm          comm;
2022   PetscMPIInt       size;
2023   PetscErrorCode    ierr;
2024 
2025   PetscFunctionBegin;
2026   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2027   PetscValidPointer(distributed,2);
2028   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
2029   ierr = MPI_Comm_size(comm,&size);CHKERRMPI(ierr);
2030   if (size == 1) { *distributed = PETSC_FALSE; PetscFunctionReturn(0); }
2031   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
2032   count = (pEnd - pStart) > 0 ? 1 : 0;
2033   ierr = MPI_Allreduce(MPI_IN_PLACE, &count, 1, MPIU_INT, MPI_SUM, comm);CHKERRMPI(ierr);
2034   *distributed = count > 1 ? PETSC_TRUE : PETSC_FALSE;
2035   PetscFunctionReturn(0);
2036 }
2037