xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision 44c45e3bf38ab8eca9b0c5f7f428c800d7bb968e)
1 #include <petsc/private/dmpleximpl.h>    /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/dmlabelimpl.h>   /*I      "petscdmlabel.h"  I*/
3 
4 /*@C
5   DMPlexSetAdjacencyUser - Define adjacency in the mesh using a user-provided callback
6 
7   Input Parameters:
8 + dm      - The DM object
9 . user    - The user callback, may be NULL (to clear the callback)
10 - ctx     - context for callback evaluation, may be NULL
11 
12   Level: advanced
13 
14   Notes:
15      The caller of DMPlexGetAdjacency may need to arrange that a large enough array is available for the adjacency.
16 
17      Any setting here overrides other configuration of DMPlex adjacency determination.
18 
19 .seealso: DMSetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexGetAdjacency(), DMPlexGetAdjacencyUser()
20 @*/
21 PetscErrorCode DMPlexSetAdjacencyUser(DM dm,PetscErrorCode (*user)(DM,PetscInt,PetscInt*,PetscInt[],void*),void *ctx)
22 {
23   DM_Plex *mesh = (DM_Plex *)dm->data;
24 
25   PetscFunctionBegin;
26   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
27   mesh->useradjacency = user;
28   mesh->useradjacencyctx = ctx;
29   PetscFunctionReturn(0);
30 }
31 
32 /*@C
33   DMPlexGetAdjacencyUser - get the user-defined adjacency callback
34 
35   Input Parameter:
36 . dm      - The DM object
37 
38   Output Parameters:
39 - user    - The user callback
40 - ctx     - context for callback evaluation
41 
42   Level: advanced
43 
44 .seealso: DMSetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexGetAdjacency(), DMPlexSetAdjacencyUser()
45 @*/
46 PetscErrorCode DMPlexGetAdjacencyUser(DM dm, PetscErrorCode (**user)(DM,PetscInt,PetscInt*,PetscInt[],void*), void **ctx)
47 {
48   DM_Plex *mesh = (DM_Plex *)dm->data;
49 
50   PetscFunctionBegin;
51   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
52   if (user) *user = mesh->useradjacency;
53   if (ctx) *ctx = mesh->useradjacencyctx;
54   PetscFunctionReturn(0);
55 }
56 
57 /*@
58   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
59 
60   Input Parameters:
61 + dm      - The DM object
62 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
63 
64   Level: intermediate
65 
66 .seealso: DMGetAdjacency(), DMSetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
67 @*/
68 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
69 {
70   DM_Plex *mesh = (DM_Plex *) dm->data;
71 
72   PetscFunctionBegin;
73   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
74   mesh->useAnchors = useAnchors;
75   PetscFunctionReturn(0);
76 }
77 
78 /*@
79   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
80 
81   Input Parameter:
82 . dm      - The DM object
83 
84   Output Parameter:
85 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
86 
87   Level: intermediate
88 
89 .seealso: DMPlexSetAdjacencyUseAnchors(), DMSetAdjacency(), DMGetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
90 @*/
91 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
92 {
93   DM_Plex *mesh = (DM_Plex *) dm->data;
94 
95   PetscFunctionBegin;
96   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
97   PetscValidIntPointer(useAnchors, 2);
98   *useAnchors = mesh->useAnchors;
99   PetscFunctionReturn(0);
100 }
101 
102 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
103 {
104   const PetscInt *cone = NULL;
105   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
106   PetscErrorCode  ierr;
107 
108   PetscFunctionBeginHot;
109   ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
110   ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
111   for (c = 0; c <= coneSize; ++c) {
112     const PetscInt  point   = !c ? p : cone[c-1];
113     const PetscInt *support = NULL;
114     PetscInt        supportSize, s, q;
115 
116     ierr = DMPlexGetSupportSize(dm, point, &supportSize);CHKERRQ(ierr);
117     ierr = DMPlexGetSupport(dm, point, &support);CHKERRQ(ierr);
118     for (s = 0; s < supportSize; ++s) {
119       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = support[s]),0); ++q) {
120         if (support[s] == adj[q]) break;
121       }
122       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
123     }
124   }
125   *adjSize = numAdj;
126   PetscFunctionReturn(0);
127 }
128 
129 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
130 {
131   const PetscInt *support = NULL;
132   PetscInt        numAdj   = 0, maxAdjSize = *adjSize, supportSize, s;
133   PetscErrorCode  ierr;
134 
135   PetscFunctionBeginHot;
136   ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr);
137   ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr);
138   for (s = 0; s <= supportSize; ++s) {
139     const PetscInt  point = !s ? p : support[s-1];
140     const PetscInt *cone  = NULL;
141     PetscInt        coneSize, c, q;
142 
143     ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
144     ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
145     for (c = 0; c < coneSize; ++c) {
146       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = cone[c]),0); ++q) {
147         if (cone[c] == adj[q]) break;
148       }
149       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
150     }
151   }
152   *adjSize = numAdj;
153   PetscFunctionReturn(0);
154 }
155 
156 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
157 {
158   PetscInt      *star = NULL;
159   PetscInt       numAdj = 0, maxAdjSize = *adjSize, starSize, s;
160   PetscErrorCode ierr;
161 
162   PetscFunctionBeginHot;
163   ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
164   for (s = 0; s < starSize*2; s += 2) {
165     const PetscInt *closure = NULL;
166     PetscInt        closureSize, c, q;
167 
168     ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
169     for (c = 0; c < closureSize*2; c += 2) {
170       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = closure[c]),0); ++q) {
171         if (closure[c] == adj[q]) break;
172       }
173       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
174     }
175     ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
176   }
177   ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
178   *adjSize = numAdj;
179   PetscFunctionReturn(0);
180 }
181 
182 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
183 {
184   static PetscInt asiz = 0;
185   PetscInt maxAnchors = 1;
186   PetscInt aStart = -1, aEnd = -1;
187   PetscInt maxAdjSize;
188   PetscSection aSec = NULL;
189   IS aIS = NULL;
190   const PetscInt *anchors;
191   DM_Plex *mesh = (DM_Plex *)dm->data;
192   PetscErrorCode  ierr;
193 
194   PetscFunctionBeginHot;
195   if (useAnchors) {
196     ierr = DMPlexGetAnchors(dm,&aSec,&aIS);CHKERRQ(ierr);
197     if (aSec) {
198       ierr = PetscSectionGetMaxDof(aSec,&maxAnchors);CHKERRQ(ierr);
199       maxAnchors = PetscMax(1,maxAnchors);
200       ierr = PetscSectionGetChart(aSec,&aStart,&aEnd);CHKERRQ(ierr);
201       ierr = ISGetIndices(aIS,&anchors);CHKERRQ(ierr);
202     }
203   }
204   if (!*adj) {
205     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
206 
207     ierr  = DMPlexGetChart(dm, &pStart,&pEnd);CHKERRQ(ierr);
208     ierr  = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
209     depth = PetscMax(depth, -depth);
210     ierr  = DMPlexGetMaxSizes(dm, &maxC, &maxS);CHKERRQ(ierr);
211     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1;
212     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1;
213     asiz  = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries);
214     asiz *= maxAnchors;
215     asiz  = PetscMin(asiz,pEnd-pStart);
216     ierr  = PetscMalloc1(asiz,adj);CHKERRQ(ierr);
217   }
218   if (*adjSize < 0) *adjSize = asiz;
219   maxAdjSize = *adjSize;
220   if (mesh->useradjacency) {
221     ierr = (*mesh->useradjacency)(dm, p, adjSize, *adj, mesh->useradjacencyctx);CHKERRQ(ierr);
222   } else if (useTransitiveClosure) {
223     ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr);
224   } else if (useCone) {
225     ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
226   } else {
227     ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
228   }
229   if (useAnchors && aSec) {
230     PetscInt origSize = *adjSize;
231     PetscInt numAdj = origSize;
232     PetscInt i = 0, j;
233     PetscInt *orig = *adj;
234 
235     while (i < origSize) {
236       PetscInt p = orig[i];
237       PetscInt aDof = 0;
238 
239       if (p >= aStart && p < aEnd) {
240         ierr = PetscSectionGetDof(aSec,p,&aDof);CHKERRQ(ierr);
241       }
242       if (aDof) {
243         PetscInt aOff;
244         PetscInt s, q;
245 
246         for (j = i + 1; j < numAdj; j++) {
247           orig[j - 1] = orig[j];
248         }
249         origSize--;
250         numAdj--;
251         ierr = PetscSectionGetOffset(aSec,p,&aOff);CHKERRQ(ierr);
252         for (s = 0; s < aDof; ++s) {
253           for (q = 0; q < numAdj || ((void)(orig[numAdj++] = anchors[aOff+s]),0); ++q) {
254             if (anchors[aOff+s] == orig[q]) break;
255           }
256           if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
257         }
258       }
259       else {
260         i++;
261       }
262     }
263     *adjSize = numAdj;
264     ierr = ISRestoreIndices(aIS,&anchors);CHKERRQ(ierr);
265   }
266   PetscFunctionReturn(0);
267 }
268 
269 /*@
270   DMPlexGetAdjacency - Return all points adjacent to the given point
271 
272   Input Parameters:
273 + dm - The DM object
274 . p  - The point
275 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE
276 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize
277 
278   Output Parameters:
279 + adjSize - The number of adjacent points
280 - adj - The adjacent points
281 
282   Level: advanced
283 
284   Notes:
285     The user must PetscFree the adj array if it was not passed in.
286 
287 .seealso: DMSetAdjacency(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator()
288 @*/
289 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
290 {
291   PetscBool      useCone, useClosure, useAnchors;
292   PetscErrorCode ierr;
293 
294   PetscFunctionBeginHot;
295   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
296   PetscValidPointer(adjSize,3);
297   PetscValidPointer(adj,4);
298   ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
299   ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
300   ierr = DMPlexGetAdjacency_Internal(dm, p, useCone, useClosure, useAnchors, adjSize, adj);CHKERRQ(ierr);
301   PetscFunctionReturn(0);
302 }
303 
304 /*@
305   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
306 
307   Collective on dm
308 
309   Input Parameters:
310 + dm      - The DM
311 - sfPoint - The PetscSF which encodes point connectivity
312 
313   Output Parameters:
314 + processRanks - A list of process neighbors, or NULL
315 - sfProcess    - An SF encoding the two-sided process connectivity, or NULL
316 
317   Level: developer
318 
319 .seealso: PetscSFCreate(), DMPlexCreateProcessSF()
320 @*/
321 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
322 {
323   const PetscSFNode *remotePoints;
324   PetscInt          *localPointsNew;
325   PetscSFNode       *remotePointsNew;
326   const PetscInt    *nranks;
327   PetscInt          *ranksNew;
328   PetscBT            neighbors;
329   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
330   PetscMPIInt        size, proc, rank;
331   PetscErrorCode     ierr;
332 
333   PetscFunctionBegin;
334   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
335   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
336   if (processRanks) {PetscValidPointer(processRanks, 3);}
337   if (sfProcess)    {PetscValidPointer(sfProcess, 4);}
338   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRMPI(ierr);
339   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRMPI(ierr);
340   ierr = PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);CHKERRQ(ierr);
341   ierr = PetscBTCreate(size, &neighbors);CHKERRQ(ierr);
342   ierr = PetscBTMemzero(size, neighbors);CHKERRQ(ierr);
343   /* Compute root-to-leaf process connectivity */
344   ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr);
345   ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr);
346   for (p = pStart; p < pEnd; ++p) {
347     PetscInt ndof, noff, n;
348 
349     ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr);
350     ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr);
351     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
352   }
353   ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr);
354   /* Compute leaf-to-neighbor process connectivity */
355   ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr);
356   ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr);
357   for (p = pStart; p < pEnd; ++p) {
358     PetscInt ndof, noff, n;
359 
360     ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr);
361     ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr);
362     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
363   }
364   ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr);
365   /* Compute leaf-to-root process connectivity */
366   for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);}
367   /* Calculate edges */
368   PetscBTClear(neighbors, rank);
369   for (proc = 0, numNeighbors = 0; proc < size; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;}
370   ierr = PetscMalloc1(numNeighbors, &ranksNew);CHKERRQ(ierr);
371   ierr = PetscMalloc1(numNeighbors, &localPointsNew);CHKERRQ(ierr);
372   ierr = PetscMalloc1(numNeighbors, &remotePointsNew);CHKERRQ(ierr);
373   for (proc = 0, n = 0; proc < size; ++proc) {
374     if (PetscBTLookup(neighbors, proc)) {
375       ranksNew[n]              = proc;
376       localPointsNew[n]        = proc;
377       remotePointsNew[n].index = rank;
378       remotePointsNew[n].rank  = proc;
379       ++n;
380     }
381   }
382   ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr);
383   if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr);}
384   else              {ierr = PetscFree(ranksNew);CHKERRQ(ierr);}
385   if (sfProcess) {
386     ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr);
387     ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr);
388     ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr);
389     ierr = PetscSFSetGraph(*sfProcess, size, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
390   }
391   PetscFunctionReturn(0);
392 }
393 
394 /*@
395   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
396 
397   Collective on dm
398 
399   Input Parameter:
400 . dm - The DM
401 
402   Output Parameters:
403 + rootSection - The number of leaves for a given root point
404 . rootrank    - The rank of each edge into the root point
405 . leafSection - The number of processes sharing a given leaf point
406 - leafrank    - The rank of each process sharing a leaf point
407 
408   Level: developer
409 
410 .seealso: DMPlexCreateOverlapLabel(), DMPlexDistribute(), DMPlexDistributeOverlap()
411 @*/
412 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
413 {
414   MPI_Comm        comm;
415   PetscSF         sfPoint;
416   const PetscInt *rootdegree;
417   PetscInt       *myrank, *remoterank;
418   PetscInt        pStart, pEnd, p, nedges;
419   PetscMPIInt     rank;
420   PetscErrorCode  ierr;
421 
422   PetscFunctionBegin;
423   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
424   ierr = MPI_Comm_rank(comm, &rank);CHKERRMPI(ierr);
425   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
426   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
427   /* Compute number of leaves for each root */
428   ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr);
429   ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr);
430   ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr);
431   ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr);
432   for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr);}
433   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
434   /* Gather rank of each leaf to root */
435   ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr);
436   ierr = PetscMalloc1(pEnd-pStart, &myrank);CHKERRQ(ierr);
437   ierr = PetscMalloc1(nedges,  &remoterank);CHKERRQ(ierr);
438   for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank;
439   ierr = PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
440   ierr = PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
441   ierr = PetscFree(myrank);CHKERRQ(ierr);
442   ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr);
443   /* Distribute remote ranks to leaves */
444   ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr);
445   ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr);
446   PetscFunctionReturn(0);
447 }
448 
449 /*@C
450   DMPlexCreateOverlapLabel - Compute owner information for shared points. This basically gets two-sided for an SF.
451 
452   Collective on dm
453 
454   Input Parameters:
455 + dm          - The DM
456 . levels      - Number of overlap levels
457 . rootSection - The number of leaves for a given root point
458 . rootrank    - The rank of each edge into the root point
459 . leafSection - The number of processes sharing a given leaf point
460 - leafrank    - The rank of each process sharing a leaf point
461 
462   Output Parameter:
463 . ovLabel     - DMLabel containing remote overlap contributions as point/rank pairings
464 
465   Level: developer
466 
467 .seealso: DMPlexDistributeOwnership(), DMPlexDistribute()
468 @*/
469 PetscErrorCode DMPlexCreateOverlapLabel(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
470 {
471   MPI_Comm           comm;
472   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
473   PetscSF            sfPoint;
474   const PetscSFNode *remote;
475   const PetscInt    *local;
476   const PetscInt    *nrank, *rrank;
477   PetscInt          *adj = NULL;
478   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l;
479   PetscMPIInt        rank, size;
480   PetscBool          flg;
481   PetscErrorCode     ierr;
482 
483   PetscFunctionBegin;
484   *ovLabel = NULL;
485   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
486   ierr = MPI_Comm_size(comm, &size);CHKERRMPI(ierr);
487   ierr = MPI_Comm_rank(comm, &rank);CHKERRMPI(ierr);
488   if (size == 1) PetscFunctionReturn(0);
489   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
490   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
491   if (!levels) {
492     /* Add owned points */
493     ierr = DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel);CHKERRQ(ierr);
494     for (p = pStart; p < pEnd; ++p) {ierr = DMLabelSetValue(*ovLabel, p, rank);CHKERRQ(ierr);}
495     PetscFunctionReturn(0);
496   }
497   ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr);
498   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
499   ierr = DMLabelCreate(PETSC_COMM_SELF, "Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr);
500   /* Handle leaves: shared with the root point */
501   for (l = 0; l < nleaves; ++l) {
502     PetscInt adjSize = PETSC_DETERMINE, a;
503 
504     ierr = DMPlexGetAdjacency(dm, local ? local[l] : l, &adjSize, &adj);CHKERRQ(ierr);
505     for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr);}
506   }
507   ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr);
508   ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr);
509   /* Handle roots */
510   for (p = pStart; p < pEnd; ++p) {
511     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
512 
513     if ((p >= sStart) && (p < sEnd)) {
514       /* Some leaves share a root with other leaves on different processes */
515       ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr);
516       if (neighbors) {
517         ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr);
518         ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
519         for (n = 0; n < neighbors; ++n) {
520           const PetscInt remoteRank = nrank[noff+n];
521 
522           if (remoteRank == rank) continue;
523           for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
524         }
525       }
526     }
527     /* Roots are shared with leaves */
528     ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr);
529     if (!neighbors) continue;
530     ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr);
531     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
532     for (n = 0; n < neighbors; ++n) {
533       const PetscInt remoteRank = rrank[noff+n];
534 
535       if (remoteRank == rank) continue;
536       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
537     }
538   }
539   ierr = PetscFree(adj);CHKERRQ(ierr);
540   ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr);
541   ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr);
542   /* Add additional overlap levels */
543   for (l = 1; l < levels; l++) {
544     /* Propagate point donations over SF to capture remote connections */
545     ierr = DMPlexPartitionLabelPropagate(dm, ovAdjByRank);CHKERRQ(ierr);
546     /* Add next level of point donations to the label */
547     ierr = DMPlexPartitionLabelAdjacency(dm, ovAdjByRank);CHKERRQ(ierr);
548   }
549   /* We require the closure in the overlap */
550   ierr = DMPlexPartitionLabelClosure(dm, ovAdjByRank);CHKERRQ(ierr);
551   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-overlap_view", &flg);CHKERRQ(ierr);
552   if (flg) {
553     PetscViewer viewer;
554     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm), &viewer);CHKERRQ(ierr);
555     ierr = DMLabelView(ovAdjByRank, viewer);CHKERRQ(ierr);
556   }
557   /* Invert sender to receiver label */
558   ierr = DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel);CHKERRQ(ierr);
559   ierr = DMPlexPartitionLabelInvert(dm, ovAdjByRank, NULL, *ovLabel);CHKERRQ(ierr);
560   /* Add owned points, except for shared local points */
561   for (p = pStart; p < pEnd; ++p) {ierr = DMLabelSetValue(*ovLabel, p, rank);CHKERRQ(ierr);}
562   for (l = 0; l < nleaves; ++l) {
563     ierr = DMLabelClearValue(*ovLabel, local[l], rank);CHKERRQ(ierr);
564     ierr = DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank);CHKERRQ(ierr);
565   }
566   /* Clean up */
567   ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr);
568   PetscFunctionReturn(0);
569 }
570 
571 /*@C
572   DMPlexCreateOverlapMigrationSF - Create an SF describing the new mesh distribution to make the overlap described by the input SF
573 
574   Collective on dm
575 
576   Input Parameters:
577 + dm          - The DM
578 - overlapSF   - The SF mapping ghost points in overlap to owner points on other processes
579 
580   Output Parameter:
581 . migrationSF - An SF that maps original points in old locations to points in new locations
582 
583   Level: developer
584 
585 .seealso: DMPlexCreateOverlapLabel(), DMPlexDistributeOverlap(), DMPlexDistribute()
586 @*/
587 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
588 {
589   MPI_Comm           comm;
590   PetscMPIInt        rank, size;
591   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
592   PetscInt          *pointDepths, *remoteDepths, *ilocal;
593   PetscInt          *depthRecv, *depthShift, *depthIdx;
594   PetscSFNode       *iremote;
595   PetscSF            pointSF;
596   const PetscInt    *sharedLocal;
597   const PetscSFNode *overlapRemote, *sharedRemote;
598   PetscErrorCode     ierr;
599 
600   PetscFunctionBegin;
601   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
602   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
603   ierr = MPI_Comm_rank(comm, &rank);CHKERRMPI(ierr);
604   ierr = MPI_Comm_size(comm, &size);CHKERRMPI(ierr);
605   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
606 
607   /* Before building the migration SF we need to know the new stratum offsets */
608   ierr = PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote);CHKERRQ(ierr);
609   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
610   for (d=0; d<dim+1; d++) {
611     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
612     for (p=pStart; p<pEnd; p++) pointDepths[p] = d;
613   }
614   for (p=0; p<nleaves; p++) remoteDepths[p] = -1;
615   ierr = PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths,MPI_REPLACE);CHKERRQ(ierr);
616   ierr = PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths,MPI_REPLACE);CHKERRQ(ierr);
617 
618   /* Count received points in each stratum and compute the internal strata shift */
619   ierr = PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx);CHKERRQ(ierr);
620   for (d=0; d<dim+1; d++) depthRecv[d]=0;
621   for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++;
622   depthShift[dim] = 0;
623   for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim];
624   for (d=1; d<dim; d++) depthShift[d] += depthRecv[0];
625   for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1];
626   for (d=0; d<dim+1; d++) {
627     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
628     depthIdx[d] = pStart + depthShift[d];
629   }
630 
631   /* Form the overlap SF build an SF that describes the full overlap migration SF */
632   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
633   newLeaves = pEnd - pStart + nleaves;
634   ierr = PetscMalloc1(newLeaves, &ilocal);CHKERRQ(ierr);
635   ierr = PetscMalloc1(newLeaves, &iremote);CHKERRQ(ierr);
636   /* First map local points to themselves */
637   for (d=0; d<dim+1; d++) {
638     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
639     for (p=pStart; p<pEnd; p++) {
640       point = p + depthShift[d];
641       ilocal[point] = point;
642       iremote[point].index = p;
643       iremote[point].rank = rank;
644       depthIdx[d]++;
645     }
646   }
647 
648   /* Add in the remote roots for currently shared points */
649   ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
650   ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote);CHKERRQ(ierr);
651   for (d=0; d<dim+1; d++) {
652     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
653     for (p=0; p<numSharedPoints; p++) {
654       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
655         point = sharedLocal[p] + depthShift[d];
656         iremote[point].index = sharedRemote[p].index;
657         iremote[point].rank = sharedRemote[p].rank;
658       }
659     }
660   }
661 
662   /* Now add the incoming overlap points */
663   for (p=0; p<nleaves; p++) {
664     point = depthIdx[remoteDepths[p]];
665     ilocal[point] = point;
666     iremote[point].index = overlapRemote[p].index;
667     iremote[point].rank = overlapRemote[p].rank;
668     depthIdx[remoteDepths[p]]++;
669   }
670   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
671 
672   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
673   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");CHKERRQ(ierr);
674   ierr = PetscSFSetFromOptions(*migrationSF);CHKERRQ(ierr);
675   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
676   ierr = PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);CHKERRQ(ierr);
677 
678   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
679   PetscFunctionReturn(0);
680 }
681 
682 /*@
683   DMPlexStratifyMigrationSF - Rearrange the leaves of a migration sf for stratification.
684 
685   Input Parameters:
686 + dm          - The DM
687 - sf          - A star forest with non-ordered leaves, usually defining a DM point migration
688 
689   Output Parameter:
690 . migrationSF - A star forest with added leaf indirection that ensures the resulting DM is stratified
691 
692   Note:
693   This lexicographically sorts by (depth, cellType)
694 
695   Level: developer
696 
697 .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexDistributeOverlap()
698 @*/
699 PetscErrorCode DMPlexStratifyMigrationSF(DM dm, PetscSF sf, PetscSF *migrationSF)
700 {
701   MPI_Comm           comm;
702   PetscMPIInt        rank, size;
703   PetscInt           d, ldepth, depth, dim, p, pStart, pEnd, nroots, nleaves;
704   PetscSFNode       *pointDepths, *remoteDepths;
705   PetscInt          *ilocal;
706   PetscInt          *depthRecv, *depthShift, *depthIdx;
707   PetscInt          *ctRecv,    *ctShift,    *ctIdx;
708   const PetscSFNode *iremote;
709   PetscErrorCode     ierr;
710 
711   PetscFunctionBegin;
712   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
713   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
714   ierr = MPI_Comm_rank(comm, &rank);CHKERRMPI(ierr);
715   ierr = MPI_Comm_size(comm, &size);CHKERRMPI(ierr);
716   ierr = DMPlexGetDepth(dm, &ldepth);CHKERRQ(ierr);
717   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
718   ierr = MPIU_Allreduce(&ldepth, &depth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr);
719   if ((ldepth >= 0) && (depth != ldepth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", ldepth, depth);
720   ierr = PetscLogEventBegin(DMPLEX_PartStratSF,dm,0,0,0);CHKERRQ(ierr);
721 
722   /* Before building the migration SF we need to know the new stratum offsets */
723   ierr = PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote);CHKERRQ(ierr);
724   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
725   for (d = 0; d < depth+1; ++d) {
726     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
727     for (p = pStart; p < pEnd; ++p) {
728       DMPolytopeType ct;
729 
730       ierr = DMPlexGetCellType(dm, p, &ct);CHKERRQ(ierr);
731       pointDepths[p].index = d;
732       pointDepths[p].rank  = ct;
733     }
734   }
735   for (p = 0; p < nleaves; ++p) {remoteDepths[p].index = -1; remoteDepths[p].rank = -1;}
736   ierr = PetscSFBcastBegin(sf, MPIU_2INT, pointDepths, remoteDepths,MPI_REPLACE);CHKERRQ(ierr);
737   ierr = PetscSFBcastEnd(sf, MPIU_2INT, pointDepths, remoteDepths,MPI_REPLACE);CHKERRQ(ierr);
738   /* Count received points in each stratum and compute the internal strata shift */
739   ierr = PetscCalloc6(depth+1, &depthRecv, depth+1, &depthShift, depth+1, &depthIdx, DM_NUM_POLYTOPES, &ctRecv, DM_NUM_POLYTOPES, &ctShift, DM_NUM_POLYTOPES, &ctIdx);CHKERRQ(ierr);
740   for (p = 0; p < nleaves; ++p) {
741     if (remoteDepths[p].rank < 0) {
742       ++depthRecv[remoteDepths[p].index];
743     } else {
744       ++ctRecv[remoteDepths[p].rank];
745     }
746   }
747   {
748     PetscInt depths[4], dims[4], shift = 0, i, c;
749 
750     /* Cells (depth), Vertices (0), Faces (depth-1), Edges (1)
751          Consider DM_POLYTOPE_FV_GHOST and DM_POLYTOPE_INTERIOR_GHOST as cells
752      */
753     depths[0] = depth; depths[1] = 0; depths[2] = depth-1; depths[3] = 1;
754     dims[0]   = dim;   dims[1]   = 0; dims[2]   = dim-1;   dims[3]   = 1;
755     for (i = 0; i <= depth; ++i) {
756       const PetscInt dep = depths[i];
757       const PetscInt dim = dims[i];
758 
759       for (c = 0; c < DM_NUM_POLYTOPES; ++c) {
760         if (DMPolytopeTypeGetDim((DMPolytopeType) c) != dim && !(i == 0 && (c == DM_POLYTOPE_FV_GHOST || c == DM_POLYTOPE_INTERIOR_GHOST))) continue;
761         ctShift[c] = shift;
762         shift     += ctRecv[c];
763       }
764       depthShift[dep] = shift;
765       shift          += depthRecv[dep];
766     }
767     for (c = 0; c < DM_NUM_POLYTOPES; ++c) {
768       const PetscInt ctDim = DMPolytopeTypeGetDim((DMPolytopeType) c);
769 
770       if ((ctDim < 0 || ctDim > dim) && (c != DM_POLYTOPE_FV_GHOST && c != DM_POLYTOPE_INTERIOR_GHOST)) {
771         ctShift[c] = shift;
772         shift     += ctRecv[c];
773       }
774     }
775   }
776   /* Derive a new local permutation based on stratified indices */
777   ierr = PetscMalloc1(nleaves, &ilocal);CHKERRQ(ierr);
778   for (p = 0; p < nleaves; ++p) {
779     const PetscInt       dep = remoteDepths[p].index;
780     const DMPolytopeType ct  = (DMPolytopeType) remoteDepths[p].rank;
781 
782     if ((PetscInt) ct < 0) {
783       ilocal[p] = depthShift[dep] + depthIdx[dep];
784       ++depthIdx[dep];
785     } else {
786       ilocal[p] = ctShift[ct] + ctIdx[ct];
787       ++ctIdx[ct];
788     }
789   }
790   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
791   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Migration SF");CHKERRQ(ierr);
792   ierr = PetscSFSetGraph(*migrationSF, nroots, nleaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_COPY_VALUES);CHKERRQ(ierr);
793   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
794   ierr = PetscFree6(depthRecv, depthShift, depthIdx, ctRecv, ctShift, ctIdx);CHKERRQ(ierr);
795   ierr = PetscLogEventEnd(DMPLEX_PartStratSF,dm,0,0,0);CHKERRQ(ierr);
796   PetscFunctionReturn(0);
797 }
798 
799 /*@
800   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
801 
802   Collective on dm
803 
804   Input Parameters:
805 + dm - The DMPlex object
806 . pointSF - The PetscSF describing the communication pattern
807 . originalSection - The PetscSection for existing data layout
808 - originalVec - The existing data in a local vector
809 
810   Output Parameters:
811 + newSection - The PetscSF describing the new data layout
812 - newVec - The new data in a local vector
813 
814   Level: developer
815 
816 .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData()
817 @*/
818 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
819 {
820   PetscSF        fieldSF;
821   PetscInt      *remoteOffsets, fieldSize;
822   PetscScalar   *originalValues, *newValues;
823   PetscErrorCode ierr;
824 
825   PetscFunctionBegin;
826   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
827   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
828 
829   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
830   ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr);
831   ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr);
832 
833   ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr);
834   ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr);
835   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
836   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
837   ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues,MPI_REPLACE);CHKERRQ(ierr);
838   ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues,MPI_REPLACE);CHKERRQ(ierr);
839   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
840   ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr);
841   ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr);
842   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
843   PetscFunctionReturn(0);
844 }
845 
846 /*@
847   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
848 
849   Collective on dm
850 
851   Input Parameters:
852 + dm - The DMPlex object
853 . pointSF - The PetscSF describing the communication pattern
854 . originalSection - The PetscSection for existing data layout
855 - originalIS - The existing data
856 
857   Output Parameters:
858 + newSection - The PetscSF describing the new data layout
859 - newIS - The new data
860 
861   Level: developer
862 
863 .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData()
864 @*/
865 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
866 {
867   PetscSF         fieldSF;
868   PetscInt       *newValues, *remoteOffsets, fieldSize;
869   const PetscInt *originalValues;
870   PetscErrorCode  ierr;
871 
872   PetscFunctionBegin;
873   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
874   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
875 
876   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
877   ierr = PetscMalloc1(fieldSize, &newValues);CHKERRQ(ierr);
878 
879   ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr);
880   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
881   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
882   ierr = PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues,MPI_REPLACE);CHKERRQ(ierr);
883   ierr = PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues,MPI_REPLACE);CHKERRQ(ierr);
884   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
885   ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr);
886   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr);
887   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
888   PetscFunctionReturn(0);
889 }
890 
891 /*@
892   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
893 
894   Collective on dm
895 
896   Input Parameters:
897 + dm - The DMPlex object
898 . pointSF - The PetscSF describing the communication pattern
899 . originalSection - The PetscSection for existing data layout
900 . datatype - The type of data
901 - originalData - The existing data
902 
903   Output Parameters:
904 + newSection - The PetscSection describing the new data layout
905 - newData - The new data
906 
907   Level: developer
908 
909 .seealso: DMPlexDistribute(), DMPlexDistributeField()
910 @*/
911 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
912 {
913   PetscSF        fieldSF;
914   PetscInt      *remoteOffsets, fieldSize;
915   PetscMPIInt    dataSize;
916   PetscErrorCode ierr;
917 
918   PetscFunctionBegin;
919   ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
920   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
921 
922   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
923   ierr = MPI_Type_size(datatype, &dataSize);CHKERRMPI(ierr);
924   ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr);
925 
926   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
927   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
928   ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData,MPI_REPLACE);CHKERRQ(ierr);
929   ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData,MPI_REPLACE);CHKERRQ(ierr);
930   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
931   ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
932   PetscFunctionReturn(0);
933 }
934 
935 static PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
936 {
937   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
938   MPI_Comm               comm;
939   PetscSF                coneSF;
940   PetscSection           originalConeSection, newConeSection;
941   PetscInt              *remoteOffsets, *cones, *globCones, *newCones, newConesSize;
942   PetscBool              flg;
943   PetscErrorCode         ierr;
944 
945   PetscFunctionBegin;
946   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
947   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
948   ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
949   /* Distribute cone section */
950   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
951   ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr);
952   ierr = DMPlexGetConeSection(dmParallel, &newConeSection);CHKERRQ(ierr);
953   ierr = PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr);
954   ierr = DMSetUp(dmParallel);CHKERRQ(ierr);
955   {
956     PetscInt pStart, pEnd, p;
957 
958     ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr);
959     for (p = pStart; p < pEnd; ++p) {
960       PetscInt coneSize;
961       ierr               = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr);
962       pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize);
963     }
964   }
965   /* Communicate and renumber cones */
966   ierr = PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr);
967   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
968   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
969   if (original) {
970     PetscInt numCones;
971 
972     ierr = PetscSectionGetStorageSize(originalConeSection,&numCones);CHKERRQ(ierr);
973     ierr = PetscMalloc1(numCones,&globCones);CHKERRQ(ierr);
974     ierr = ISLocalToGlobalMappingApplyBlock(original, numCones, cones, globCones);CHKERRQ(ierr);
975   } else {
976     globCones = cones;
977   }
978   ierr = DMPlexGetCones(dmParallel, &newCones);CHKERRQ(ierr);
979   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, globCones, newCones,MPI_REPLACE);CHKERRQ(ierr);
980   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, globCones, newCones,MPI_REPLACE);CHKERRQ(ierr);
981   if (original) {
982     ierr = PetscFree(globCones);CHKERRQ(ierr);
983   }
984   ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr);
985   ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr);
986   if (PetscDefined(USE_DEBUG)) {
987     PetscInt  p;
988     PetscBool valid = PETSC_TRUE;
989     for (p = 0; p < newConesSize; ++p) {
990       if (newCones[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "[%d] Point %D not in overlap SF\n", PetscGlobalRank,p);CHKERRQ(ierr);}
991     }
992     if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
993   }
994   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr);
995   if (flg) {
996     ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr);
997     ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr);
998     ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr);
999     ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr);
1000     ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr);
1001   }
1002   ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr);
1003   ierr = DMPlexGetConeOrientations(dmParallel, &newCones);CHKERRQ(ierr);
1004   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones,MPI_REPLACE);CHKERRQ(ierr);
1005   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones,MPI_REPLACE);CHKERRQ(ierr);
1006   ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr);
1007   ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
1008   /* Create supports and stratify DMPlex */
1009   {
1010     PetscInt pStart, pEnd;
1011 
1012     ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr);
1013     ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr);
1014   }
1015   ierr = DMPlexSymmetrize(dmParallel);CHKERRQ(ierr);
1016   ierr = DMPlexStratify(dmParallel);CHKERRQ(ierr);
1017   {
1018     PetscBool useCone, useClosure, useAnchors;
1019 
1020     ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
1021     ierr = DMSetBasicAdjacency(dmParallel, useCone, useClosure);CHKERRQ(ierr);
1022     ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
1023     ierr = DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors);CHKERRQ(ierr);
1024   }
1025   PetscFunctionReturn(0);
1026 }
1027 
1028 static PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
1029 {
1030   MPI_Comm         comm;
1031   PetscSection     originalCoordSection, newCoordSection;
1032   Vec              originalCoordinates, newCoordinates;
1033   PetscInt         bs;
1034   PetscBool        isper;
1035   const char      *name;
1036   const PetscReal *maxCell, *L;
1037   const DMBoundaryType *bd;
1038   PetscErrorCode   ierr;
1039 
1040   PetscFunctionBegin;
1041   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1042   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1043 
1044   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1045   ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr);
1046   ierr = DMGetCoordinateSection(dmParallel, &newCoordSection);CHKERRQ(ierr);
1047   ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr);
1048   if (originalCoordinates) {
1049     ierr = VecCreate(PETSC_COMM_SELF, &newCoordinates);CHKERRQ(ierr);
1050     ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr);
1051     ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr);
1052 
1053     ierr = DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr);
1054     ierr = DMSetCoordinatesLocal(dmParallel, newCoordinates);CHKERRQ(ierr);
1055     ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr);
1056     ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr);
1057     ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr);
1058   }
1059   ierr = DMGetPeriodicity(dm, &isper, &maxCell, &L, &bd);CHKERRQ(ierr);
1060   ierr = DMSetPeriodicity(dmParallel, isper, maxCell, L, bd);CHKERRQ(ierr);
1061   PetscFunctionReturn(0);
1062 }
1063 
1064 static PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1065 {
1066   DM_Plex         *mesh = (DM_Plex*) dm->data;
1067   MPI_Comm         comm;
1068   DMLabel          depthLabel;
1069   PetscMPIInt      rank;
1070   PetscInt         depth, d, numLabels, numLocalLabels, l;
1071   PetscBool        hasLabels = PETSC_FALSE, lsendDepth, sendDepth;
1072   PetscObjectState depthState = -1;
1073   PetscErrorCode   ierr;
1074 
1075   PetscFunctionBegin;
1076   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1077   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1078 
1079   ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1080   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1081   ierr = MPI_Comm_rank(comm, &rank);CHKERRMPI(ierr);
1082 
1083   /* If the user has changed the depth label, communicate it instead */
1084   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1085   ierr = DMPlexGetDepthLabel(dm, &depthLabel);CHKERRQ(ierr);
1086   if (depthLabel) {ierr = PetscObjectStateGet((PetscObject) depthLabel, &depthState);CHKERRQ(ierr);}
1087   lsendDepth = mesh->depthState != depthState ? PETSC_TRUE : PETSC_FALSE;
1088   ierr = MPIU_Allreduce(&lsendDepth, &sendDepth, 1, MPIU_BOOL, MPI_LOR, comm);CHKERRQ(ierr);
1089   if (sendDepth) {
1090     ierr = DMPlexGetDepthLabel(dmParallel, &dmParallel->depthLabel);CHKERRQ(ierr);
1091     ierr = DMRemoveLabelBySelf(dmParallel, &dmParallel->depthLabel, PETSC_FALSE);CHKERRQ(ierr);
1092   }
1093   /* Everyone must have either the same number of labels, or none */
1094   ierr = DMGetNumLabels(dm, &numLocalLabels);CHKERRQ(ierr);
1095   numLabels = numLocalLabels;
1096   ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRMPI(ierr);
1097   if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1098   for (l = 0; l < numLabels; ++l) {
1099     DMLabel     label = NULL, labelNew = NULL;
1100     PetscBool   isDepth, lisOutput = PETSC_TRUE, isOutput;
1101     const char *name = NULL;
1102 
1103     if (hasLabels) {
1104       ierr = DMGetLabelByNum(dm, l, &label);CHKERRQ(ierr);
1105       /* Skip "depth" because it is recreated */
1106       ierr = PetscObjectGetName((PetscObject) label, &name);CHKERRQ(ierr);
1107       ierr = PetscStrcmp(name, "depth", &isDepth);CHKERRQ(ierr);
1108     }
1109     ierr = MPI_Bcast(&isDepth, 1, MPIU_BOOL, 0, comm);CHKERRMPI(ierr);
1110     if (isDepth && !sendDepth) continue;
1111     ierr = DMLabelDistribute(label, migrationSF, &labelNew);CHKERRQ(ierr);
1112     if (isDepth) {
1113       /* Put in any missing strata which can occur if users are managing the depth label themselves */
1114       PetscInt gdepth;
1115 
1116       ierr = MPIU_Allreduce(&depth, &gdepth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr);
1117       if ((depth >= 0) && (gdepth != depth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", depth, gdepth);
1118       for (d = 0; d <= gdepth; ++d) {
1119         PetscBool has;
1120 
1121         ierr = DMLabelHasStratum(labelNew, d, &has);CHKERRQ(ierr);
1122         if (!has) {ierr = DMLabelAddStratum(labelNew, d);CHKERRQ(ierr);}
1123       }
1124     }
1125     ierr = DMAddLabel(dmParallel, labelNew);CHKERRQ(ierr);
1126     /* Put the output flag in the new label */
1127     if (hasLabels) {ierr = DMGetLabelOutput(dm, name, &lisOutput);CHKERRQ(ierr);}
1128     ierr = MPIU_Allreduce(&lisOutput, &isOutput, 1, MPIU_BOOL, MPI_LAND, comm);CHKERRQ(ierr);
1129     ierr = PetscObjectGetName((PetscObject) labelNew, &name);CHKERRQ(ierr);
1130     ierr = DMSetLabelOutput(dmParallel, name, isOutput);CHKERRQ(ierr);
1131     ierr = DMLabelDestroy(&labelNew);CHKERRQ(ierr);
1132   }
1133   ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1134   PetscFunctionReturn(0);
1135 }
1136 
1137 static PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1138 {
1139   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1140   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1141   MPI_Comm        comm;
1142   DM              refTree;
1143   PetscSection    origParentSection, newParentSection;
1144   PetscInt        *origParents, *origChildIDs;
1145   PetscBool       flg;
1146   PetscErrorCode  ierr;
1147 
1148   PetscFunctionBegin;
1149   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1150   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
1151   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1152 
1153   /* Set up tree */
1154   ierr = DMPlexGetReferenceTree(dm,&refTree);CHKERRQ(ierr);
1155   ierr = DMPlexSetReferenceTree(dmParallel,refTree);CHKERRQ(ierr);
1156   ierr = DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL);CHKERRQ(ierr);
1157   if (origParentSection) {
1158     PetscInt        pStart, pEnd;
1159     PetscInt        *newParents, *newChildIDs, *globParents;
1160     PetscInt        *remoteOffsetsParents, newParentSize;
1161     PetscSF         parentSF;
1162 
1163     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1164     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);CHKERRQ(ierr);
1165     ierr = PetscSectionSetChart(newParentSection,pStart,pEnd);CHKERRQ(ierr);
1166     ierr = PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);CHKERRQ(ierr);
1167     ierr = PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);CHKERRQ(ierr);
1168     ierr = PetscFree(remoteOffsetsParents);CHKERRQ(ierr);
1169     ierr = PetscSectionGetStorageSize(newParentSection,&newParentSize);CHKERRQ(ierr);
1170     ierr = PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs);CHKERRQ(ierr);
1171     if (original) {
1172       PetscInt numParents;
1173 
1174       ierr = PetscSectionGetStorageSize(origParentSection,&numParents);CHKERRQ(ierr);
1175       ierr = PetscMalloc1(numParents,&globParents);CHKERRQ(ierr);
1176       ierr = ISLocalToGlobalMappingApplyBlock(original, numParents, origParents, globParents);CHKERRQ(ierr);
1177     }
1178     else {
1179       globParents = origParents;
1180     }
1181     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, globParents, newParents,MPI_REPLACE);CHKERRQ(ierr);
1182     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, globParents, newParents,MPI_REPLACE);CHKERRQ(ierr);
1183     if (original) {
1184       ierr = PetscFree(globParents);CHKERRQ(ierr);
1185     }
1186     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs,MPI_REPLACE);CHKERRQ(ierr);
1187     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs,MPI_REPLACE);CHKERRQ(ierr);
1188     ierr = ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents);CHKERRQ(ierr);
1189     if (PetscDefined(USE_DEBUG)) {
1190       PetscInt  p;
1191       PetscBool valid = PETSC_TRUE;
1192       for (p = 0; p < newParentSize; ++p) {
1193         if (newParents[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
1194       }
1195       if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1196     }
1197     ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-parents_view", &flg);CHKERRQ(ierr);
1198     if (flg) {
1199       ierr = PetscPrintf(comm, "Serial Parent Section: \n");CHKERRQ(ierr);
1200       ierr = PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr);
1201       ierr = PetscPrintf(comm, "Parallel Parent Section: \n");CHKERRQ(ierr);
1202       ierr = PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr);
1203       ierr = PetscSFView(parentSF, NULL);CHKERRQ(ierr);
1204     }
1205     ierr = DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);CHKERRQ(ierr);
1206     ierr = PetscSectionDestroy(&newParentSection);CHKERRQ(ierr);
1207     ierr = PetscFree2(newParents,newChildIDs);CHKERRQ(ierr);
1208     ierr = PetscSFDestroy(&parentSF);CHKERRQ(ierr);
1209   }
1210   pmesh->useAnchors = mesh->useAnchors;
1211   PetscFunctionReturn(0);
1212 }
1213 
1214 PETSC_UNUSED static PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, DM dmParallel)
1215 {
1216   PetscMPIInt            rank, size;
1217   MPI_Comm               comm;
1218   PetscErrorCode         ierr;
1219 
1220   PetscFunctionBegin;
1221   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1222   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1223 
1224   /* Create point SF for parallel mesh */
1225   ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1226   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1227   ierr = MPI_Comm_rank(comm, &rank);CHKERRMPI(ierr);
1228   ierr = MPI_Comm_size(comm, &size);CHKERRMPI(ierr);
1229   {
1230     const PetscInt *leaves;
1231     PetscSFNode    *remotePoints, *rowners, *lowners;
1232     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1233     PetscInt        pStart, pEnd;
1234 
1235     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1236     ierr = PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr);
1237     ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr);
1238     for (p=0; p<numRoots; p++) {
1239       rowners[p].rank  = -1;
1240       rowners[p].index = -1;
1241     }
1242     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners,MPI_REPLACE);CHKERRQ(ierr);
1243     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners,MPI_REPLACE);CHKERRQ(ierr);
1244     for (p = 0; p < numLeaves; ++p) {
1245       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1246         lowners[p].rank  = rank;
1247         lowners[p].index = leaves ? leaves[p] : p;
1248       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1249         lowners[p].rank  = -2;
1250         lowners[p].index = -2;
1251       }
1252     }
1253     for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1254       rowners[p].rank  = -3;
1255       rowners[p].index = -3;
1256     }
1257     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1258     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1259     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners,MPI_REPLACE);CHKERRQ(ierr);
1260     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners,MPI_REPLACE);CHKERRQ(ierr);
1261     for (p = 0; p < numLeaves; ++p) {
1262       if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
1263       if (lowners[p].rank != rank) ++numGhostPoints;
1264     }
1265     ierr = PetscMalloc1(numGhostPoints, &ghostPoints);CHKERRQ(ierr);
1266     ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr);
1267     for (p = 0, gp = 0; p < numLeaves; ++p) {
1268       if (lowners[p].rank != rank) {
1269         ghostPoints[gp]        = leaves ? leaves[p] : p;
1270         remotePoints[gp].rank  = lowners[p].rank;
1271         remotePoints[gp].index = lowners[p].index;
1272         ++gp;
1273       }
1274     }
1275     ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr);
1276     ierr = PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1277     ierr = PetscSFSetFromOptions((dmParallel)->sf);CHKERRQ(ierr);
1278   }
1279   {
1280     PetscBool useCone, useClosure, useAnchors;
1281 
1282     ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
1283     ierr = DMSetBasicAdjacency(dmParallel, useCone, useClosure);CHKERRQ(ierr);
1284     ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
1285     ierr = DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors);CHKERRQ(ierr);
1286   }
1287   ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1288   PetscFunctionReturn(0);
1289 }
1290 
1291 /*@
1292   DMPlexSetPartitionBalance - Should distribution of the DM attempt to balance the shared point partition?
1293 
1294   Input Parameters:
1295 + dm - The DMPlex object
1296 - flg - Balance the partition?
1297 
1298   Level: intermediate
1299 
1300 .seealso: DMPlexDistribute(), DMPlexGetPartitionBalance()
1301 @*/
1302 PetscErrorCode DMPlexSetPartitionBalance(DM dm, PetscBool flg)
1303 {
1304   DM_Plex *mesh = (DM_Plex *)dm->data;
1305 
1306   PetscFunctionBegin;
1307   mesh->partitionBalance = flg;
1308   PetscFunctionReturn(0);
1309 }
1310 
1311 /*@
1312   DMPlexGetPartitionBalance - Does distribution of the DM attempt to balance the shared point partition?
1313 
1314   Input Parameter:
1315 . dm - The DMPlex object
1316 
1317   Output Parameter:
1318 . flg - Balance the partition?
1319 
1320   Level: intermediate
1321 
1322 .seealso: DMPlexDistribute(), DMPlexSetPartitionBalance()
1323 @*/
1324 PetscErrorCode DMPlexGetPartitionBalance(DM dm, PetscBool *flg)
1325 {
1326   DM_Plex *mesh = (DM_Plex *)dm->data;
1327 
1328   PetscFunctionBegin;
1329   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1330   PetscValidBoolPointer(flg, 2);
1331   *flg = mesh->partitionBalance;
1332   PetscFunctionReturn(0);
1333 }
1334 
1335 typedef struct {
1336   PetscInt vote, rank, index;
1337 } Petsc3Int;
1338 
1339 /* MaxLoc, but carry a third piece of information around */
1340 static void MPIAPI MaxLocCarry(void *in_, void *inout_, PetscMPIInt *len_, MPI_Datatype *dtype)
1341 {
1342   Petsc3Int *a = (Petsc3Int *)inout_;
1343   Petsc3Int *b = (Petsc3Int *)in_;
1344   PetscInt i, len = *len_;
1345   for (i = 0; i < len; i++) {
1346     if (a[i].vote < b[i].vote) {
1347       a[i].vote = b[i].vote;
1348       a[i].rank = b[i].rank;
1349       a[i].index = b[i].index;
1350     } else if (a[i].vote <= b[i].vote) {
1351       if (a[i].rank >= b[i].rank) {
1352         a[i].rank = b[i].rank;
1353         a[i].index = b[i].index;
1354       }
1355     }
1356   }
1357 }
1358 
1359 /*@C
1360   DMPlexCreatePointSF - Build a point SF from an SF describing a point migration
1361 
1362   Input Parameters:
1363 + dm          - The source DMPlex object
1364 . migrationSF - The star forest that describes the parallel point remapping
1365 . ownership   - Flag causing a vote to determine point ownership
1366 
1367   Output Parameter:
1368 - pointSF     - The star forest describing the point overlap in the remapped DM
1369 
1370   Notes:
1371   Output pointSF is guaranteed to have the array of local indices (leaves) sorted.
1372 
1373   Level: developer
1374 
1375 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1376 @*/
1377 PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownership, PetscSF *pointSF)
1378 {
1379   PetscMPIInt        rank, size;
1380   PetscInt           p, nroots, nleaves, idx, npointLeaves;
1381   PetscInt          *pointLocal;
1382   const PetscInt    *leaves;
1383   const PetscSFNode *roots;
1384   PetscSFNode       *rootNodes, *leafNodes, *pointRemote;
1385   Vec                shifts;
1386   const PetscInt     numShifts = 13759;
1387   const PetscScalar *shift = NULL;
1388   const PetscBool    shiftDebug = PETSC_FALSE;
1389   PetscBool          balance;
1390   PetscErrorCode     ierr;
1391 
1392   PetscFunctionBegin;
1393   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1394   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRMPI(ierr);
1395   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRMPI(ierr);
1396   ierr = PetscLogEventBegin(DMPLEX_CreatePointSF,dm,0,0,0);CHKERRQ(ierr);
1397 
1398   ierr = DMPlexGetPartitionBalance(dm, &balance);CHKERRQ(ierr);
1399   ierr = PetscSFGetGraph(migrationSF, &nroots, &nleaves, &leaves, &roots);CHKERRQ(ierr);
1400   ierr = PetscMalloc2(nroots, &rootNodes, nleaves, &leafNodes);CHKERRQ(ierr);
1401   if (ownership) {
1402     MPI_Op       op;
1403     MPI_Datatype datatype;
1404     Petsc3Int   *rootVote = NULL, *leafVote = NULL;
1405     /* If balancing, we compute a random cyclic shift of the rank for each remote point. That way, the max will evenly distribute among ranks. */
1406     if (balance) {
1407       PetscRandom r;
1408 
1409       ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
1410       ierr = PetscRandomSetInterval(r, 0, 2467*size);CHKERRQ(ierr);
1411       ierr = VecCreate(PETSC_COMM_SELF, &shifts);CHKERRQ(ierr);
1412       ierr = VecSetSizes(shifts, numShifts, numShifts);CHKERRQ(ierr);
1413       ierr = VecSetType(shifts, VECSTANDARD);CHKERRQ(ierr);
1414       ierr = VecSetRandom(shifts, r);CHKERRQ(ierr);
1415       ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
1416       ierr = VecGetArrayRead(shifts, &shift);CHKERRQ(ierr);
1417     }
1418 
1419     ierr = PetscMalloc1(nroots, &rootVote);CHKERRQ(ierr);
1420     ierr = PetscMalloc1(nleaves, &leafVote);CHKERRQ(ierr);
1421     /* Point ownership vote: Process with highest rank owns shared points */
1422     for (p = 0; p < nleaves; ++p) {
1423       if (shiftDebug) {
1424         ierr = PetscSynchronizedPrintf(PetscObjectComm((PetscObject) dm), "[%d] Point %D RemotePoint %D Shift %D MyRank %D\n", rank, leaves ? leaves[p] : p, roots[p].index, (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]), (rank + (shift ? (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]) : 0))%size);CHKERRQ(ierr);
1425       }
1426       /* Either put in a bid or we know we own it */
1427       leafVote[p].vote  = (rank + (shift ? (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]) : 0))%size;
1428       leafVote[p].rank = rank;
1429       leafVote[p].index = p;
1430     }
1431     for (p = 0; p < nroots; p++) {
1432       /* Root must not participate in the reduction, flag so that MAXLOC does not use */
1433       rootVote[p].vote  = -3;
1434       rootVote[p].rank  = -3;
1435       rootVote[p].index = -3;
1436     }
1437     ierr = MPI_Type_contiguous(3, MPIU_INT, &datatype);CHKERRMPI(ierr);
1438     ierr = MPI_Type_commit(&datatype);CHKERRMPI(ierr);
1439     ierr = MPI_Op_create(&MaxLocCarry, 1, &op);CHKERRMPI(ierr);
1440     ierr = PetscSFReduceBegin(migrationSF, datatype, leafVote, rootVote, op);CHKERRQ(ierr);
1441     ierr = PetscSFReduceEnd(migrationSF, datatype, leafVote, rootVote, op);CHKERRQ(ierr);
1442     ierr = MPI_Op_free(&op);CHKERRMPI(ierr);
1443     ierr = MPI_Type_free(&datatype);CHKERRMPI(ierr);
1444     for (p = 0; p < nroots; p++) {
1445       rootNodes[p].rank = rootVote[p].rank;
1446       rootNodes[p].index = rootVote[p].index;
1447     }
1448     ierr = PetscFree(leafVote);CHKERRQ(ierr);
1449     ierr = PetscFree(rootVote);CHKERRQ(ierr);
1450   } else {
1451     for (p = 0; p < nroots; p++) {
1452       rootNodes[p].index = -1;
1453       rootNodes[p].rank = rank;
1454     }
1455     for (p = 0; p < nleaves; p++) {
1456       /* Write new local id into old location */
1457       if (roots[p].rank == rank) {
1458         rootNodes[roots[p].index].index = leaves ? leaves[p] : p;
1459       }
1460     }
1461   }
1462   ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rootNodes, leafNodes,MPI_REPLACE);CHKERRQ(ierr);
1463   ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rootNodes, leafNodes,MPI_REPLACE);CHKERRQ(ierr);
1464 
1465   for (npointLeaves = 0, p = 0; p < nleaves; p++) {
1466     if (leafNodes[p].rank != rank) npointLeaves++;
1467   }
1468   ierr = PetscMalloc1(npointLeaves, &pointLocal);CHKERRQ(ierr);
1469   ierr = PetscMalloc1(npointLeaves, &pointRemote);CHKERRQ(ierr);
1470   for (idx = 0, p = 0; p < nleaves; p++) {
1471     if (leafNodes[p].rank != rank) {
1472       /* Note that pointLocal is automatically sorted as it is sublist of 0, ..., nleaves-1 */
1473       pointLocal[idx] = p;
1474       pointRemote[idx] = leafNodes[p];
1475       idx++;
1476     }
1477   }
1478   if (shift) {
1479     ierr = VecRestoreArrayRead(shifts, &shift);CHKERRQ(ierr);
1480     ierr = VecDestroy(&shifts);CHKERRQ(ierr);
1481   }
1482   if (shiftDebug) {ierr = PetscSynchronizedFlush(PetscObjectComm((PetscObject) dm), PETSC_STDOUT);CHKERRQ(ierr);}
1483   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), pointSF);CHKERRQ(ierr);
1484   ierr = PetscSFSetFromOptions(*pointSF);CHKERRQ(ierr);
1485   ierr = PetscSFSetGraph(*pointSF, nleaves, npointLeaves, pointLocal, PETSC_OWN_POINTER, pointRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1486   ierr = PetscFree2(rootNodes, leafNodes);CHKERRQ(ierr);
1487   ierr = PetscLogEventEnd(DMPLEX_CreatePointSF,dm,0,0,0);CHKERRQ(ierr);
1488   PetscFunctionReturn(0);
1489 }
1490 
1491 /*@C
1492   DMPlexMigrate  - Migrates internal DM data over the supplied star forest
1493 
1494   Collective on dm
1495 
1496   Input Parameters:
1497 + dm       - The source DMPlex object
1498 . sf       - The star forest communication context describing the migration pattern
1499 
1500   Output Parameter:
1501 - targetDM - The target DMPlex object
1502 
1503   Level: intermediate
1504 
1505 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1506 @*/
1507 PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM)
1508 {
1509   MPI_Comm               comm;
1510   PetscInt               dim, cdim, nroots;
1511   PetscSF                sfPoint;
1512   ISLocalToGlobalMapping ltogMigration;
1513   ISLocalToGlobalMapping ltogOriginal = NULL;
1514   PetscBool              flg;
1515   PetscErrorCode         ierr;
1516 
1517   PetscFunctionBegin;
1518   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1519   ierr = PetscLogEventBegin(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1520   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1521   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1522   ierr = DMSetDimension(targetDM, dim);CHKERRQ(ierr);
1523   ierr = DMGetCoordinateDim(dm, &cdim);CHKERRQ(ierr);
1524   ierr = DMSetCoordinateDim(targetDM, cdim);CHKERRQ(ierr);
1525 
1526   /* Check for a one-to-all distribution pattern */
1527   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1528   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
1529   if (nroots >= 0) {
1530     IS        isOriginal;
1531     PetscInt  n, size, nleaves;
1532     PetscInt  *numbering_orig, *numbering_new;
1533 
1534     /* Get the original point numbering */
1535     ierr = DMPlexCreatePointNumbering(dm, &isOriginal);CHKERRQ(ierr);
1536     ierr = ISLocalToGlobalMappingCreateIS(isOriginal, &ltogOriginal);CHKERRQ(ierr);
1537     ierr = ISLocalToGlobalMappingGetSize(ltogOriginal, &size);CHKERRQ(ierr);
1538     ierr = ISLocalToGlobalMappingGetBlockIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1539     /* Convert to positive global numbers */
1540     for (n=0; n<size; n++) {if (numbering_orig[n] < 0) numbering_orig[n] = -(numbering_orig[n]+1);}
1541     /* Derive the new local-to-global mapping from the old one */
1542     ierr = PetscSFGetGraph(sf, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1543     ierr = PetscMalloc1(nleaves, &numbering_new);CHKERRQ(ierr);
1544     ierr = PetscSFBcastBegin(sf, MPIU_INT, numbering_orig, numbering_new,MPI_REPLACE);CHKERRQ(ierr);
1545     ierr = PetscSFBcastEnd(sf, MPIU_INT, numbering_orig, numbering_new,MPI_REPLACE);CHKERRQ(ierr);
1546     ierr = ISLocalToGlobalMappingCreate(comm, 1, nleaves, numbering_new, PETSC_OWN_POINTER, &ltogMigration);CHKERRQ(ierr);
1547     ierr = ISLocalToGlobalMappingRestoreIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1548     ierr = ISDestroy(&isOriginal);CHKERRQ(ierr);
1549   } else {
1550     /* One-to-all distribution pattern: We can derive LToG from SF */
1551     ierr = ISLocalToGlobalMappingCreateSF(sf, 0, &ltogMigration);CHKERRQ(ierr);
1552   }
1553   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1554   if (flg) {
1555     ierr = PetscPrintf(comm, "Point renumbering for DM migration:\n");CHKERRQ(ierr);
1556     ierr = ISLocalToGlobalMappingView(ltogMigration, NULL);CHKERRQ(ierr);
1557   }
1558   /* Migrate DM data to target DM */
1559   ierr = DMPlexDistributeCones(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1560   ierr = DMPlexDistributeLabels(dm, sf, targetDM);CHKERRQ(ierr);
1561   ierr = DMPlexDistributeCoordinates(dm, sf, targetDM);CHKERRQ(ierr);
1562   ierr = DMPlexDistributeSetupTree(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1563   ierr = ISLocalToGlobalMappingDestroy(&ltogOriginal);CHKERRQ(ierr);
1564   ierr = ISLocalToGlobalMappingDestroy(&ltogMigration);CHKERRQ(ierr);
1565   ierr = PetscLogEventEnd(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1566   PetscFunctionReturn(0);
1567 }
1568 
1569 /*@C
1570   DMPlexDistribute - Distributes the mesh and any associated sections.
1571 
1572   Collective on dm
1573 
1574   Input Parameters:
1575 + dm  - The original DMPlex object
1576 - overlap - The overlap of partitions, 0 is the default
1577 
1578   Output Parameters:
1579 + sf - The PetscSF used for point distribution, or NULL if not needed
1580 - dmParallel - The distributed DMPlex object
1581 
1582   Note: If the mesh was not distributed, the output dmParallel will be NULL.
1583 
1584   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
1585   representation on the mesh.
1586 
1587   Level: intermediate
1588 
1589 .seealso: DMPlexCreate(), DMSetAdjacency(), DMPlexGetOverlap()
1590 @*/
1591 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1592 {
1593   MPI_Comm               comm;
1594   PetscPartitioner       partitioner;
1595   IS                     cellPart;
1596   PetscSection           cellPartSection;
1597   DM                     dmCoord;
1598   DMLabel                lblPartition, lblMigration;
1599   PetscSF                sfMigration, sfStratified, sfPoint;
1600   PetscBool              flg, balance;
1601   PetscMPIInt            rank, size;
1602   PetscErrorCode         ierr;
1603 
1604   PetscFunctionBegin;
1605   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1606   PetscValidLogicalCollectiveInt(dm, overlap, 2);
1607   if (sf) PetscValidPointer(sf,3);
1608   PetscValidPointer(dmParallel,4);
1609 
1610   if (sf) *sf = NULL;
1611   *dmParallel = NULL;
1612   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1613   ierr = MPI_Comm_rank(comm, &rank);CHKERRMPI(ierr);
1614   ierr = MPI_Comm_size(comm, &size);CHKERRMPI(ierr);
1615   if (size == 1) PetscFunctionReturn(0);
1616 
1617   ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1618   /* Create cell partition */
1619   ierr = PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1620   ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr);
1621   ierr = DMPlexGetPartitioner(dm, &partitioner);CHKERRQ(ierr);
1622   ierr = PetscPartitionerDMPlexPartition(partitioner, dm, NULL, cellPartSection, &cellPart);CHKERRQ(ierr);
1623   ierr = PetscLogEventBegin(DMPLEX_PartSelf,dm,0,0,0);CHKERRQ(ierr);
1624   {
1625     /* Convert partition to DMLabel */
1626     IS             is;
1627     PetscHSetI     ht;
1628     const PetscInt *points;
1629     PetscInt       *iranks;
1630     PetscInt       pStart, pEnd, proc, npoints, poff = 0, nranks;
1631 
1632     ierr = DMLabelCreate(PETSC_COMM_SELF, "Point Partition", &lblPartition);CHKERRQ(ierr);
1633     /* Preallocate strata */
1634     ierr = PetscHSetICreate(&ht);CHKERRQ(ierr);
1635     ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr);
1636     for (proc = pStart; proc < pEnd; proc++) {
1637       ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr);
1638       if (npoints) {ierr = PetscHSetIAdd(ht, proc);CHKERRQ(ierr);}
1639     }
1640     ierr = PetscHSetIGetSize(ht, &nranks);CHKERRQ(ierr);
1641     ierr = PetscMalloc1(nranks, &iranks);CHKERRQ(ierr);
1642     ierr = PetscHSetIGetElems(ht, &poff, iranks);CHKERRQ(ierr);
1643     ierr = PetscHSetIDestroy(&ht);CHKERRQ(ierr);
1644     ierr = DMLabelAddStrata(lblPartition, nranks, iranks);CHKERRQ(ierr);
1645     ierr = PetscFree(iranks);CHKERRQ(ierr);
1646     /* Inline DMPlexPartitionLabelClosure() */
1647     ierr = ISGetIndices(cellPart, &points);CHKERRQ(ierr);
1648     ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr);
1649     for (proc = pStart; proc < pEnd; proc++) {
1650       ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr);
1651       if (!npoints) continue;
1652       ierr = PetscSectionGetOffset(cellPartSection, proc, &poff);CHKERRQ(ierr);
1653       ierr = DMPlexClosurePoints_Private(dm, npoints, points+poff, &is);CHKERRQ(ierr);
1654       ierr = DMLabelSetStratumIS(lblPartition, proc, is);CHKERRQ(ierr);
1655       ierr = ISDestroy(&is);CHKERRQ(ierr);
1656     }
1657     ierr = ISRestoreIndices(cellPart, &points);CHKERRQ(ierr);
1658   }
1659   ierr = PetscLogEventEnd(DMPLEX_PartSelf,dm,0,0,0);CHKERRQ(ierr);
1660 
1661   ierr = DMLabelCreate(PETSC_COMM_SELF, "Point migration", &lblMigration);CHKERRQ(ierr);
1662   ierr = DMPlexPartitionLabelInvert(dm, lblPartition, NULL, lblMigration);CHKERRQ(ierr);
1663   ierr = DMPlexPartitionLabelCreateSF(dm, lblMigration, &sfMigration);CHKERRQ(ierr);
1664   ierr = DMPlexStratifyMigrationSF(dm, sfMigration, &sfStratified);CHKERRQ(ierr);
1665   ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1666   sfMigration = sfStratified;
1667   ierr = PetscSFSetUp(sfMigration);CHKERRQ(ierr);
1668   ierr = PetscLogEventEnd(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1669   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1670   if (flg) {
1671     ierr = DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr);
1672     ierr = PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr);
1673   }
1674 
1675   /* Create non-overlapping parallel DM and migrate internal data */
1676   ierr = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr);
1677   ierr = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr);
1678   ierr = DMPlexMigrate(dm, sfMigration, *dmParallel);CHKERRQ(ierr);
1679 
1680   /* Build the point SF without overlap */
1681   ierr = DMPlexGetPartitionBalance(dm, &balance);CHKERRQ(ierr);
1682   ierr = DMPlexSetPartitionBalance(*dmParallel, balance);CHKERRQ(ierr);
1683   ierr = DMPlexCreatePointSF(*dmParallel, sfMigration, PETSC_TRUE, &sfPoint);CHKERRQ(ierr);
1684   ierr = DMSetPointSF(*dmParallel, sfPoint);CHKERRQ(ierr);
1685   ierr = DMGetCoordinateDM(*dmParallel, &dmCoord);CHKERRQ(ierr);
1686   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1687   if (flg) {ierr = PetscSFView(sfPoint, NULL);CHKERRQ(ierr);}
1688 
1689   if (overlap > 0) {
1690     DM                 dmOverlap;
1691     PetscInt           nroots, nleaves, noldleaves, l;
1692     const PetscInt    *oldLeaves;
1693     PetscSFNode       *newRemote, *permRemote;
1694     const PetscSFNode *oldRemote;
1695     PetscSF            sfOverlap, sfOverlapPoint;
1696 
1697     /* Add the partition overlap to the distributed DM */
1698     ierr = DMPlexDistributeOverlap(*dmParallel, overlap, &sfOverlap, &dmOverlap);CHKERRQ(ierr);
1699     ierr = DMDestroy(dmParallel);CHKERRQ(ierr);
1700     *dmParallel = dmOverlap;
1701     if (flg) {
1702       ierr = PetscPrintf(comm, "Overlap Migration SF:\n");CHKERRQ(ierr);
1703       ierr = PetscSFView(sfOverlap, NULL);CHKERRQ(ierr);
1704     }
1705 
1706     /* Re-map the migration SF to establish the full migration pattern */
1707     ierr = PetscSFGetGraph(sfMigration, &nroots, &noldleaves, &oldLeaves, &oldRemote);CHKERRQ(ierr);
1708     ierr = PetscSFGetGraph(sfOverlap, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1709     ierr = PetscMalloc1(nleaves, &newRemote);CHKERRQ(ierr);
1710     /* oldRemote: original root point mapping to original leaf point
1711        newRemote: original leaf point mapping to overlapped leaf point */
1712     if (oldLeaves) {
1713       /* After stratification, the migration remotes may not be in root (canonical) order, so we reorder using the leaf numbering */
1714       ierr = PetscMalloc1(noldleaves, &permRemote);CHKERRQ(ierr);
1715       for (l = 0; l < noldleaves; ++l) permRemote[oldLeaves[l]] = oldRemote[l];
1716       oldRemote = permRemote;
1717     }
1718     ierr = PetscSFBcastBegin(sfOverlap, MPIU_2INT, oldRemote, newRemote,MPI_REPLACE);CHKERRQ(ierr);
1719     ierr = PetscSFBcastEnd(sfOverlap, MPIU_2INT, oldRemote, newRemote,MPI_REPLACE);CHKERRQ(ierr);
1720     if (oldLeaves) {ierr = PetscFree(oldRemote);CHKERRQ(ierr);}
1721     ierr = PetscSFCreate(comm, &sfOverlapPoint);CHKERRQ(ierr);
1722     ierr = PetscSFSetGraph(sfOverlapPoint, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1723     ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1724     ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1725     sfMigration = sfOverlapPoint;
1726   }
1727   /* Cleanup Partition */
1728   ierr = DMLabelDestroy(&lblPartition);CHKERRQ(ierr);
1729   ierr = DMLabelDestroy(&lblMigration);CHKERRQ(ierr);
1730   ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr);
1731   ierr = ISDestroy(&cellPart);CHKERRQ(ierr);
1732   /* Copy BC */
1733   ierr = DMCopyBoundary(dm, *dmParallel);CHKERRQ(ierr);
1734   /* Create sfNatural */
1735   if (dm->useNatural) {
1736     PetscSection section;
1737 
1738     ierr = DMGetLocalSection(dm, &section);CHKERRQ(ierr);
1739     ierr = DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &(*dmParallel)->sfNatural);CHKERRQ(ierr);
1740     ierr = DMSetUseNatural(*dmParallel, PETSC_TRUE);CHKERRQ(ierr);
1741   }
1742   /* Cleanup */
1743   if (sf) {*sf = sfMigration;}
1744   else    {ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);}
1745   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1746   ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1747   PetscFunctionReturn(0);
1748 }
1749 
1750 /*@C
1751   DMPlexDistributeOverlap - Add partition overlap to a distributed non-overlapping DM.
1752 
1753   Collective on dm
1754 
1755   Input Parameters:
1756 + dm  - The non-overlapping distributed DMPlex object
1757 - overlap - The overlap of partitions (the same on all ranks)
1758 
1759   Output Parameters:
1760 + sf - The PetscSF used for point distribution
1761 - dmOverlap - The overlapping distributed DMPlex object, or NULL
1762 
1763   Notes:
1764   If the mesh was not distributed, the return value is NULL.
1765 
1766   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
1767   representation on the mesh.
1768 
1769   Level: advanced
1770 
1771 .seealso: DMPlexCreate(), DMSetAdjacency(), DMPlexDistribute(), DMPlexCreateOverlapLabel(), DMPlexGetOverlap()
1772 @*/
1773 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, PetscSF *sf, DM *dmOverlap)
1774 {
1775   MPI_Comm               comm;
1776   PetscMPIInt            size, rank;
1777   PetscSection           rootSection, leafSection;
1778   IS                     rootrank, leafrank;
1779   DM                     dmCoord;
1780   DMLabel                lblOverlap;
1781   PetscSF                sfOverlap, sfStratified, sfPoint;
1782   PetscErrorCode         ierr;
1783 
1784   PetscFunctionBegin;
1785   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1786   PetscValidLogicalCollectiveInt(dm, overlap, 2);
1787   if (sf) PetscValidPointer(sf, 3);
1788   PetscValidPointer(dmOverlap, 4);
1789 
1790   if (sf) *sf = NULL;
1791   *dmOverlap  = NULL;
1792   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1793   ierr = MPI_Comm_size(comm, &size);CHKERRMPI(ierr);
1794   ierr = MPI_Comm_rank(comm, &rank);CHKERRMPI(ierr);
1795   if (size == 1) PetscFunctionReturn(0);
1796 
1797   ierr = PetscLogEventBegin(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1798   /* Compute point overlap with neighbouring processes on the distributed DM */
1799   ierr = PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1800   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1801   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1802   ierr = DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);CHKERRQ(ierr);
1803   ierr = DMPlexCreateOverlapLabel(dm, overlap, rootSection, rootrank, leafSection, leafrank, &lblOverlap);CHKERRQ(ierr);
1804   /* Convert overlap label to stratified migration SF */
1805   ierr = DMPlexPartitionLabelCreateSF(dm, lblOverlap, &sfOverlap);CHKERRQ(ierr);
1806   ierr = DMPlexStratifyMigrationSF(dm, sfOverlap, &sfStratified);CHKERRQ(ierr);
1807   ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1808   sfOverlap = sfStratified;
1809   ierr = PetscObjectSetName((PetscObject) sfOverlap, "Overlap SF");CHKERRQ(ierr);
1810   ierr = PetscSFSetFromOptions(sfOverlap);CHKERRQ(ierr);
1811 
1812   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1813   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1814   ierr = ISDestroy(&rootrank);CHKERRQ(ierr);
1815   ierr = ISDestroy(&leafrank);CHKERRQ(ierr);
1816   ierr = PetscLogEventEnd(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1817 
1818   /* Build the overlapping DM */
1819   ierr = DMPlexCreate(comm, dmOverlap);CHKERRQ(ierr);
1820   ierr = PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");CHKERRQ(ierr);
1821   ierr = DMPlexMigrate(dm, sfOverlap, *dmOverlap);CHKERRQ(ierr);
1822   /* Store the overlap in the new DM */
1823   ((DM_Plex*)(*dmOverlap)->data)->overlap = overlap + ((DM_Plex*)dm->data)->overlap;
1824   /* Build the new point SF */
1825   ierr = DMPlexCreatePointSF(*dmOverlap, sfOverlap, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
1826   ierr = DMSetPointSF(*dmOverlap, sfPoint);CHKERRQ(ierr);
1827   ierr = DMGetCoordinateDM(*dmOverlap, &dmCoord);CHKERRQ(ierr);
1828   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1829   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1830   /* Cleanup overlap partition */
1831   ierr = DMLabelDestroy(&lblOverlap);CHKERRQ(ierr);
1832   if (sf) *sf = sfOverlap;
1833   else    {ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);}
1834   ierr = PetscLogEventEnd(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1835   PetscFunctionReturn(0);
1836 }
1837 
1838 PetscErrorCode DMPlexGetOverlap_Plex(DM dm, PetscInt *overlap)
1839 {
1840   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1841 
1842   PetscFunctionBegin;
1843   *overlap = mesh->overlap;
1844   PetscFunctionReturn(0);
1845 }
1846 
1847 /*@
1848   DMPlexGetOverlap - Get the DMPlex partition overlap.
1849 
1850   Not collective
1851 
1852   Input Parameter:
1853 . dm - The DM
1854 
1855   Output Parameter:
1856 . overlap - The overlap of this DM
1857 
1858   Level: intermediate
1859 
1860 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap(), DMPlexCreateOverlapLabel()
1861 @*/
1862 PetscErrorCode DMPlexGetOverlap(DM dm, PetscInt *overlap)
1863 {
1864   PetscErrorCode     ierr;
1865 
1866   PetscFunctionBegin;
1867   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1868   ierr = PetscUseMethod(dm,"DMPlexGetOverlap_C",(DM,PetscInt*),(dm,overlap));CHKERRQ(ierr);
1869   PetscFunctionReturn(0);
1870 }
1871 
1872 
1873 /*@C
1874   DMPlexGetGatherDM - Get a copy of the DMPlex that gathers all points on the
1875   root process of the original's communicator.
1876 
1877   Collective on dm
1878 
1879   Input Parameter:
1880 . dm - the original DMPlex object
1881 
1882   Output Parameters:
1883 + sf - the PetscSF used for point distribution (optional)
1884 - gatherMesh - the gathered DM object, or NULL
1885 
1886   Level: intermediate
1887 
1888 .seealso: DMPlexDistribute(), DMPlexGetRedundantDM()
1889 @*/
1890 PetscErrorCode DMPlexGetGatherDM(DM dm, PetscSF *sf, DM *gatherMesh)
1891 {
1892   MPI_Comm       comm;
1893   PetscMPIInt    size;
1894   PetscPartitioner oldPart, gatherPart;
1895   PetscErrorCode ierr;
1896 
1897   PetscFunctionBegin;
1898   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1899   PetscValidPointer(gatherMesh,2);
1900   *gatherMesh = NULL;
1901   if (sf) *sf = NULL;
1902   comm = PetscObjectComm((PetscObject)dm);
1903   ierr = MPI_Comm_size(comm,&size);CHKERRMPI(ierr);
1904   if (size == 1) PetscFunctionReturn(0);
1905   ierr = DMPlexGetPartitioner(dm,&oldPart);CHKERRQ(ierr);
1906   ierr = PetscObjectReference((PetscObject)oldPart);CHKERRQ(ierr);
1907   ierr = PetscPartitionerCreate(comm,&gatherPart);CHKERRQ(ierr);
1908   ierr = PetscPartitionerSetType(gatherPart,PETSCPARTITIONERGATHER);CHKERRQ(ierr);
1909   ierr = DMPlexSetPartitioner(dm,gatherPart);CHKERRQ(ierr);
1910   ierr = DMPlexDistribute(dm,0,sf,gatherMesh);CHKERRQ(ierr);
1911 
1912   ierr = DMPlexSetPartitioner(dm,oldPart);CHKERRQ(ierr);
1913   ierr = PetscPartitionerDestroy(&gatherPart);CHKERRQ(ierr);
1914   ierr = PetscPartitionerDestroy(&oldPart);CHKERRQ(ierr);
1915   PetscFunctionReturn(0);
1916 }
1917 
1918 /*@C
1919   DMPlexGetRedundantDM - Get a copy of the DMPlex that is completely copied on each process.
1920 
1921   Collective on dm
1922 
1923   Input Parameter:
1924 . dm - the original DMPlex object
1925 
1926   Output Parameters:
1927 + sf - the PetscSF used for point distribution (optional)
1928 - redundantMesh - the redundant DM object, or NULL
1929 
1930   Level: intermediate
1931 
1932 .seealso: DMPlexDistribute(), DMPlexGetGatherDM()
1933 @*/
1934 PetscErrorCode DMPlexGetRedundantDM(DM dm, PetscSF *sf, DM *redundantMesh)
1935 {
1936   MPI_Comm       comm;
1937   PetscMPIInt    size, rank;
1938   PetscInt       pStart, pEnd, p;
1939   PetscInt       numPoints = -1;
1940   PetscSF        migrationSF, sfPoint, gatherSF;
1941   DM             gatherDM, dmCoord;
1942   PetscSFNode    *points;
1943   PetscErrorCode ierr;
1944 
1945   PetscFunctionBegin;
1946   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1947   PetscValidPointer(redundantMesh,2);
1948   *redundantMesh = NULL;
1949   comm = PetscObjectComm((PetscObject)dm);
1950   ierr = MPI_Comm_size(comm,&size);CHKERRMPI(ierr);
1951   if (size == 1) {
1952     ierr = PetscObjectReference((PetscObject) dm);CHKERRQ(ierr);
1953     *redundantMesh = dm;
1954     if (sf) *sf = NULL;
1955     PetscFunctionReturn(0);
1956   }
1957   ierr = DMPlexGetGatherDM(dm,&gatherSF,&gatherDM);CHKERRQ(ierr);
1958   if (!gatherDM) PetscFunctionReturn(0);
1959   ierr = MPI_Comm_rank(comm,&rank);CHKERRMPI(ierr);
1960   ierr = DMPlexGetChart(gatherDM,&pStart,&pEnd);CHKERRQ(ierr);
1961   numPoints = pEnd - pStart;
1962   ierr = MPI_Bcast(&numPoints,1,MPIU_INT,0,comm);CHKERRMPI(ierr);
1963   ierr = PetscMalloc1(numPoints,&points);CHKERRQ(ierr);
1964   ierr = PetscSFCreate(comm,&migrationSF);CHKERRQ(ierr);
1965   for (p = 0; p < numPoints; p++) {
1966     points[p].index = p;
1967     points[p].rank  = 0;
1968   }
1969   ierr = PetscSFSetGraph(migrationSF,pEnd-pStart,numPoints,NULL,PETSC_OWN_POINTER,points,PETSC_OWN_POINTER);CHKERRQ(ierr);
1970   ierr = DMPlexCreate(comm, redundantMesh);CHKERRQ(ierr);
1971   ierr = PetscObjectSetName((PetscObject) *redundantMesh, "Redundant Mesh");CHKERRQ(ierr);
1972   ierr = DMPlexMigrate(gatherDM, migrationSF, *redundantMesh);CHKERRQ(ierr);
1973   ierr = DMPlexCreatePointSF(*redundantMesh, migrationSF, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
1974   ierr = DMSetPointSF(*redundantMesh, sfPoint);CHKERRQ(ierr);
1975   ierr = DMGetCoordinateDM(*redundantMesh, &dmCoord);CHKERRQ(ierr);
1976   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1977   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1978   if (sf) {
1979     PetscSF tsf;
1980 
1981     ierr = PetscSFCompose(gatherSF,migrationSF,&tsf);CHKERRQ(ierr);
1982     ierr = DMPlexStratifyMigrationSF(dm, tsf, sf);CHKERRQ(ierr);
1983     ierr = PetscSFDestroy(&tsf);CHKERRQ(ierr);
1984   }
1985   ierr = PetscSFDestroy(&migrationSF);CHKERRQ(ierr);
1986   ierr = PetscSFDestroy(&gatherSF);CHKERRQ(ierr);
1987   ierr = DMDestroy(&gatherDM);CHKERRQ(ierr);
1988   PetscFunctionReturn(0);
1989 }
1990 
1991 /*@
1992   DMPlexIsDistributed - Find out whether this DM is distributed, i.e. more than one rank owns some points.
1993 
1994   Collective
1995 
1996   Input Parameter:
1997 . dm      - The DM object
1998 
1999   Output Parameter:
2000 . distributed - Flag whether the DM is distributed
2001 
2002   Level: intermediate
2003 
2004   Notes:
2005   This currently finds out whether at least two ranks have any DAG points.
2006   This involves MPI_Allreduce() with one integer.
2007   The result is currently not stashed so every call to this routine involves this global communication.
2008 
2009 .seealso: DMPlexDistribute(), DMPlexGetOverlap(), DMPlexIsInterpolated()
2010 @*/
2011 PetscErrorCode DMPlexIsDistributed(DM dm, PetscBool *distributed)
2012 {
2013   PetscInt          pStart, pEnd, count;
2014   MPI_Comm          comm;
2015   PetscErrorCode    ierr;
2016 
2017   PetscFunctionBegin;
2018   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2019   PetscValidPointer(distributed,2);
2020   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
2021   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
2022   count = !!(pEnd - pStart);
2023   ierr = MPI_Allreduce(MPI_IN_PLACE, &count, 1, MPIU_INT, MPI_SUM, comm);CHKERRMPI(ierr);
2024   *distributed = count > 1 ? PETSC_TRUE : PETSC_FALSE;
2025   PetscFunctionReturn(0);
2026 }
2027