xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision 2f6eced2a19e978d64f27de66fbc6c26cc5c7934)
1 #include <petsc/private/dmpleximpl.h>    /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/dmlabelimpl.h>   /*I      "petscdmlabel.h"  I*/
3 
4 /*@
5   DMPlexSetAdjacencyUseCone - Define adjacency in the mesh using either the cone or the support first
6 
7   Input Parameters:
8 + dm      - The DM object
9 - useCone - Flag to use the cone first
10 
11   Level: intermediate
12 
13   Notes:
14 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
15 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
16 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
17 
18 .seealso: DMPlexGetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
19 @*/
20 PetscErrorCode DMPlexSetAdjacencyUseCone(DM dm, PetscBool useCone)
21 {
22   DM_Plex *mesh = (DM_Plex *) dm->data;
23 
24   PetscFunctionBegin;
25   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
26   mesh->useCone = useCone;
27   PetscFunctionReturn(0);
28 }
29 
30 /*@
31   DMPlexGetAdjacencyUseCone - Query whether adjacency in the mesh uses the cone or the support first
32 
33   Input Parameter:
34 . dm      - The DM object
35 
36   Output Parameter:
37 . useCone - Flag to use the cone first
38 
39   Level: intermediate
40 
41   Notes:
42 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
43 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
44 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
45 
46 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
47 @*/
48 PetscErrorCode DMPlexGetAdjacencyUseCone(DM dm, PetscBool *useCone)
49 {
50   DM_Plex *mesh = (DM_Plex *) dm->data;
51 
52   PetscFunctionBegin;
53   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
54   PetscValidIntPointer(useCone, 2);
55   *useCone = mesh->useCone;
56   PetscFunctionReturn(0);
57 }
58 
59 /*@
60   DMPlexSetAdjacencyUseClosure - Define adjacency in the mesh using the transitive closure
61 
62   Input Parameters:
63 + dm      - The DM object
64 - useClosure - Flag to use the closure
65 
66   Level: intermediate
67 
68   Notes:
69 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
70 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
71 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
72 
73 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
74 @*/
75 PetscErrorCode DMPlexSetAdjacencyUseClosure(DM dm, PetscBool useClosure)
76 {
77   DM_Plex *mesh = (DM_Plex *) dm->data;
78 
79   PetscFunctionBegin;
80   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
81   mesh->useClosure = useClosure;
82   PetscFunctionReturn(0);
83 }
84 
85 /*@
86   DMPlexGetAdjacencyUseClosure - Query whether adjacency in the mesh uses the transitive closure
87 
88   Input Parameter:
89 . dm      - The DM object
90 
91   Output Parameter:
92 . useClosure - Flag to use the closure
93 
94   Level: intermediate
95 
96   Notes:
97 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
98 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
99 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
100 
101 .seealso: DMPlexSetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
102 @*/
103 PetscErrorCode DMPlexGetAdjacencyUseClosure(DM dm, PetscBool *useClosure)
104 {
105   DM_Plex *mesh = (DM_Plex *) dm->data;
106 
107   PetscFunctionBegin;
108   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
109   PetscValidIntPointer(useClosure, 2);
110   *useClosure = mesh->useClosure;
111   PetscFunctionReturn(0);
112 }
113 
114 /*@
115   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
116 
117   Input Parameters:
118 + dm      - The DM object
119 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
120 
121   Level: intermediate
122 
123 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
124 @*/
125 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
126 {
127   DM_Plex *mesh = (DM_Plex *) dm->data;
128 
129   PetscFunctionBegin;
130   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
131   mesh->useAnchors = useAnchors;
132   PetscFunctionReturn(0);
133 }
134 
135 /*@
136   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
137 
138   Input Parameter:
139 . dm      - The DM object
140 
141   Output Parameter:
142 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
143 
144   Level: intermediate
145 
146 .seealso: DMPlexSetAdjacencyUseAnchors(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
147 @*/
148 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
149 {
150   DM_Plex *mesh = (DM_Plex *) dm->data;
151 
152   PetscFunctionBegin;
153   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
154   PetscValidIntPointer(useAnchors, 2);
155   *useAnchors = mesh->useAnchors;
156   PetscFunctionReturn(0);
157 }
158 
159 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
160 {
161   const PetscInt *cone = NULL;
162   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
163   PetscErrorCode  ierr;
164 
165   PetscFunctionBeginHot;
166   ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
167   ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
168   for (c = 0; c <= coneSize; ++c) {
169     const PetscInt  point   = !c ? p : cone[c-1];
170     const PetscInt *support = NULL;
171     PetscInt        supportSize, s, q;
172 
173     ierr = DMPlexGetSupportSize(dm, point, &supportSize);CHKERRQ(ierr);
174     ierr = DMPlexGetSupport(dm, point, &support);CHKERRQ(ierr);
175     for (s = 0; s < supportSize; ++s) {
176       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = support[s]),0); ++q) {
177         if (support[s] == adj[q]) break;
178       }
179       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
180     }
181   }
182   *adjSize = numAdj;
183   PetscFunctionReturn(0);
184 }
185 
186 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
187 {
188   const PetscInt *support = NULL;
189   PetscInt        numAdj   = 0, maxAdjSize = *adjSize, supportSize, s;
190   PetscErrorCode  ierr;
191 
192   PetscFunctionBeginHot;
193   ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr);
194   ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr);
195   for (s = 0; s <= supportSize; ++s) {
196     const PetscInt  point = !s ? p : support[s-1];
197     const PetscInt *cone  = NULL;
198     PetscInt        coneSize, c, q;
199 
200     ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
201     ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
202     for (c = 0; c < coneSize; ++c) {
203       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = cone[c]),0); ++q) {
204         if (cone[c] == adj[q]) break;
205       }
206       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
207     }
208   }
209   *adjSize = numAdj;
210   PetscFunctionReturn(0);
211 }
212 
213 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
214 {
215   PetscInt      *star = NULL;
216   PetscInt       numAdj = 0, maxAdjSize = *adjSize, starSize, s;
217   PetscErrorCode ierr;
218 
219   PetscFunctionBeginHot;
220   ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
221   for (s = 0; s < starSize*2; s += 2) {
222     const PetscInt *closure = NULL;
223     PetscInt        closureSize, c, q;
224 
225     ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
226     for (c = 0; c < closureSize*2; c += 2) {
227       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = closure[c]),0); ++q) {
228         if (closure[c] == adj[q]) break;
229       }
230       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
231     }
232     ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
233   }
234   ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
235   *adjSize = numAdj;
236   PetscFunctionReturn(0);
237 }
238 
239 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
240 {
241   static PetscInt asiz = 0;
242   PetscInt maxAnchors = 1;
243   PetscInt aStart = -1, aEnd = -1;
244   PetscInt maxAdjSize;
245   PetscSection aSec = NULL;
246   IS aIS = NULL;
247   const PetscInt *anchors;
248   PetscErrorCode  ierr;
249 
250   PetscFunctionBeginHot;
251   if (useAnchors) {
252     ierr = DMPlexGetAnchors(dm,&aSec,&aIS);CHKERRQ(ierr);
253     if (aSec) {
254       ierr = PetscSectionGetMaxDof(aSec,&maxAnchors);CHKERRQ(ierr);
255       maxAnchors = PetscMax(1,maxAnchors);
256       ierr = PetscSectionGetChart(aSec,&aStart,&aEnd);CHKERRQ(ierr);
257       ierr = ISGetIndices(aIS,&anchors);CHKERRQ(ierr);
258     }
259   }
260   if (!*adj) {
261     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
262 
263     ierr  = DMPlexGetChart(dm, &pStart,&pEnd);CHKERRQ(ierr);
264     ierr  = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
265     ierr  = DMPlexGetMaxSizes(dm, &maxC, &maxS);CHKERRQ(ierr);
266     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1;
267     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1;
268     asiz  = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries);
269     asiz *= maxAnchors;
270     asiz  = PetscMin(asiz,pEnd-pStart);
271     ierr  = PetscMalloc1(asiz,adj);CHKERRQ(ierr);
272   }
273   if (*adjSize < 0) *adjSize = asiz;
274   maxAdjSize = *adjSize;
275   if (useTransitiveClosure) {
276     ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr);
277   } else if (useCone) {
278     ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
279   } else {
280     ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
281   }
282   if (useAnchors && aSec) {
283     PetscInt origSize = *adjSize;
284     PetscInt numAdj = origSize;
285     PetscInt i = 0, j;
286     PetscInt *orig = *adj;
287 
288     while (i < origSize) {
289       PetscInt p = orig[i];
290       PetscInt aDof = 0;
291 
292       if (p >= aStart && p < aEnd) {
293         ierr = PetscSectionGetDof(aSec,p,&aDof);CHKERRQ(ierr);
294       }
295       if (aDof) {
296         PetscInt aOff;
297         PetscInt s, q;
298 
299         for (j = i + 1; j < numAdj; j++) {
300           orig[j - 1] = orig[j];
301         }
302         origSize--;
303         numAdj--;
304         ierr = PetscSectionGetOffset(aSec,p,&aOff);CHKERRQ(ierr);
305         for (s = 0; s < aDof; ++s) {
306           for (q = 0; q < numAdj || ((void)(orig[numAdj++] = anchors[aOff+s]),0); ++q) {
307             if (anchors[aOff+s] == orig[q]) break;
308           }
309           if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
310         }
311       }
312       else {
313         i++;
314       }
315     }
316     *adjSize = numAdj;
317     ierr = ISRestoreIndices(aIS,&anchors);CHKERRQ(ierr);
318   }
319   PetscFunctionReturn(0);
320 }
321 
322 /*@
323   DMPlexGetAdjacency - Return all points adjacent to the given point
324 
325   Input Parameters:
326 + dm - The DM object
327 . p  - The point
328 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE
329 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize
330 
331   Output Parameters:
332 + adjSize - The number of adjacent points
333 - adj - The adjacent points
334 
335   Level: advanced
336 
337   Notes: The user must PetscFree the adj array if it was not passed in.
338 
339 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator()
340 @*/
341 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
342 {
343   DM_Plex       *mesh = (DM_Plex *) dm->data;
344   PetscErrorCode ierr;
345 
346   PetscFunctionBeginHot;
347   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
348   PetscValidPointer(adjSize,3);
349   PetscValidPointer(adj,4);
350   ierr = DMPlexGetAdjacency_Internal(dm, p, mesh->useCone, mesh->useClosure, mesh->useAnchors, adjSize, adj);CHKERRQ(ierr);
351   PetscFunctionReturn(0);
352 }
353 
354 /*@
355   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
356 
357   Collective on DM
358 
359   Input Parameters:
360 + dm      - The DM
361 - sfPoint - The PetscSF which encodes point connectivity
362 
363   Output Parameters:
364 + processRanks - A list of process neighbors, or NULL
365 - sfProcess    - An SF encoding the two-sided process connectivity, or NULL
366 
367   Level: developer
368 
369 .seealso: PetscSFCreate(), DMPlexCreateProcessSF()
370 @*/
371 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
372 {
373   const PetscSFNode *remotePoints;
374   PetscInt          *localPointsNew;
375   PetscSFNode       *remotePointsNew;
376   const PetscInt    *nranks;
377   PetscInt          *ranksNew;
378   PetscBT            neighbors;
379   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
380   PetscMPIInt        numProcs, proc, rank;
381   PetscErrorCode     ierr;
382 
383   PetscFunctionBegin;
384   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
385   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
386   if (processRanks) {PetscValidPointer(processRanks, 3);}
387   if (sfProcess)    {PetscValidPointer(sfProcess, 4);}
388   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
389   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
390   ierr = PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);CHKERRQ(ierr);
391   ierr = PetscBTCreate(numProcs, &neighbors);CHKERRQ(ierr);
392   ierr = PetscBTMemzero(numProcs, neighbors);CHKERRQ(ierr);
393   /* Compute root-to-leaf process connectivity */
394   ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr);
395   ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr);
396   for (p = pStart; p < pEnd; ++p) {
397     PetscInt ndof, noff, n;
398 
399     ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr);
400     ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr);
401     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
402   }
403   ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr);
404   /* Compute leaf-to-neighbor process connectivity */
405   ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr);
406   ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr);
407   for (p = pStart; p < pEnd; ++p) {
408     PetscInt ndof, noff, n;
409 
410     ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr);
411     ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr);
412     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
413   }
414   ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr);
415   /* Compute leaf-to-root process connectivity */
416   for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);}
417   /* Calculate edges */
418   PetscBTClear(neighbors, rank);
419   for(proc = 0, numNeighbors = 0; proc < numProcs; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;}
420   ierr = PetscMalloc1(numNeighbors, &ranksNew);CHKERRQ(ierr);
421   ierr = PetscMalloc1(numNeighbors, &localPointsNew);CHKERRQ(ierr);
422   ierr = PetscMalloc1(numNeighbors, &remotePointsNew);CHKERRQ(ierr);
423   for(proc = 0, n = 0; proc < numProcs; ++proc) {
424     if (PetscBTLookup(neighbors, proc)) {
425       ranksNew[n]              = proc;
426       localPointsNew[n]        = proc;
427       remotePointsNew[n].index = rank;
428       remotePointsNew[n].rank  = proc;
429       ++n;
430     }
431   }
432   ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr);
433   if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr);}
434   else              {ierr = PetscFree(ranksNew);CHKERRQ(ierr);}
435   if (sfProcess) {
436     ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr);
437     ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr);
438     ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr);
439     ierr = PetscSFSetGraph(*sfProcess, numProcs, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
440   }
441   PetscFunctionReturn(0);
442 }
443 
444 /*@
445   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
446 
447   Collective on DM
448 
449   Input Parameter:
450 . dm - The DM
451 
452   Output Parameters:
453 + rootSection - The number of leaves for a given root point
454 . rootrank    - The rank of each edge into the root point
455 . leafSection - The number of processes sharing a given leaf point
456 - leafrank    - The rank of each process sharing a leaf point
457 
458   Level: developer
459 
460 .seealso: DMPlexCreateOverlap()
461 @*/
462 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
463 {
464   MPI_Comm        comm;
465   PetscSF         sfPoint;
466   const PetscInt *rootdegree;
467   PetscInt       *myrank, *remoterank;
468   PetscInt        pStart, pEnd, p, nedges;
469   PetscMPIInt     rank;
470   PetscErrorCode  ierr;
471 
472   PetscFunctionBegin;
473   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
474   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
475   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
476   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
477   /* Compute number of leaves for each root */
478   ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr);
479   ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr);
480   ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr);
481   ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr);
482   for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr);}
483   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
484   /* Gather rank of each leaf to root */
485   ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr);
486   ierr = PetscMalloc1(pEnd-pStart, &myrank);CHKERRQ(ierr);
487   ierr = PetscMalloc1(nedges,  &remoterank);CHKERRQ(ierr);
488   for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank;
489   ierr = PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
490   ierr = PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
491   ierr = PetscFree(myrank);CHKERRQ(ierr);
492   ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr);
493   /* Distribute remote ranks to leaves */
494   ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr);
495   ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr);
496   PetscFunctionReturn(0);
497 }
498 
499 /*@C
500   DMPlexCreateOverlap - Compute owner information for shared points. This basically gets two-sided for an SF.
501 
502   Collective on DM
503 
504   Input Parameters:
505 + dm          - The DM
506 . levels      - Number of overlap levels
507 . rootSection - The number of leaves for a given root point
508 . rootrank    - The rank of each edge into the root point
509 . leafSection - The number of processes sharing a given leaf point
510 - leafrank    - The rank of each process sharing a leaf point
511 
512   Output Parameters:
513 + ovLabel     - DMLabel containing remote overlap contributions as point/rank pairings
514 
515   Level: developer
516 
517 .seealso: DMPlexDistributeOwnership(), DMPlexDistribute()
518 @*/
519 PetscErrorCode DMPlexCreateOverlap(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
520 {
521   MPI_Comm           comm;
522   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
523   PetscSF            sfPoint, sfProc;
524   const PetscSFNode *remote;
525   const PetscInt    *local;
526   const PetscInt    *nrank, *rrank;
527   PetscInt          *adj = NULL;
528   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l;
529   PetscMPIInt        rank, numProcs;
530   PetscBool          useCone, useClosure, flg;
531   PetscErrorCode     ierr;
532 
533   PetscFunctionBegin;
534   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
535   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
536   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
537   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
538   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
539   ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr);
540   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
541   ierr = DMLabelCreate("Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr);
542   /* Handle leaves: shared with the root point */
543   for (l = 0; l < nleaves; ++l) {
544     PetscInt adjSize = PETSC_DETERMINE, a;
545 
546     ierr = DMPlexGetAdjacency(dm, local[l], &adjSize, &adj);CHKERRQ(ierr);
547     for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr);}
548   }
549   ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr);
550   ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr);
551   /* Handle roots */
552   for (p = pStart; p < pEnd; ++p) {
553     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
554 
555     if ((p >= sStart) && (p < sEnd)) {
556       /* Some leaves share a root with other leaves on different processes */
557       ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr);
558       if (neighbors) {
559         ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr);
560         ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
561         for (n = 0; n < neighbors; ++n) {
562           const PetscInt remoteRank = nrank[noff+n];
563 
564           if (remoteRank == rank) continue;
565           for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
566         }
567       }
568     }
569     /* Roots are shared with leaves */
570     ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr);
571     if (!neighbors) continue;
572     ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr);
573     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
574     for (n = 0; n < neighbors; ++n) {
575       const PetscInt remoteRank = rrank[noff+n];
576 
577       if (remoteRank == rank) continue;
578       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
579     }
580   }
581   ierr = PetscFree(adj);CHKERRQ(ierr);
582   ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr);
583   ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr);
584   /* Add additional overlap levels */
585   for (l = 1; l < levels; l++) {
586     /* Propagate point donations over SF to capture remote connections */
587     ierr = DMPlexPartitionLabelPropagate(dm, ovAdjByRank);CHKERRQ(ierr);
588     /* Add next level of point donations to the label */
589     ierr = DMPlexPartitionLabelAdjacency(dm, ovAdjByRank);CHKERRQ(ierr);
590   }
591   /* We require the closure in the overlap */
592   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
593   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
594   if (useCone || !useClosure) {
595     ierr = DMPlexPartitionLabelClosure(dm, ovAdjByRank);CHKERRQ(ierr);
596   }
597   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-overlap_view", &flg);CHKERRQ(ierr);
598   if (flg) {
599     ierr = DMLabelView(ovAdjByRank, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
600   }
601   /* Make global process SF and invert sender to receiver label */
602   {
603     /* Build a global process SF */
604     PetscSFNode *remoteProc;
605     ierr = PetscMalloc1(numProcs, &remoteProc);CHKERRQ(ierr);
606     for (p = 0; p < numProcs; ++p) {
607       remoteProc[p].rank  = p;
608       remoteProc[p].index = rank;
609     }
610     ierr = PetscSFCreate(comm, &sfProc);CHKERRQ(ierr);
611     ierr = PetscObjectSetName((PetscObject) sfProc, "Process SF");CHKERRQ(ierr);
612     ierr = PetscSFSetGraph(sfProc, numProcs, numProcs, NULL, PETSC_OWN_POINTER, remoteProc, PETSC_OWN_POINTER);CHKERRQ(ierr);
613   }
614   ierr = DMLabelCreate("Overlap label", ovLabel);CHKERRQ(ierr);
615   ierr = DMPlexPartitionLabelInvert(dm, ovAdjByRank, sfProc, *ovLabel);CHKERRQ(ierr);
616   /* Add owned points, except for shared local points */
617   for (p = pStart; p < pEnd; ++p) {ierr = DMLabelSetValue(*ovLabel, p, rank);CHKERRQ(ierr);}
618   for (l = 0; l < nleaves; ++l) {
619     ierr = DMLabelClearValue(*ovLabel, local[l], rank);CHKERRQ(ierr);
620     ierr = DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank);CHKERRQ(ierr);
621   }
622   /* Clean up */
623   ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr);
624   ierr = PetscSFDestroy(&sfProc);CHKERRQ(ierr);
625   PetscFunctionReturn(0);
626 }
627 
628 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
629 {
630   MPI_Comm           comm;
631   PetscMPIInt        rank, numProcs;
632   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
633   PetscInt          *pointDepths, *remoteDepths, *ilocal;
634   PetscInt          *depthRecv, *depthShift, *depthIdx;
635   PetscSFNode       *iremote;
636   PetscSF            pointSF;
637   const PetscInt    *sharedLocal;
638   const PetscSFNode *overlapRemote, *sharedRemote;
639   PetscErrorCode     ierr;
640 
641   PetscFunctionBegin;
642   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
643   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
644   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
645   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
646   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
647 
648   /* Before building the migration SF we need to know the new stratum offsets */
649   ierr = PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote);CHKERRQ(ierr);
650   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
651   for (d=0; d<dim+1; d++) {
652     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
653     for (p=pStart; p<pEnd; p++) pointDepths[p] = d;
654   }
655   for (p=0; p<nleaves; p++) remoteDepths[p] = -1;
656   ierr = PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
657   ierr = PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
658 
659   /* Count recevied points in each stratum and compute the internal strata shift */
660   ierr = PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx);CHKERRQ(ierr);
661   for (d=0; d<dim+1; d++) depthRecv[d]=0;
662   for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++;
663   depthShift[dim] = 0;
664   for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim];
665   for (d=1; d<dim; d++) depthShift[d] += depthRecv[0];
666   for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1];
667   for (d=0; d<dim+1; d++) {
668     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
669     depthIdx[d] = pStart + depthShift[d];
670   }
671 
672   /* Form the overlap SF build an SF that describes the full overlap migration SF */
673   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
674   newLeaves = pEnd - pStart + nleaves;
675   ierr = PetscMalloc1(newLeaves, &ilocal);CHKERRQ(ierr);
676   ierr = PetscMalloc1(newLeaves, &iremote);CHKERRQ(ierr);
677   /* First map local points to themselves */
678   for (d=0; d<dim+1; d++) {
679     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
680     for (p=pStart; p<pEnd; p++) {
681       point = p + depthShift[d];
682       ilocal[point] = point;
683       iremote[point].index = p;
684       iremote[point].rank = rank;
685       depthIdx[d]++;
686     }
687   }
688 
689   /* Add in the remote roots for currently shared points */
690   ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
691   ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote);CHKERRQ(ierr);
692   for (d=0; d<dim+1; d++) {
693     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
694     for (p=0; p<numSharedPoints; p++) {
695       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
696         point = sharedLocal[p] + depthShift[d];
697         iremote[point].index = sharedRemote[p].index;
698         iremote[point].rank = sharedRemote[p].rank;
699       }
700     }
701   }
702 
703   /* Now add the incoming overlap points */
704   for (p=0; p<nleaves; p++) {
705     point = depthIdx[remoteDepths[p]];
706     ilocal[point] = point;
707     iremote[point].index = overlapRemote[p].index;
708     iremote[point].rank = overlapRemote[p].rank;
709     depthIdx[remoteDepths[p]]++;
710   }
711   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
712 
713   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
714   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");CHKERRQ(ierr);
715   ierr = PetscSFSetFromOptions(*migrationSF);CHKERRQ(ierr);
716   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
717   ierr = PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);CHKERRQ(ierr);
718 
719   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
720   PetscFunctionReturn(0);
721 }
722 
723 /*@
724   DMPlexStratifyMigrationSF - Rearrange the leaves of a migration sf for stratification.
725 
726   Input Parameter:
727 + dm          - The DM
728 - sf          - A star forest with non-ordered leaves, usually defining a DM point migration
729 
730   Output Parameter:
731 . migrationSF - A star forest with added leaf indirection that ensures the resulting DM is stratified
732 
733   Level: developer
734 
735 .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexDistributeOverlap()
736 @*/
737 PetscErrorCode DMPlexStratifyMigrationSF(DM dm, PetscSF sf, PetscSF *migrationSF)
738 {
739   MPI_Comm           comm;
740   PetscMPIInt        rank, numProcs;
741   PetscInt           d, ldepth, depth, p, pStart, pEnd, nroots, nleaves;
742   PetscInt          *pointDepths, *remoteDepths, *ilocal;
743   PetscInt          *depthRecv, *depthShift, *depthIdx;
744   PetscInt           hybEnd[4];
745   const PetscSFNode *iremote;
746   PetscErrorCode     ierr;
747 
748   PetscFunctionBegin;
749   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
750   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
751   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
752   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
753   ierr = DMPlexGetDepth(dm, &ldepth);CHKERRQ(ierr);
754   ierr = MPIU_Allreduce(&ldepth, &depth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr);
755   if ((ldepth >= 0) && (depth != ldepth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", ldepth, depth);
756 
757   /* Before building the migration SF we need to know the new stratum offsets */
758   ierr = PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote);CHKERRQ(ierr);
759   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
760   ierr = DMPlexGetHybridBounds(dm,&hybEnd[depth],&hybEnd[depth-1],&hybEnd[1],&hybEnd[0]);CHKERRQ(ierr);
761   for (d = 0; d < depth+1; ++d) {
762     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
763     for (p = pStart; p < pEnd; ++p) {
764       if (hybEnd[d] >= 0 && p >= hybEnd[d]) { /* put in a separate value for hybrid points */
765         pointDepths[p] = 2 * d;
766       } else {
767         pointDepths[p] = 2 * d + 1;
768       }
769     }
770   }
771   for (p = 0; p < nleaves; ++p) remoteDepths[p] = -1;
772   ierr = PetscSFBcastBegin(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
773   ierr = PetscSFBcastEnd(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
774   /* Count recevied points in each stratum and compute the internal strata shift */
775   ierr = PetscMalloc3(2*(depth+1), &depthRecv, 2*(depth+1), &depthShift, 2*(depth+1), &depthIdx);CHKERRQ(ierr);
776   for (d = 0; d < 2*(depth+1); ++d) depthRecv[d] = 0;
777   for (p = 0; p < nleaves; ++p) depthRecv[remoteDepths[p]]++;
778   depthShift[2*depth+1] = 0;
779   for (d = 0; d < 2*depth+1; ++d) depthShift[d] = depthRecv[2 * depth + 1];
780   for (d = 0; d < 2*depth; ++d) depthShift[d] += depthRecv[2 * depth];
781   depthShift[0] += depthRecv[1];
782   for (d = 2; d < 2*depth; ++d) depthShift[d] += depthRecv[1];
783   for (d = 2; d < 2*depth; ++d) depthShift[d] += depthRecv[0];
784   for (d = 2 * depth-1; d > 2; --d) {
785     PetscInt e;
786 
787     for (e = d -1; e > 1; --e) depthShift[e] += depthRecv[d];
788   }
789   for (d = 0; d < 2*(depth+1); ++d) {depthIdx[d] = 0;}
790   /* Derive a new local permutation based on stratified indices */
791   ierr = PetscMalloc1(nleaves, &ilocal);CHKERRQ(ierr);
792   for (p = 0; p < nleaves; ++p) {
793     const PetscInt dep = remoteDepths[p];
794 
795     ilocal[p] = depthShift[dep] + depthIdx[dep];
796     depthIdx[dep]++;
797   }
798   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
799   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Migration SF");CHKERRQ(ierr);
800   ierr = PetscSFSetGraph(*migrationSF, nroots, nleaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_COPY_VALUES);CHKERRQ(ierr);
801   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
802   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
803   PetscFunctionReturn(0);
804 }
805 
806 /*@
807   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
808 
809   Collective on DM
810 
811   Input Parameters:
812 + dm - The DMPlex object
813 . pointSF - The PetscSF describing the communication pattern
814 . originalSection - The PetscSection for existing data layout
815 - originalVec - The existing data
816 
817   Output Parameters:
818 + newSection - The PetscSF describing the new data layout
819 - newVec - The new data
820 
821   Level: developer
822 
823 .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData()
824 @*/
825 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
826 {
827   PetscSF        fieldSF;
828   PetscInt      *remoteOffsets, fieldSize;
829   PetscScalar   *originalValues, *newValues;
830   PetscErrorCode ierr;
831 
832   PetscFunctionBegin;
833   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
834   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
835 
836   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
837   ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr);
838   ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr);
839 
840   ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr);
841   ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr);
842   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
843   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
844   ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
845   ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
846   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
847   ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr);
848   ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr);
849   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
850   PetscFunctionReturn(0);
851 }
852 
853 /*@
854   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
855 
856   Collective on DM
857 
858   Input Parameters:
859 + dm - The DMPlex object
860 . pointSF - The PetscSF describing the communication pattern
861 . originalSection - The PetscSection for existing data layout
862 - originalIS - The existing data
863 
864   Output Parameters:
865 + newSection - The PetscSF describing the new data layout
866 - newIS - The new data
867 
868   Level: developer
869 
870 .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData()
871 @*/
872 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
873 {
874   PetscSF         fieldSF;
875   PetscInt       *newValues, *remoteOffsets, fieldSize;
876   const PetscInt *originalValues;
877   PetscErrorCode  ierr;
878 
879   PetscFunctionBegin;
880   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
881   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
882 
883   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
884   ierr = PetscMalloc1(fieldSize, &newValues);CHKERRQ(ierr);
885 
886   ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr);
887   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
888   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
889   ierr = PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
890   ierr = PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
891   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
892   ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr);
893   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr);
894   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
895   PetscFunctionReturn(0);
896 }
897 
898 /*@
899   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
900 
901   Collective on DM
902 
903   Input Parameters:
904 + dm - The DMPlex object
905 . pointSF - The PetscSF describing the communication pattern
906 . originalSection - The PetscSection for existing data layout
907 . datatype - The type of data
908 - originalData - The existing data
909 
910   Output Parameters:
911 + newSection - The PetscSection describing the new data layout
912 - newData - The new data
913 
914   Level: developer
915 
916 .seealso: DMPlexDistribute(), DMPlexDistributeField()
917 @*/
918 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
919 {
920   PetscSF        fieldSF;
921   PetscInt      *remoteOffsets, fieldSize;
922   PetscMPIInt    dataSize;
923   PetscErrorCode ierr;
924 
925   PetscFunctionBegin;
926   ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
927   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
928 
929   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
930   ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr);
931   ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr);
932 
933   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
934   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
935   ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
936   ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
937   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
938   ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
939   PetscFunctionReturn(0);
940 }
941 
942 PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
943 {
944   DM_Plex               *mesh  = (DM_Plex*) dm->data;
945   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
946   MPI_Comm               comm;
947   PetscSF                coneSF;
948   PetscSection           originalConeSection, newConeSection;
949   PetscInt              *remoteOffsets, *cones, *globCones, *newCones, newConesSize;
950   PetscBool              flg;
951   PetscErrorCode         ierr;
952 
953   PetscFunctionBegin;
954   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
955   PetscValidPointer(dmParallel,4);
956   ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
957 
958   /* Distribute cone section */
959   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
960   ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr);
961   ierr = DMPlexGetConeSection(dmParallel, &newConeSection);CHKERRQ(ierr);
962   ierr = PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr);
963   ierr = DMSetUp(dmParallel);CHKERRQ(ierr);
964   {
965     PetscInt pStart, pEnd, p;
966 
967     ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr);
968     for (p = pStart; p < pEnd; ++p) {
969       PetscInt coneSize;
970       ierr               = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr);
971       pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize);
972     }
973   }
974   /* Communicate and renumber cones */
975   ierr = PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr);
976   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
977   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
978   if (original) {
979     PetscInt numCones;
980 
981     ierr = PetscSectionGetStorageSize(originalConeSection,&numCones);CHKERRQ(ierr); ierr = PetscMalloc1(numCones,&globCones);CHKERRQ(ierr);
982     ierr = ISLocalToGlobalMappingApplyBlock(original, numCones, cones, globCones);CHKERRQ(ierr);
983   }
984   else {
985     globCones = cones;
986   }
987   ierr = DMPlexGetCones(dmParallel, &newCones);CHKERRQ(ierr);
988   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr);
989   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr);
990   if (original) {
991     ierr = PetscFree(globCones);CHKERRQ(ierr);
992   }
993   ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr);
994   ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr);
995 #if PETSC_USE_DEBUG
996   {
997     PetscInt  p;
998     PetscBool valid = PETSC_TRUE;
999     for (p = 0; p < newConesSize; ++p) {
1000       if (newCones[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
1001     }
1002     if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1003   }
1004 #endif
1005   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr);
1006   if (flg) {
1007     ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr);
1008     ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1009     ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr);
1010     ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1011     ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr);
1012   }
1013   ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr);
1014   ierr = DMPlexGetConeOrientations(dmParallel, &newCones);CHKERRQ(ierr);
1015   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1016   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1017   ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr);
1018   ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
1019   /* Create supports and stratify sieve */
1020   {
1021     PetscInt pStart, pEnd;
1022 
1023     ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr);
1024     ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr);
1025   }
1026   ierr = DMPlexSymmetrize(dmParallel);CHKERRQ(ierr);
1027   ierr = DMPlexStratify(dmParallel);CHKERRQ(ierr);
1028   pmesh->useCone    = mesh->useCone;
1029   pmesh->useClosure = mesh->useClosure;
1030   PetscFunctionReturn(0);
1031 }
1032 
1033 PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
1034 {
1035   MPI_Comm         comm;
1036   PetscSection     originalCoordSection, newCoordSection;
1037   Vec              originalCoordinates, newCoordinates;
1038   PetscInt         bs;
1039   const char      *name;
1040   const PetscReal *maxCell, *L;
1041   const DMBoundaryType *bd;
1042   PetscErrorCode   ierr;
1043 
1044   PetscFunctionBegin;
1045   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1046   PetscValidPointer(dmParallel, 3);
1047 
1048   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1049   ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr);
1050   ierr = DMGetCoordinateSection(dmParallel, &newCoordSection);CHKERRQ(ierr);
1051   ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr);
1052   if (originalCoordinates) {
1053     ierr = VecCreate(PETSC_COMM_SELF, &newCoordinates);CHKERRQ(ierr);
1054     ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr);
1055     ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr);
1056 
1057     ierr = DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr);
1058     ierr = DMSetCoordinatesLocal(dmParallel, newCoordinates);CHKERRQ(ierr);
1059     ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr);
1060     ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr);
1061     ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr);
1062   }
1063   ierr = DMGetPeriodicity(dm, &maxCell, &L, &bd);CHKERRQ(ierr);
1064   if (L) {ierr = DMSetPeriodicity(dmParallel, maxCell, L, bd);CHKERRQ(ierr);}
1065   PetscFunctionReturn(0);
1066 }
1067 
1068 /* Here we are assuming that process 0 always has everything */
1069 PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1070 {
1071   DM_Plex         *mesh = (DM_Plex*) dm->data;
1072   MPI_Comm         comm;
1073   DMLabel          depthLabel;
1074   PetscMPIInt      rank;
1075   PetscInt         depth, d, numLabels, numLocalLabels, l;
1076   PetscBool        hasLabels = PETSC_FALSE, lsendDepth, sendDepth;
1077   PetscObjectState depthState = -1;
1078   PetscErrorCode   ierr;
1079 
1080   PetscFunctionBegin;
1081   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1082   PetscValidHeaderSpecific(dm, DM_CLASSID, 3);
1083   ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1084   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1085   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1086 
1087   /* If the user has changed the depth label, communicate it instead */
1088   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1089   ierr = DMPlexGetDepthLabel(dm, &depthLabel);CHKERRQ(ierr);
1090   if (depthLabel) {ierr = DMLabelGetState(depthLabel, &depthState);CHKERRQ(ierr);}
1091   lsendDepth = mesh->depthState != depthState ? PETSC_TRUE : PETSC_FALSE;
1092   ierr = MPIU_Allreduce(&lsendDepth, &sendDepth, 1, MPIU_BOOL, MPI_LOR, comm);CHKERRQ(ierr);
1093   if (sendDepth) {
1094     ierr = DMRemoveLabel(dmParallel, "depth", &depthLabel);CHKERRQ(ierr);
1095     ierr = DMLabelDestroy(&depthLabel);CHKERRQ(ierr);
1096   }
1097   /* Everyone must have either the same number of labels, or none */
1098   ierr = DMGetNumLabels(dm, &numLocalLabels);CHKERRQ(ierr);
1099   numLabels = numLocalLabels;
1100   ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1101   if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1102   for (l = numLabels-1; l >= 0; --l) {
1103     DMLabel     label = NULL, labelNew = NULL;
1104     PetscBool   isdepth;
1105 
1106     if (hasLabels) {
1107       ierr = DMGetLabelByNum(dm, l, &label);CHKERRQ(ierr);
1108       /* Skip "depth" because it is recreated */
1109       ierr = PetscStrcmp(label->name, "depth", &isdepth);CHKERRQ(ierr);
1110     }
1111     ierr = MPI_Bcast(&isdepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr);
1112     if (isdepth && !sendDepth) continue;
1113     ierr = DMLabelDistribute(label, migrationSF, &labelNew);CHKERRQ(ierr);
1114     if (isdepth) {
1115       /* Put in any missing strata which can occur if users are managing the depth label themselves */
1116       PetscInt gdepth;
1117 
1118       ierr = MPIU_Allreduce(&depth, &gdepth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr);
1119       if ((depth >= 0) && (gdepth != depth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", depth, gdepth);
1120       for (d = 0; d <= gdepth; ++d) {
1121         PetscBool has;
1122 
1123         ierr = DMLabelHasStratum(labelNew, d, &has);CHKERRQ(ierr);
1124         if (!has) ierr = DMLabelAddStratum(labelNew, d);CHKERRQ(ierr);
1125       }
1126     }
1127     ierr = DMAddLabel(dmParallel, labelNew);CHKERRQ(ierr);
1128   }
1129   ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1130   PetscFunctionReturn(0);
1131 }
1132 
1133 PetscErrorCode DMPlexDistributeSetupHybrid(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1134 {
1135   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1136   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1137   PetscBool      *isHybrid, *isHybridParallel;
1138   PetscInt        dim, depth, d;
1139   PetscInt        pStart, pEnd, pStartP, pEndP;
1140   PetscErrorCode  ierr;
1141 
1142   PetscFunctionBegin;
1143   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1144   PetscValidPointer(dmParallel, 4);
1145 
1146   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1147   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1148   ierr = DMPlexGetChart(dm,&pStart,&pEnd);CHKERRQ(ierr);
1149   ierr = DMPlexGetChart(dmParallel,&pStartP,&pEndP);CHKERRQ(ierr);
1150   ierr = PetscCalloc2(pEnd-pStart,&isHybrid,pEndP-pStartP,&isHybridParallel);CHKERRQ(ierr);
1151   for (d = 0; d <= depth; d++) {
1152     PetscInt hybridMax = (depth == 1 && d == 1) ? mesh->hybridPointMax[dim] : mesh->hybridPointMax[d];
1153 
1154     if (hybridMax >= 0) {
1155       PetscInt sStart, sEnd, p;
1156 
1157       ierr = DMPlexGetDepthStratum(dm,d,&sStart,&sEnd);CHKERRQ(ierr);
1158       for (p = hybridMax; p < sEnd; p++) isHybrid[p-pStart] = PETSC_TRUE;
1159     }
1160   }
1161   ierr = PetscSFBcastBegin(migrationSF,MPIU_BOOL,isHybrid,isHybridParallel);CHKERRQ(ierr);
1162   ierr = PetscSFBcastEnd(migrationSF,MPIU_BOOL,isHybrid,isHybridParallel);CHKERRQ(ierr);
1163   for (d = 0; d <= dim; d++) pmesh->hybridPointMax[d] = -1;
1164   for (d = 0; d <= depth; d++) {
1165     PetscInt sStart, sEnd, p, dd;
1166 
1167     ierr = DMPlexGetDepthStratum(dmParallel,d,&sStart,&sEnd);CHKERRQ(ierr);
1168     dd = (depth == 1 && d == 1) ? dim : d;
1169     for (p = sStart; p < sEnd; p++) {
1170       if (isHybridParallel[p-pStartP]) {
1171         pmesh->hybridPointMax[dd] = p;
1172         break;
1173       }
1174     }
1175   }
1176   ierr = PetscFree2(isHybrid,isHybridParallel);CHKERRQ(ierr);
1177   PetscFunctionReturn(0);
1178 }
1179 
1180 PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1181 {
1182   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1183   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1184   MPI_Comm        comm;
1185   DM              refTree;
1186   PetscSection    origParentSection, newParentSection;
1187   PetscInt        *origParents, *origChildIDs;
1188   PetscBool       flg;
1189   PetscErrorCode  ierr;
1190 
1191   PetscFunctionBegin;
1192   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1193   PetscValidHeaderSpecific(dm, DM_CLASSID, 4);
1194   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1195 
1196   /* Set up tree */
1197   ierr = DMPlexGetReferenceTree(dm,&refTree);CHKERRQ(ierr);
1198   ierr = DMPlexSetReferenceTree(dmParallel,refTree);CHKERRQ(ierr);
1199   ierr = DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL);CHKERRQ(ierr);
1200   if (origParentSection) {
1201     PetscInt        pStart, pEnd;
1202     PetscInt        *newParents, *newChildIDs, *globParents;
1203     PetscInt        *remoteOffsetsParents, newParentSize;
1204     PetscSF         parentSF;
1205 
1206     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1207     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);CHKERRQ(ierr);
1208     ierr = PetscSectionSetChart(newParentSection,pStart,pEnd);CHKERRQ(ierr);
1209     ierr = PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);CHKERRQ(ierr);
1210     ierr = PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);CHKERRQ(ierr);
1211     ierr = PetscFree(remoteOffsetsParents);CHKERRQ(ierr);
1212     ierr = PetscSectionGetStorageSize(newParentSection,&newParentSize);CHKERRQ(ierr);
1213     ierr = PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs);CHKERRQ(ierr);
1214     if (original) {
1215       PetscInt numParents;
1216 
1217       ierr = PetscSectionGetStorageSize(origParentSection,&numParents);CHKERRQ(ierr);
1218       ierr = PetscMalloc1(numParents,&globParents);CHKERRQ(ierr);
1219       ierr = ISLocalToGlobalMappingApplyBlock(original, numParents, origParents, globParents);CHKERRQ(ierr);
1220     }
1221     else {
1222       globParents = origParents;
1223     }
1224     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr);
1225     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr);
1226     if (original) {
1227       ierr = PetscFree(globParents);CHKERRQ(ierr);
1228     }
1229     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1230     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1231     ierr = ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents);CHKERRQ(ierr);
1232 #if PETSC_USE_DEBUG
1233     {
1234       PetscInt  p;
1235       PetscBool valid = PETSC_TRUE;
1236       for (p = 0; p < newParentSize; ++p) {
1237         if (newParents[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
1238       }
1239       if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1240     }
1241 #endif
1242     ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-parents_view", &flg);CHKERRQ(ierr);
1243     if (flg) {
1244       ierr = PetscPrintf(comm, "Serial Parent Section: \n");CHKERRQ(ierr);
1245       ierr = PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1246       ierr = PetscPrintf(comm, "Parallel Parent Section: \n");CHKERRQ(ierr);
1247       ierr = PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1248       ierr = PetscSFView(parentSF, NULL);CHKERRQ(ierr);
1249     }
1250     ierr = DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);CHKERRQ(ierr);
1251     ierr = PetscSectionDestroy(&newParentSection);CHKERRQ(ierr);
1252     ierr = PetscFree2(newParents,newChildIDs);CHKERRQ(ierr);
1253     ierr = PetscSFDestroy(&parentSF);CHKERRQ(ierr);
1254   }
1255   pmesh->useAnchors = mesh->useAnchors;
1256   PetscFunctionReturn(0);
1257 }
1258 
1259 PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, DM dmParallel)
1260 {
1261   DM_Plex               *mesh  = (DM_Plex*) dm->data;
1262   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
1263   PetscMPIInt            rank, numProcs;
1264   MPI_Comm               comm;
1265   PetscErrorCode         ierr;
1266 
1267   PetscFunctionBegin;
1268   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1269   PetscValidPointer(dmParallel,7);
1270 
1271   /* Create point SF for parallel mesh */
1272   ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1273   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1274   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1275   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1276   {
1277     const PetscInt *leaves;
1278     PetscSFNode    *remotePoints, *rowners, *lowners;
1279     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1280     PetscInt        pStart, pEnd;
1281 
1282     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1283     ierr = PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr);
1284     ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr);
1285     for (p=0; p<numRoots; p++) {
1286       rowners[p].rank  = -1;
1287       rowners[p].index = -1;
1288     }
1289     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1290     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1291     for (p = 0; p < numLeaves; ++p) {
1292       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1293         lowners[p].rank  = rank;
1294         lowners[p].index = leaves ? leaves[p] : p;
1295       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1296         lowners[p].rank  = -2;
1297         lowners[p].index = -2;
1298       }
1299     }
1300     for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1301       rowners[p].rank  = -3;
1302       rowners[p].index = -3;
1303     }
1304     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1305     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1306     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1307     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1308     for (p = 0; p < numLeaves; ++p) {
1309       if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
1310       if (lowners[p].rank != rank) ++numGhostPoints;
1311     }
1312     ierr = PetscMalloc1(numGhostPoints, &ghostPoints);CHKERRQ(ierr);
1313     ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr);
1314     for (p = 0, gp = 0; p < numLeaves; ++p) {
1315       if (lowners[p].rank != rank) {
1316         ghostPoints[gp]        = leaves ? leaves[p] : p;
1317         remotePoints[gp].rank  = lowners[p].rank;
1318         remotePoints[gp].index = lowners[p].index;
1319         ++gp;
1320       }
1321     }
1322     ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr);
1323     ierr = PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1324     ierr = PetscSFSetFromOptions((dmParallel)->sf);CHKERRQ(ierr);
1325   }
1326   pmesh->useCone    = mesh->useCone;
1327   pmesh->useClosure = mesh->useClosure;
1328   ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1329   PetscFunctionReturn(0);
1330 }
1331 
1332 /*@C
1333   DMPlexDerivePointSF - Build a point SF from an SF describing a point migration
1334 
1335   Input Parameter:
1336 + dm          - The source DMPlex object
1337 . migrationSF - The star forest that describes the parallel point remapping
1338 . ownership   - Flag causing a vote to determine point ownership
1339 
1340   Output Parameter:
1341 - pointSF     - The star forest describing the point overlap in the remapped DM
1342 
1343   Level: developer
1344 
1345 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1346 @*/
1347 PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownership, PetscSF *pointSF)
1348 {
1349   PetscMPIInt        rank;
1350   PetscInt           p, nroots, nleaves, idx, npointLeaves;
1351   PetscInt          *pointLocal;
1352   const PetscInt    *leaves;
1353   const PetscSFNode *roots;
1354   PetscSFNode       *rootNodes, *leafNodes, *pointRemote;
1355   PetscErrorCode     ierr;
1356 
1357   PetscFunctionBegin;
1358   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1359   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1360 
1361   ierr = PetscSFGetGraph(migrationSF, &nroots, &nleaves, &leaves, &roots);CHKERRQ(ierr);
1362   ierr = PetscMalloc2(nroots, &rootNodes, nleaves, &leafNodes);CHKERRQ(ierr);
1363   if (ownership) {
1364     /* Point ownership vote: Process with highest rank ownes shared points */
1365     for (p = 0; p < nleaves; ++p) {
1366       /* Either put in a bid or we know we own it */
1367       leafNodes[p].rank  = rank;
1368       leafNodes[p].index = p;
1369     }
1370     for (p = 0; p < nroots; p++) {
1371       /* Root must not participate in the reduction, flag so that MAXLOC does not use */
1372       rootNodes[p].rank  = -3;
1373       rootNodes[p].index = -3;
1374     }
1375     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);CHKERRQ(ierr);
1376     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);CHKERRQ(ierr);
1377   } else {
1378     for (p = 0; p < nroots; p++) {
1379       rootNodes[p].index = -1;
1380       rootNodes[p].rank = rank;
1381     };
1382     for (p = 0; p < nleaves; p++) {
1383       /* Write new local id into old location */
1384       if (roots[p].rank == rank) {
1385         rootNodes[roots[p].index].index = leaves ? leaves[p] : p;
1386       }
1387     }
1388   }
1389   ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1390   ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1391 
1392   for (npointLeaves = 0, p = 0; p < nleaves; p++) {if (leafNodes[p].rank != rank) npointLeaves++;}
1393   ierr = PetscMalloc1(npointLeaves, &pointLocal);CHKERRQ(ierr);
1394   ierr = PetscMalloc1(npointLeaves, &pointRemote);CHKERRQ(ierr);
1395   for (idx = 0, p = 0; p < nleaves; p++) {
1396     if (leafNodes[p].rank != rank) {
1397       pointLocal[idx] = p;
1398       pointRemote[idx] = leafNodes[p];
1399       idx++;
1400     }
1401   }
1402   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), pointSF);CHKERRQ(ierr);
1403   ierr = PetscSFSetFromOptions(*pointSF);CHKERRQ(ierr);
1404   ierr = PetscSFSetGraph(*pointSF, nleaves, npointLeaves, pointLocal, PETSC_OWN_POINTER, pointRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1405   ierr = PetscFree2(rootNodes, leafNodes);CHKERRQ(ierr);
1406   PetscFunctionReturn(0);
1407 }
1408 
1409 /*@C
1410   DMPlexMigrate  - Migrates internal DM data over the supplied star forest
1411 
1412   Input Parameter:
1413 + dm       - The source DMPlex object
1414 . sf       - The star forest communication context describing the migration pattern
1415 
1416   Output Parameter:
1417 - targetDM - The target DMPlex object
1418 
1419   Level: intermediate
1420 
1421 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1422 @*/
1423 PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM)
1424 {
1425   MPI_Comm               comm;
1426   PetscInt               dim, nroots;
1427   PetscSF                sfPoint;
1428   ISLocalToGlobalMapping ltogMigration;
1429   ISLocalToGlobalMapping ltogOriginal = NULL;
1430   PetscBool              flg;
1431   PetscErrorCode         ierr;
1432 
1433   PetscFunctionBegin;
1434   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1435   ierr = PetscLogEventBegin(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1436   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1437   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1438   ierr = DMSetDimension(targetDM, dim);CHKERRQ(ierr);
1439 
1440   /* Check for a one-to-all distribution pattern */
1441   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1442   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
1443   if (nroots >= 0) {
1444     IS                     isOriginal;
1445     PetscInt               n, size, nleaves;
1446     PetscInt              *numbering_orig, *numbering_new;
1447     /* Get the original point numbering */
1448     ierr = DMPlexCreatePointNumbering(dm, &isOriginal);CHKERRQ(ierr);
1449     ierr = ISLocalToGlobalMappingCreateIS(isOriginal, &ltogOriginal);CHKERRQ(ierr);
1450     ierr = ISLocalToGlobalMappingGetSize(ltogOriginal, &size);CHKERRQ(ierr);
1451     ierr = ISLocalToGlobalMappingGetBlockIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1452     /* Convert to positive global numbers */
1453     for (n=0; n<size; n++) {if (numbering_orig[n] < 0) numbering_orig[n] = -(numbering_orig[n]+1);}
1454     /* Derive the new local-to-global mapping from the old one */
1455     ierr = PetscSFGetGraph(sf, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1456     ierr = PetscMalloc1(nleaves, &numbering_new);CHKERRQ(ierr);
1457     ierr = PetscSFBcastBegin(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr);
1458     ierr = PetscSFBcastEnd(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr);
1459     ierr = ISLocalToGlobalMappingCreate(comm, 1, nleaves, (const PetscInt*) numbering_new, PETSC_OWN_POINTER, &ltogMigration);CHKERRQ(ierr);
1460     ierr = ISLocalToGlobalMappingRestoreIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1461     ierr = ISDestroy(&isOriginal);CHKERRQ(ierr);
1462   } else {
1463     /* One-to-all distribution pattern: We can derive LToG from SF */
1464     ierr = ISLocalToGlobalMappingCreateSF(sf, 0, &ltogMigration);CHKERRQ(ierr);
1465   }
1466   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1467   if (flg) {
1468     ierr = PetscPrintf(comm, "Point renumbering for DM migration:\n");CHKERRQ(ierr);
1469     ierr = ISLocalToGlobalMappingView(ltogMigration, NULL);CHKERRQ(ierr);
1470   }
1471   /* Migrate DM data to target DM */
1472   ierr = DMPlexDistributeCones(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1473   ierr = DMPlexDistributeLabels(dm, sf, targetDM);CHKERRQ(ierr);
1474   ierr = DMPlexDistributeCoordinates(dm, sf, targetDM);CHKERRQ(ierr);
1475   ierr = DMPlexDistributeSetupHybrid(dm, sf, ltogMigration, targetDM);CHKERRQ(ierr);
1476   ierr = DMPlexDistributeSetupTree(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1477   ierr = ISLocalToGlobalMappingDestroy(&ltogOriginal);CHKERRQ(ierr);
1478   ierr = ISLocalToGlobalMappingDestroy(&ltogMigration);CHKERRQ(ierr);
1479   ierr = PetscLogEventEnd(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1480   PetscFunctionReturn(0);
1481 }
1482 
1483 /*@C
1484   DMPlexDistribute - Distributes the mesh and any associated sections.
1485 
1486   Not Collective
1487 
1488   Input Parameter:
1489 + dm  - The original DMPlex object
1490 - overlap - The overlap of partitions, 0 is the default
1491 
1492   Output Parameter:
1493 + sf - The PetscSF used for point distribution
1494 - parallelMesh - The distributed DMPlex object, or NULL
1495 
1496   Note: If the mesh was not distributed, the return value is NULL.
1497 
1498   The user can control the definition of adjacency for the mesh using DMPlexSetAdjacencyUseCone() and
1499   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1500   representation on the mesh.
1501 
1502   Level: intermediate
1503 
1504 .keywords: mesh, elements
1505 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1506 @*/
1507 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1508 {
1509   MPI_Comm               comm;
1510   PetscPartitioner       partitioner;
1511   IS                     cellPart;
1512   PetscSection           cellPartSection;
1513   DM                     dmCoord;
1514   DMLabel                lblPartition, lblMigration;
1515   PetscSF                sfProcess, sfMigration, sfStratified, sfPoint;
1516   PetscBool              flg;
1517   PetscMPIInt            rank, numProcs, p;
1518   PetscErrorCode         ierr;
1519 
1520   PetscFunctionBegin;
1521   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1522   if (sf) PetscValidPointer(sf,4);
1523   PetscValidPointer(dmParallel,5);
1524 
1525   ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1526   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1527   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1528   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1529 
1530   *dmParallel = NULL;
1531   if (numProcs == 1) {
1532     ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1533     PetscFunctionReturn(0);
1534   }
1535 
1536   /* Create cell partition */
1537   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1538   ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr);
1539   ierr = DMPlexGetPartitioner(dm, &partitioner);CHKERRQ(ierr);
1540   ierr = PetscPartitionerPartition(partitioner, dm, cellPartSection, &cellPart);CHKERRQ(ierr);
1541   {
1542     /* Convert partition to DMLabel */
1543     PetscInt proc, pStart, pEnd, npoints, poffset;
1544     const PetscInt *points;
1545     ierr = DMLabelCreate("Point Partition", &lblPartition);CHKERRQ(ierr);
1546     ierr = ISGetIndices(cellPart, &points);CHKERRQ(ierr);
1547     ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr);
1548     for (proc = pStart; proc < pEnd; proc++) {
1549       ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr);
1550       ierr = PetscSectionGetOffset(cellPartSection, proc, &poffset);CHKERRQ(ierr);
1551       for (p = poffset; p < poffset+npoints; p++) {
1552         ierr = DMLabelSetValue(lblPartition, points[p], proc);CHKERRQ(ierr);
1553       }
1554     }
1555     ierr = ISRestoreIndices(cellPart, &points);CHKERRQ(ierr);
1556   }
1557   ierr = DMPlexPartitionLabelClosure(dm, lblPartition);CHKERRQ(ierr);
1558   {
1559     /* Build a global process SF */
1560     PetscSFNode *remoteProc;
1561     ierr = PetscMalloc1(numProcs, &remoteProc);CHKERRQ(ierr);
1562     for (p = 0; p < numProcs; ++p) {
1563       remoteProc[p].rank  = p;
1564       remoteProc[p].index = rank;
1565     }
1566     ierr = PetscSFCreate(comm, &sfProcess);CHKERRQ(ierr);
1567     ierr = PetscObjectSetName((PetscObject) sfProcess, "Process SF");CHKERRQ(ierr);
1568     ierr = PetscSFSetGraph(sfProcess, numProcs, numProcs, NULL, PETSC_OWN_POINTER, remoteProc, PETSC_OWN_POINTER);CHKERRQ(ierr);
1569   }
1570   ierr = DMLabelCreate("Point migration", &lblMigration);CHKERRQ(ierr);
1571   ierr = DMPlexPartitionLabelInvert(dm, lblPartition, sfProcess, lblMigration);CHKERRQ(ierr);
1572   ierr = DMPlexPartitionLabelCreateSF(dm, lblMigration, &sfMigration);CHKERRQ(ierr);
1573   /* Stratify the SF in case we are migrating an already parallel plex */
1574   ierr = DMPlexStratifyMigrationSF(dm, sfMigration, &sfStratified);CHKERRQ(ierr);
1575   ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1576   sfMigration = sfStratified;
1577   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1578   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1579   if (flg) {
1580     ierr = DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1581     ierr = PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1582   }
1583 
1584   /* Create non-overlapping parallel DM and migrate internal data */
1585   ierr = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr);
1586   ierr = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr);
1587   ierr = DMPlexMigrate(dm, sfMigration, *dmParallel);CHKERRQ(ierr);
1588 
1589   /* Build the point SF without overlap */
1590   ierr = DMPlexCreatePointSF(*dmParallel, sfMigration, PETSC_TRUE, &sfPoint);CHKERRQ(ierr);
1591   ierr = DMSetPointSF(*dmParallel, sfPoint);CHKERRQ(ierr);
1592   ierr = DMGetCoordinateDM(*dmParallel, &dmCoord);CHKERRQ(ierr);
1593   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1594   if (flg) {ierr = PetscSFView(sfPoint, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);}
1595 
1596   if (overlap > 0) {
1597     DM                 dmOverlap;
1598     PetscInt           nroots, nleaves;
1599     PetscSFNode       *newRemote;
1600     const PetscSFNode *oldRemote;
1601     PetscSF            sfOverlap, sfOverlapPoint;
1602     /* Add the partition overlap to the distributed DM */
1603     ierr = DMPlexDistributeOverlap(*dmParallel, overlap, &sfOverlap, &dmOverlap);CHKERRQ(ierr);
1604     ierr = DMDestroy(dmParallel);CHKERRQ(ierr);
1605     *dmParallel = dmOverlap;
1606     if (flg) {
1607       ierr = PetscPrintf(comm, "Overlap Migration SF:\n");CHKERRQ(ierr);
1608       ierr = PetscSFView(sfOverlap, NULL);CHKERRQ(ierr);
1609     }
1610 
1611     /* Re-map the migration SF to establish the full migration pattern */
1612     ierr = PetscSFGetGraph(sfMigration, &nroots, NULL, NULL, &oldRemote);CHKERRQ(ierr);
1613     ierr = PetscSFGetGraph(sfOverlap, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1614     ierr = PetscMalloc1(nleaves, &newRemote);CHKERRQ(ierr);
1615     ierr = PetscSFBcastBegin(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1616     ierr = PetscSFBcastEnd(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1617     ierr = PetscSFCreate(comm, &sfOverlapPoint);CHKERRQ(ierr);
1618     ierr = PetscSFSetGraph(sfOverlapPoint, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1619     ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1620     ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1621     sfMigration = sfOverlapPoint;
1622   }
1623   /* Cleanup Partition */
1624   ierr = PetscSFDestroy(&sfProcess);CHKERRQ(ierr);
1625   ierr = DMLabelDestroy(&lblPartition);CHKERRQ(ierr);
1626   ierr = DMLabelDestroy(&lblMigration);CHKERRQ(ierr);
1627   ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr);
1628   ierr = ISDestroy(&cellPart);CHKERRQ(ierr);
1629   /* Copy BC */
1630   ierr = DMCopyBoundary(dm, *dmParallel);CHKERRQ(ierr);
1631   /* Create sfNatural */
1632   if (dm->useNatural) {
1633     PetscSection section;
1634 
1635     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1636     ierr = DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &(*dmParallel)->sfNatural);CHKERRQ(ierr);
1637   }
1638   /* Cleanup */
1639   if (sf) {*sf = sfMigration;}
1640   else    {ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);}
1641   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1642   ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1643   PetscFunctionReturn(0);
1644 }
1645 
1646 /*@C
1647   DMPlexDistribute - Add partition overlap to a distributed non-overlapping DM.
1648 
1649   Not Collective
1650 
1651   Input Parameter:
1652 + dm  - The non-overlapping distrbuted DMPlex object
1653 - overlap - The overlap of partitions, 0 is the default
1654 
1655   Output Parameter:
1656 + sf - The PetscSF used for point distribution
1657 - dmOverlap - The overlapping distributed DMPlex object, or NULL
1658 
1659   Note: If the mesh was not distributed, the return value is NULL.
1660 
1661   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1662   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1663   representation on the mesh.
1664 
1665   Level: intermediate
1666 
1667 .keywords: mesh, elements
1668 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1669 @*/
1670 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, PetscSF *sf, DM *dmOverlap)
1671 {
1672   MPI_Comm               comm;
1673   PetscMPIInt            rank;
1674   PetscSection           rootSection, leafSection;
1675   IS                     rootrank, leafrank;
1676   DM                     dmCoord;
1677   DMLabel                lblOverlap;
1678   PetscSF                sfOverlap, sfStratified, sfPoint;
1679   PetscErrorCode         ierr;
1680 
1681   PetscFunctionBegin;
1682   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1683   if (sf) PetscValidPointer(sf, 3);
1684   PetscValidPointer(dmOverlap, 4);
1685 
1686   ierr = PetscLogEventBegin(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1687   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1688   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1689 
1690   /* Compute point overlap with neighbouring processes on the distributed DM */
1691   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1692   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1693   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1694   ierr = DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);CHKERRQ(ierr);
1695   ierr = DMPlexCreateOverlap(dm, overlap, rootSection, rootrank, leafSection, leafrank, &lblOverlap);CHKERRQ(ierr);
1696   /* Convert overlap label to stratified migration SF */
1697   ierr = DMPlexPartitionLabelCreateSF(dm, lblOverlap, &sfOverlap);CHKERRQ(ierr);
1698   ierr = DMPlexStratifyMigrationSF(dm, sfOverlap, &sfStratified);CHKERRQ(ierr);
1699   ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1700   sfOverlap = sfStratified;
1701   ierr = PetscObjectSetName((PetscObject) sfOverlap, "Overlap SF");CHKERRQ(ierr);
1702   ierr = PetscSFSetFromOptions(sfOverlap);CHKERRQ(ierr);
1703 
1704   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1705   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1706   ierr = ISDestroy(&rootrank);CHKERRQ(ierr);
1707   ierr = ISDestroy(&leafrank);CHKERRQ(ierr);
1708   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1709 
1710   /* Build the overlapping DM */
1711   ierr = DMPlexCreate(comm, dmOverlap);CHKERRQ(ierr);
1712   ierr = PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");CHKERRQ(ierr);
1713   ierr = DMPlexMigrate(dm, sfOverlap, *dmOverlap);CHKERRQ(ierr);
1714   /* Build the new point SF */
1715   ierr = DMPlexCreatePointSF(*dmOverlap, sfOverlap, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
1716   ierr = DMSetPointSF(*dmOverlap, sfPoint);CHKERRQ(ierr);
1717   ierr = DMGetCoordinateDM(*dmOverlap, &dmCoord);CHKERRQ(ierr);
1718   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1719   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1720   /* Cleanup overlap partition */
1721   ierr = DMLabelDestroy(&lblOverlap);CHKERRQ(ierr);
1722   if (sf) *sf = sfOverlap;
1723   else    {ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);}
1724   ierr = PetscLogEventEnd(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1725   PetscFunctionReturn(0);
1726 }
1727 
1728 /*@C
1729   DMPlexGetGatherDM - Get a copy of the DMPlex that gathers all points on the
1730   root process of the original's communicator.
1731 
1732   Input Parameters:
1733 . dm - the original DMPlex object
1734 
1735   Output Parameters:
1736 . gatherMesh - the gathered DM object, or NULL
1737 
1738   Level: intermediate
1739 
1740 .keywords: mesh
1741 .seealso: DMPlexDistribute(), DMPlexGetRedundantDM()
1742 @*/
1743 PetscErrorCode DMPlexGetGatherDM(DM dm, DM * gatherMesh)
1744 {
1745   MPI_Comm       comm;
1746   PetscMPIInt    size;
1747   PetscPartitioner oldPart, gatherPart;
1748   PetscErrorCode ierr;
1749 
1750   PetscFunctionBegin;
1751   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1752   comm = PetscObjectComm((PetscObject)dm);
1753   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1754   *gatherMesh = NULL;
1755   if (size == 1) PetscFunctionReturn(0);
1756   ierr = DMPlexGetPartitioner(dm,&oldPart);CHKERRQ(ierr);
1757   ierr = PetscObjectReference((PetscObject)oldPart);CHKERRQ(ierr);
1758   ierr = PetscPartitionerCreate(comm,&gatherPart);CHKERRQ(ierr);
1759   ierr = PetscPartitionerSetType(gatherPart,PETSCPARTITIONERGATHER);CHKERRQ(ierr);
1760   ierr = DMPlexSetPartitioner(dm,gatherPart);CHKERRQ(ierr);
1761   ierr = DMPlexDistribute(dm,0,NULL,gatherMesh);CHKERRQ(ierr);
1762   ierr = DMPlexSetPartitioner(dm,oldPart);CHKERRQ(ierr);
1763   ierr = PetscPartitionerDestroy(&gatherPart);CHKERRQ(ierr);
1764   ierr = PetscPartitionerDestroy(&oldPart);CHKERRQ(ierr);
1765   PetscFunctionReturn(0);
1766 }
1767 
1768 /*@C
1769   DMPlexGetRedundantDM - Get a copy of the DMPlex that is completely copied on each process.
1770 
1771   Input Parameters:
1772 . dm - the original DMPlex object
1773 
1774   Output Parameters:
1775 . redundantMesh - the redundant DM object, or NULL
1776 
1777   Level: intermediate
1778 
1779 .keywords: mesh
1780 .seealso: DMPlexDistribute(), DMPlexGetGatherDM()
1781 @*/
1782 PetscErrorCode DMPlexGetRedundantDM(DM dm, DM * redundantMesh)
1783 {
1784   MPI_Comm       comm;
1785   PetscMPIInt    size, rank;
1786   PetscInt       pStart, pEnd, p;
1787   PetscInt       numPoints = -1;
1788   PetscSF        migrationSF, sfPoint;
1789   DM             gatherDM, dmCoord;
1790   PetscSFNode    *points;
1791   PetscErrorCode ierr;
1792 
1793   PetscFunctionBegin;
1794   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1795   comm = PetscObjectComm((PetscObject)dm);
1796   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1797   *redundantMesh = NULL;
1798   if (size == 1) {
1799     ierr = PetscObjectReference((PetscObject) dm);CHKERRQ(ierr);
1800     *redundantMesh = dm;
1801     PetscFunctionReturn(0);
1802   }
1803   ierr = DMPlexGetGatherDM(dm,&gatherDM);CHKERRQ(ierr);
1804   if (!gatherDM) PetscFunctionReturn(0);
1805   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1806   ierr = DMPlexGetChart(gatherDM,&pStart,&pEnd);CHKERRQ(ierr);
1807   numPoints = pEnd - pStart;
1808   ierr = MPI_Bcast(&numPoints,1,MPIU_INT,0,comm);CHKERRQ(ierr);
1809   ierr = PetscMalloc1(numPoints,&points);CHKERRQ(ierr);
1810   ierr = PetscSFCreate(comm,&migrationSF);CHKERRQ(ierr);
1811   for (p = 0; p < numPoints; p++) {
1812     points[p].index = p;
1813     points[p].rank  = 0;
1814   }
1815   ierr = PetscSFSetGraph(migrationSF,pEnd-pStart,numPoints,NULL,PETSC_OWN_POINTER,points,PETSC_OWN_POINTER);CHKERRQ(ierr);
1816   ierr = DMPlexCreate(comm, redundantMesh);CHKERRQ(ierr);
1817   ierr = PetscObjectSetName((PetscObject) *redundantMesh, "Redundant Mesh");CHKERRQ(ierr);
1818   ierr = DMPlexMigrate(gatherDM, migrationSF, *redundantMesh);CHKERRQ(ierr);
1819   ierr = DMPlexCreatePointSF(*redundantMesh, migrationSF, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
1820   ierr = DMSetPointSF(*redundantMesh, sfPoint);CHKERRQ(ierr);
1821   ierr = DMGetCoordinateDM(*redundantMesh, &dmCoord);CHKERRQ(ierr);
1822   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1823   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1824   ierr = PetscSFDestroy(&migrationSF);CHKERRQ(ierr);
1825   ierr = DMDestroy(&gatherDM);CHKERRQ(ierr);
1826   PetscFunctionReturn(0);
1827 }
1828