xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision 1620da3bfb1c7463ba1aa20a053a4db6e8846c00)
1 #include <petsc/private/dmpleximpl.h>    /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/dmlabelimpl.h>   /*I      "petscdmlabel.h"  I*/
3 
4 #undef __FUNCT__
5 #define __FUNCT__ "DMPlexSetAdjacencyUseCone"
6 /*@
7   DMPlexSetAdjacencyUseCone - Define adjacency in the mesh using either the cone or the support first
8 
9   Input Parameters:
10 + dm      - The DM object
11 - useCone - Flag to use the cone first
12 
13   Level: intermediate
14 
15   Notes:
16 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
17 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
18 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
19 
20 .seealso: DMPlexGetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
21 @*/
22 PetscErrorCode DMPlexSetAdjacencyUseCone(DM dm, PetscBool useCone)
23 {
24   DM_Plex *mesh = (DM_Plex *) dm->data;
25 
26   PetscFunctionBegin;
27   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
28   mesh->useCone = useCone;
29   PetscFunctionReturn(0);
30 }
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "DMPlexGetAdjacencyUseCone"
34 /*@
35   DMPlexGetAdjacencyUseCone - Query whether adjacency in the mesh uses the cone or the support first
36 
37   Input Parameter:
38 . dm      - The DM object
39 
40   Output Parameter:
41 . useCone - Flag to use the cone first
42 
43   Level: intermediate
44 
45   Notes:
46 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
47 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
48 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
49 
50 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
51 @*/
52 PetscErrorCode DMPlexGetAdjacencyUseCone(DM dm, PetscBool *useCone)
53 {
54   DM_Plex *mesh = (DM_Plex *) dm->data;
55 
56   PetscFunctionBegin;
57   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
58   PetscValidIntPointer(useCone, 2);
59   *useCone = mesh->useCone;
60   PetscFunctionReturn(0);
61 }
62 
63 #undef __FUNCT__
64 #define __FUNCT__ "DMPlexSetAdjacencyUseClosure"
65 /*@
66   DMPlexSetAdjacencyUseClosure - Define adjacency in the mesh using the transitive closure
67 
68   Input Parameters:
69 + dm      - The DM object
70 - useClosure - Flag to use the closure
71 
72   Level: intermediate
73 
74   Notes:
75 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
76 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
77 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
78 
79 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
80 @*/
81 PetscErrorCode DMPlexSetAdjacencyUseClosure(DM dm, PetscBool useClosure)
82 {
83   DM_Plex *mesh = (DM_Plex *) dm->data;
84 
85   PetscFunctionBegin;
86   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
87   mesh->useClosure = useClosure;
88   PetscFunctionReturn(0);
89 }
90 
91 #undef __FUNCT__
92 #define __FUNCT__ "DMPlexGetAdjacencyUseClosure"
93 /*@
94   DMPlexGetAdjacencyUseClosure - Query whether adjacency in the mesh uses the transitive closure
95 
96   Input Parameter:
97 . dm      - The DM object
98 
99   Output Parameter:
100 . useClosure - Flag to use the closure
101 
102   Level: intermediate
103 
104   Notes:
105 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
106 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
107 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
108 
109 .seealso: DMPlexSetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
110 @*/
111 PetscErrorCode DMPlexGetAdjacencyUseClosure(DM dm, PetscBool *useClosure)
112 {
113   DM_Plex *mesh = (DM_Plex *) dm->data;
114 
115   PetscFunctionBegin;
116   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
117   PetscValidIntPointer(useClosure, 2);
118   *useClosure = mesh->useClosure;
119   PetscFunctionReturn(0);
120 }
121 
122 #undef __FUNCT__
123 #define __FUNCT__ "DMPlexSetAdjacencyUseAnchors"
124 /*@
125   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
126 
127   Input Parameters:
128 + dm      - The DM object
129 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
130 
131   Level: intermediate
132 
133 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
134 @*/
135 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
136 {
137   DM_Plex *mesh = (DM_Plex *) dm->data;
138 
139   PetscFunctionBegin;
140   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
141   mesh->useAnchors = useAnchors;
142   PetscFunctionReturn(0);
143 }
144 
145 #undef __FUNCT__
146 #define __FUNCT__ "DMPlexGetAdjacencyUseAnchors"
147 /*@
148   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
149 
150   Input Parameter:
151 . dm      - The DM object
152 
153   Output Parameter:
154 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
155 
156   Level: intermediate
157 
158 .seealso: DMPlexSetAdjacencyUseAnchors(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
159 @*/
160 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
161 {
162   DM_Plex *mesh = (DM_Plex *) dm->data;
163 
164   PetscFunctionBegin;
165   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
166   PetscValidIntPointer(useAnchors, 2);
167   *useAnchors = mesh->useAnchors;
168   PetscFunctionReturn(0);
169 }
170 
171 #undef __FUNCT__
172 #define __FUNCT__ "DMPlexGetAdjacency_Cone_Internal"
173 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
174 {
175   const PetscInt *cone = NULL;
176   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
177   PetscErrorCode  ierr;
178 
179   PetscFunctionBeginHot;
180   ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
181   ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
182   for (c = 0; c <= coneSize; ++c) {
183     const PetscInt  point   = !c ? p : cone[c-1];
184     const PetscInt *support = NULL;
185     PetscInt        supportSize, s, q;
186 
187     ierr = DMPlexGetSupportSize(dm, point, &supportSize);CHKERRQ(ierr);
188     ierr = DMPlexGetSupport(dm, point, &support);CHKERRQ(ierr);
189     for (s = 0; s < supportSize; ++s) {
190       for (q = 0; q < numAdj || (adj[numAdj++] = support[s],0); ++q) {
191         if (support[s] == adj[q]) break;
192       }
193       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
194     }
195   }
196   *adjSize = numAdj;
197   PetscFunctionReturn(0);
198 }
199 
200 #undef __FUNCT__
201 #define __FUNCT__ "DMPlexGetAdjacency_Support_Internal"
202 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
203 {
204   const PetscInt *support = NULL;
205   PetscInt        numAdj   = 0, maxAdjSize = *adjSize, supportSize, s;
206   PetscErrorCode  ierr;
207 
208   PetscFunctionBeginHot;
209   ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr);
210   ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr);
211   for (s = 0; s <= supportSize; ++s) {
212     const PetscInt  point = !s ? p : support[s-1];
213     const PetscInt *cone  = NULL;
214     PetscInt        coneSize, c, q;
215 
216     ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
217     ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
218     for (c = 0; c < coneSize; ++c) {
219       for (q = 0; q < numAdj || (adj[numAdj++] = cone[c],0); ++q) {
220         if (cone[c] == adj[q]) break;
221       }
222       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
223     }
224   }
225   *adjSize = numAdj;
226   PetscFunctionReturn(0);
227 }
228 
229 #undef __FUNCT__
230 #define __FUNCT__ "DMPlexGetAdjacency_Transitive_Internal"
231 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
232 {
233   PetscInt      *star = NULL;
234   PetscInt       numAdj = 0, maxAdjSize = *adjSize, starSize, s;
235   PetscErrorCode ierr;
236 
237   PetscFunctionBeginHot;
238   ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
239   for (s = 0; s < starSize*2; s += 2) {
240     const PetscInt *closure = NULL;
241     PetscInt        closureSize, c, q;
242 
243     ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
244     for (c = 0; c < closureSize*2; c += 2) {
245       for (q = 0; q < numAdj || (adj[numAdj++] = closure[c],0); ++q) {
246         if (closure[c] == adj[q]) break;
247       }
248       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
249     }
250     ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
251   }
252   ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
253   *adjSize = numAdj;
254   PetscFunctionReturn(0);
255 }
256 
257 #undef __FUNCT__
258 #define __FUNCT__ "DMPlexGetAdjacency_Internal"
259 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
260 {
261   static PetscInt asiz = 0;
262   PetscInt maxAnchors = 1;
263   PetscInt aStart = -1, aEnd = -1;
264   PetscInt maxAdjSize;
265   PetscSection aSec = NULL;
266   IS aIS = NULL;
267   const PetscInt *anchors;
268   PetscErrorCode  ierr;
269 
270   PetscFunctionBeginHot;
271   if (useAnchors) {
272     ierr = DMPlexGetAnchors(dm,&aSec,&aIS);CHKERRQ(ierr);
273     if (aSec) {
274       ierr = PetscSectionGetMaxDof(aSec,&maxAnchors);CHKERRQ(ierr);
275       maxAnchors = PetscMax(1,maxAnchors);
276       ierr = PetscSectionGetChart(aSec,&aStart,&aEnd);CHKERRQ(ierr);
277       ierr = ISGetIndices(aIS,&anchors);CHKERRQ(ierr);
278     }
279   }
280   if (!*adj) {
281     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
282 
283     ierr  = DMPlexGetChart(dm, &pStart,&pEnd);CHKERRQ(ierr);
284     ierr  = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
285     ierr  = DMPlexGetMaxSizes(dm, &maxC, &maxS);CHKERRQ(ierr);
286     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1;
287     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1;
288     asiz  = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries);
289     asiz *= maxAnchors;
290     asiz  = PetscMin(asiz,pEnd-pStart);
291     ierr  = PetscMalloc1(asiz,adj);CHKERRQ(ierr);
292   }
293   if (*adjSize < 0) *adjSize = asiz;
294   maxAdjSize = *adjSize;
295   if (useTransitiveClosure) {
296     ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr);
297   } else if (useCone) {
298     ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
299   } else {
300     ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
301   }
302   if (useAnchors && aSec) {
303     PetscInt origSize = *adjSize;
304     PetscInt numAdj = origSize;
305     PetscInt i = 0, j;
306     PetscInt *orig = *adj;
307 
308     while (i < origSize) {
309       PetscInt p = orig[i];
310       PetscInt aDof = 0;
311 
312       if (p >= aStart && p < aEnd) {
313         ierr = PetscSectionGetDof(aSec,p,&aDof);CHKERRQ(ierr);
314       }
315       if (aDof) {
316         PetscInt aOff;
317         PetscInt s, q;
318 
319         for (j = i + 1; j < numAdj; j++) {
320           orig[j - 1] = orig[j];
321         }
322         origSize--;
323         numAdj--;
324         ierr = PetscSectionGetOffset(aSec,p,&aOff);CHKERRQ(ierr);
325         for (s = 0; s < aDof; ++s) {
326           for (q = 0; q < numAdj || (orig[numAdj++] = anchors[aOff+s],0); ++q) {
327             if (anchors[aOff+s] == orig[q]) break;
328           }
329           if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
330         }
331       }
332       else {
333         i++;
334       }
335     }
336     *adjSize = numAdj;
337     ierr = ISRestoreIndices(aIS,&anchors);CHKERRQ(ierr);
338   }
339   PetscFunctionReturn(0);
340 }
341 
342 #undef __FUNCT__
343 #define __FUNCT__ "DMPlexGetAdjacency"
344 /*@
345   DMPlexGetAdjacency - Return all points adjacent to the given point
346 
347   Input Parameters:
348 + dm - The DM object
349 . p  - The point
350 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE
351 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize
352 
353   Output Parameters:
354 + adjSize - The number of adjacent points
355 - adj - The adjacent points
356 
357   Level: advanced
358 
359   Notes: The user must PetscFree the adj array if it was not passed in.
360 
361 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator()
362 @*/
363 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
364 {
365   DM_Plex       *mesh = (DM_Plex *) dm->data;
366   PetscErrorCode ierr;
367 
368   PetscFunctionBeginHot;
369   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
370   PetscValidPointer(adjSize,3);
371   PetscValidPointer(adj,4);
372   ierr = DMPlexGetAdjacency_Internal(dm, p, mesh->useCone, mesh->useClosure, mesh->useAnchors, adjSize, adj);CHKERRQ(ierr);
373   PetscFunctionReturn(0);
374 }
375 
376 #undef __FUNCT__
377 #define __FUNCT__ "DMPlexCreateTwoSidedProcessSF"
378 /*@
379   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
380 
381   Collective on DM
382 
383   Input Parameters:
384 + dm      - The DM
385 - sfPoint - The PetscSF which encodes point connectivity
386 
387   Output Parameters:
388 + processRanks - A list of process neighbors, or NULL
389 - sfProcess    - An SF encoding the two-sided process connectivity, or NULL
390 
391   Level: developer
392 
393 .seealso: PetscSFCreate(), DMPlexCreateProcessSF()
394 @*/
395 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
396 {
397   const PetscSFNode *remotePoints;
398   PetscInt          *localPointsNew;
399   PetscSFNode       *remotePointsNew;
400   const PetscInt    *nranks;
401   PetscInt          *ranksNew;
402   PetscBT            neighbors;
403   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
404   PetscMPIInt        numProcs, proc, rank;
405   PetscErrorCode     ierr;
406 
407   PetscFunctionBegin;
408   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
409   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
410   if (processRanks) {PetscValidPointer(processRanks, 3);}
411   if (sfProcess)    {PetscValidPointer(sfProcess, 4);}
412   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
413   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
414   ierr = PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);CHKERRQ(ierr);
415   ierr = PetscBTCreate(numProcs, &neighbors);CHKERRQ(ierr);
416   ierr = PetscBTMemzero(numProcs, neighbors);CHKERRQ(ierr);
417   /* Compute root-to-leaf process connectivity */
418   ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr);
419   ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr);
420   for (p = pStart; p < pEnd; ++p) {
421     PetscInt ndof, noff, n;
422 
423     ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr);
424     ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr);
425     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
426   }
427   ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr);
428   /* Compute leaf-to-neighbor process connectivity */
429   ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr);
430   ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr);
431   for (p = pStart; p < pEnd; ++p) {
432     PetscInt ndof, noff, n;
433 
434     ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr);
435     ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr);
436     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
437   }
438   ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr);
439   /* Compute leaf-to-root process connectivity */
440   for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);}
441   /* Calculate edges */
442   PetscBTClear(neighbors, rank);
443   for(proc = 0, numNeighbors = 0; proc < numProcs; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;}
444   ierr = PetscMalloc1(numNeighbors, &ranksNew);CHKERRQ(ierr);
445   ierr = PetscMalloc1(numNeighbors, &localPointsNew);CHKERRQ(ierr);
446   ierr = PetscMalloc1(numNeighbors, &remotePointsNew);CHKERRQ(ierr);
447   for(proc = 0, n = 0; proc < numProcs; ++proc) {
448     if (PetscBTLookup(neighbors, proc)) {
449       ranksNew[n]              = proc;
450       localPointsNew[n]        = proc;
451       remotePointsNew[n].index = rank;
452       remotePointsNew[n].rank  = proc;
453       ++n;
454     }
455   }
456   ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr);
457   if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr);}
458   else              {ierr = PetscFree(ranksNew);CHKERRQ(ierr);}
459   if (sfProcess) {
460     ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr);
461     ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr);
462     ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr);
463     ierr = PetscSFSetGraph(*sfProcess, numProcs, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
464   }
465   PetscFunctionReturn(0);
466 }
467 
468 #undef __FUNCT__
469 #define __FUNCT__ "DMPlexDistributeOwnership"
470 /*@
471   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
472 
473   Collective on DM
474 
475   Input Parameter:
476 . dm - The DM
477 
478   Output Parameters:
479 + rootSection - The number of leaves for a given root point
480 . rootrank    - The rank of each edge into the root point
481 . leafSection - The number of processes sharing a given leaf point
482 - leafrank    - The rank of each process sharing a leaf point
483 
484   Level: developer
485 
486 .seealso: DMPlexCreateOverlap()
487 @*/
488 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
489 {
490   MPI_Comm        comm;
491   PetscSF         sfPoint;
492   const PetscInt *rootdegree;
493   PetscInt       *myrank, *remoterank;
494   PetscInt        pStart, pEnd, p, nedges;
495   PetscMPIInt     rank;
496   PetscErrorCode  ierr;
497 
498   PetscFunctionBegin;
499   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
500   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
501   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
502   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
503   /* Compute number of leaves for each root */
504   ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr);
505   ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr);
506   ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr);
507   ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr);
508   for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr);}
509   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
510   /* Gather rank of each leaf to root */
511   ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr);
512   ierr = PetscMalloc1(pEnd-pStart, &myrank);CHKERRQ(ierr);
513   ierr = PetscMalloc1(nedges,  &remoterank);CHKERRQ(ierr);
514   for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank;
515   ierr = PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
516   ierr = PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
517   ierr = PetscFree(myrank);CHKERRQ(ierr);
518   ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr);
519   /* Distribute remote ranks to leaves */
520   ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr);
521   ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr);
522   PetscFunctionReturn(0);
523 }
524 
525 #undef __FUNCT__
526 #define __FUNCT__ "DMPlexCreateOverlap"
527 /*@C
528   DMPlexCreateOverlap - Compute owner information for shared points. This basically gets two-sided for an SF.
529 
530   Collective on DM
531 
532   Input Parameters:
533 + dm          - The DM
534 . levels      - Number of overlap levels
535 . rootSection - The number of leaves for a given root point
536 . rootrank    - The rank of each edge into the root point
537 . leafSection - The number of processes sharing a given leaf point
538 - leafrank    - The rank of each process sharing a leaf point
539 
540   Output Parameters:
541 + ovLabel     - DMLabel containing remote overlap contributions as point/rank pairings
542 
543   Level: developer
544 
545 .seealso: DMPlexDistributeOwnership(), DMPlexDistribute()
546 @*/
547 PetscErrorCode DMPlexCreateOverlap(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
548 {
549   MPI_Comm           comm;
550   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
551   PetscSF            sfPoint, sfProc;
552   const PetscSFNode *remote;
553   const PetscInt    *local;
554   const PetscInt    *nrank, *rrank;
555   PetscInt          *adj = NULL;
556   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l;
557   PetscMPIInt        rank, numProcs;
558   PetscBool          useCone, useClosure, flg;
559   PetscErrorCode     ierr;
560 
561   PetscFunctionBegin;
562   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
563   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
564   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
565   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
566   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
567   ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr);
568   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
569   ierr = DMLabelCreate("Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr);
570   /* Handle leaves: shared with the root point */
571   for (l = 0; l < nleaves; ++l) {
572     PetscInt adjSize = PETSC_DETERMINE, a;
573 
574     ierr = DMPlexGetAdjacency(dm, local[l], &adjSize, &adj);CHKERRQ(ierr);
575     for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr);}
576   }
577   ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr);
578   ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr);
579   /* Handle roots */
580   for (p = pStart; p < pEnd; ++p) {
581     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
582 
583     if ((p >= sStart) && (p < sEnd)) {
584       /* Some leaves share a root with other leaves on different processes */
585       ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr);
586       if (neighbors) {
587         ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr);
588         ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
589         for (n = 0; n < neighbors; ++n) {
590           const PetscInt remoteRank = nrank[noff+n];
591 
592           if (remoteRank == rank) continue;
593           for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
594         }
595       }
596     }
597     /* Roots are shared with leaves */
598     ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr);
599     if (!neighbors) continue;
600     ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr);
601     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
602     for (n = 0; n < neighbors; ++n) {
603       const PetscInt remoteRank = rrank[noff+n];
604 
605       if (remoteRank == rank) continue;
606       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
607     }
608   }
609   ierr = PetscFree(adj);CHKERRQ(ierr);
610   ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr);
611   ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr);
612   /* Add additional overlap levels */
613   for (l = 1; l < levels; l++) {ierr = DMPlexPartitionLabelAdjacency(dm, ovAdjByRank);CHKERRQ(ierr);}
614   /* We require the closure in the overlap */
615   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
616   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
617   if (useCone || !useClosure) {
618     ierr = DMPlexPartitionLabelClosure(dm, ovAdjByRank);CHKERRQ(ierr);
619   }
620   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-overlap_view", &flg);CHKERRQ(ierr);
621   if (flg) {
622     ierr = DMLabelView(ovAdjByRank, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
623   }
624   /* Make process SF and invert sender to receiver label */
625   ierr = DMPlexCreateTwoSidedProcessSF(dm, sfPoint, rootSection, rootrank, leafSection, leafrank, NULL, &sfProc);CHKERRQ(ierr);
626   ierr = DMLabelCreate("Overlap label", ovLabel);CHKERRQ(ierr);
627   ierr = DMPlexPartitionLabelInvert(dm, ovAdjByRank, sfProc, *ovLabel);CHKERRQ(ierr);
628   /* Add owned points, except for shared local points */
629   for (p = pStart; p < pEnd; ++p) {ierr = DMLabelSetValue(*ovLabel, p, rank);CHKERRQ(ierr);}
630   for (l = 0; l < nleaves; ++l) {
631     ierr = DMLabelClearValue(*ovLabel, local[l], rank);CHKERRQ(ierr);
632     ierr = DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank);CHKERRQ(ierr);
633   }
634   /* Clean up */
635   ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr);
636   ierr = PetscSFDestroy(&sfProc);CHKERRQ(ierr);
637   PetscFunctionReturn(0);
638 }
639 
640 #undef __FUNCT__
641 #define __FUNCT__ "DMPlexCreateOverlapMigrationSF"
642 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
643 {
644   MPI_Comm           comm;
645   PetscMPIInt        rank, numProcs;
646   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
647   PetscInt          *pointDepths, *remoteDepths, *ilocal;
648   PetscInt          *depthRecv, *depthShift, *depthIdx;
649   PetscSFNode       *iremote;
650   PetscSF            pointSF;
651   const PetscInt    *sharedLocal;
652   const PetscSFNode *overlapRemote, *sharedRemote;
653   PetscErrorCode     ierr;
654 
655   PetscFunctionBegin;
656   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
657   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
658   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
659   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
660   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
661 
662   /* Before building the migration SF we need to know the new stratum offsets */
663   ierr = PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote);CHKERRQ(ierr);
664   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
665   for (d=0; d<dim+1; d++) {
666     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
667     for (p=pStart; p<pEnd; p++) pointDepths[p] = d;
668   }
669   for (p=0; p<nleaves; p++) remoteDepths[p] = -1;
670   ierr = PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
671   ierr = PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
672 
673   /* Count recevied points in each stratum and compute the internal strata shift */
674   ierr = PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx);CHKERRQ(ierr);
675   for (d=0; d<dim+1; d++) depthRecv[d]=0;
676   for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++;
677   depthShift[dim] = 0;
678   for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim];
679   for (d=1; d<dim; d++) depthShift[d] += depthRecv[0];
680   for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1];
681   for (d=0; d<dim+1; d++) {
682     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
683     depthIdx[d] = pStart + depthShift[d];
684   }
685 
686   /* Form the overlap SF build an SF that describes the full overlap migration SF */
687   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
688   newLeaves = pEnd - pStart + nleaves;
689   ierr = PetscMalloc1(newLeaves, &ilocal);CHKERRQ(ierr);
690   ierr = PetscMalloc1(newLeaves, &iremote);CHKERRQ(ierr);
691   /* First map local points to themselves */
692   for (d=0; d<dim+1; d++) {
693     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
694     for (p=pStart; p<pEnd; p++) {
695       point = p + depthShift[d];
696       ilocal[point] = point;
697       iremote[point].index = p;
698       iremote[point].rank = rank;
699       depthIdx[d]++;
700     }
701   }
702 
703   /* Add in the remote roots for currently shared points */
704   ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
705   ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote);CHKERRQ(ierr);
706   for (d=0; d<dim+1; d++) {
707     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
708     for (p=0; p<numSharedPoints; p++) {
709       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
710         point = sharedLocal[p] + depthShift[d];
711         iremote[point].index = sharedRemote[p].index;
712         iremote[point].rank = sharedRemote[p].rank;
713       }
714     }
715   }
716 
717   /* Now add the incoming overlap points */
718   for (p=0; p<nleaves; p++) {
719     point = depthIdx[remoteDepths[p]];
720     ilocal[point] = point;
721     iremote[point].index = overlapRemote[p].index;
722     iremote[point].rank = overlapRemote[p].rank;
723     depthIdx[remoteDepths[p]]++;
724   }
725   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
726 
727   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
728   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");CHKERRQ(ierr);
729   ierr = PetscSFSetFromOptions(*migrationSF);CHKERRQ(ierr);
730   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
731   ierr = PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);CHKERRQ(ierr);
732 
733   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
734   PetscFunctionReturn(0);
735 }
736 
737 #undef __FUNCT__
738 #define __FUNCT__ "DMPlexStratifyMigrationSF"
739 /*@
740   DMPlexStratifyMigrationSF - Add partition overlap to a distributed non-overlapping DM.
741 
742   Input Parameter:
743 + dm          - The DM
744 - sf          - A star forest with non-ordered leaves, usually defining a DM point migration
745 
746   Output Parameter:
747 . migrationSF - A star forest with added leaf indirection that ensures the resulting DM is stratified
748 
749   Level: developer
750 
751 .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexDistributeOverlap()
752 @*/
753 PetscErrorCode DMPlexStratifyMigrationSF(DM dm, PetscSF sf, PetscSF *migrationSF)
754 {
755   MPI_Comm           comm;
756   PetscMPIInt        rank, numProcs;
757   PetscInt           d, ldepth, depth, p, pStart, pEnd, nroots, nleaves;
758   PetscInt          *pointDepths, *remoteDepths, *ilocal;
759   PetscInt          *depthRecv, *depthShift, *depthIdx;
760   const PetscSFNode *iremote;
761   PetscErrorCode     ierr;
762 
763   PetscFunctionBegin;
764   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
765   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
766   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
767   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
768   ierr = DMPlexGetDepth(dm, &ldepth);CHKERRQ(ierr);
769   ierr = MPIU_Allreduce(&ldepth, &depth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr);
770   if ((ldepth >= 0) && (depth != ldepth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", ldepth, depth);
771 
772   /* Before building the migration SF we need to know the new stratum offsets */
773   ierr = PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote);CHKERRQ(ierr);
774   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
775   for (d = 0; d < depth+1; ++d) {
776     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
777     for (p = pStart; p < pEnd; ++p) pointDepths[p] = d;
778   }
779   for (p = 0; p < nleaves; ++p) remoteDepths[p] = -1;
780   ierr = PetscSFBcastBegin(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
781   ierr = PetscSFBcastEnd(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
782   /* Count recevied points in each stratum and compute the internal strata shift */
783   ierr = PetscMalloc3(depth+1, &depthRecv, depth+1, &depthShift, depth+1, &depthIdx);CHKERRQ(ierr);
784   for (d = 0; d < depth+1; ++d) depthRecv[d] = 0;
785   for (p = 0; p < nleaves; ++p) depthRecv[remoteDepths[p]]++;
786   depthShift[depth] = 0;
787   for (d = 0; d < depth; ++d) depthShift[d] = depthRecv[depth];
788   for (d = 1; d < depth; ++d) depthShift[d] += depthRecv[0];
789   for (d = depth-2; d > 0; --d) depthShift[d] += depthRecv[d+1];
790   for (d = 0; d < depth+1; ++d) {depthIdx[d] = 0;}
791   /* Derive a new local permutation based on stratified indices */
792   ierr = PetscMalloc1(nleaves, &ilocal);CHKERRQ(ierr);
793   for (p = 0; p < nleaves; ++p) {
794     const PetscInt dep = remoteDepths[p];
795 
796     ilocal[p] = depthShift[dep] + depthIdx[dep];
797     depthIdx[dep]++;
798   }
799   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
800   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Migration SF");CHKERRQ(ierr);
801   ierr = PetscSFSetGraph(*migrationSF, nroots, nleaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_COPY_VALUES);CHKERRQ(ierr);
802   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
803   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
804   PetscFunctionReturn(0);
805 }
806 
807 #undef __FUNCT__
808 #define __FUNCT__ "DMPlexDistributeField"
809 /*@
810   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
811 
812   Collective on DM
813 
814   Input Parameters:
815 + dm - The DMPlex object
816 . pointSF - The PetscSF describing the communication pattern
817 . originalSection - The PetscSection for existing data layout
818 - originalVec - The existing data
819 
820   Output Parameters:
821 + newSection - The PetscSF describing the new data layout
822 - newVec - The new data
823 
824   Level: developer
825 
826 .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData()
827 @*/
828 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
829 {
830   PetscSF        fieldSF;
831   PetscInt      *remoteOffsets, fieldSize;
832   PetscScalar   *originalValues, *newValues;
833   PetscErrorCode ierr;
834 
835   PetscFunctionBegin;
836   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
837   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
838 
839   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
840   ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr);
841   ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr);
842 
843   ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr);
844   ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr);
845   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
846   ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
847   ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
848   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
849   ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr);
850   ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr);
851   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
852   PetscFunctionReturn(0);
853 }
854 
855 #undef __FUNCT__
856 #define __FUNCT__ "DMPlexDistributeFieldIS"
857 /*@
858   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
859 
860   Collective on DM
861 
862   Input Parameters:
863 + dm - The DMPlex object
864 . pointSF - The PetscSF describing the communication pattern
865 . originalSection - The PetscSection for existing data layout
866 - originalIS - The existing data
867 
868   Output Parameters:
869 + newSection - The PetscSF describing the new data layout
870 - newIS - The new data
871 
872   Level: developer
873 
874 .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData()
875 @*/
876 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
877 {
878   PetscSF         fieldSF;
879   PetscInt       *newValues, *remoteOffsets, fieldSize;
880   const PetscInt *originalValues;
881   PetscErrorCode  ierr;
882 
883   PetscFunctionBegin;
884   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
885   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
886 
887   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
888   ierr = PetscMalloc1(fieldSize, &newValues);CHKERRQ(ierr);
889 
890   ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr);
891   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
892   ierr = PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
893   ierr = PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
894   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
895   ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr);
896   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr);
897   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
898   PetscFunctionReturn(0);
899 }
900 
901 #undef __FUNCT__
902 #define __FUNCT__ "DMPlexDistributeData"
903 /*@
904   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
905 
906   Collective on DM
907 
908   Input Parameters:
909 + dm - The DMPlex object
910 . pointSF - The PetscSF describing the communication pattern
911 . originalSection - The PetscSection for existing data layout
912 . datatype - The type of data
913 - originalData - The existing data
914 
915   Output Parameters:
916 + newSection - The PetscSection describing the new data layout
917 - newData - The new data
918 
919   Level: developer
920 
921 .seealso: DMPlexDistribute(), DMPlexDistributeField()
922 @*/
923 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
924 {
925   PetscSF        fieldSF;
926   PetscInt      *remoteOffsets, fieldSize;
927   PetscMPIInt    dataSize;
928   PetscErrorCode ierr;
929 
930   PetscFunctionBegin;
931   ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
932   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
933 
934   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
935   ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr);
936   ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr);
937 
938   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
939   ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
940   ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
941   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
942   ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
943   PetscFunctionReturn(0);
944 }
945 
946 #undef __FUNCT__
947 #define __FUNCT__ "DMPlexDistributeCones"
948 PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
949 {
950   DM_Plex               *mesh  = (DM_Plex*) dm->data;
951   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
952   MPI_Comm               comm;
953   PetscSF                coneSF;
954   PetscSection           originalConeSection, newConeSection;
955   PetscInt              *remoteOffsets, *cones, *globCones, *newCones, newConesSize;
956   PetscBool              flg;
957   PetscErrorCode         ierr;
958 
959   PetscFunctionBegin;
960   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
961   PetscValidPointer(dmParallel,4);
962   ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
963 
964   /* Distribute cone section */
965   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
966   ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr);
967   ierr = DMPlexGetConeSection(dmParallel, &newConeSection);CHKERRQ(ierr);
968   ierr = PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr);
969   ierr = DMSetUp(dmParallel);CHKERRQ(ierr);
970   {
971     PetscInt pStart, pEnd, p;
972 
973     ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr);
974     for (p = pStart; p < pEnd; ++p) {
975       PetscInt coneSize;
976       ierr               = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr);
977       pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize);
978     }
979   }
980   /* Communicate and renumber cones */
981   ierr = PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr);
982   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
983   if (original) {
984     PetscInt numCones;
985 
986     ierr = PetscSectionGetStorageSize(originalConeSection,&numCones);CHKERRQ(ierr); ierr = PetscMalloc1(numCones,&globCones);CHKERRQ(ierr);
987     ierr = ISLocalToGlobalMappingApplyBlock(original, numCones, cones, globCones);CHKERRQ(ierr);
988   }
989   else {
990     globCones = cones;
991   }
992   ierr = DMPlexGetCones(dmParallel, &newCones);CHKERRQ(ierr);
993   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr);
994   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr);
995   if (original) {
996     ierr = PetscFree(globCones);CHKERRQ(ierr);
997   }
998   ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr);
999   ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr);
1000 #if PETSC_USE_DEBUG
1001   {
1002     PetscInt  p;
1003     PetscBool valid = PETSC_TRUE;
1004     for (p = 0; p < newConesSize; ++p) {
1005       if (newCones[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
1006     }
1007     if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1008   }
1009 #endif
1010   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr);
1011   if (flg) {
1012     ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr);
1013     ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1014     ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr);
1015     ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1016     ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr);
1017   }
1018   ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr);
1019   ierr = DMPlexGetConeOrientations(dmParallel, &newCones);CHKERRQ(ierr);
1020   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1021   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1022   ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr);
1023   ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
1024   /* Create supports and stratify sieve */
1025   {
1026     PetscInt pStart, pEnd;
1027 
1028     ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr);
1029     ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr);
1030   }
1031   ierr = DMPlexSymmetrize(dmParallel);CHKERRQ(ierr);
1032   ierr = DMPlexStratify(dmParallel);CHKERRQ(ierr);
1033   pmesh->useCone    = mesh->useCone;
1034   pmesh->useClosure = mesh->useClosure;
1035   PetscFunctionReturn(0);
1036 }
1037 
1038 #undef __FUNCT__
1039 #define __FUNCT__ "DMPlexDistributeCoordinates"
1040 PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
1041 {
1042   MPI_Comm         comm;
1043   PetscSection     originalCoordSection, newCoordSection;
1044   Vec              originalCoordinates, newCoordinates;
1045   PetscInt         bs;
1046   const char      *name;
1047   const PetscReal *maxCell, *L;
1048   const DMBoundaryType *bd;
1049   PetscErrorCode   ierr;
1050 
1051   PetscFunctionBegin;
1052   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1053   PetscValidPointer(dmParallel, 3);
1054 
1055   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1056   ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr);
1057   ierr = DMGetCoordinateSection(dmParallel, &newCoordSection);CHKERRQ(ierr);
1058   ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr);
1059   if (originalCoordinates) {
1060     ierr = VecCreate(comm, &newCoordinates);CHKERRQ(ierr);
1061     ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr);
1062     ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr);
1063 
1064     ierr = DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr);
1065     ierr = DMSetCoordinatesLocal(dmParallel, newCoordinates);CHKERRQ(ierr);
1066     ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr);
1067     ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr);
1068     ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr);
1069   }
1070   ierr = DMGetPeriodicity(dm, &maxCell, &L, &bd);CHKERRQ(ierr);
1071   if (L) {ierr = DMSetPeriodicity(dmParallel, maxCell, L, bd);CHKERRQ(ierr);}
1072   PetscFunctionReturn(0);
1073 }
1074 
1075 #undef __FUNCT__
1076 #define __FUNCT__ "DMPlexDistributeLabels"
1077 /* Here we are assuming that process 0 always has everything */
1078 PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1079 {
1080   DM_Plex         *mesh = (DM_Plex*) dm->data;
1081   MPI_Comm         comm;
1082   DMLabel          depth;
1083   PetscMPIInt      rank;
1084   PetscInt         numLabels, numLocalLabels, l;
1085   PetscBool        hasLabels = PETSC_FALSE, lsendDepth, sendDepth;
1086   PetscObjectState depthState = -1;
1087   PetscErrorCode   ierr;
1088 
1089   PetscFunctionBegin;
1090   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1091   PetscValidHeaderSpecific(dm, DM_CLASSID, 3);
1092   ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1093   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1094   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1095 
1096   /* If the user has changed the depth label, communicate it instead */
1097   ierr = DMPlexGetDepthLabel(dm, &depth);CHKERRQ(ierr);
1098   if (depth) {ierr = DMLabelGetState(depth, &depthState);CHKERRQ(ierr);}
1099   lsendDepth = mesh->depthState != depthState ? PETSC_TRUE : PETSC_FALSE;
1100   ierr = MPIU_Allreduce(&lsendDepth, &sendDepth, 1, MPIU_BOOL, MPI_LOR, comm);CHKERRQ(ierr);
1101   if (sendDepth) {
1102     ierr = DMRemoveLabel(dmParallel, "depth", &depth);CHKERRQ(ierr);
1103     ierr = DMLabelDestroy(&depth);CHKERRQ(ierr);
1104   }
1105   /* Everyone must have either the same number of labels, or none */
1106   ierr = DMGetNumLabels(dm, &numLocalLabels);CHKERRQ(ierr);
1107   numLabels = numLocalLabels;
1108   ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1109   if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1110   for (l = numLabels-1; l >= 0; --l) {
1111     DMLabel     label = NULL, labelNew = NULL;
1112     PetscBool   isdepth;
1113 
1114     if (hasLabels) {
1115       ierr = DMGetLabelByNum(dm, l, &label);CHKERRQ(ierr);
1116       /* Skip "depth" because it is recreated */
1117       ierr = PetscStrcmp(label->name, "depth", &isdepth);CHKERRQ(ierr);
1118     }
1119     ierr = MPI_Bcast(&isdepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr);
1120     if (isdepth && !sendDepth) continue;
1121     ierr = DMLabelDistribute(label, migrationSF, &labelNew);CHKERRQ(ierr);
1122     ierr = DMAddLabel(dmParallel, labelNew);CHKERRQ(ierr);
1123   }
1124   ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1125   PetscFunctionReturn(0);
1126 }
1127 
1128 #undef __FUNCT__
1129 #define __FUNCT__ "DMPlexDistributeSetupHybrid"
1130 PetscErrorCode DMPlexDistributeSetupHybrid(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1131 {
1132   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1133   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1134   MPI_Comm        comm;
1135   const PetscInt *gpoints;
1136   PetscInt        dim, depth, n, d;
1137   PetscErrorCode  ierr;
1138 
1139   PetscFunctionBegin;
1140   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1141   PetscValidPointer(dmParallel, 4);
1142 
1143   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1144   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1145 
1146   /* Setup hybrid structure */
1147   for (d = 0; d <= dim; ++d) {pmesh->hybridPointMax[d] = mesh->hybridPointMax[d];}
1148   ierr = MPI_Bcast(pmesh->hybridPointMax, dim+1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1149   ierr = ISLocalToGlobalMappingGetSize(renumbering, &n);CHKERRQ(ierr);
1150   ierr = ISLocalToGlobalMappingGetIndices(renumbering, &gpoints);CHKERRQ(ierr);
1151   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1152   for (d = 0; d <= dim; ++d) {
1153     PetscInt pmax = pmesh->hybridPointMax[d], newmax = 0, pEnd, stratum[2], p;
1154 
1155     if (pmax < 0) continue;
1156     ierr = DMPlexGetDepthStratum(dm, d > depth ? depth : d, &stratum[0], &stratum[1]);CHKERRQ(ierr);
1157     ierr = DMPlexGetDepthStratum(dmParallel, d, NULL, &pEnd);CHKERRQ(ierr);
1158     ierr = MPI_Bcast(stratum, 2, MPIU_INT, 0, comm);CHKERRQ(ierr);
1159     for (p = 0; p < n; ++p) {
1160       const PetscInt point = gpoints[p];
1161 
1162       if ((point >= stratum[0]) && (point < stratum[1]) && (point >= pmax)) ++newmax;
1163     }
1164     if (newmax > 0) pmesh->hybridPointMax[d] = pEnd - newmax;
1165     else            pmesh->hybridPointMax[d] = -1;
1166   }
1167   ierr = ISLocalToGlobalMappingRestoreIndices(renumbering, &gpoints);CHKERRQ(ierr);
1168   PetscFunctionReturn(0);
1169 }
1170 
1171 #undef __FUNCT__
1172 #define __FUNCT__ "DMPlexDistributeSetupTree"
1173 PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1174 {
1175   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1176   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1177   MPI_Comm        comm;
1178   DM              refTree;
1179   PetscSection    origParentSection, newParentSection;
1180   PetscInt        *origParents, *origChildIDs;
1181   PetscBool       flg;
1182   PetscErrorCode  ierr;
1183 
1184   PetscFunctionBegin;
1185   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1186   PetscValidHeaderSpecific(dm, DM_CLASSID, 4);
1187   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1188 
1189   /* Set up tree */
1190   ierr = DMPlexGetReferenceTree(dm,&refTree);CHKERRQ(ierr);
1191   ierr = DMPlexSetReferenceTree(dmParallel,refTree);CHKERRQ(ierr);
1192   ierr = DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL);CHKERRQ(ierr);
1193   if (origParentSection) {
1194     PetscInt        pStart, pEnd;
1195     PetscInt        *newParents, *newChildIDs, *globParents;
1196     PetscInt        *remoteOffsetsParents, newParentSize;
1197     PetscSF         parentSF;
1198 
1199     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1200     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);CHKERRQ(ierr);
1201     ierr = PetscSectionSetChart(newParentSection,pStart,pEnd);CHKERRQ(ierr);
1202     ierr = PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);CHKERRQ(ierr);
1203     ierr = PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);CHKERRQ(ierr);
1204     ierr = PetscSectionGetStorageSize(newParentSection,&newParentSize);CHKERRQ(ierr);
1205     ierr = PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs);CHKERRQ(ierr);
1206     if (original) {
1207       PetscInt numParents;
1208 
1209       ierr = PetscSectionGetStorageSize(origParentSection,&numParents);CHKERRQ(ierr);
1210       ierr = PetscMalloc1(numParents,&globParents);CHKERRQ(ierr);
1211       ierr = ISLocalToGlobalMappingApplyBlock(original, numParents, origParents, globParents);CHKERRQ(ierr);
1212     }
1213     else {
1214       globParents = origParents;
1215     }
1216     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr);
1217     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr);
1218     if (original) {
1219       ierr = PetscFree(globParents);CHKERRQ(ierr);
1220     }
1221     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1222     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1223     ierr = ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents);CHKERRQ(ierr);
1224 #if PETSC_USE_DEBUG
1225     {
1226       PetscInt  p;
1227       PetscBool valid = PETSC_TRUE;
1228       for (p = 0; p < newParentSize; ++p) {
1229         if (newParents[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
1230       }
1231       if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1232     }
1233 #endif
1234     ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-parents_view", &flg);CHKERRQ(ierr);
1235     if (flg) {
1236       ierr = PetscPrintf(comm, "Serial Parent Section: \n");CHKERRQ(ierr);
1237       ierr = PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1238       ierr = PetscPrintf(comm, "Parallel Parent Section: \n");CHKERRQ(ierr);
1239       ierr = PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1240       ierr = PetscSFView(parentSF, NULL);CHKERRQ(ierr);
1241     }
1242     ierr = DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);CHKERRQ(ierr);
1243     ierr = PetscSectionDestroy(&newParentSection);CHKERRQ(ierr);
1244     ierr = PetscFree2(newParents,newChildIDs);CHKERRQ(ierr);
1245     ierr = PetscSFDestroy(&parentSF);CHKERRQ(ierr);
1246   }
1247   pmesh->useAnchors = mesh->useAnchors;
1248   PetscFunctionReturn(0);
1249 }
1250 
1251 #undef __FUNCT__
1252 #define __FUNCT__ "DMPlexDistributeSF"
1253 PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, DM dmParallel)
1254 {
1255   DM_Plex               *mesh  = (DM_Plex*) dm->data;
1256   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
1257   PetscMPIInt            rank, numProcs;
1258   MPI_Comm               comm;
1259   PetscErrorCode         ierr;
1260 
1261   PetscFunctionBegin;
1262   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1263   PetscValidPointer(dmParallel,7);
1264 
1265   /* Create point SF for parallel mesh */
1266   ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1267   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1268   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1269   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1270   {
1271     const PetscInt *leaves;
1272     PetscSFNode    *remotePoints, *rowners, *lowners;
1273     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1274     PetscInt        pStart, pEnd;
1275 
1276     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1277     ierr = PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr);
1278     ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr);
1279     for (p=0; p<numRoots; p++) {
1280       rowners[p].rank  = -1;
1281       rowners[p].index = -1;
1282     }
1283     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1284     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1285     for (p = 0; p < numLeaves; ++p) {
1286       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1287         lowners[p].rank  = rank;
1288         lowners[p].index = leaves ? leaves[p] : p;
1289       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1290         lowners[p].rank  = -2;
1291         lowners[p].index = -2;
1292       }
1293     }
1294     for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1295       rowners[p].rank  = -3;
1296       rowners[p].index = -3;
1297     }
1298     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1299     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1300     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1301     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1302     for (p = 0; p < numLeaves; ++p) {
1303       if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
1304       if (lowners[p].rank != rank) ++numGhostPoints;
1305     }
1306     ierr = PetscMalloc1(numGhostPoints, &ghostPoints);CHKERRQ(ierr);
1307     ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr);
1308     for (p = 0, gp = 0; p < numLeaves; ++p) {
1309       if (lowners[p].rank != rank) {
1310         ghostPoints[gp]        = leaves ? leaves[p] : p;
1311         remotePoints[gp].rank  = lowners[p].rank;
1312         remotePoints[gp].index = lowners[p].index;
1313         ++gp;
1314       }
1315     }
1316     ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr);
1317     ierr = PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1318     ierr = PetscSFSetFromOptions((dmParallel)->sf);CHKERRQ(ierr);
1319   }
1320   pmesh->useCone    = mesh->useCone;
1321   pmesh->useClosure = mesh->useClosure;
1322   ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1323   PetscFunctionReturn(0);
1324 }
1325 
1326 #undef __FUNCT__
1327 #define __FUNCT__ "DMPlexCreatePointSF"
1328 /*@C
1329   DMPlexDerivePointSF - Build a point SF from an SF describing a point migration
1330 
1331   Input Parameter:
1332 + dm          - The source DMPlex object
1333 . migrationSF - The star forest that describes the parallel point remapping
1334 . ownership   - Flag causing a vote to determine point ownership
1335 
1336   Output Parameter:
1337 - pointSF     - The star forest describing the point overlap in the remapped DM
1338 
1339   Level: developer
1340 
1341 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1342 @*/
1343 PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownership, PetscSF *pointSF)
1344 {
1345   PetscMPIInt        rank;
1346   PetscInt           p, nroots, nleaves, idx, npointLeaves;
1347   PetscInt          *pointLocal;
1348   const PetscInt    *leaves;
1349   const PetscSFNode *roots;
1350   PetscSFNode       *rootNodes, *leafNodes, *pointRemote;
1351   PetscErrorCode     ierr;
1352 
1353   PetscFunctionBegin;
1354   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1355   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1356 
1357   ierr = PetscSFGetGraph(migrationSF, &nroots, &nleaves, &leaves, &roots);CHKERRQ(ierr);
1358   ierr = PetscMalloc2(nroots, &rootNodes, nleaves, &leafNodes);CHKERRQ(ierr);
1359   if (ownership) {
1360     /* Point ownership vote: Process with highest rank ownes shared points */
1361     for (p = 0; p < nleaves; ++p) {
1362       /* Either put in a bid or we know we own it */
1363       leafNodes[p].rank  = rank;
1364       leafNodes[p].index = p;
1365     }
1366     for (p = 0; p < nroots; p++) {
1367       /* Root must not participate in the reduction, flag so that MAXLOC does not use */
1368       rootNodes[p].rank  = -3;
1369       rootNodes[p].index = -3;
1370     }
1371     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);CHKERRQ(ierr);
1372     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);CHKERRQ(ierr);
1373   } else {
1374     for (p = 0; p < nroots; p++) {
1375       rootNodes[p].index = -1;
1376       rootNodes[p].rank = rank;
1377     };
1378     for (p = 0; p < nleaves; p++) {
1379       /* Write new local id into old location */
1380       if (roots[p].rank == rank) {
1381         rootNodes[roots[p].index].index = leaves ? leaves[p] : p;
1382       }
1383     }
1384   }
1385   ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1386   ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1387 
1388   for (npointLeaves = 0, p = 0; p < nleaves; p++) {if (leafNodes[p].rank != rank) npointLeaves++;}
1389   ierr = PetscMalloc1(npointLeaves, &pointLocal);CHKERRQ(ierr);
1390   ierr = PetscMalloc1(npointLeaves, &pointRemote);CHKERRQ(ierr);
1391   for (idx = 0, p = 0; p < nleaves; p++) {
1392     if (leafNodes[p].rank != rank) {
1393       pointLocal[idx] = p;
1394       pointRemote[idx] = leafNodes[p];
1395       idx++;
1396     }
1397   }
1398   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), pointSF);CHKERRQ(ierr);
1399   ierr = PetscSFSetFromOptions(*pointSF);CHKERRQ(ierr);
1400   ierr = PetscSFSetGraph(*pointSF, nleaves, npointLeaves, pointLocal, PETSC_OWN_POINTER, pointRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1401   ierr = PetscFree2(rootNodes, leafNodes);CHKERRQ(ierr);
1402   PetscFunctionReturn(0);
1403 }
1404 
1405 #undef __FUNCT__
1406 #define __FUNCT__ "DMPlexMigrate"
1407 /*@C
1408   DMPlexMigrate  - Migrates internal DM data over the supplied star forest
1409 
1410   Input Parameter:
1411 + dm       - The source DMPlex object
1412 . sf       - The star forest communication context describing the migration pattern
1413 
1414   Output Parameter:
1415 - targetDM - The target DMPlex object
1416 
1417   Level: intermediate
1418 
1419 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1420 @*/
1421 PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM)
1422 {
1423   MPI_Comm               comm;
1424   PetscInt               dim, nroots;
1425   PetscSF                sfPoint;
1426   ISLocalToGlobalMapping ltogMigration;
1427   ISLocalToGlobalMapping ltogOriginal = NULL;
1428   PetscBool              flg;
1429   PetscErrorCode         ierr;
1430 
1431   PetscFunctionBegin;
1432   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1433   ierr = PetscLogEventBegin(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1434   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1435   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1436   ierr = DMSetDimension(targetDM, dim);CHKERRQ(ierr);
1437 
1438   /* Check for a one-to-all distribution pattern */
1439   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1440   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
1441   if (nroots >= 0) {
1442     IS                     isOriginal;
1443     PetscInt               n, size, nleaves;
1444     PetscInt              *numbering_orig, *numbering_new;
1445     /* Get the original point numbering */
1446     ierr = DMPlexCreatePointNumbering(dm, &isOriginal);CHKERRQ(ierr);
1447     ierr = ISLocalToGlobalMappingCreateIS(isOriginal, &ltogOriginal);CHKERRQ(ierr);
1448     ierr = ISLocalToGlobalMappingGetSize(ltogOriginal, &size);CHKERRQ(ierr);
1449     ierr = ISLocalToGlobalMappingGetBlockIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1450     /* Convert to positive global numbers */
1451     for (n=0; n<size; n++) {if (numbering_orig[n] < 0) numbering_orig[n] = -(numbering_orig[n]+1);}
1452     /* Derive the new local-to-global mapping from the old one */
1453     ierr = PetscSFGetGraph(sf, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1454     ierr = PetscMalloc1(nleaves, &numbering_new);CHKERRQ(ierr);
1455     ierr = PetscSFBcastBegin(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr);
1456     ierr = PetscSFBcastEnd(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr);
1457     ierr = ISLocalToGlobalMappingCreate(comm, 1, nleaves, (const PetscInt*) numbering_new, PETSC_OWN_POINTER, &ltogMigration);CHKERRQ(ierr);
1458     ierr = ISLocalToGlobalMappingRestoreIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1459     ierr = ISDestroy(&isOriginal);CHKERRQ(ierr);
1460   } else {
1461     /* One-to-all distribution pattern: We can derive LToG from SF */
1462     ierr = ISLocalToGlobalMappingCreateSF(sf, 0, &ltogMigration);CHKERRQ(ierr);
1463   }
1464   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1465   if (flg) {
1466     ierr = PetscPrintf(comm, "Point renumbering for DM migration:\n");CHKERRQ(ierr);
1467     ierr = ISLocalToGlobalMappingView(ltogMigration, NULL);CHKERRQ(ierr);
1468   }
1469   /* Migrate DM data to target DM */
1470   ierr = DMPlexDistributeCones(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1471   ierr = DMPlexDistributeCoordinates(dm, sf, targetDM);CHKERRQ(ierr);
1472   ierr = DMPlexDistributeLabels(dm, sf, targetDM);CHKERRQ(ierr);
1473   ierr = DMPlexDistributeSetupHybrid(dm, sf, ltogMigration, targetDM);CHKERRQ(ierr);
1474   ierr = DMPlexDistributeSetupTree(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1475   ierr = ISLocalToGlobalMappingDestroy(&ltogOriginal);CHKERRQ(ierr);
1476   ierr = ISLocalToGlobalMappingDestroy(&ltogMigration);CHKERRQ(ierr);
1477   ierr = PetscLogEventEnd(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1478   PetscFunctionReturn(0);
1479 }
1480 
1481 #undef __FUNCT__
1482 #define __FUNCT__ "DMPlexDistribute"
1483 /*@C
1484   DMPlexDistribute - Distributes the mesh and any associated sections.
1485 
1486   Not Collective
1487 
1488   Input Parameter:
1489 + dm  - The original DMPlex object
1490 - overlap - The overlap of partitions, 0 is the default
1491 
1492   Output Parameter:
1493 + sf - The PetscSF used for point distribution
1494 - parallelMesh - The distributed DMPlex object, or NULL
1495 
1496   Note: If the mesh was not distributed, the return value is NULL.
1497 
1498   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1499   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1500   representation on the mesh.
1501 
1502   Level: intermediate
1503 
1504 .keywords: mesh, elements
1505 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1506 @*/
1507 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1508 {
1509   MPI_Comm               comm;
1510   PetscPartitioner       partitioner;
1511   IS                     cellPart;
1512   PetscSection           cellPartSection;
1513   DM                     dmCoord;
1514   DMLabel                lblPartition, lblMigration;
1515   PetscSF                sfProcess, sfMigration, sfStratified, sfPoint;
1516   PetscBool              flg;
1517   PetscMPIInt            rank, numProcs, p;
1518   PetscErrorCode         ierr;
1519 
1520   PetscFunctionBegin;
1521   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1522   if (sf) PetscValidPointer(sf,4);
1523   PetscValidPointer(dmParallel,5);
1524 
1525   ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1526   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1527   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1528   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1529 
1530   *dmParallel = NULL;
1531   if (numProcs == 1) PetscFunctionReturn(0);
1532 
1533   /* Create cell partition */
1534   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1535   ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr);
1536   ierr = DMPlexGetPartitioner(dm, &partitioner);CHKERRQ(ierr);
1537   ierr = PetscPartitionerPartition(partitioner, dm, cellPartSection, &cellPart);CHKERRQ(ierr);
1538   {
1539     /* Convert partition to DMLabel */
1540     PetscInt proc, pStart, pEnd, npoints, poffset;
1541     const PetscInt *points;
1542     ierr = DMLabelCreate("Point Partition", &lblPartition);CHKERRQ(ierr);
1543     ierr = ISGetIndices(cellPart, &points);CHKERRQ(ierr);
1544     ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr);
1545     for (proc = pStart; proc < pEnd; proc++) {
1546       ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr);
1547       ierr = PetscSectionGetOffset(cellPartSection, proc, &poffset);CHKERRQ(ierr);
1548       for (p = poffset; p < poffset+npoints; p++) {
1549         ierr = DMLabelSetValue(lblPartition, points[p], proc);CHKERRQ(ierr);
1550       }
1551     }
1552     ierr = ISRestoreIndices(cellPart, &points);CHKERRQ(ierr);
1553   }
1554   ierr = DMPlexPartitionLabelClosure(dm, lblPartition);CHKERRQ(ierr);
1555   {
1556     /* Build a global process SF */
1557     PetscSFNode *remoteProc;
1558     ierr = PetscMalloc1(numProcs, &remoteProc);CHKERRQ(ierr);
1559     for (p = 0; p < numProcs; ++p) {
1560       remoteProc[p].rank  = p;
1561       remoteProc[p].index = rank;
1562     }
1563     ierr = PetscSFCreate(comm, &sfProcess);CHKERRQ(ierr);
1564     ierr = PetscObjectSetName((PetscObject) sfProcess, "Process SF");CHKERRQ(ierr);
1565     ierr = PetscSFSetGraph(sfProcess, numProcs, numProcs, NULL, PETSC_OWN_POINTER, remoteProc, PETSC_OWN_POINTER);CHKERRQ(ierr);
1566   }
1567   ierr = DMLabelCreate("Point migration", &lblMigration);CHKERRQ(ierr);
1568   ierr = DMPlexPartitionLabelInvert(dm, lblPartition, sfProcess, lblMigration);CHKERRQ(ierr);
1569   ierr = DMPlexPartitionLabelCreateSF(dm, lblMigration, &sfMigration);CHKERRQ(ierr);
1570   /* Stratify the SF in case we are migrating an already parallel plex */
1571   ierr = DMPlexStratifyMigrationSF(dm, sfMigration, &sfStratified);CHKERRQ(ierr);
1572   ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1573   sfMigration = sfStratified;
1574   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1575   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1576   if (flg) {
1577     ierr = DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1578     ierr = PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1579   }
1580 
1581   /* Create non-overlapping parallel DM and migrate internal data */
1582   ierr = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr);
1583   ierr = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr);
1584   ierr = DMPlexMigrate(dm, sfMigration, *dmParallel);CHKERRQ(ierr);
1585 
1586   /* Build the point SF without overlap */
1587   ierr = DMPlexCreatePointSF(*dmParallel, sfMigration, PETSC_TRUE, &sfPoint);CHKERRQ(ierr);
1588   ierr = DMSetPointSF(*dmParallel, sfPoint);CHKERRQ(ierr);
1589   ierr = DMGetCoordinateDM(*dmParallel, &dmCoord);CHKERRQ(ierr);
1590   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1591   if (flg) {ierr = PetscSFView(sfPoint, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);}
1592 
1593   if (overlap > 0) {
1594     DM                 dmOverlap;
1595     PetscInt           nroots, nleaves;
1596     PetscSFNode       *newRemote;
1597     const PetscSFNode *oldRemote;
1598     PetscSF            sfOverlap, sfOverlapPoint;
1599     /* Add the partition overlap to the distributed DM */
1600     ierr = DMPlexDistributeOverlap(*dmParallel, overlap, &sfOverlap, &dmOverlap);CHKERRQ(ierr);
1601     ierr = DMDestroy(dmParallel);CHKERRQ(ierr);
1602     *dmParallel = dmOverlap;
1603     if (flg) {
1604       ierr = PetscPrintf(comm, "Overlap Migration SF:\n");CHKERRQ(ierr);
1605       ierr = PetscSFView(sfOverlap, NULL);CHKERRQ(ierr);
1606     }
1607 
1608     /* Re-map the migration SF to establish the full migration pattern */
1609     ierr = PetscSFGetGraph(sfMigration, &nroots, NULL, NULL, &oldRemote);CHKERRQ(ierr);
1610     ierr = PetscSFGetGraph(sfOverlap, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1611     ierr = PetscMalloc1(nleaves, &newRemote);CHKERRQ(ierr);
1612     ierr = PetscSFBcastBegin(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1613     ierr = PetscSFBcastEnd(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1614     ierr = PetscSFCreate(comm, &sfOverlapPoint);CHKERRQ(ierr);
1615     ierr = PetscSFSetGraph(sfOverlapPoint, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1616     ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1617     ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1618     sfMigration = sfOverlapPoint;
1619   }
1620   /* Cleanup Partition */
1621   ierr = PetscSFDestroy(&sfProcess);CHKERRQ(ierr);
1622   ierr = DMLabelDestroy(&lblPartition);CHKERRQ(ierr);
1623   ierr = DMLabelDestroy(&lblMigration);CHKERRQ(ierr);
1624   ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr);
1625   ierr = ISDestroy(&cellPart);CHKERRQ(ierr);
1626   /* Copy BC */
1627   ierr = DMCopyBoundary(dm, *dmParallel);CHKERRQ(ierr);
1628   /* Create sfNatural */
1629   if (dm->useNatural) {
1630     PetscSection section;
1631 
1632     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1633     ierr = DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &(*dmParallel)->sfNatural);CHKERRQ(ierr);
1634   }
1635   /* Cleanup */
1636   if (sf) {*sf = sfMigration;}
1637   else    {ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);}
1638   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1639   ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1640   PetscFunctionReturn(0);
1641 }
1642 
1643 #undef __FUNCT__
1644 #define __FUNCT__ "DMPlexDistributeOverlap"
1645 /*@C
1646   DMPlexDistribute - Add partition overlap to a distributed non-overlapping DM.
1647 
1648   Not Collective
1649 
1650   Input Parameter:
1651 + dm  - The non-overlapping distrbuted DMPlex object
1652 - overlap - The overlap of partitions, 0 is the default
1653 
1654   Output Parameter:
1655 + sf - The PetscSF used for point distribution
1656 - dmOverlap - The overlapping distributed DMPlex object, or NULL
1657 
1658   Note: If the mesh was not distributed, the return value is NULL.
1659 
1660   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1661   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1662   representation on the mesh.
1663 
1664   Level: intermediate
1665 
1666 .keywords: mesh, elements
1667 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1668 @*/
1669 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, PetscSF *sf, DM *dmOverlap)
1670 {
1671   MPI_Comm               comm;
1672   PetscMPIInt            rank;
1673   PetscSection           rootSection, leafSection;
1674   IS                     rootrank, leafrank;
1675   DM                     dmCoord;
1676   DMLabel                lblOverlap;
1677   PetscSF                sfOverlap, sfStratified, sfPoint;
1678   PetscErrorCode         ierr;
1679 
1680   PetscFunctionBegin;
1681   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1682   if (sf) PetscValidPointer(sf, 3);
1683   PetscValidPointer(dmOverlap, 4);
1684 
1685   ierr = PetscLogEventBegin(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1686   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1687   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1688 
1689   /* Compute point overlap with neighbouring processes on the distributed DM */
1690   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1691   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1692   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1693   ierr = DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);CHKERRQ(ierr);
1694   ierr = DMPlexCreateOverlap(dm, overlap, rootSection, rootrank, leafSection, leafrank, &lblOverlap);CHKERRQ(ierr);
1695   /* Convert overlap label to stratified migration SF */
1696   ierr = DMPlexPartitionLabelCreateSF(dm, lblOverlap, &sfOverlap);CHKERRQ(ierr);
1697   ierr = DMPlexStratifyMigrationSF(dm, sfOverlap, &sfStratified);CHKERRQ(ierr);
1698   ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1699   sfOverlap = sfStratified;
1700   ierr = PetscObjectSetName((PetscObject) sfOverlap, "Overlap SF");CHKERRQ(ierr);
1701   ierr = PetscSFSetFromOptions(sfOverlap);CHKERRQ(ierr);
1702 
1703   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1704   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1705   ierr = ISDestroy(&rootrank);CHKERRQ(ierr);
1706   ierr = ISDestroy(&leafrank);CHKERRQ(ierr);
1707   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1708 
1709   /* Build the overlapping DM */
1710   ierr = DMPlexCreate(comm, dmOverlap);CHKERRQ(ierr);
1711   ierr = PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");CHKERRQ(ierr);
1712   ierr = DMPlexMigrate(dm, sfOverlap, *dmOverlap);CHKERRQ(ierr);
1713   /* Build the new point SF */
1714   ierr = DMPlexCreatePointSF(*dmOverlap, sfOverlap, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
1715   ierr = DMSetPointSF(*dmOverlap, sfPoint);CHKERRQ(ierr);
1716   ierr = DMGetCoordinateDM(*dmOverlap, &dmCoord);CHKERRQ(ierr);
1717   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1718   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1719   /* Cleanup overlap partition */
1720   ierr = DMLabelDestroy(&lblOverlap);CHKERRQ(ierr);
1721   if (sf) *sf = sfOverlap;
1722   else    {ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);}
1723   ierr = PetscLogEventEnd(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1724   PetscFunctionReturn(0);
1725 }
1726 
1727 #undef __FUNCT__
1728 #define __FUNCT__ "DMPlexGetGatherDM"
1729 /*@C
1730   DMPlexGetGatherDM - Get a copy of the DMPlex that gathers all points on the
1731   root process of the original's communicator.
1732 
1733   Input Parameters:
1734 . dm - the original DMPlex object
1735 
1736   Output Parameters:
1737 . gatherMesh - the gathered DM object, or NULL
1738 
1739   Level: intermediate
1740 
1741 .keywords: mesh
1742 .seealso: DMPlexDistribute(), DMPlexGetRedundantDM()
1743 @*/
1744 PetscErrorCode DMPlexGetGatherDM(DM dm, DM * gatherMesh)
1745 {
1746   MPI_Comm       comm;
1747   PetscMPIInt    size;
1748   PetscPartitioner oldPart, gatherPart;
1749   PetscErrorCode ierr;
1750 
1751   PetscFunctionBegin;
1752   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1753   comm = PetscObjectComm((PetscObject)dm);
1754   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1755   *gatherMesh = NULL;
1756   if (size == 1) PetscFunctionReturn(0);
1757   ierr = DMPlexGetPartitioner(dm,&oldPart);CHKERRQ(ierr);
1758   ierr = PetscObjectReference((PetscObject)oldPart);CHKERRQ(ierr);
1759   ierr = PetscPartitionerCreate(comm,&gatherPart);CHKERRQ(ierr);
1760   ierr = PetscPartitionerSetType(gatherPart,PETSCPARTITIONERGATHER);CHKERRQ(ierr);
1761   ierr = DMPlexSetPartitioner(dm,gatherPart);CHKERRQ(ierr);
1762   ierr = DMPlexDistribute(dm,0,NULL,gatherMesh);CHKERRQ(ierr);
1763   ierr = DMPlexSetPartitioner(dm,oldPart);CHKERRQ(ierr);
1764   ierr = PetscPartitionerDestroy(&gatherPart);CHKERRQ(ierr);
1765   ierr = PetscPartitionerDestroy(&oldPart);CHKERRQ(ierr);
1766   PetscFunctionReturn(0);
1767 }
1768 
1769 #undef __FUNCT__
1770 #define __FUNCT__ "DMPlexGetRedundantDM"
1771 /*@C
1772   DMPlexGetRedundantDM - Get a copy of the DMPlex that is completely copied on each process.
1773 
1774   Input Parameters:
1775 . dm - the original DMPlex object
1776 
1777   Output Parameters:
1778 . redundantMesh - the redundant DM object, or NULL
1779 
1780   Level: intermediate
1781 
1782 .keywords: mesh
1783 .seealso: DMPlexDistribute(), DMPlexGetGatherDM()
1784 @*/
1785 PetscErrorCode DMPlexGetRedundantDM(DM dm, DM * redundantMesh)
1786 {
1787   MPI_Comm       comm;
1788   PetscMPIInt    size, rank;
1789   PetscInt       pStart, pEnd, p;
1790   PetscInt       numPoints = -1;
1791   PetscSF        migrationSF, sfPoint;
1792   DM             gatherDM, dmCoord;
1793   PetscSFNode    *points;
1794   PetscErrorCode ierr;
1795 
1796   PetscFunctionBegin;
1797   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1798   comm = PetscObjectComm((PetscObject)dm);
1799   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1800   *redundantMesh = NULL;
1801   if (size == 1) PetscFunctionReturn(0);
1802   ierr = DMPlexGetGatherDM(dm,&gatherDM);CHKERRQ(ierr);
1803   if (!gatherDM) PetscFunctionReturn(0);
1804   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1805   ierr = DMPlexGetChart(gatherDM,&pStart,&pEnd);CHKERRQ(ierr);
1806   numPoints = pEnd - pStart;
1807   ierr = MPI_Bcast(&numPoints,1,MPIU_INT,0,comm);CHKERRQ(ierr);
1808   ierr = PetscMalloc1(numPoints,&points);CHKERRQ(ierr);
1809   ierr = PetscSFCreate(comm,&migrationSF);CHKERRQ(ierr);
1810   for (p = 0; p < numPoints; p++) {
1811     points[p].index = p;
1812     points[p].rank  = 0;
1813   }
1814   ierr = PetscSFSetGraph(migrationSF,pEnd-pStart,numPoints,NULL,PETSC_OWN_POINTER,points,PETSC_OWN_POINTER);CHKERRQ(ierr);
1815   ierr = DMPlexCreate(comm, redundantMesh);CHKERRQ(ierr);
1816   ierr = PetscObjectSetName((PetscObject) *redundantMesh, "Redundant Mesh");CHKERRQ(ierr);
1817   ierr = DMPlexMigrate(gatherDM, migrationSF, *redundantMesh);CHKERRQ(ierr);
1818   ierr = DMPlexCreatePointSF(*redundantMesh, migrationSF, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
1819   ierr = DMSetPointSF(*redundantMesh, sfPoint);CHKERRQ(ierr);
1820   ierr = DMGetCoordinateDM(*redundantMesh, &dmCoord);CHKERRQ(ierr);
1821   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1822   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1823   ierr = PetscSFDestroy(&migrationSF);CHKERRQ(ierr);
1824   ierr = DMDestroy(&gatherDM);CHKERRQ(ierr);
1825   PetscFunctionReturn(0);
1826 }
1827