xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision bf0c4ff7f024e5604c03be42c12927a3985c36f7)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 
3 #undef __FUNCT__
4 #define __FUNCT__ "DMPlexSetAdjacencyUseCone"
5 /*@
6   DMPlexSetAdjacencyUseCone - Define adjacency in the mesh using either the cone or the support first
7 
8   Input Parameters:
9 + dm      - The DM object
10 - useCone - Flag to use the cone first
11 
12   Level: intermediate
13 
14   Notes:
15 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
16 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
17 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
18 
19 .seealso: DMPlexGetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
20 @*/
21 PetscErrorCode DMPlexSetAdjacencyUseCone(DM dm, PetscBool useCone)
22 {
23   DM_Plex *mesh = (DM_Plex *) dm->data;
24 
25   PetscFunctionBegin;
26   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
27   mesh->useCone = useCone;
28   PetscFunctionReturn(0);
29 }
30 
31 #undef __FUNCT__
32 #define __FUNCT__ "DMPlexGetAdjacencyUseCone"
33 /*@
34   DMPlexGetAdjacencyUseCone - Query whether adjacency in the mesh uses the cone or the support first
35 
36   Input Parameter:
37 . dm      - The DM object
38 
39   Output Parameter:
40 . useCone - Flag to use the cone first
41 
42   Level: intermediate
43 
44   Notes:
45 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
46 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
47 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
48 
49 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
50 @*/
51 PetscErrorCode DMPlexGetAdjacencyUseCone(DM dm, PetscBool *useCone)
52 {
53   DM_Plex *mesh = (DM_Plex *) dm->data;
54 
55   PetscFunctionBegin;
56   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
57   PetscValidIntPointer(useCone, 2);
58   *useCone = mesh->useCone;
59   PetscFunctionReturn(0);
60 }
61 
62 #undef __FUNCT__
63 #define __FUNCT__ "DMPlexSetAdjacencyUseClosure"
64 /*@
65   DMPlexSetAdjacencyUseClosure - Define adjacency in the mesh using the transitive closure
66 
67   Input Parameters:
68 + dm      - The DM object
69 - useClosure - Flag to use the closure
70 
71   Level: intermediate
72 
73   Notes:
74 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
75 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
76 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
77 
78 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
79 @*/
80 PetscErrorCode DMPlexSetAdjacencyUseClosure(DM dm, PetscBool useClosure)
81 {
82   DM_Plex *mesh = (DM_Plex *) dm->data;
83 
84   PetscFunctionBegin;
85   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
86   mesh->useClosure = useClosure;
87   PetscFunctionReturn(0);
88 }
89 
90 #undef __FUNCT__
91 #define __FUNCT__ "DMPlexGetAdjacencyUseClosure"
92 /*@
93   DMPlexGetAdjacencyUseClosure - Query whether adjacency in the mesh uses the transitive closure
94 
95   Input Parameter:
96 . dm      - The DM object
97 
98   Output Parameter:
99 . useClosure - Flag to use the closure
100 
101   Level: intermediate
102 
103   Notes:
104 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
105 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
106 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
107 
108 .seealso: DMPlexSetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
109 @*/
110 PetscErrorCode DMPlexGetAdjacencyUseClosure(DM dm, PetscBool *useClosure)
111 {
112   DM_Plex *mesh = (DM_Plex *) dm->data;
113 
114   PetscFunctionBegin;
115   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
116   PetscValidIntPointer(useClosure, 2);
117   *useClosure = mesh->useClosure;
118   PetscFunctionReturn(0);
119 }
120 
121 #undef __FUNCT__
122 #define __FUNCT__ "DMPlexSetAdjacencyUseAnchors"
123 /*@
124   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
125 
126   Input Parameters:
127 + dm      - The DM object
128 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
129 
130   Level: intermediate
131 
132 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
133 @*/
134 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
135 {
136   DM_Plex *mesh = (DM_Plex *) dm->data;
137 
138   PetscFunctionBegin;
139   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
140   mesh->useAnchors = useAnchors;
141   PetscFunctionReturn(0);
142 }
143 
144 #undef __FUNCT__
145 #define __FUNCT__ "DMPlexGetAdjacencyUseAnchors"
146 /*@
147   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
148 
149   Input Parameter:
150 . dm      - The DM object
151 
152   Output Parameter:
153 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
154 
155   Level: intermediate
156 
157 .seealso: DMPlexSetAdjacencyUseAnchors(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
158 @*/
159 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
160 {
161   DM_Plex *mesh = (DM_Plex *) dm->data;
162 
163   PetscFunctionBegin;
164   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
165   PetscValidIntPointer(useAnchors, 2);
166   *useAnchors = mesh->useAnchors;
167   PetscFunctionReturn(0);
168 }
169 
170 #undef __FUNCT__
171 #define __FUNCT__ "DMPlexGetAdjacency_Cone_Internal"
172 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
173 {
174   const PetscInt *cone = NULL;
175   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
176   PetscErrorCode  ierr;
177 
178   PetscFunctionBeginHot;
179   ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
180   ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
181   for (c = 0; c <= coneSize; ++c) {
182     const PetscInt  point   = !c ? p : cone[c-1];
183     const PetscInt *support = NULL;
184     PetscInt        supportSize, s, q;
185 
186     ierr = DMPlexGetSupportSize(dm, point, &supportSize);CHKERRQ(ierr);
187     ierr = DMPlexGetSupport(dm, point, &support);CHKERRQ(ierr);
188     for (s = 0; s < supportSize; ++s) {
189       for (q = 0; q < numAdj || (adj[numAdj++] = support[s],0); ++q) {
190         if (support[s] == adj[q]) break;
191       }
192       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
193     }
194   }
195   *adjSize = numAdj;
196   PetscFunctionReturn(0);
197 }
198 
199 #undef __FUNCT__
200 #define __FUNCT__ "DMPlexGetAdjacency_Support_Internal"
201 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
202 {
203   const PetscInt *support = NULL;
204   PetscInt        numAdj   = 0, maxAdjSize = *adjSize, supportSize, s;
205   PetscErrorCode  ierr;
206 
207   PetscFunctionBeginHot;
208   ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr);
209   ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr);
210   for (s = 0; s <= supportSize; ++s) {
211     const PetscInt  point = !s ? p : support[s-1];
212     const PetscInt *cone  = NULL;
213     PetscInt        coneSize, c, q;
214 
215     ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
216     ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
217     for (c = 0; c < coneSize; ++c) {
218       for (q = 0; q < numAdj || (adj[numAdj++] = cone[c],0); ++q) {
219         if (cone[c] == adj[q]) break;
220       }
221       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
222     }
223   }
224   *adjSize = numAdj;
225   PetscFunctionReturn(0);
226 }
227 
228 #undef __FUNCT__
229 #define __FUNCT__ "DMPlexGetAdjacency_Transitive_Internal"
230 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
231 {
232   PetscInt      *star = NULL;
233   PetscInt       numAdj = 0, maxAdjSize = *adjSize, starSize, s;
234   PetscErrorCode ierr;
235 
236   PetscFunctionBeginHot;
237   ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
238   for (s = 0; s < starSize*2; s += 2) {
239     const PetscInt *closure = NULL;
240     PetscInt        closureSize, c, q;
241 
242     ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
243     for (c = 0; c < closureSize*2; c += 2) {
244       for (q = 0; q < numAdj || (adj[numAdj++] = closure[c],0); ++q) {
245         if (closure[c] == adj[q]) break;
246       }
247       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
248     }
249     ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
250   }
251   ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
252   *adjSize = numAdj;
253   PetscFunctionReturn(0);
254 }
255 
256 #undef __FUNCT__
257 #define __FUNCT__ "DMPlexGetAdjacency_Internal"
258 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
259 {
260   static PetscInt asiz = 0;
261   PetscInt maxAnchors = 1;
262   PetscInt aStart = -1, aEnd = -1;
263   PetscInt maxAdjSize;
264   PetscSection aSec = NULL;
265   IS aIS = NULL;
266   const PetscInt *anchors;
267   PetscErrorCode  ierr;
268 
269   PetscFunctionBeginHot;
270   if (useAnchors) {
271     ierr = DMPlexGetAnchors(dm,&aSec,&aIS);CHKERRQ(ierr);
272     if (aSec) {
273       ierr = PetscSectionGetMaxDof(aSec,&maxAnchors);CHKERRQ(ierr);
274       maxAnchors = PetscMax(1,maxAnchors);
275       ierr = PetscSectionGetChart(aSec,&aStart,&aEnd);CHKERRQ(ierr);
276       ierr = ISGetIndices(aIS,&anchors);CHKERRQ(ierr);
277     }
278   }
279   if (!*adj) {
280     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
281 
282     ierr  = DMPlexGetChart(dm, &pStart,&pEnd);CHKERRQ(ierr);
283     ierr  = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
284     ierr  = DMPlexGetMaxSizes(dm, &maxC, &maxS);CHKERRQ(ierr);
285     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1;
286     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1;
287     asiz  = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries);
288     asiz *= maxAnchors;
289     asiz  = PetscMin(asiz,pEnd-pStart);
290     ierr  = PetscMalloc1(asiz,adj);CHKERRQ(ierr);
291   }
292   if (*adjSize < 0) *adjSize = asiz;
293   maxAdjSize = *adjSize;
294   if (useTransitiveClosure) {
295     ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr);
296   } else if (useCone) {
297     ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
298   } else {
299     ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
300   }
301   if (useAnchors && aSec) {
302     PetscInt origSize = *adjSize;
303     PetscInt numAdj = origSize;
304     PetscInt i = 0, j;
305     PetscInt *orig = *adj;
306 
307     while (i < origSize) {
308       PetscInt p = orig[i];
309       PetscInt aDof = 0;
310 
311       if (p >= aStart && p < aEnd) {
312         ierr = PetscSectionGetDof(aSec,p,&aDof);CHKERRQ(ierr);
313       }
314       if (aDof) {
315         PetscInt aOff;
316         PetscInt s, q;
317 
318         for (j = i + 1; j < numAdj; j++) {
319           orig[j - 1] = orig[j];
320         }
321         origSize--;
322         numAdj--;
323         ierr = PetscSectionGetOffset(aSec,p,&aOff);CHKERRQ(ierr);
324         for (s = 0; s < aDof; ++s) {
325           for (q = 0; q < numAdj || (orig[numAdj++] = anchors[aOff+s],0); ++q) {
326             if (anchors[aOff+s] == orig[q]) break;
327           }
328           if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
329         }
330       }
331       else {
332         i++;
333       }
334     }
335     *adjSize = numAdj;
336     ierr = ISRestoreIndices(aIS,&anchors);CHKERRQ(ierr);
337   }
338   PetscFunctionReturn(0);
339 }
340 
341 #undef __FUNCT__
342 #define __FUNCT__ "DMPlexGetAdjacency"
343 /*@
344   DMPlexGetAdjacency - Return all points adjacent to the given point
345 
346   Input Parameters:
347 + dm - The DM object
348 . p  - The point
349 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE
350 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize
351 
352   Output Parameters:
353 + adjSize - The number of adjacent points
354 - adj - The adjacent points
355 
356   Level: advanced
357 
358   Notes: The user must PetscFree the adj array if it was not passed in.
359 
360 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator()
361 @*/
362 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
363 {
364   DM_Plex       *mesh = (DM_Plex *) dm->data;
365   PetscErrorCode ierr;
366 
367   PetscFunctionBeginHot;
368   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
369   PetscValidPointer(adjSize,3);
370   PetscValidPointer(adj,4);
371   ierr = DMPlexGetAdjacency_Internal(dm, p, mesh->useCone, mesh->useClosure, mesh->useAnchors, adjSize, adj);CHKERRQ(ierr);
372   PetscFunctionReturn(0);
373 }
374 #undef __FUNCT__
375 #define __FUNCT__ "DMPlexCreateTwoSidedProcessSF"
376 /*@
377   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
378 
379   Collective on DM
380 
381   Input Parameters:
382 + dm      - The DM
383 - sfPoint - The PetscSF which encodes point connectivity
384 
385   Output Parameters:
386 + processRanks - A list of process neighbors, or NULL
387 - sfProcess    - An SF encoding the two-sided process connectivity, or NULL
388 
389   Level: developer
390 
391 .seealso: PetscSFCreate(), DMPlexCreateProcessSF()
392 @*/
393 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
394 {
395   const PetscSFNode *remotePoints;
396   PetscInt          *localPointsNew;
397   PetscSFNode       *remotePointsNew;
398   const PetscInt    *nranks;
399   PetscInt          *ranksNew;
400   PetscBT            neighbors;
401   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
402   PetscMPIInt        numProcs, proc, rank;
403   PetscErrorCode     ierr;
404 
405   PetscFunctionBegin;
406   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
407   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
408   if (processRanks) {PetscValidPointer(processRanks, 3);}
409   if (sfProcess)    {PetscValidPointer(sfProcess, 4);}
410   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
411   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
412   ierr = PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);CHKERRQ(ierr);
413   ierr = PetscBTCreate(numProcs, &neighbors);CHKERRQ(ierr);
414   ierr = PetscBTMemzero(numProcs, neighbors);CHKERRQ(ierr);
415   /* Compute root-to-leaf process connectivity */
416   ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr);
417   ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr);
418   for (p = pStart; p < pEnd; ++p) {
419     PetscInt ndof, noff, n;
420 
421     ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr);
422     ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr);
423     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
424   }
425   ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr);
426   /* Compute leaf-to-neighbor process connectivity */
427   ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr);
428   ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr);
429   for (p = pStart; p < pEnd; ++p) {
430     PetscInt ndof, noff, n;
431 
432     ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr);
433     ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr);
434     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
435   }
436   ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr);
437   /* Compute leaf-to-root process connectivity */
438   for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);}
439   /* Calculate edges */
440   PetscBTClear(neighbors, rank);
441   for(proc = 0, numNeighbors = 0; proc < numProcs; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;}
442   ierr = PetscMalloc1(numNeighbors, &ranksNew);CHKERRQ(ierr);
443   ierr = PetscMalloc1(numNeighbors, &localPointsNew);CHKERRQ(ierr);
444   ierr = PetscMalloc1(numNeighbors, &remotePointsNew);CHKERRQ(ierr);
445   for(proc = 0, n = 0; proc < numProcs; ++proc) {
446     if (PetscBTLookup(neighbors, proc)) {
447       ranksNew[n]              = proc;
448       localPointsNew[n]        = proc;
449       remotePointsNew[n].index = rank;
450       remotePointsNew[n].rank  = proc;
451       ++n;
452     }
453   }
454   ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr);
455   if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr);}
456   else              {ierr = PetscFree(ranksNew);CHKERRQ(ierr);}
457   if (sfProcess) {
458     ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr);
459     ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr);
460     ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr);
461     ierr = PetscSFSetGraph(*sfProcess, numProcs, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
462   }
463   PetscFunctionReturn(0);
464 }
465 
466 #undef __FUNCT__
467 #define __FUNCT__ "DMPlexDistributeOwnership"
468 /*@
469   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
470 
471   Collective on DM
472 
473   Input Parameter:
474 . dm - The DM
475 
476   Output Parameters:
477 + rootSection - The number of leaves for a given root point
478 . rootrank    - The rank of each edge into the root point
479 . leafSection - The number of processes sharing a given leaf point
480 - leafrank    - The rank of each process sharing a leaf point
481 
482   Level: developer
483 
484 .seealso: DMPlexCreateOverlap()
485 @*/
486 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
487 {
488   MPI_Comm        comm;
489   PetscSF         sfPoint;
490   const PetscInt *rootdegree;
491   PetscInt       *myrank, *remoterank;
492   PetscInt        pStart, pEnd, p, nedges;
493   PetscMPIInt     rank;
494   PetscErrorCode  ierr;
495 
496   PetscFunctionBegin;
497   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
498   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
499   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
500   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
501   /* Compute number of leaves for each root */
502   ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr);
503   ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr);
504   ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr);
505   ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr);
506   for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr);}
507   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
508   /* Gather rank of each leaf to root */
509   ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr);
510   ierr = PetscMalloc1(pEnd-pStart, &myrank);CHKERRQ(ierr);
511   ierr = PetscMalloc1(nedges,  &remoterank);CHKERRQ(ierr);
512   for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank;
513   ierr = PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
514   ierr = PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
515   ierr = PetscFree(myrank);CHKERRQ(ierr);
516   ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr);
517   /* Distribute remote ranks to leaves */
518   ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr);
519   ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr);
520   PetscFunctionReturn(0);
521 }
522 
523 #undef __FUNCT__
524 #define __FUNCT__ "DMPlexCreateOverlap"
525 /*@C
526   DMPlexCreateOverlap - Compute owner information for shared points. This basically gets two-sided for an SF.
527 
528   Collective on DM
529 
530   Input Parameters:
531 + dm          - The DM
532 . levels      - Number of overlap levels
533 . rootSection - The number of leaves for a given root point
534 . rootrank    - The rank of each edge into the root point
535 . leafSection - The number of processes sharing a given leaf point
536 - leafrank    - The rank of each process sharing a leaf point
537 
538   Output Parameters:
539 + ovLabel     - DMLabel containing remote overlap contributions as point/rank pairings
540 
541   Level: developer
542 
543 .seealso: DMPlexDistributeOwnership(), DMPlexDistribute()
544 @*/
545 PetscErrorCode DMPlexCreateOverlap(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
546 {
547   MPI_Comm           comm;
548   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
549   PetscSF            sfPoint, sfProc;
550   const PetscSFNode *remote;
551   const PetscInt    *local;
552   const PetscInt    *nrank, *rrank;
553   PetscInt          *adj = NULL;
554   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l;
555   PetscMPIInt        rank, numProcs;
556   PetscBool          useCone, useClosure, flg;
557   PetscErrorCode     ierr;
558 
559   PetscFunctionBegin;
560   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
561   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
562   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
563   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
564   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
565   ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr);
566   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
567   ierr = DMLabelCreate("Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr);
568   /* Handle leaves: shared with the root point */
569   for (l = 0; l < nleaves; ++l) {
570     PetscInt adjSize = PETSC_DETERMINE, a;
571 
572     ierr = DMPlexGetAdjacency(dm, local[l], &adjSize, &adj);CHKERRQ(ierr);
573     for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr);}
574   }
575   ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr);
576   ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr);
577   /* Handle roots */
578   for (p = pStart; p < pEnd; ++p) {
579     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
580 
581     if ((p >= sStart) && (p < sEnd)) {
582       /* Some leaves share a root with other leaves on different processes */
583       ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr);
584       if (neighbors) {
585         ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr);
586         ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
587         for (n = 0; n < neighbors; ++n) {
588           const PetscInt remoteRank = nrank[noff+n];
589 
590           if (remoteRank == rank) continue;
591           for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
592         }
593       }
594     }
595     /* Roots are shared with leaves */
596     ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr);
597     if (!neighbors) continue;
598     ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr);
599     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
600     for (n = 0; n < neighbors; ++n) {
601       const PetscInt remoteRank = rrank[noff+n];
602 
603       if (remoteRank == rank) continue;
604       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
605     }
606   }
607   ierr = PetscFree(adj);CHKERRQ(ierr);
608   ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr);
609   ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr);
610   /* Add additional overlap levels */
611   for (l = 1; l < levels; l++) {ierr = DMPlexPartitionLabelAdjacency(dm, ovAdjByRank);CHKERRQ(ierr);}
612   /* We require the closure in the overlap */
613   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
614   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
615   if (useCone || !useClosure) {
616     ierr = DMPlexPartitionLabelClosure(dm, ovAdjByRank);CHKERRQ(ierr);
617   }
618   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-overlap_view", &flg);CHKERRQ(ierr);
619   if (flg) {
620     ierr = DMLabelView(ovAdjByRank, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
621   }
622   /* Make process SF and invert sender to receiver label */
623   ierr = DMPlexCreateTwoSidedProcessSF(dm, sfPoint, rootSection, rootrank, leafSection, leafrank, NULL, &sfProc);CHKERRQ(ierr);
624   ierr = DMLabelCreate("Overlap label", ovLabel);CHKERRQ(ierr);
625   ierr = DMPlexPartitionLabelInvert(dm, ovAdjByRank, sfProc, *ovLabel);CHKERRQ(ierr);
626   /* Add owned points, except for shared local points */
627   for (p = pStart; p < pEnd; ++p) {ierr = DMLabelSetValue(*ovLabel, p, rank);CHKERRQ(ierr);}
628   for (l = 0; l < nleaves; ++l) {
629     ierr = DMLabelClearValue(*ovLabel, local[l], rank);CHKERRQ(ierr);
630     ierr = DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank);CHKERRQ(ierr);
631   }
632   /* Clean up */
633   ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr);
634   ierr = PetscSFDestroy(&sfProc);CHKERRQ(ierr);
635   PetscFunctionReturn(0);
636 }
637 
638 #undef __FUNCT__
639 #define __FUNCT__ "DMPlexCreateOverlapMigrationSF"
640 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
641 {
642   MPI_Comm           comm;
643   PetscMPIInt        rank, numProcs;
644   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
645   PetscInt          *pointDepths, *remoteDepths, *ilocal;
646   PetscInt          *depthRecv, *depthShift, *depthIdx;
647   PetscSFNode       *iremote;
648   PetscSF            pointSF;
649   const PetscInt    *sharedLocal;
650   const PetscSFNode *overlapRemote, *sharedRemote;
651   PetscErrorCode     ierr;
652 
653   PetscFunctionBegin;
654   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
655   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
656   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
657   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
658   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
659 
660   /* Before building the migration SF we need to know the new stratum offsets */
661   ierr = PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote);CHKERRQ(ierr);
662   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
663   for (d=0; d<dim+1; d++) {
664     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
665     for (p=pStart; p<pEnd; p++) pointDepths[p] = d;
666   }
667   for (p=0; p<nleaves; p++) remoteDepths[p] = -1;
668   ierr = PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
669   ierr = PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
670 
671   /* Count recevied points in each stratum and compute the internal strata shift */
672   ierr = PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx);CHKERRQ(ierr);
673   for (d=0; d<dim+1; d++) depthRecv[d]=0;
674   for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++;
675   depthShift[dim] = 0;
676   for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim];
677   for (d=1; d<dim; d++) depthShift[d] += depthRecv[0];
678   for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1];
679   for (d=0; d<dim+1; d++) {
680     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
681     depthIdx[d] = pStart + depthShift[d];
682   }
683 
684   /* Form the overlap SF build an SF that describes the full overlap migration SF */
685   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
686   newLeaves = pEnd - pStart + nleaves;
687   ierr = PetscMalloc1(newLeaves, &ilocal);CHKERRQ(ierr);
688   ierr = PetscMalloc1(newLeaves, &iremote);CHKERRQ(ierr);
689   /* First map local points to themselves */
690   for (d=0; d<dim+1; d++) {
691     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
692     for (p=pStart; p<pEnd; p++) {
693       point = p + depthShift[d];
694       ilocal[point] = point;
695       iremote[point].index = p;
696       iremote[point].rank = rank;
697       depthIdx[d]++;
698     }
699   }
700 
701   /* Add in the remote roots for currently shared points */
702   ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
703   ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote);CHKERRQ(ierr);
704   for (d=0; d<dim+1; d++) {
705     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
706     for (p=0; p<numSharedPoints; p++) {
707       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
708         point = sharedLocal[p] + depthShift[d];
709         iremote[point].index = sharedRemote[p].index;
710         iremote[point].rank = sharedRemote[p].rank;
711       }
712     }
713   }
714 
715   /* Now add the incoming overlap points */
716   for (p=0; p<nleaves; p++) {
717     point = depthIdx[remoteDepths[p]];
718     ilocal[point] = point;
719     iremote[point].index = overlapRemote[p].index;
720     iremote[point].rank = overlapRemote[p].rank;
721     depthIdx[remoteDepths[p]]++;
722   }
723   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
724 
725   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
726   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");CHKERRQ(ierr);
727   ierr = PetscSFSetFromOptions(*migrationSF);CHKERRQ(ierr);
728   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
729   ierr = PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);CHKERRQ(ierr);
730 
731   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
732   PetscFunctionReturn(0);
733 }
734 
735 #undef __FUNCT__
736 #define __FUNCT__ "DMPlexStratifyMigrationSF"
737 /*@
738   DMPlexStratifyMigrationSF - Add partition overlap to a distributed non-overlapping DM.
739 
740   Input Parameter:
741 + dm          - The DM
742 - sf          - A star forest with non-ordered leaves, usually defining a DM point migration
743 
744   Output Parameter:
745 . migrationSF - A star forest with added leaf indirection that ensures the resulting DM is stratified
746 
747   Level: developer
748 
749 .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexDistributeOverlap()
750 @*/
751 PetscErrorCode DMPlexStratifyMigrationSF(DM dm, PetscSF sf, PetscSF *migrationSF)
752 {
753   MPI_Comm           comm;
754   PetscMPIInt        rank, numProcs;
755   PetscInt           d, ldepth, depth, p, pStart, pEnd, nroots, nleaves;
756   PetscInt          *pointDepths, *remoteDepths, *ilocal;
757   PetscInt          *depthRecv, *depthShift, *depthIdx;
758   const PetscSFNode *iremote;
759   PetscErrorCode     ierr;
760 
761   PetscFunctionBegin;
762   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
763   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
764   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
765   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
766   ierr = DMPlexGetDepth(dm, &ldepth);CHKERRQ(ierr);
767   ierr = MPI_Allreduce(&ldepth, &depth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr);
768   if ((ldepth >= 0) && (depth != ldepth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", ldepth, depth);
769 
770   /* Before building the migration SF we need to know the new stratum offsets */
771   ierr = PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote);CHKERRQ(ierr);
772   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
773   for (d = 0; d < depth+1; ++d) {
774     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
775     for (p = pStart; p < pEnd; ++p) pointDepths[p] = d;
776   }
777   for (p = 0; p < nleaves; ++p) remoteDepths[p] = -1;
778   ierr = PetscSFBcastBegin(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
779   ierr = PetscSFBcastEnd(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
780   /* Count recevied points in each stratum and compute the internal strata shift */
781   ierr = PetscMalloc3(depth+1, &depthRecv, depth+1, &depthShift, depth+1, &depthIdx);CHKERRQ(ierr);
782   for (d = 0; d < depth+1; ++d) depthRecv[d] = 0;
783   for (p = 0; p < nleaves; ++p) depthRecv[remoteDepths[p]]++;
784   depthShift[depth] = 0;
785   for (d = 0; d < depth; ++d) depthShift[d] = depthRecv[depth];
786   for (d = 1; d < depth; ++d) depthShift[d] += depthRecv[0];
787   for (d = depth-2; d > 0; --d) depthShift[d] += depthRecv[d+1];
788   for (d = 0; d < depth+1; ++d) {depthIdx[d] = 0;}
789   /* Derive a new local permutation based on stratified indices */
790   ierr = PetscMalloc1(nleaves, &ilocal);CHKERRQ(ierr);
791   for (p = 0; p < nleaves; ++p) {
792     const PetscInt dep = remoteDepths[p];
793 
794     ilocal[p] = depthShift[dep] + depthIdx[dep];
795     depthIdx[dep]++;
796   }
797   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
798   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Migration SF");CHKERRQ(ierr);
799   ierr = PetscSFSetGraph(*migrationSF, nroots, nleaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_COPY_VALUES);CHKERRQ(ierr);
800   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
801   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
802   PetscFunctionReturn(0);
803 }
804 
805 #undef __FUNCT__
806 #define __FUNCT__ "DMPlexDistributeField"
807 /*@
808   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
809 
810   Collective on DM
811 
812   Input Parameters:
813 + dm - The DMPlex object
814 . pointSF - The PetscSF describing the communication pattern
815 . originalSection - The PetscSection for existing data layout
816 - originalVec - The existing data
817 
818   Output Parameters:
819 + newSection - The PetscSF describing the new data layout
820 - newVec - The new data
821 
822   Level: developer
823 
824 .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData()
825 @*/
826 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
827 {
828   PetscSF        fieldSF;
829   PetscInt      *remoteOffsets, fieldSize;
830   PetscScalar   *originalValues, *newValues;
831   PetscErrorCode ierr;
832 
833   PetscFunctionBegin;
834   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
835   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
836 
837   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
838   ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr);
839   ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr);
840 
841   ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr);
842   ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr);
843   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
844   ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
845   ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
846   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
847   ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr);
848   ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr);
849   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
850   PetscFunctionReturn(0);
851 }
852 
853 #undef __FUNCT__
854 #define __FUNCT__ "DMPlexDistributeFieldIS"
855 /*@
856   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
857 
858   Collective on DM
859 
860   Input Parameters:
861 + dm - The DMPlex object
862 . pointSF - The PetscSF describing the communication pattern
863 . originalSection - The PetscSection for existing data layout
864 - originalIS - The existing data
865 
866   Output Parameters:
867 + newSection - The PetscSF describing the new data layout
868 - newIS - The new data
869 
870   Level: developer
871 
872 .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData()
873 @*/
874 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
875 {
876   PetscSF         fieldSF;
877   PetscInt       *newValues, *remoteOffsets, fieldSize;
878   const PetscInt *originalValues;
879   PetscErrorCode  ierr;
880 
881   PetscFunctionBegin;
882   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
883   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
884 
885   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
886   ierr = PetscMalloc1(fieldSize, &newValues);CHKERRQ(ierr);
887 
888   ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr);
889   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
890   ierr = PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
891   ierr = PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
892   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
893   ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr);
894   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr);
895   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
896   PetscFunctionReturn(0);
897 }
898 
899 #undef __FUNCT__
900 #define __FUNCT__ "DMPlexDistributeData"
901 /*@
902   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
903 
904   Collective on DM
905 
906   Input Parameters:
907 + dm - The DMPlex object
908 . pointSF - The PetscSF describing the communication pattern
909 . originalSection - The PetscSection for existing data layout
910 . datatype - The type of data
911 - originalData - The existing data
912 
913   Output Parameters:
914 + newSection - The PetscSection describing the new data layout
915 - newData - The new data
916 
917   Level: developer
918 
919 .seealso: DMPlexDistribute(), DMPlexDistributeField()
920 @*/
921 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
922 {
923   PetscSF        fieldSF;
924   PetscInt      *remoteOffsets, fieldSize;
925   PetscMPIInt    dataSize;
926   PetscErrorCode ierr;
927 
928   PetscFunctionBegin;
929   ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
930   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
931 
932   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
933   ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr);
934   ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr);
935 
936   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
937   ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
938   ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
939   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
940   ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
941   PetscFunctionReturn(0);
942 }
943 
944 #undef __FUNCT__
945 #define __FUNCT__ "DMPlexDistributeCones"
946 PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
947 {
948   DM_Plex               *mesh  = (DM_Plex*) dm->data;
949   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
950   MPI_Comm               comm;
951   PetscSF                coneSF;
952   PetscSection           originalConeSection, newConeSection;
953   PetscInt              *remoteOffsets, *cones, *newCones, newConesSize;
954   PetscBool              flg;
955   PetscErrorCode         ierr;
956 
957   PetscFunctionBegin;
958   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
959   PetscValidPointer(dmParallel,4);
960   ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
961 
962   /* Distribute cone section */
963   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
964   ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr);
965   ierr = DMPlexGetConeSection(dmParallel, &newConeSection);CHKERRQ(ierr);
966   ierr = PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr);
967   ierr = DMSetUp(dmParallel);CHKERRQ(ierr);
968   {
969     PetscInt pStart, pEnd, p;
970 
971     ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr);
972     for (p = pStart; p < pEnd; ++p) {
973       PetscInt coneSize;
974       ierr               = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr);
975       pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize);
976     }
977   }
978   /* Communicate and renumber cones */
979   ierr = PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr);
980   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
981   ierr = DMPlexGetCones(dmParallel, &newCones);CHKERRQ(ierr);
982   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
983   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
984   ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr);
985   ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr);
986 #if PETSC_USE_DEBUG
987   {
988     PetscInt  p;
989     PetscBool valid = PETSC_TRUE;
990     for (p = 0; p < newConesSize; ++p) {
991       if (newCones[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
992     }
993     if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
994   }
995 #endif
996   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr);
997   if (flg) {
998     ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr);
999     ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1000     ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr);
1001     ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1002     ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr);
1003   }
1004   ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr);
1005   ierr = DMPlexGetConeOrientations(dmParallel, &newCones);CHKERRQ(ierr);
1006   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1007   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1008   ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr);
1009   ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
1010   /* Create supports and stratify sieve */
1011   {
1012     PetscInt pStart, pEnd;
1013 
1014     ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr);
1015     ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr);
1016   }
1017   ierr = DMPlexSymmetrize(dmParallel);CHKERRQ(ierr);
1018   ierr = DMPlexStratify(dmParallel);CHKERRQ(ierr);
1019   pmesh->useCone    = mesh->useCone;
1020   pmesh->useClosure = mesh->useClosure;
1021   PetscFunctionReturn(0);
1022 }
1023 
1024 #undef __FUNCT__
1025 #define __FUNCT__ "DMPlexDistributeCoordinates"
1026 PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
1027 {
1028   MPI_Comm         comm;
1029   PetscSection     originalCoordSection, newCoordSection;
1030   Vec              originalCoordinates, newCoordinates;
1031   PetscInt         bs;
1032   const char      *name;
1033   const PetscReal *maxCell, *L;
1034   const DMBoundaryType *bd;
1035   PetscErrorCode   ierr;
1036 
1037   PetscFunctionBegin;
1038   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1039   PetscValidPointer(dmParallel, 3);
1040 
1041   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1042   ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr);
1043   ierr = DMGetCoordinateSection(dmParallel, &newCoordSection);CHKERRQ(ierr);
1044   ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr);
1045   if (originalCoordinates) {
1046     ierr = VecCreate(comm, &newCoordinates);CHKERRQ(ierr);
1047     ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr);
1048     ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr);
1049 
1050     ierr = DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr);
1051     ierr = DMSetCoordinatesLocal(dmParallel, newCoordinates);CHKERRQ(ierr);
1052     ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr);
1053     ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr);
1054     ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr);
1055   }
1056   ierr = DMGetPeriodicity(dm, &maxCell, &L, &bd);CHKERRQ(ierr);
1057   if (L) {ierr = DMSetPeriodicity(dmParallel, maxCell, L, bd);CHKERRQ(ierr);}
1058   PetscFunctionReturn(0);
1059 }
1060 
1061 #undef __FUNCT__
1062 #define __FUNCT__ "DMPlexDistributeLabels"
1063 /* Here we are assuming that process 0 always has everything */
1064 PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1065 {
1066   MPI_Comm       comm;
1067   PetscMPIInt    rank;
1068   PetscInt       numLabels, numLocalLabels, l;
1069   PetscBool      hasLabels = PETSC_FALSE;
1070   PetscErrorCode ierr;
1071 
1072   PetscFunctionBegin;
1073   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1074   PetscValidHeaderSpecific(dm, DM_CLASSID, 3);
1075   ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1076   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1077   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1078 
1079   /* Everyone must have either the same number of labels, or none */
1080   ierr = DMPlexGetNumLabels(dm, &numLocalLabels);CHKERRQ(ierr);
1081   numLabels = numLocalLabels;
1082   ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1083   if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1084   for (l = numLabels-1; l >= 0; --l) {
1085     DMLabel     label = NULL, labelNew = NULL;
1086     PetscBool   isdepth;
1087 
1088     if (hasLabels) {
1089       ierr = DMPlexGetLabelByNum(dm, l, &label);CHKERRQ(ierr);
1090       /* Skip "depth" because it is recreated */
1091       ierr = PetscStrcmp(label->name, "depth", &isdepth);CHKERRQ(ierr);
1092     }
1093     ierr = MPI_Bcast(&isdepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr);
1094     if (isdepth) continue;
1095     ierr = DMLabelDistribute(label, migrationSF, &labelNew);CHKERRQ(ierr);
1096     ierr = DMPlexAddLabel(dmParallel, labelNew);CHKERRQ(ierr);
1097   }
1098   ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1099   PetscFunctionReturn(0);
1100 }
1101 
1102 #undef __FUNCT__
1103 #define __FUNCT__ "DMPlexDistributeSetupHybrid"
1104 PetscErrorCode DMPlexDistributeSetupHybrid(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1105 {
1106   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1107   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1108   MPI_Comm        comm;
1109   const PetscInt *gpoints;
1110   PetscInt        dim, depth, n, d;
1111   PetscErrorCode  ierr;
1112 
1113   PetscFunctionBegin;
1114   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1115   PetscValidPointer(dmParallel, 4);
1116 
1117   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1118   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1119 
1120   /* Setup hybrid structure */
1121   for (d = 0; d <= dim; ++d) {pmesh->hybridPointMax[d] = mesh->hybridPointMax[d];}
1122   ierr = MPI_Bcast(pmesh->hybridPointMax, dim+1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1123   ierr = ISLocalToGlobalMappingGetSize(renumbering, &n);CHKERRQ(ierr);
1124   ierr = ISLocalToGlobalMappingGetIndices(renumbering, &gpoints);CHKERRQ(ierr);
1125   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1126   for (d = 0; d <= dim; ++d) {
1127     PetscInt pmax = pmesh->hybridPointMax[d], newmax = 0, pEnd, stratum[2], p;
1128 
1129     if (pmax < 0) continue;
1130     ierr = DMPlexGetDepthStratum(dm, d > depth ? depth : d, &stratum[0], &stratum[1]);CHKERRQ(ierr);
1131     ierr = DMPlexGetDepthStratum(dmParallel, d, NULL, &pEnd);CHKERRQ(ierr);
1132     ierr = MPI_Bcast(stratum, 2, MPIU_INT, 0, comm);CHKERRQ(ierr);
1133     for (p = 0; p < n; ++p) {
1134       const PetscInt point = gpoints[p];
1135 
1136       if ((point >= stratum[0]) && (point < stratum[1]) && (point >= pmax)) ++newmax;
1137     }
1138     if (newmax > 0) pmesh->hybridPointMax[d] = pEnd - newmax;
1139     else            pmesh->hybridPointMax[d] = -1;
1140   }
1141   ierr = ISLocalToGlobalMappingRestoreIndices(renumbering, &gpoints);CHKERRQ(ierr);
1142   PetscFunctionReturn(0);
1143 }
1144 
1145 #undef __FUNCT__
1146 #define __FUNCT__ "DMPlexDistributeSetupTree"
1147 PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1148 {
1149   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1150   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1151   MPI_Comm        comm;
1152   DM              refTree;
1153   PetscSection    origParentSection, newParentSection;
1154   PetscInt        *origParents, *origChildIDs;
1155   PetscBool       flg;
1156   PetscErrorCode  ierr;
1157 
1158   PetscFunctionBegin;
1159   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1160   PetscValidHeaderSpecific(dm, DM_CLASSID, 4);
1161   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1162 
1163   /* Set up tree */
1164   ierr = DMPlexGetReferenceTree(dm,&refTree);CHKERRQ(ierr);
1165   ierr = DMPlexSetReferenceTree(dmParallel,refTree);CHKERRQ(ierr);
1166   ierr = DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL);CHKERRQ(ierr);
1167   if (origParentSection) {
1168     PetscInt        pStart, pEnd;
1169     PetscInt        *newParents, *newChildIDs;
1170     PetscInt        *remoteOffsetsParents, newParentSize;
1171     PetscSF         parentSF;
1172 
1173     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1174     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);CHKERRQ(ierr);
1175     ierr = PetscSectionSetChart(newParentSection,pStart,pEnd);CHKERRQ(ierr);
1176     ierr = PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);CHKERRQ(ierr);
1177     ierr = PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);CHKERRQ(ierr);
1178     ierr = PetscSectionGetStorageSize(newParentSection,&newParentSize);CHKERRQ(ierr);
1179     ierr = PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs);CHKERRQ(ierr);
1180     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origParents, newParents);CHKERRQ(ierr);
1181     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origParents, newParents);CHKERRQ(ierr);
1182     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1183     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1184     ierr = ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents);CHKERRQ(ierr);
1185 #if PETSC_USE_DEBUG
1186     {
1187       PetscInt  p;
1188       PetscBool valid = PETSC_TRUE;
1189       for (p = 0; p < newParentSize; ++p) {
1190         if (newParents[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
1191       }
1192       if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1193     }
1194 #endif
1195     ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-parents_view", &flg);CHKERRQ(ierr);
1196     if (flg) {
1197       ierr = PetscPrintf(comm, "Serial Parent Section: \n");CHKERRQ(ierr);
1198       ierr = PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1199       ierr = PetscPrintf(comm, "Parallel Parent Section: \n");CHKERRQ(ierr);
1200       ierr = PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1201       ierr = PetscSFView(parentSF, NULL);CHKERRQ(ierr);
1202     }
1203     ierr = DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);CHKERRQ(ierr);
1204     ierr = PetscSectionDestroy(&newParentSection);CHKERRQ(ierr);
1205     ierr = PetscFree2(newParents,newChildIDs);CHKERRQ(ierr);
1206     ierr = PetscSFDestroy(&parentSF);CHKERRQ(ierr);
1207   }
1208   pmesh->useAnchors = mesh->useAnchors;
1209   PetscFunctionReturn(0);
1210 }
1211 
1212 #undef __FUNCT__
1213 #define __FUNCT__ "DMPlexDistributeSF"
1214 PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, DM dmParallel)
1215 {
1216   DM_Plex               *mesh  = (DM_Plex*) dm->data;
1217   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
1218   PetscMPIInt            rank, numProcs;
1219   MPI_Comm               comm;
1220   PetscErrorCode         ierr;
1221 
1222   PetscFunctionBegin;
1223   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1224   PetscValidPointer(dmParallel,7);
1225 
1226   /* Create point SF for parallel mesh */
1227   ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1228   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1229   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1230   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1231   {
1232     const PetscInt *leaves;
1233     PetscSFNode    *remotePoints, *rowners, *lowners;
1234     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1235     PetscInt        pStart, pEnd;
1236 
1237     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1238     ierr = PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr);
1239     ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr);
1240     for (p=0; p<numRoots; p++) {
1241       rowners[p].rank  = -1;
1242       rowners[p].index = -1;
1243     }
1244     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1245     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1246     for (p = 0; p < numLeaves; ++p) {
1247       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1248         lowners[p].rank  = rank;
1249         lowners[p].index = leaves ? leaves[p] : p;
1250       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1251         lowners[p].rank  = -2;
1252         lowners[p].index = -2;
1253       }
1254     }
1255     for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1256       rowners[p].rank  = -3;
1257       rowners[p].index = -3;
1258     }
1259     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1260     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1261     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1262     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1263     for (p = 0; p < numLeaves; ++p) {
1264       if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
1265       if (lowners[p].rank != rank) ++numGhostPoints;
1266     }
1267     ierr = PetscMalloc1(numGhostPoints, &ghostPoints);CHKERRQ(ierr);
1268     ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr);
1269     for (p = 0, gp = 0; p < numLeaves; ++p) {
1270       if (lowners[p].rank != rank) {
1271         ghostPoints[gp]        = leaves ? leaves[p] : p;
1272         remotePoints[gp].rank  = lowners[p].rank;
1273         remotePoints[gp].index = lowners[p].index;
1274         ++gp;
1275       }
1276     }
1277     ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr);
1278     ierr = PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1279     ierr = PetscSFSetFromOptions((dmParallel)->sf);CHKERRQ(ierr);
1280   }
1281   pmesh->useCone    = mesh->useCone;
1282   pmesh->useClosure = mesh->useClosure;
1283   ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1284   PetscFunctionReturn(0);
1285 }
1286 
1287 #undef __FUNCT__
1288 #define __FUNCT__ "DMPlexCreatePointSF"
1289 /*@C
1290   DMPlexDerivePointSF - Build a point SF from an SF describing a point migration
1291 
1292   Input Parameter:
1293 + dm          - The source DMPlex object
1294 . migrationSF - The star forest that describes the parallel point remapping
1295 . ownership   - Flag causing a vote to determine point ownership
1296 
1297   Output Parameter:
1298 - pointSF     - The star forest describing the point overlap in the remapped DM
1299 
1300   Level: developer
1301 
1302 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1303 @*/
1304 PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownership, PetscSF *pointSF)
1305 {
1306   PetscMPIInt        rank;
1307   PetscInt           p, nroots, nleaves, idx, npointLeaves;
1308   PetscInt          *pointLocal;
1309   const PetscInt    *leaves;
1310   const PetscSFNode *roots;
1311   PetscSFNode       *rootNodes, *leafNodes, *pointRemote;
1312   PetscErrorCode     ierr;
1313 
1314   PetscFunctionBegin;
1315   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1316   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1317 
1318   ierr = PetscSFGetGraph(migrationSF, &nroots, &nleaves, &leaves, &roots);CHKERRQ(ierr);
1319   ierr = PetscMalloc2(nroots, &rootNodes, nleaves, &leafNodes);CHKERRQ(ierr);
1320   if (ownership) {
1321     /* Point ownership vote: Process with highest rank ownes shared points */
1322     for (p = 0; p < nleaves; ++p) {
1323       /* Either put in a bid or we know we own it */
1324       leafNodes[p].rank  = rank;
1325       leafNodes[p].index = p;
1326     }
1327     for (p = 0; p < nroots; p++) {
1328       /* Root must not participate in the reduction, flag so that MAXLOC does not use */
1329       rootNodes[p].rank  = -3;
1330       rootNodes[p].index = -3;
1331     }
1332     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);CHKERRQ(ierr);
1333     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);CHKERRQ(ierr);
1334   } else {
1335     for (p = 0; p < nroots; p++) {
1336       rootNodes[p].index = -1;
1337       rootNodes[p].rank = rank;
1338     };
1339     for (p = 0; p < nleaves; p++) {
1340       /* Write new local id into old location */
1341       if (roots[p].rank == rank) {
1342         rootNodes[roots[p].index].index = leaves[p];
1343       }
1344     }
1345   }
1346   ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1347   ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1348 
1349   for (npointLeaves = 0, p = 0; p < nleaves; p++) {if (leafNodes[p].rank != rank) npointLeaves++;}
1350   ierr = PetscMalloc1(npointLeaves, &pointLocal);CHKERRQ(ierr);
1351   ierr = PetscMalloc1(npointLeaves, &pointRemote);CHKERRQ(ierr);
1352   for (idx = 0, p = 0; p < nleaves; p++) {
1353     if (leafNodes[p].rank != rank) {
1354       pointLocal[idx] = p;
1355       pointRemote[idx] = leafNodes[p];
1356       idx++;
1357     }
1358   }
1359   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), pointSF);CHKERRQ(ierr);
1360   ierr = PetscSFSetFromOptions(*pointSF);CHKERRQ(ierr);
1361   ierr = PetscSFSetGraph(*pointSF, nleaves, npointLeaves, pointLocal, PETSC_OWN_POINTER, pointRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1362   ierr = PetscFree2(rootNodes, leafNodes);CHKERRQ(ierr);
1363   PetscFunctionReturn(0);
1364 }
1365 
1366 #undef __FUNCT__
1367 #define __FUNCT__ "DMPlexMigrate"
1368 /*@C
1369   DMPlexMigrate  - Migrates internal DM data over the supplied star forest
1370 
1371   Input Parameter:
1372 + dm       - The source DMPlex object
1373 . sf       - The star forest communication context describing the migration pattern
1374 
1375   Output Parameter:
1376 - targetDM - The target DMPlex object
1377 
1378   Level: intermediate
1379 
1380 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1381 @*/
1382 PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM)
1383 {
1384   MPI_Comm               comm;
1385   PetscInt               dim, nroots;
1386   PetscSF                sfPoint;
1387   ISLocalToGlobalMapping ltogMigration;
1388   PetscBool              flg;
1389   PetscErrorCode         ierr;
1390 
1391   PetscFunctionBegin;
1392   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1393   ierr = PetscLogEventBegin(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1394   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1395   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1396   ierr = DMSetDimension(targetDM, dim);CHKERRQ(ierr);
1397 
1398   /* Check for a one-to-all distribution pattern */
1399   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1400   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
1401   if (nroots >= 0) {
1402     IS                     isOriginal;
1403     ISLocalToGlobalMapping ltogOriginal;
1404     PetscInt               n, size, nleaves, conesSize;
1405     PetscInt              *numbering_orig, *numbering_new, *cones;
1406     PetscSection           coneSection;
1407     /* Get the original point numbering */
1408     ierr = DMPlexCreatePointNumbering(dm, &isOriginal);CHKERRQ(ierr);
1409     ierr = ISLocalToGlobalMappingCreateIS(isOriginal, &ltogOriginal);CHKERRQ(ierr);
1410     ierr = ISLocalToGlobalMappingGetSize(ltogOriginal, &size);CHKERRQ(ierr);
1411     ierr = ISLocalToGlobalMappingGetBlockIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1412     /* Convert to positive global numbers */
1413     for (n=0; n<size; n++) {if (numbering_orig[n] < 0) numbering_orig[n] = -(numbering_orig[n]+1);}
1414     /* Derive the new local-to-global mapping from the old one */
1415     ierr = PetscSFGetGraph(sf, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1416     ierr = PetscMalloc1(nleaves, &numbering_new);CHKERRQ(ierr);
1417     ierr = PetscSFBcastBegin(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr);
1418     ierr = PetscSFBcastEnd(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr);
1419     ierr = ISLocalToGlobalMappingCreate(comm, 1, nleaves, (const PetscInt*) numbering_new, PETSC_OWN_POINTER, &ltogMigration);CHKERRQ(ierr);
1420     ierr = ISLocalToGlobalMappingRestoreIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1421     /* Convert cones to global numbering before migrating them */
1422     ierr = DMPlexGetConeSection(dm, &coneSection);CHKERRQ(ierr);
1423     ierr = PetscSectionGetStorageSize(coneSection, &conesSize);CHKERRQ(ierr);
1424     ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
1425     ierr = ISLocalToGlobalMappingApplyBlock(ltogOriginal, conesSize, cones, cones);CHKERRQ(ierr);
1426     ierr = ISDestroy(&isOriginal);CHKERRQ(ierr);
1427     ierr = ISLocalToGlobalMappingDestroy(&ltogOriginal);CHKERRQ(ierr);
1428   } else {
1429     /* One-to-all distribution pattern: We can derive LToG from SF */
1430     ierr = ISLocalToGlobalMappingCreateSF(sf, 0, &ltogMigration);CHKERRQ(ierr);
1431   }
1432   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1433   if (flg) {
1434     ierr = PetscPrintf(comm, "Point renumbering for DM migration:\n");CHKERRQ(ierr);
1435     ierr = ISLocalToGlobalMappingView(ltogMigration, NULL);CHKERRQ(ierr);
1436   }
1437   /* Migrate DM data to target DM */
1438   ierr = DMPlexDistributeCones(dm, sf, ltogMigration, targetDM);CHKERRQ(ierr);
1439   ierr = DMPlexDistributeCoordinates(dm, sf, targetDM);CHKERRQ(ierr);
1440   ierr = DMPlexDistributeLabels(dm, sf, targetDM);CHKERRQ(ierr);
1441   ierr = DMPlexDistributeSetupHybrid(dm, sf, ltogMigration, targetDM);CHKERRQ(ierr);
1442   ierr = DMPlexDistributeSetupTree(dm, sf, ltogMigration, targetDM);CHKERRQ(ierr);
1443   ierr = ISLocalToGlobalMappingDestroy(&ltogMigration);CHKERRQ(ierr);
1444   ierr = PetscLogEventEnd(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1445   PetscFunctionReturn(0);
1446 }
1447 
1448 #undef __FUNCT__
1449 #define __FUNCT__ "DMPlexDistribute"
1450 /*@C
1451   DMPlexDistribute - Distributes the mesh and any associated sections.
1452 
1453   Not Collective
1454 
1455   Input Parameter:
1456 + dm  - The original DMPlex object
1457 - overlap - The overlap of partitions, 0 is the default
1458 
1459   Output Parameter:
1460 + sf - The PetscSF used for point distribution
1461 - parallelMesh - The distributed DMPlex object, or NULL
1462 
1463   Note: If the mesh was not distributed, the return value is NULL.
1464 
1465   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1466   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1467   representation on the mesh.
1468 
1469   Level: intermediate
1470 
1471 .keywords: mesh, elements
1472 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1473 @*/
1474 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1475 {
1476   MPI_Comm               comm;
1477   PetscPartitioner       partitioner;
1478   IS                     cellPart;
1479   PetscSection           cellPartSection;
1480   DM                     dmCoord;
1481   DMLabel                lblPartition, lblMigration;
1482   PetscSF                sfProcess, sfMigration, sfStratified, sfPoint;
1483   PetscBool              flg;
1484   PetscMPIInt            rank, numProcs, p;
1485   PetscErrorCode         ierr;
1486 
1487   PetscFunctionBegin;
1488   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1489   if (sf) PetscValidPointer(sf,4);
1490   PetscValidPointer(dmParallel,5);
1491 
1492   ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1493   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1494   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1495   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1496 
1497   *dmParallel = NULL;
1498   if (numProcs == 1) PetscFunctionReturn(0);
1499 
1500   /* Create cell partition */
1501   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1502   ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr);
1503   ierr = DMPlexGetPartitioner(dm, &partitioner);CHKERRQ(ierr);
1504   ierr = PetscPartitionerPartition(partitioner, dm, cellPartSection, &cellPart);CHKERRQ(ierr);
1505   {
1506     /* Convert partition to DMLabel */
1507     PetscInt proc, pStart, pEnd, npoints, poffset;
1508     const PetscInt *points;
1509     ierr = DMLabelCreate("Point Partition", &lblPartition);CHKERRQ(ierr);
1510     ierr = ISGetIndices(cellPart, &points);CHKERRQ(ierr);
1511     ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr);
1512     for (proc = pStart; proc < pEnd; proc++) {
1513       ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr);
1514       ierr = PetscSectionGetOffset(cellPartSection, proc, &poffset);CHKERRQ(ierr);
1515       for (p = poffset; p < poffset+npoints; p++) {
1516         ierr = DMLabelSetValue(lblPartition, points[p], proc);CHKERRQ(ierr);
1517       }
1518     }
1519     ierr = ISRestoreIndices(cellPart, &points);CHKERRQ(ierr);
1520   }
1521   ierr = DMPlexPartitionLabelClosure(dm, lblPartition);CHKERRQ(ierr);
1522   {
1523     /* Build a global process SF */
1524     PetscSFNode *remoteProc;
1525     ierr = PetscMalloc1(numProcs, &remoteProc);CHKERRQ(ierr);
1526     for (p = 0; p < numProcs; ++p) {
1527       remoteProc[p].rank  = p;
1528       remoteProc[p].index = rank;
1529     }
1530     ierr = PetscSFCreate(comm, &sfProcess);CHKERRQ(ierr);
1531     ierr = PetscObjectSetName((PetscObject) sfProcess, "Process SF");CHKERRQ(ierr);
1532     ierr = PetscSFSetGraph(sfProcess, numProcs, numProcs, NULL, PETSC_OWN_POINTER, remoteProc, PETSC_OWN_POINTER);CHKERRQ(ierr);
1533   }
1534   ierr = DMLabelCreate("Point migration", &lblMigration);CHKERRQ(ierr);
1535   ierr = DMPlexPartitionLabelInvert(dm, lblPartition, sfProcess, lblMigration);CHKERRQ(ierr);
1536   ierr = DMPlexPartitionLabelCreateSF(dm, lblMigration, &sfMigration);CHKERRQ(ierr);
1537   /* Stratify the SF in case we are migrating an already parallel plex */
1538   ierr = DMPlexStratifyMigrationSF(dm, sfMigration, &sfStratified);CHKERRQ(ierr);
1539   ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1540   sfMigration = sfStratified;
1541   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1542   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1543   if (flg) {
1544     ierr = DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1545     ierr = PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1546   }
1547 
1548   /* Create non-overlapping parallel DM and migrate internal data */
1549   ierr = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr);
1550   ierr = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr);
1551   ierr = DMPlexMigrate(dm, sfMigration, *dmParallel);CHKERRQ(ierr);
1552 
1553   /* Build the point SF without overlap */
1554   ierr = DMPlexCreatePointSF(*dmParallel, sfMigration, PETSC_TRUE, &sfPoint);CHKERRQ(ierr);
1555   ierr = DMSetPointSF(*dmParallel, sfPoint);CHKERRQ(ierr);
1556   ierr = DMGetCoordinateDM(*dmParallel, &dmCoord);CHKERRQ(ierr);
1557   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1558   if (flg) {ierr = PetscSFView(sfPoint, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);}
1559 
1560   if (overlap > 0) {
1561     DM                 dmOverlap;
1562     PetscInt           nroots, nleaves;
1563     PetscSFNode       *newRemote;
1564     const PetscSFNode *oldRemote;
1565     PetscSF            sfOverlap, sfOverlapPoint;
1566     /* Add the partition overlap to the distributed DM */
1567     ierr = DMPlexDistributeOverlap(*dmParallel, overlap, &sfOverlap, &dmOverlap);CHKERRQ(ierr);
1568     ierr = DMDestroy(dmParallel);CHKERRQ(ierr);
1569     *dmParallel = dmOverlap;
1570     if (flg) {
1571       ierr = PetscPrintf(comm, "Overlap Migration SF:\n");CHKERRQ(ierr);
1572       ierr = PetscSFView(sfOverlap, NULL);CHKERRQ(ierr);
1573     }
1574 
1575     /* Re-map the migration SF to establish the full migration pattern */
1576     ierr = PetscSFGetGraph(sfMigration, &nroots, NULL, NULL, &oldRemote);CHKERRQ(ierr);
1577     ierr = PetscSFGetGraph(sfOverlap, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1578     ierr = PetscMalloc1(nleaves, &newRemote);CHKERRQ(ierr);
1579     ierr = PetscSFBcastBegin(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1580     ierr = PetscSFBcastEnd(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1581     ierr = PetscSFCreate(comm, &sfOverlapPoint);CHKERRQ(ierr);
1582     ierr = PetscSFSetGraph(sfOverlapPoint, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1583     ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1584     ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1585     sfMigration = sfOverlapPoint;
1586   }
1587   /* Cleanup Partition */
1588   ierr = PetscSFDestroy(&sfProcess);CHKERRQ(ierr);
1589   ierr = DMLabelDestroy(&lblPartition);CHKERRQ(ierr);
1590   ierr = DMLabelDestroy(&lblMigration);CHKERRQ(ierr);
1591   ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr);
1592   ierr = ISDestroy(&cellPart);CHKERRQ(ierr);
1593   /* Copy BC */
1594   ierr = DMPlexCopyBoundary(dm, *dmParallel);CHKERRQ(ierr);
1595   /* Create sfNatural */
1596   if (dm->useNatural) {
1597     PetscSection section;
1598 
1599     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1600     ierr = DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &(*dmParallel)->sfNatural);CHKERRQ(ierr);
1601   }
1602   /* Cleanup */
1603   if (sf) {*sf = sfMigration;}
1604   else    {ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);}
1605   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1606   ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1607   PetscFunctionReturn(0);
1608 }
1609 
1610 #undef __FUNCT__
1611 #define __FUNCT__ "DMPlexDistributeOverlap"
1612 /*@C
1613   DMPlexDistribute - Add partition overlap to a distributed non-overlapping DM.
1614 
1615   Not Collective
1616 
1617   Input Parameter:
1618 + dm  - The non-overlapping distrbuted DMPlex object
1619 - overlap - The overlap of partitions, 0 is the default
1620 
1621   Output Parameter:
1622 + sf - The PetscSF used for point distribution
1623 - dmOverlap - The overlapping distributed DMPlex object, or NULL
1624 
1625   Note: If the mesh was not distributed, the return value is NULL.
1626 
1627   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1628   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1629   representation on the mesh.
1630 
1631   Level: intermediate
1632 
1633 .keywords: mesh, elements
1634 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1635 @*/
1636 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, PetscSF *sf, DM *dmOverlap)
1637 {
1638   MPI_Comm               comm;
1639   PetscMPIInt            rank;
1640   PetscSection           rootSection, leafSection;
1641   IS                     rootrank, leafrank;
1642   DM                     dmCoord;
1643   DMLabel                lblOverlap;
1644   PetscSF                sfOverlap, sfStratified, sfPoint;
1645   PetscErrorCode         ierr;
1646 
1647   PetscFunctionBegin;
1648   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1649   if (sf) PetscValidPointer(sf, 3);
1650   PetscValidPointer(dmOverlap, 4);
1651 
1652   ierr = PetscLogEventBegin(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1653   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1654   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1655 
1656   /* Compute point overlap with neighbouring processes on the distributed DM */
1657   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1658   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1659   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1660   ierr = DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);CHKERRQ(ierr);
1661   ierr = DMPlexCreateOverlap(dm, overlap, rootSection, rootrank, leafSection, leafrank, &lblOverlap);CHKERRQ(ierr);
1662   /* Convert overlap label to stratified migration SF */
1663   ierr = DMPlexPartitionLabelCreateSF(dm, lblOverlap, &sfOverlap);CHKERRQ(ierr);
1664   ierr = DMPlexStratifyMigrationSF(dm, sfOverlap, &sfStratified);CHKERRQ(ierr);
1665   ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1666   sfOverlap = sfStratified;
1667   ierr = PetscObjectSetName((PetscObject) sfOverlap, "Overlap SF");CHKERRQ(ierr);
1668   ierr = PetscSFSetFromOptions(sfOverlap);CHKERRQ(ierr);
1669 
1670   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1671   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1672   ierr = ISDestroy(&rootrank);CHKERRQ(ierr);
1673   ierr = ISDestroy(&leafrank);CHKERRQ(ierr);
1674   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1675 
1676   /* Build the overlapping DM */
1677   ierr = DMPlexCreate(comm, dmOverlap);CHKERRQ(ierr);
1678   ierr = PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");CHKERRQ(ierr);
1679   ierr = DMPlexMigrate(dm, sfOverlap, *dmOverlap);CHKERRQ(ierr);
1680   /* Build the new point SF */
1681   ierr = DMPlexCreatePointSF(*dmOverlap, sfOverlap, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
1682   ierr = DMSetPointSF(*dmOverlap, sfPoint);CHKERRQ(ierr);
1683   ierr = DMGetCoordinateDM(*dmOverlap, &dmCoord);CHKERRQ(ierr);
1684   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1685   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1686   /* Cleanup overlap partition */
1687   ierr = DMLabelDestroy(&lblOverlap);CHKERRQ(ierr);
1688   if (sf) *sf = sfOverlap;
1689   else    {ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);}
1690   ierr = PetscLogEventEnd(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1691   PetscFunctionReturn(0);
1692 }
1693