xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision 9837ea968140067d7f638ef40a3e6ee2b94657e5)
1 #include <petsc-private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 #include <petscsf.h>
3 
4 #undef __FUNCT__
5 #define __FUNCT__ "DMPlexSetAdjacencyUseCone"
6 /*@
7   DMPlexSetAdjacencyUseCone - Define adjacency in the mesh using either the cone or the support first
8 
9   Input Parameters:
10 + dm      - The DM object
11 - useCone - Flag to use the cone first
12 
13   Level: intermediate
14 
15   Notes:
16 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE
17 $     FVM:   Two points p and q are adjacent if q \in star(cone(p)),    useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
18 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
19 
20 .seealso: DMPlexGetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
21 @*/
22 PetscErrorCode DMPlexSetAdjacencyUseCone(DM dm, PetscBool useCone)
23 {
24   DM_Plex *mesh = (DM_Plex *) dm->data;
25 
26   PetscFunctionBegin;
27   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
28   mesh->useCone = useCone;
29   PetscFunctionReturn(0);
30 }
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "DMPlexGetAdjacencyUseCone"
34 /*@
35   DMPlexGetAdjacencyUseCone - Query whether adjacency in the mesh uses the cone or the support first
36 
37   Input Parameter:
38 . dm      - The DM object
39 
40   Output Parameter:
41 . useCone - Flag to use the cone first
42 
43   Level: intermediate
44 
45   Notes:
46 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE
47 $     FVM:   Two points p and q are adjacent if q \in star(cone(p)),    useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
48 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
49 
50 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
51 @*/
52 PetscErrorCode DMPlexGetAdjacencyUseCone(DM dm, PetscBool *useCone)
53 {
54   DM_Plex *mesh = (DM_Plex *) dm->data;
55 
56   PetscFunctionBegin;
57   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
58   PetscValidIntPointer(useCone, 2);
59   *useCone = mesh->useCone;
60   PetscFunctionReturn(0);
61 }
62 
63 #undef __FUNCT__
64 #define __FUNCT__ "DMPlexSetAdjacencyUseClosure"
65 /*@
66   DMPlexSetAdjacencyUseClosure - Define adjacency in the mesh using the transitive closure
67 
68   Input Parameters:
69 + dm      - The DM object
70 - useClosure - Flag to use the closure
71 
72   Level: intermediate
73 
74   Notes:
75 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE
76 $     FVM:   Two points p and q are adjacent if q \in star(cone(p)),    useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
77 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
78 
79 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
80 @*/
81 PetscErrorCode DMPlexSetAdjacencyUseClosure(DM dm, PetscBool useClosure)
82 {
83   DM_Plex *mesh = (DM_Plex *) dm->data;
84 
85   PetscFunctionBegin;
86   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
87   mesh->useClosure = useClosure;
88   PetscFunctionReturn(0);
89 }
90 
91 #undef __FUNCT__
92 #define __FUNCT__ "DMPlexGetAdjacencyUseClosure"
93 /*@
94   DMPlexGetAdjacencyUseClosure - Query whether adjacency in the mesh uses the transitive closure
95 
96   Input Parameter:
97 . dm      - The DM object
98 
99   Output Parameter:
100 . useClosure - Flag to use the closure
101 
102   Level: intermediate
103 
104   Notes:
105 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE
106 $     FVM:   Two points p and q are adjacent if q \in star(cone(p)),    useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
107 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
108 
109 .seealso: DMPlexSetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
110 @*/
111 PetscErrorCode DMPlexGetAdjacencyUseClosure(DM dm, PetscBool *useClosure)
112 {
113   DM_Plex *mesh = (DM_Plex *) dm->data;
114 
115   PetscFunctionBegin;
116   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
117   PetscValidIntPointer(useClosure, 2);
118   *useClosure = mesh->useClosure;
119   PetscFunctionReturn(0);
120 }
121 
122 #undef __FUNCT__
123 #define __FUNCT__ "DMPlexSetAdjacencyUseAnchors"
124 /*@
125   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
126 
127   Input Parameters:
128 + dm      - The DM object
129 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
130 
131   Level: intermediate
132 
133 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
134 @*/
135 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
136 {
137   DM_Plex *mesh = (DM_Plex *) dm->data;
138 
139   PetscFunctionBegin;
140   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
141   mesh->useAnchors = useAnchors;
142   PetscFunctionReturn(0);
143 }
144 
145 #undef __FUNCT__
146 #define __FUNCT__ "DMPlexGetAdjacencyUseAnchors"
147 /*@
148   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
149 
150   Input Parameter:
151 . dm      - The DM object
152 
153   Output Parameter:
154 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
155 
156   Level: intermediate
157 
158 .seealso: DMPlexSetAdjacencyUseAnchors(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
159 @*/
160 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
161 {
162   DM_Plex *mesh = (DM_Plex *) dm->data;
163 
164   PetscFunctionBegin;
165   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
166   PetscValidIntPointer(useAnchors, 2);
167   *useAnchors = mesh->useAnchors;
168   PetscFunctionReturn(0);
169 }
170 
171 #undef __FUNCT__
172 #define __FUNCT__ "DMPlexGetAdjacency_Cone_Internal"
173 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
174 {
175   const PetscInt *cone = NULL;
176   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
177   PetscErrorCode  ierr;
178 
179   PetscFunctionBeginHot;
180   ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
181   ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
182   for (c = 0; c < coneSize; ++c) {
183     const PetscInt *support = NULL;
184     PetscInt        supportSize, s, q;
185 
186     ierr = DMPlexGetSupportSize(dm, cone[c], &supportSize);CHKERRQ(ierr);
187     ierr = DMPlexGetSupport(dm, cone[c], &support);CHKERRQ(ierr);
188     for (s = 0; s < supportSize; ++s) {
189       for (q = 0; q < numAdj || (adj[numAdj++] = support[s],0); ++q) {
190         if (support[s] == adj[q]) break;
191       }
192       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
193     }
194   }
195   *adjSize = numAdj;
196   PetscFunctionReturn(0);
197 }
198 
199 #undef __FUNCT__
200 #define __FUNCT__ "DMPlexGetAdjacency_Support_Internal"
201 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
202 {
203   const PetscInt *support = NULL;
204   PetscInt        numAdj   = 0, maxAdjSize = *adjSize, supportSize, s;
205   PetscErrorCode  ierr;
206 
207   PetscFunctionBeginHot;
208   ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr);
209   ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr);
210   for (s = 0; s < supportSize; ++s) {
211     const PetscInt *cone = NULL;
212     PetscInt        coneSize, c, q;
213 
214     ierr = DMPlexGetConeSize(dm, support[s], &coneSize);CHKERRQ(ierr);
215     ierr = DMPlexGetCone(dm, support[s], &cone);CHKERRQ(ierr);
216     for (c = 0; c < coneSize; ++c) {
217       for (q = 0; q < numAdj || (adj[numAdj++] = cone[c],0); ++q) {
218         if (cone[c] == adj[q]) break;
219       }
220       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
221     }
222   }
223   *adjSize = numAdj;
224   PetscFunctionReturn(0);
225 }
226 
227 #undef __FUNCT__
228 #define __FUNCT__ "DMPlexGetAdjacency_Transitive_Internal"
229 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
230 {
231   PetscInt      *star = NULL;
232   PetscInt       numAdj = 0, maxAdjSize = *adjSize, starSize, s;
233   PetscErrorCode ierr;
234 
235   PetscFunctionBeginHot;
236   ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
237   for (s = 0; s < starSize*2; s += 2) {
238     const PetscInt *closure = NULL;
239     PetscInt        closureSize, c, q;
240 
241     ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
242     for (c = 0; c < closureSize*2; c += 2) {
243       for (q = 0; q < numAdj || (adj[numAdj++] = closure[c],0); ++q) {
244         if (closure[c] == adj[q]) break;
245       }
246       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
247     }
248     ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
249   }
250   ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
251   *adjSize = numAdj;
252   PetscFunctionReturn(0);
253 }
254 
255 #undef __FUNCT__
256 #define __FUNCT__ "DMPlexGetAdjacency_Internal"
257 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
258 {
259   static PetscInt asiz = 0;
260   PetscInt maxAnchors = 1;
261   PetscInt aStart = -1, aEnd = -1;
262   PetscInt maxAdjSize;
263   PetscSection aSec = NULL;
264   IS aIS = NULL;
265   const PetscInt *anchors;
266   PetscErrorCode  ierr;
267 
268   PetscFunctionBeginHot;
269   if (useAnchors) {
270     ierr = DMPlexGetAnchors(dm,&aSec,&aIS);CHKERRQ(ierr);
271     if (aSec) {
272       ierr = PetscSectionGetMaxDof(aSec,&maxAnchors);CHKERRQ(ierr);
273       maxAnchors = PetscMax(1,maxAnchors);
274       ierr = PetscSectionGetChart(aSec,&aStart,&aEnd);CHKERRQ(ierr);
275       ierr = ISGetIndices(aIS,&anchors);CHKERRQ(ierr);
276     }
277   }
278   if (!*adj) {
279     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
280 
281     ierr  = DMPlexGetChart(dm, &pStart,&pEnd);CHKERRQ(ierr);
282     ierr  = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
283     ierr  = DMPlexGetMaxSizes(dm, &maxC, &maxS);CHKERRQ(ierr);
284     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1;
285     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1;
286     asiz  = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries);
287     asiz *= maxAnchors;
288     asiz  = PetscMin(asiz,pEnd-pStart);
289     ierr  = PetscMalloc1(asiz,adj);CHKERRQ(ierr);
290   }
291   if (*adjSize < 0) *adjSize = asiz;
292   maxAdjSize = *adjSize;
293   if (useTransitiveClosure) {
294     ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr);
295   } else if (useCone) {
296     ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
297   } else {
298     ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
299   }
300   if (useAnchors && aSec) {
301     PetscInt origSize = *adjSize;
302     PetscInt numAdj = origSize;
303     PetscInt i = 0, j;
304     PetscInt *orig = *adj;
305 
306     while (i < origSize) {
307       PetscInt p = orig[i];
308       PetscInt aDof = 0;
309 
310       if (p >= aStart && p < aEnd) {
311         ierr = PetscSectionGetDof(aSec,p,&aDof);CHKERRQ(ierr);
312       }
313       if (aDof) {
314         PetscInt aOff;
315         PetscInt s, q;
316 
317         for (j = i + 1; j < numAdj; j++) {
318           orig[j - 1] = orig[j];
319         }
320         origSize--;
321         numAdj--;
322         ierr = PetscSectionGetOffset(aSec,p,&aOff);CHKERRQ(ierr);
323         for (s = 0; s < aDof; ++s) {
324           for (q = 0; q < numAdj || (orig[numAdj++] = anchors[aOff+s],0); ++q) {
325             if (anchors[aOff+s] == orig[q]) break;
326           }
327           if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
328         }
329       }
330       else {
331         i++;
332       }
333     }
334     *adjSize = numAdj;
335     ierr = ISRestoreIndices(aIS,&anchors);CHKERRQ(ierr);
336   }
337   PetscFunctionReturn(0);
338 }
339 
340 #undef __FUNCT__
341 #define __FUNCT__ "DMPlexGetAdjacency"
342 /*@
343   DMPlexGetAdjacency - Return all points adjacent to the given point
344 
345   Input Parameters:
346 + dm - The DM object
347 . p  - The point
348 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE
349 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize
350 
351   Output Parameters:
352 + adjSize - The number of adjacent points
353 - adj - The adjacent points
354 
355   Level: advanced
356 
357   Notes: The user must PetscFree the adj array if it was not passed in.
358 
359 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator()
360 @*/
361 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
362 {
363   DM_Plex       *mesh = (DM_Plex *) dm->data;
364   PetscErrorCode ierr;
365 
366   PetscFunctionBeginHot;
367   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
368   PetscValidPointer(adjSize,3);
369   PetscValidPointer(adj,4);
370   ierr = DMPlexGetAdjacency_Internal(dm, p, mesh->useCone, mesh->useClosure, mesh->useAnchors, adjSize, adj);CHKERRQ(ierr);
371   PetscFunctionReturn(0);
372 }
373 #undef __FUNCT__
374 #define __FUNCT__ "DMPlexCreateTwoSidedProcessSF"
375 /*@
376   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
377 
378   Collective on DM
379 
380   Input Parameters:
381 + dm      - The DM
382 - sfPoint - The PetscSF which encodes point connectivity
383 
384   Output Parameters:
385 + processRanks - A list of process neighbors, or NULL
386 - sfProcess    - An SF encoding the two-sided process connectivity, or NULL
387 
388   Level: developer
389 
390 .seealso: PetscSFCreate(), DMPlexCreateProcessSF()
391 @*/
392 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
393 {
394   const PetscSFNode *remotePoints;
395   PetscInt          *localPointsNew;
396   PetscSFNode       *remotePointsNew;
397   const PetscInt    *nranks;
398   PetscInt          *ranksNew;
399   PetscBT            neighbors;
400   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
401   PetscMPIInt        numProcs, proc, rank;
402   PetscErrorCode     ierr;
403 
404   PetscFunctionBegin;
405   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
406   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
407   if (processRanks) {PetscValidPointer(processRanks, 3);}
408   if (sfProcess)    {PetscValidPointer(sfProcess, 4);}
409   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
410   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
411   ierr = PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);CHKERRQ(ierr);
412   ierr = PetscBTCreate(numProcs, &neighbors);CHKERRQ(ierr);
413   ierr = PetscBTMemzero(numProcs, neighbors);CHKERRQ(ierr);
414   /* Compute root-to-leaf process connectivity */
415   ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr);
416   ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr);
417   for (p = pStart; p < pEnd; ++p) {
418     PetscInt ndof, noff, n;
419 
420     ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr);
421     ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr);
422     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);}
423   }
424   ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr);
425   /* Compute leaf-to-neighbor process connectivity */
426   ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr);
427   ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr);
428   for (p = pStart; p < pEnd; ++p) {
429     PetscInt ndof, noff, n;
430 
431     ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr);
432     ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr);
433     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);}
434   }
435   ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr);
436   /* Compute leaf-to-root process connectivity */
437   for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);}
438   /* Calculate edges */
439   PetscBTClear(neighbors, rank);
440   for(proc = 0, numNeighbors = 0; proc < numProcs; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;}
441   ierr = PetscMalloc1(numNeighbors, &ranksNew);CHKERRQ(ierr);
442   ierr = PetscMalloc1(numNeighbors, &localPointsNew);CHKERRQ(ierr);
443   ierr = PetscMalloc1(numNeighbors, &remotePointsNew);CHKERRQ(ierr);
444   for(proc = 0, n = 0; proc < numProcs; ++proc) {
445     if (PetscBTLookup(neighbors, proc)) {
446       ranksNew[n]              = proc;
447       localPointsNew[n]        = proc;
448       remotePointsNew[n].index = rank;
449       remotePointsNew[n].rank  = proc;
450       ++n;
451     }
452   }
453   ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr);
454   if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr);}
455   else              {ierr = PetscFree(ranksNew);CHKERRQ(ierr);}
456   if (sfProcess) {
457     ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr);
458     ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr);
459     ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr);
460     ierr = PetscSFSetGraph(*sfProcess, numProcs, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
461   }
462   PetscFunctionReturn(0);
463 }
464 
465 #undef __FUNCT__
466 #define __FUNCT__ "DMPlexDistributeOwnership"
467 /*@
468   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
469 
470   Collective on DM
471 
472   Input Parameter:
473 . dm - The DM
474 
475   Output Parameters:
476 + rootSection - The number of leaves for a given root point
477 . rootrank    - The rank of each edge into the root point
478 . leafSection - The number of processes sharing a given leaf point
479 - leafrank    - The rank of each process sharing a leaf point
480 
481   Level: developer
482 
483 .seealso: DMPlexCreateOverlap()
484 @*/
485 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
486 {
487   MPI_Comm        comm;
488   PetscSF         sfPoint;
489   const PetscInt *rootdegree;
490   PetscInt       *myrank, *remoterank;
491   PetscInt        pStart, pEnd, p, nedges;
492   PetscMPIInt     rank;
493   PetscErrorCode  ierr;
494 
495   PetscFunctionBegin;
496   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
497   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
498   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
499   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
500   /* Compute number of leaves for each root */
501   ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr);
502   ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr);
503   ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr);
504   ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr);
505   for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr);}
506   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
507   /* Gather rank of each leaf to root */
508   ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr);
509   ierr = PetscMalloc1(pEnd-pStart, &myrank);CHKERRQ(ierr);
510   ierr = PetscMalloc1(nedges,  &remoterank);CHKERRQ(ierr);
511   for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank;
512   ierr = PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
513   ierr = PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
514   ierr = PetscFree(myrank);CHKERRQ(ierr);
515   ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr);
516   /* Distribute remote ranks to leaves */
517   ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr);
518   ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr);
519   PetscFunctionReturn(0);
520 }
521 
522 #undef __FUNCT__
523 #define __FUNCT__ "DMPlexCreateOverlap"
524 /*@C
525   DMPlexCreateOverlap - Compute owner information for shared points. This basically gets two-sided for an SF.
526 
527   Collective on DM
528 
529   Input Parameters:
530 + dm          - The DM
531 . rootSection - The number of leaves for a given root point
532 . rootrank    - The rank of each edge into the root point
533 . leafSection - The number of processes sharing a given leaf point
534 - leafrank    - The rank of each process sharing a leaf point
535 
536   Output Parameters:
537 + ovRootSection - The number of new overlap points for each neighboring process
538 . ovRootPoints  - The new overlap points for each neighboring process
539 . ovLeafSection - The number of new overlap points from each neighboring process
540 - ovLeafPoints  - The new overlap points from each neighboring process
541 
542   Level: developer
543 
544 .seealso: DMPlexDistributeOwnership()
545 @*/
546 PetscErrorCode DMPlexCreateOverlap(DM dm, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, PetscSF *overlapSF)
547 {
548   MPI_Comm           comm;
549   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
550   PetscSF            sfPoint, sfProc;
551   IS                 valueIS;
552   DMLabel            ovLeafLabel;
553   const PetscSFNode *remote;
554   const PetscInt    *local;
555   const PetscInt    *nrank, *rrank, *neighbors;
556   PetscSFNode       *ovRootPoints, *ovLeafPoints, *remotePoints;
557   PetscSection       ovRootSection, ovLeafSection;
558   PetscInt          *adj = NULL;
559   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l, numNeighbors, n, ovSize;
560   PetscInt           idx, numRemote;
561   PetscMPIInt        rank, numProcs;
562   PetscBool          flg;
563   PetscErrorCode     ierr;
564 
565   PetscFunctionBegin;
566   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
567   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
568   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
569   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
570   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
571   ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr);
572   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
573   ierr = DMLabelCreate("Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr);
574   /* Handle leaves: shared with the root point */
575   for (l = 0; l < nleaves; ++l) {
576     PetscInt adjSize = PETSC_DETERMINE, a;
577 
578     ierr = DMPlexGetAdjacency(dm, local[l], &adjSize, &adj);CHKERRQ(ierr);
579     for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr);}
580   }
581   ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr);
582   ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr);
583   /* Handle roots */
584   for (p = pStart; p < pEnd; ++p) {
585     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
586 
587     if ((p >= sStart) && (p < sEnd)) {
588       /* Some leaves share a root with other leaves on different processes */
589       ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr);
590       if (neighbors) {
591         ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr);
592         ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
593         for (n = 0; n < neighbors; ++n) {
594           const PetscInt remoteRank = nrank[noff+n];
595 
596           if (remoteRank == rank) continue;
597           for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
598         }
599       }
600     }
601     /* Roots are shared with leaves */
602     ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr);
603     if (!neighbors) continue;
604     ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr);
605     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
606     for (n = 0; n < neighbors; ++n) {
607       const PetscInt remoteRank = rrank[noff+n];
608 
609       if (remoteRank == rank) continue;
610       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
611     }
612   }
613   ierr = PetscFree(adj);CHKERRQ(ierr);
614   ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr);
615   ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr);
616   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-overlap_view", &flg);CHKERRQ(ierr);
617   if (flg) {
618     ierr = PetscViewerASCIISynchronizedAllow(PETSC_VIEWER_STDOUT_WORLD, PETSC_TRUE);CHKERRQ(ierr);
619     ierr = DMLabelView(ovAdjByRank, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
620   }
621   /* Convert to (point, rank) and use actual owners */
622   ierr = PetscSectionCreate(comm, &ovRootSection);CHKERRQ(ierr);
623   ierr = PetscObjectSetName((PetscObject) ovRootSection, "Overlap Root Section");CHKERRQ(ierr);
624   ierr = PetscSectionSetChart(ovRootSection, 0, numProcs);CHKERRQ(ierr);
625   ierr = DMLabelGetValueIS(ovAdjByRank, &valueIS);CHKERRQ(ierr);
626   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
627   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
628   for (n = 0; n < numNeighbors; ++n) {
629     PetscInt numPoints;
630 
631     ierr = DMLabelGetStratumSize(ovAdjByRank, neighbors[n], &numPoints);CHKERRQ(ierr);
632     ierr = PetscSectionAddDof(ovRootSection, neighbors[n], numPoints);CHKERRQ(ierr);
633   }
634   ierr = PetscSectionSetUp(ovRootSection);CHKERRQ(ierr);
635   ierr = PetscSectionGetStorageSize(ovRootSection, &ovSize);CHKERRQ(ierr);
636   ierr = PetscMalloc1(ovSize, &ovRootPoints);CHKERRQ(ierr);
637   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
638   for (n = 0; n < numNeighbors; ++n) {
639     IS              pointIS;
640     const PetscInt *points;
641     PetscInt        off, numPoints, p;
642 
643     ierr = PetscSectionGetOffset(ovRootSection, neighbors[n], &off);CHKERRQ(ierr);
644     ierr = DMLabelGetStratumIS(ovAdjByRank, neighbors[n], &pointIS);CHKERRQ(ierr);
645     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
646     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
647     for (p = 0; p < numPoints; ++p) {
648       ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);
649       if (l >= 0) {ovRootPoints[off+p] = remote[l];}
650       else        {ovRootPoints[off+p].index = points[p]; ovRootPoints[off+p].rank = rank;}
651     }
652     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
653     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
654   }
655   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
656   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
657   ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr);
658   /* Make process SF */
659   ierr = DMPlexCreateTwoSidedProcessSF(dm, sfPoint, rootSection, rootrank, leafSection, leafrank, NULL, &sfProc);CHKERRQ(ierr);
660   if (flg) {
661     ierr = PetscSFView(sfProc, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
662   }
663   /* Communicate overlap */
664   ierr = PetscSectionCreate(comm, &ovLeafSection);CHKERRQ(ierr);
665   ierr = PetscObjectSetName((PetscObject) ovLeafSection, "Overlap Leaf Section");CHKERRQ(ierr);
666   ierr = DMPlexDistributeData(dm, sfProc, ovRootSection, MPIU_2INT, ovRootPoints, ovLeafSection, (void**) &ovLeafPoints);CHKERRQ(ierr);
667   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
668   ierr = DMLabelCreate("ovLeafs", &ovLeafLabel);CHKERRQ(ierr);
669   ierr = PetscSectionGetStorageSize(ovLeafSection, &ovSize);CHKERRQ(ierr);
670   for (p = 0; p < ovSize; p++) {
671     /* Don't import points from yourself */
672     if (ovLeafPoints[p].rank == rank) continue;
673     ierr = DMLabelSetValue(ovLeafLabel, ovLeafPoints[p].index, ovLeafPoints[p].rank);CHKERRQ(ierr);
674   }
675   /* Don't import points already in the pointSF */
676   for (l = 0; l < nleaves; ++l) {
677     ierr = DMLabelClearValue(ovLeafLabel, remote[l].index, remote[l].rank);CHKERRQ(ierr);
678   }
679   for (numRemote = 0, n = 0; n < numProcs; ++n) {
680     PetscInt numPoints;
681     ierr = DMLabelGetStratumSize(ovLeafLabel, n, &numPoints);CHKERRQ(ierr);
682     numRemote += numPoints;
683   }
684   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
685   for (idx = 0, n = 0; n < numProcs; ++n) {
686     IS remoteRootIS;
687     PetscInt numPoints;
688     const PetscInt *remoteRoots;
689     ierr = DMLabelGetStratumSize(ovLeafLabel, n, &numPoints);CHKERRQ(ierr);
690     if (numPoints <= 0) continue;
691     ierr = DMLabelGetStratumIS(ovLeafLabel, n, &remoteRootIS);CHKERRQ(ierr);
692     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
693     for (p = 0; p < numPoints; p++) {
694       remotePoints[idx].index = remoteRoots[p];
695       remotePoints[idx].rank = n;
696       idx++;
697     }
698     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
699     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
700   }
701   ierr = DMLabelDestroy(&ovLeafLabel);CHKERRQ(ierr);
702   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), overlapSF);CHKERRQ(ierr);
703   ierr = PetscObjectSetName((PetscObject) *overlapSF, "Overlap SF");CHKERRQ(ierr);
704   ierr = PetscSFSetFromOptions(*overlapSF);CHKERRQ(ierr);
705   ierr = PetscSFSetGraph(*overlapSF, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
706   if (flg) {
707     ierr = PetscPrintf(comm, "Overlap SF\n");CHKERRQ(ierr);
708     ierr = PetscSFView(*overlapSF, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
709   }
710   /* Clean up */
711   ierr = PetscSFDestroy(&sfProc);CHKERRQ(ierr);
712   ierr = PetscFree(ovRootPoints);CHKERRQ(ierr);
713   ierr = PetscSectionDestroy(&ovRootSection);CHKERRQ(ierr);
714   ierr = PetscFree(ovLeafPoints);CHKERRQ(ierr);
715   ierr = PetscSectionDestroy(&ovLeafSection);CHKERRQ(ierr);
716   PetscFunctionReturn(0);
717 }
718 
719 #undef __FUNCT__
720 #define __FUNCT__ "DMPlexCreateOverlapMigrationSF"
721 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
722 {
723   MPI_Comm           comm;
724   PetscMPIInt        rank, numProcs;
725   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
726   PetscInt          *pointDepths, *remoteDepths, *ilocal;
727   PetscInt          *depthRecv, *depthShift, *depthIdx;
728   PetscSFNode       *iremote;
729   PetscSF            pointSF;
730   const PetscInt    *sharedLocal;
731   const PetscSFNode *overlapRemote, *sharedRemote;
732   PetscErrorCode     ierr;
733 
734   PetscFunctionBegin;
735   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
736   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
737   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
738   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
739   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
740 
741   /* Before building the migration SF we need to know the new stratum offsets */
742   ierr = PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote);CHKERRQ(ierr);
743   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
744   for (d=0; d<dim+1; d++) {
745     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
746     for (p=pStart; p<pEnd; p++) pointDepths[p] = d;
747   }
748   for (p=0; p<nleaves; p++) remoteDepths[p] = -1;
749   ierr = PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
750   ierr = PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
751 
752   /* Count recevied points in each stratum and compute the internal strata shift */
753   ierr = PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx);CHKERRQ(ierr);
754   for (d=0; d<dim+1; d++) depthRecv[d]=0;
755   for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++;
756   depthShift[dim] = 0;
757   for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim];
758   for (d=1; d<dim; d++) depthShift[d] += depthRecv[0];
759   for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1];
760   for (d=0; d<dim+1; d++) {
761     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
762     depthIdx[d] = pStart + depthShift[d];
763   }
764 
765   /* Form the overlap SF build an SF that describes the full overlap migration SF */
766   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
767   newLeaves = pEnd - pStart + nleaves;
768   ierr = PetscMalloc2(newLeaves, &ilocal, newLeaves, &iremote);CHKERRQ(ierr);
769   /* First map local points to themselves */
770   for (d=0; d<dim+1; d++) {
771     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
772     for (p=pStart; p<pEnd; p++) {
773       point = p + depthShift[d];
774       ilocal[point] = point;
775       iremote[point].index = p;
776       iremote[point].rank = rank;
777       depthIdx[d]++;
778     }
779   }
780 
781   /* Add in the remote roots for currently shared points */
782   ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
783   ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote);CHKERRQ(ierr);
784   for (d=0; d<dim+1; d++) {
785     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
786     for (p=0; p<numSharedPoints; p++) {
787       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
788         point = sharedLocal[p] + depthShift[d];
789         iremote[point].index = sharedRemote[p].index;
790         iremote[point].rank = sharedRemote[p].rank;
791       }
792     }
793   }
794 
795   /* Now add the incoming overlap points */
796   for (p=0; p<nleaves; p++) {
797     point = depthIdx[remoteDepths[p]];
798     ilocal[point] = point;
799     iremote[point].index = overlapRemote[p].index;
800     iremote[point].rank = overlapRemote[p].rank;
801     depthIdx[remoteDepths[p]]++;
802   }
803   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
804 
805   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
806   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");CHKERRQ(ierr);
807   ierr = PetscSFSetFromOptions(*migrationSF);CHKERRQ(ierr);
808   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
809   ierr = PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);CHKERRQ(ierr);
810 
811   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
812   PetscFunctionReturn(0);
813 }
814 
815 #undef __FUNCT__
816 #define __FUNCT__ "DMPlexDistributeField"
817 /*@
818   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
819 
820   Collective on DM
821 
822   Input Parameters:
823 + dm - The DMPlex object
824 . pointSF - The PetscSF describing the communication pattern
825 . originalSection - The PetscSection for existing data layout
826 - originalVec - The existing data
827 
828   Output Parameters:
829 + newSection - The PetscSF describing the new data layout
830 - newVec - The new data
831 
832   Level: developer
833 
834 .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData()
835 @*/
836 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
837 {
838   PetscSF        fieldSF;
839   PetscInt      *remoteOffsets, fieldSize;
840   PetscScalar   *originalValues, *newValues;
841   PetscErrorCode ierr;
842 
843   PetscFunctionBegin;
844   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
845   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
846 
847   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
848   ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr);
849   ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr);
850 
851   ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr);
852   ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr);
853   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
854   ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
855   ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
856   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
857   ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr);
858   ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr);
859   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
860   PetscFunctionReturn(0);
861 }
862 
863 #undef __FUNCT__
864 #define __FUNCT__ "DMPlexDistributeFieldIS"
865 /*@
866   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
867 
868   Collective on DM
869 
870   Input Parameters:
871 + dm - The DMPlex object
872 . pointSF - The PetscSF describing the communication pattern
873 . originalSection - The PetscSection for existing data layout
874 - originalIS - The existing data
875 
876   Output Parameters:
877 + newSection - The PetscSF describing the new data layout
878 - newIS - The new data
879 
880   Level: developer
881 
882 .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData()
883 @*/
884 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
885 {
886   PetscSF         fieldSF;
887   PetscInt       *newValues, *remoteOffsets, fieldSize;
888   const PetscInt *originalValues;
889   PetscErrorCode  ierr;
890 
891   PetscFunctionBegin;
892   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
893   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
894 
895   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
896   ierr = PetscMalloc(fieldSize * sizeof(PetscInt), &newValues);CHKERRQ(ierr);
897 
898   ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr);
899   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
900   ierr = PetscSFBcastBegin(fieldSF, MPIU_INT, originalValues, newValues);CHKERRQ(ierr);
901   ierr = PetscSFBcastEnd(fieldSF, MPIU_INT, originalValues, newValues);CHKERRQ(ierr);
902   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
903   ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr);
904   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr);
905   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
906   PetscFunctionReturn(0);
907 }
908 
909 #undef __FUNCT__
910 #define __FUNCT__ "DMPlexDistributeData"
911 /*@
912   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
913 
914   Collective on DM
915 
916   Input Parameters:
917 + dm - The DMPlex object
918 . pointSF - The PetscSF describing the communication pattern
919 . originalSection - The PetscSection for existing data layout
920 . datatype - The type of data
921 - originalData - The existing data
922 
923   Output Parameters:
924 + newSection - The PetscSection describing the new data layout
925 - newData - The new data
926 
927   Level: developer
928 
929 .seealso: DMPlexDistribute(), DMPlexDistributeField()
930 @*/
931 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
932 {
933   PetscSF        fieldSF;
934   PetscInt      *remoteOffsets, fieldSize;
935   PetscMPIInt    dataSize;
936   PetscErrorCode ierr;
937 
938   PetscFunctionBegin;
939   ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
940   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
941 
942   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
943   ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr);
944   ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr);
945 
946   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
947   ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
948   ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
949   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
950   ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
951   PetscFunctionReturn(0);
952 }
953 
954 #undef __FUNCT__
955 #define __FUNCT__ "DMPlexDistributeCones"
956 PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
957 {
958   DM_Plex               *pmesh   = (DM_Plex*) (dmParallel)->data;
959   MPI_Comm               comm;
960   PetscSF                coneSF;
961   PetscSection           originalConeSection, newConeSection;
962   PetscInt              *remoteOffsets, *cones, *newCones, newConesSize;
963   PetscBool              flg;
964   PetscErrorCode         ierr;
965 
966   PetscFunctionBegin;
967   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
968   PetscValidPointer(dmParallel,4);
969   ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
970 
971   /* Distribute cone section */
972   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
973   ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr);
974   ierr = DMPlexGetConeSection(dmParallel, &newConeSection);CHKERRQ(ierr);
975   ierr = PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr);
976   ierr = DMSetUp(dmParallel);CHKERRQ(ierr);
977   {
978     PetscInt pStart, pEnd, p;
979 
980     ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr);
981     for (p = pStart; p < pEnd; ++p) {
982       PetscInt coneSize;
983       ierr               = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr);
984       pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize);
985     }
986   }
987   /* Communicate and renumber cones */
988   ierr = PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr);
989   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
990   ierr = DMPlexGetCones(dmParallel, &newCones);CHKERRQ(ierr);
991   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
992   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
993   ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr);
994   ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr);
995   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr);
996   if (flg) {
997     ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr);
998     ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
999     ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr);
1000     ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1001     ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr);
1002   }
1003   ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr);
1004   ierr = DMPlexGetConeOrientations(dmParallel, &newCones);CHKERRQ(ierr);
1005   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1006   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1007   ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr);
1008   ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
1009   /* Create supports and stratify sieve */
1010   {
1011     PetscInt pStart, pEnd;
1012 
1013     ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr);
1014     ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr);
1015   }
1016   ierr = DMPlexSymmetrize(dmParallel);CHKERRQ(ierr);
1017   ierr = DMPlexStratify(dmParallel);CHKERRQ(ierr);
1018   PetscFunctionReturn(0);
1019 }
1020 
1021 #undef __FUNCT__
1022 #define __FUNCT__ "DMPlexDistributeCoordinates"
1023 PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
1024 {
1025   MPI_Comm         comm;
1026   PetscSection     originalCoordSection, newCoordSection;
1027   Vec              originalCoordinates, newCoordinates;
1028   PetscInt         bs;
1029   const char      *name;
1030   const PetscReal *maxCell, *L;
1031   PetscErrorCode   ierr;
1032 
1033   PetscFunctionBegin;
1034   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1035   PetscValidPointer(dmParallel, 3);
1036 
1037   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1038   ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr);
1039   ierr = DMGetCoordinateSection(dmParallel, &newCoordSection);CHKERRQ(ierr);
1040   ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr);
1041   if (originalCoordinates) {
1042     ierr = VecCreate(comm, &newCoordinates);CHKERRQ(ierr);
1043     ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr);
1044     ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr);
1045 
1046     ierr = DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr);
1047     ierr = DMSetCoordinatesLocal(dmParallel, newCoordinates);CHKERRQ(ierr);
1048     ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr);
1049     ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr);
1050     ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr);
1051   }
1052   ierr = DMGetPeriodicity(dm, &maxCell, &L);CHKERRQ(ierr);
1053   if (L) {ierr = DMSetPeriodicity(dmParallel, maxCell, L);CHKERRQ(ierr);}
1054   PetscFunctionReturn(0);
1055 }
1056 
1057 #undef __FUNCT__
1058 #define __FUNCT__ "DMPlexDistributeLabels"
1059 PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1060 {
1061   MPI_Comm       comm;
1062   PetscMPIInt    rank;
1063   PetscInt       numLabels, l;
1064   PetscErrorCode ierr;
1065 
1066   PetscFunctionBegin;
1067   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1068   PetscValidHeaderSpecific(dm, DM_CLASSID, 3);
1069   ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1070   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1071   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1072 
1073   /* Bcast number of labels */
1074   ierr = DMPlexGetNumLabels(dm, &numLabels);CHKERRQ(ierr);
1075   ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1076   for (l = numLabels-1; l >= 0; --l) {
1077     DMLabel     label = NULL, labelNew = NULL;
1078     PetscBool   isdepth;
1079 
1080     if (!rank) {
1081       ierr = DMPlexGetLabelByNum(dm, l, &label);CHKERRQ(ierr);
1082       /* Skip "depth" because it is recreated */
1083       ierr = PetscStrcmp(label->name, "depth", &isdepth);CHKERRQ(ierr);
1084     }
1085     ierr = MPI_Bcast(&isdepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr);
1086     if (isdepth) continue;
1087     ierr = DMLabelDistribute(label, migrationSF, &labelNew);CHKERRQ(ierr);
1088     ierr = DMPlexAddLabel(dmParallel, labelNew);CHKERRQ(ierr);
1089   }
1090   ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1091   PetscFunctionReturn(0);
1092 }
1093 
1094 #undef __FUNCT__
1095 #define __FUNCT__ "DMPlexDistributeSetupHybrid"
1096 PetscErrorCode DMPlexDistributeSetupHybrid(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1097 {
1098   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1099   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1100   MPI_Comm        comm;
1101   const PetscInt *gpoints;
1102   PetscInt        dim, depth, n, d;
1103   PetscErrorCode  ierr;
1104 
1105   PetscFunctionBegin;
1106   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1107   PetscValidPointer(dmParallel, 4);
1108 
1109   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1110   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1111 
1112   /* Setup hybrid structure */
1113   for (d = 0; d <= dim; ++d) {pmesh->hybridPointMax[d] = mesh->hybridPointMax[d];}
1114   ierr = MPI_Bcast(pmesh->hybridPointMax, dim+1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1115   ierr = ISLocalToGlobalMappingGetSize(renumbering, &n);CHKERRQ(ierr);
1116   ierr = ISLocalToGlobalMappingGetIndices(renumbering, &gpoints);CHKERRQ(ierr);
1117   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1118   for (d = 0; d <= dim; ++d) {
1119     PetscInt pmax = pmesh->hybridPointMax[d], newmax = 0, pEnd, stratum[2], p;
1120 
1121     if (pmax < 0) continue;
1122     ierr = DMPlexGetDepthStratum(dm, d > depth ? depth : d, &stratum[0], &stratum[1]);CHKERRQ(ierr);
1123     ierr = DMPlexGetDepthStratum(dmParallel, d, NULL, &pEnd);CHKERRQ(ierr);
1124     ierr = MPI_Bcast(stratum, 2, MPIU_INT, 0, comm);CHKERRQ(ierr);
1125     for (p = 0; p < n; ++p) {
1126       const PetscInt point = gpoints[p];
1127 
1128       if ((point >= stratum[0]) && (point < stratum[1]) && (point >= pmax)) ++newmax;
1129     }
1130     if (newmax > 0) pmesh->hybridPointMax[d] = pEnd - newmax;
1131     else            pmesh->hybridPointMax[d] = -1;
1132   }
1133   ierr = ISLocalToGlobalMappingRestoreIndices(renumbering, &gpoints);CHKERRQ(ierr);
1134   PetscFunctionReturn(0);
1135 }
1136 
1137 #undef __FUNCT__
1138 #define __FUNCT__ "DMPlexDistributeSetupTree"
1139 PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1140 {
1141   MPI_Comm        comm;
1142   DM              refTree;
1143   PetscSection    origParentSection, newParentSection;
1144   PetscInt        *origParents, *origChildIDs;
1145   PetscBool       flg;
1146   PetscErrorCode  ierr;
1147 
1148   PetscFunctionBegin;
1149   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1150   PetscValidHeaderSpecific(dm, DM_CLASSID, 4);
1151   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1152 
1153   /* Set up tree */
1154   ierr = DMPlexGetReferenceTree(dm,&refTree);CHKERRQ(ierr);
1155   ierr = DMPlexSetReferenceTree(dmParallel,refTree);CHKERRQ(ierr);
1156   ierr = DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL);CHKERRQ(ierr);
1157   if (origParentSection) {
1158     PetscInt        pStart, pEnd;
1159     PetscInt        *newParents, *newChildIDs;
1160     PetscInt        *remoteOffsetsParents, newParentSize;
1161     PetscSF         parentSF;
1162 
1163     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1164     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);CHKERRQ(ierr);
1165     ierr = PetscSectionSetChart(newParentSection,pStart,pEnd);CHKERRQ(ierr);
1166     ierr = PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);CHKERRQ(ierr);
1167     ierr = PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);CHKERRQ(ierr);
1168     ierr = PetscSectionGetStorageSize(newParentSection,&newParentSize);CHKERRQ(ierr);
1169     ierr = PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs);CHKERRQ(ierr);
1170     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origParents, newParents);CHKERRQ(ierr);
1171     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origParents, newParents);CHKERRQ(ierr);
1172     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1173     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1174     ierr = ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents);CHKERRQ(ierr);
1175     ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-parents_view", &flg);CHKERRQ(ierr);
1176     if (flg) {
1177       ierr = PetscPrintf(comm, "Serial Parent Section: \n");CHKERRQ(ierr);
1178       ierr = PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1179       ierr = PetscPrintf(comm, "Parallel Parent Section: \n");CHKERRQ(ierr);
1180       ierr = PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1181       ierr = PetscSFView(parentSF, NULL);CHKERRQ(ierr);
1182     }
1183     ierr = DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);CHKERRQ(ierr);
1184     ierr = PetscSectionDestroy(&newParentSection);CHKERRQ(ierr);
1185     ierr = PetscFree2(newParents,newChildIDs);CHKERRQ(ierr);
1186     ierr = PetscSFDestroy(&parentSF);CHKERRQ(ierr);
1187   }
1188   PetscFunctionReturn(0);
1189 }
1190 
1191 #undef __FUNCT__
1192 #define __FUNCT__ "DMPlexDistributeSF"
1193 PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, PetscSection partSection, IS part, PetscSection origPartSection, IS origPart, DM dmParallel)
1194 {
1195   DM_Plex               *mesh  = (DM_Plex*) dm->data;
1196   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
1197   PetscMPIInt            rank, numProcs;
1198   MPI_Comm               comm;
1199   PetscErrorCode         ierr;
1200 
1201   PetscFunctionBegin;
1202   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1203   PetscValidPointer(dmParallel,7);
1204 
1205   /* Create point SF for parallel mesh */
1206   ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1207   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1208   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1209   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1210   {
1211     const PetscInt *leaves;
1212     PetscSFNode    *remotePoints, *rowners, *lowners;
1213     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1214     PetscInt        pStart, pEnd;
1215 
1216     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1217     ierr = PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr);
1218     ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr);
1219     for (p=0; p<numRoots; p++) {
1220       rowners[p].rank  = -1;
1221       rowners[p].index = -1;
1222     }
1223     if (origPart) {
1224       /* Make sure points in the original partition are not assigned to other procs */
1225       const PetscInt *origPoints;
1226 
1227       ierr = ISGetIndices(origPart, &origPoints);CHKERRQ(ierr);
1228       for (p = 0; p < numProcs; ++p) {
1229         PetscInt dof, off, d;
1230 
1231         ierr = PetscSectionGetDof(origPartSection, p, &dof);CHKERRQ(ierr);
1232         ierr = PetscSectionGetOffset(origPartSection, p, &off);CHKERRQ(ierr);
1233         for (d = off; d < off+dof; ++d) {
1234           rowners[origPoints[d]].rank = p;
1235         }
1236       }
1237       ierr = ISRestoreIndices(origPart, &origPoints);CHKERRQ(ierr);
1238     }
1239     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1240     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1241     for (p = 0; p < numLeaves; ++p) {
1242       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1243         lowners[p].rank  = rank;
1244         lowners[p].index = leaves ? leaves[p] : p;
1245       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1246         lowners[p].rank  = -2;
1247         lowners[p].index = -2;
1248       }
1249     }
1250     for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1251       rowners[p].rank  = -3;
1252       rowners[p].index = -3;
1253     }
1254     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1255     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1256     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1257     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1258     for (p = 0; p < numLeaves; ++p) {
1259       if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
1260       if (lowners[p].rank != rank) ++numGhostPoints;
1261     }
1262     ierr = PetscMalloc1(numGhostPoints,    &ghostPoints);CHKERRQ(ierr);
1263     ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr);
1264     for (p = 0, gp = 0; p < numLeaves; ++p) {
1265       if (lowners[p].rank != rank) {
1266         ghostPoints[gp]        = leaves ? leaves[p] : p;
1267         remotePoints[gp].rank  = lowners[p].rank;
1268         remotePoints[gp].index = lowners[p].index;
1269         ++gp;
1270       }
1271     }
1272     ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr);
1273     ierr = PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1274     ierr = PetscSFSetFromOptions((dmParallel)->sf);CHKERRQ(ierr);
1275   }
1276   pmesh->useCone    = mesh->useCone;
1277   pmesh->useClosure = mesh->useClosure;
1278   ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1279   PetscFunctionReturn(0);
1280 }
1281 
1282 #undef __FUNCT__
1283 #define __FUNCT__ "DMPlexDistribute"
1284 /*@C
1285   DMPlexDistribute - Distributes the mesh and any associated sections.
1286 
1287   Not Collective
1288 
1289   Input Parameter:
1290 + dm  - The original DMPlex object
1291 . partitioner - The partitioning package, or NULL for the default
1292 - overlap - The overlap of partitions, 0 is the default
1293 
1294   Output Parameter:
1295 + sf - The PetscSF used for point distribution
1296 - parallelMesh - The distributed DMPlex object, or NULL
1297 
1298   Note: If the mesh was not distributed, the return value is NULL.
1299 
1300   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1301   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1302   representation on the mesh.
1303 
1304   Level: intermediate
1305 
1306 .keywords: mesh, elements
1307 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1308 @*/
1309 PetscErrorCode DMPlexDistribute(DM dm, const char partitioner[], PetscInt overlap, PetscSF *sf, DM *dmParallel)
1310 {
1311   DM_Plex               *mesh   = (DM_Plex*) dm->data, *pmesh;
1312   MPI_Comm               comm;
1313   PetscInt               dim, numRemoteRanks, nroots, nleaves;
1314   DM                     dmOverlap;
1315   IS                     cellPart,        part;
1316   PetscSection           cellPartSection, partSection;
1317   PetscSFNode           *remoteRanks, *newRemote;
1318   const PetscSFNode     *oldRemote;
1319   PetscSF                partSF, pointSF, overlapPointSF, overlapSF;
1320   ISLocalToGlobalMapping renumbering;
1321   PetscBool              flg;
1322   PetscMPIInt            rank, numProcs, p;
1323   PetscErrorCode         ierr;
1324 
1325   PetscFunctionBegin;
1326   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1327   if (sf) PetscValidPointer(sf,4);
1328   PetscValidPointer(dmParallel,5);
1329 
1330   ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1331   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1332   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1333   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1334 
1335   *dmParallel = NULL;
1336   if (numProcs == 1) PetscFunctionReturn(0);
1337 
1338   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1339   /* Create cell partition - We need to rewrite to use IS, use the MatPartition stuff */
1340   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1341   if (overlap > 1) SETERRQ(comm, PETSC_ERR_SUP, "Overlap > 1 not yet implemented");
1342   ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr);
1343   ierr = PetscPartitionerPartition(mesh->partitioner, dm, PETSC_FALSE, cellPartSection, &cellPart, NULL, NULL);CHKERRQ(ierr);
1344   /* Create SF assuming a serial partition for all processes: Could check for IS length here */
1345   if (!rank) numRemoteRanks = numProcs;
1346   else       numRemoteRanks = 0;
1347   ierr = PetscMalloc1(numRemoteRanks, &remoteRanks);CHKERRQ(ierr);
1348   for (p = 0; p < numRemoteRanks; ++p) {
1349     remoteRanks[p].rank  = p;
1350     remoteRanks[p].index = 0;
1351   }
1352   ierr = PetscSFCreate(comm, &partSF);CHKERRQ(ierr);
1353   ierr = PetscSFSetGraph(partSF, 1, numRemoteRanks, NULL, PETSC_OWN_POINTER, remoteRanks, PETSC_OWN_POINTER);CHKERRQ(ierr);
1354   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1355   if (flg) {
1356     ierr = PetscPrintf(comm, "Original Cell Partition:\n");CHKERRQ(ierr);
1357     ierr = PetscSectionView(cellPartSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1358     ierr = ISView(cellPart, NULL);CHKERRQ(ierr);
1359     ierr = PetscSFView(partSF, NULL);CHKERRQ(ierr);
1360   }
1361   /* Close the partition over the mesh */
1362   ierr = DMPlexCreatePartitionClosure(dm, cellPartSection, cellPart, &partSection, &part);CHKERRQ(ierr);
1363   /* Create new mesh */
1364   ierr  = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr);
1365   ierr  = DMSetDimension(*dmParallel, dim);CHKERRQ(ierr);
1366   ierr  = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr);
1367   pmesh = (DM_Plex*) (*dmParallel)->data;
1368   pmesh->useAnchors = mesh->useAnchors;
1369 
1370   /* Distribute sieve points and the global point numbering (replaces creating remote bases) */
1371   ierr = PetscSFConvertPartition(partSF, partSection, part, &renumbering, &pointSF);CHKERRQ(ierr);
1372   if (flg) {
1373     ierr = PetscPrintf(comm, "Point Partition:\n");CHKERRQ(ierr);
1374     ierr = PetscSectionView(partSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1375     ierr = ISView(part, NULL);CHKERRQ(ierr);
1376     ierr = PetscSFView(pointSF, NULL);CHKERRQ(ierr);
1377     ierr = PetscPrintf(comm, "Point Renumbering after partition:\n");CHKERRQ(ierr);
1378     ierr = ISLocalToGlobalMappingView(renumbering, NULL);CHKERRQ(ierr);
1379   }
1380   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1381 
1382   /* Migrate data to a non-overlapping parallel DM */
1383   ierr = DMPlexDistributeCones(dm, pointSF, renumbering, *dmParallel);CHKERRQ(ierr);
1384   ierr = DMPlexDistributeCoordinates(dm, pointSF, *dmParallel);CHKERRQ(ierr);
1385   ierr = DMPlexDistributeLabels(dm, pointSF, *dmParallel);CHKERRQ(ierr);
1386   ierr = DMPlexDistributeSetupHybrid(dm, pointSF, renumbering, *dmParallel);CHKERRQ(ierr);
1387   ierr = DMPlexDistributeSetupTree(dm, pointSF, renumbering, *dmParallel);CHKERRQ(ierr);
1388 
1389   /* Build the point SF without overlap */
1390   ierr = DMPlexDistributeSF(dm, pointSF, partSection, part, NULL, NULL, *dmParallel);CHKERRQ(ierr);
1391   if (flg) {
1392     ierr = PetscPrintf(comm, "Point SF:\n");CHKERRQ(ierr);
1393     ierr = PetscSFView((*dmParallel)->sf, NULL);CHKERRQ(ierr);
1394   }
1395 
1396   if (overlap > 0) {
1397     ierr = PetscLogEventBegin(DMPLEX_DistributeOverlap,dm,0,0,0);CHKERRQ(ierr);
1398     /* Add the partition overlap to the distributed DM */
1399     ierr = DMPlexDistributeOverlap(*dmParallel, overlap, renumbering, &overlapSF, &dmOverlap);CHKERRQ(ierr);
1400     ierr = DMDestroy(dmParallel);CHKERRQ(ierr);
1401     *dmParallel = dmOverlap;
1402     if (flg) {
1403       ierr = PetscPrintf(comm, "Overlap Migration SF:\n");CHKERRQ(ierr);
1404       ierr = PetscSFView(overlapSF, NULL);CHKERRQ(ierr);
1405     }
1406 
1407     /* Re-map the pointSF to establish the full migration pattern */
1408     ierr = PetscSFGetGraph(pointSF, &nroots, NULL, NULL, &oldRemote);CHKERRQ(ierr);
1409     ierr = PetscSFGetGraph(overlapSF, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1410     ierr = PetscMalloc1(nleaves, &newRemote);CHKERRQ(ierr);
1411     ierr = PetscSFBcastBegin(overlapSF, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1412     ierr = PetscSFBcastEnd(overlapSF, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1413     ierr = PetscSFCreate(comm, &overlapPointSF);CHKERRQ(ierr);
1414     ierr = PetscSFSetGraph(overlapPointSF, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1415     ierr = PetscSFDestroy(&overlapSF);CHKERRQ(ierr);
1416     ierr = PetscSFDestroy(&pointSF);CHKERRQ(ierr);
1417     pointSF = overlapPointSF;
1418     ierr = PetscLogEventEnd(DMPLEX_DistributeOverlap,dm,0,0,0);CHKERRQ(ierr);
1419   }
1420   /* Cleanup Partition */
1421   ierr = ISLocalToGlobalMappingDestroy(&renumbering);CHKERRQ(ierr);
1422   ierr = PetscSFDestroy(&partSF);CHKERRQ(ierr);
1423   ierr = PetscSectionDestroy(&partSection);CHKERRQ(ierr);
1424   ierr = ISDestroy(&part);CHKERRQ(ierr);
1425   ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr);
1426   ierr = ISDestroy(&cellPart);CHKERRQ(ierr);
1427   /* Copy BC */
1428   ierr = DMPlexCopyBoundary(dm, *dmParallel);CHKERRQ(ierr);
1429   /* Cleanup */
1430   if (sf) {*sf = pointSF;}
1431   else    {ierr = PetscSFDestroy(&pointSF);CHKERRQ(ierr);}
1432   ierr = DMSetFromOptions(*dmParallel);CHKERRQ(ierr);
1433   ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1434   PetscFunctionReturn(0);
1435 }
1436 
1437 #undef __FUNCT__
1438 #define __FUNCT__ "DMPlexDistributeOverlap"
1439 /*@C
1440   DMPlexDistribute - Add partition overlap to a distributed non-overlapping DM.
1441 
1442   Not Collective
1443 
1444   Input Parameter:
1445 + dm  - The non-overlapping distrbuted DMPlex object
1446 - overlap - The overlap of partitions, 0 is the default
1447 
1448   Output Parameter:
1449 + sf - The PetscSF used for point distribution
1450 - dmOverlap - The overlapping distributed DMPlex object, or NULL
1451 
1452   Note: If the mesh was not distributed, the return value is NULL.
1453 
1454   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1455   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1456   representation on the mesh.
1457 
1458   Level: intermediate
1459 
1460 .keywords: mesh, elements
1461 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1462 @*/
1463 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, ISLocalToGlobalMapping renumbering, PetscSF *sf, DM *dmOverlap)
1464 {
1465   MPI_Comm               comm;
1466   PetscMPIInt            rank;
1467   PetscSection           rootSection, leafSection;
1468   IS                     rootrank, leafrank;
1469   PetscSection           coneSection;
1470   PetscSF                overlapSF, migrationSF, pointSF, newPointSF;
1471   PetscSFNode           *ghostRemote;
1472   const PetscSFNode     *overlapRemote;
1473   ISLocalToGlobalMapping overlapRenumbering;
1474   const PetscInt        *renumberingArray, *overlapLocal;
1475   PetscInt               dim, p, pStart, pEnd, conesSize, idx;
1476   PetscInt               numGhostPoints, numOverlapPoints, numSharedPoints, overlapLeaves;
1477   PetscInt              *cones, *ghostLocal, *overlapRenumberingArray, *pointIDs, *recvPointIDs;
1478   PetscErrorCode         ierr;
1479 
1480   PetscFunctionBegin;
1481   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1482   if (sf) PetscValidPointer(sf, 3);
1483   PetscValidPointer(dmOverlap, 4);
1484 
1485   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1486   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1487   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1488 
1489   /* Compute point overlap with neighbouring processes on the distributed DM */
1490   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1491   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1492   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1493   ierr = DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);CHKERRQ(ierr);
1494   ierr = DMPlexCreateOverlap(dm, rootSection, rootrank, leafSection, leafrank, &overlapSF);CHKERRQ(ierr);
1495   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1496   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1497   ierr = ISDestroy(&rootrank);CHKERRQ(ierr);
1498   ierr = ISDestroy(&leafrank);CHKERRQ(ierr);
1499   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1500 
1501   /* Build dense migration SF that maps the non-overlapping partition to the overlapping one */
1502   ierr = DMPlexCreateOverlapMigrationSF(dm, overlapSF, &migrationSF);CHKERRQ(ierr);
1503 
1504   /* Convert cones to global numbering before migrating them */
1505   ierr = DMPlexGetConeSection(dm, &coneSection);CHKERRQ(ierr);
1506   ierr = PetscSectionGetStorageSize(coneSection, &conesSize);CHKERRQ(ierr);
1507   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
1508   ierr = ISLocalToGlobalMappingApplyBlock(renumbering, conesSize, cones, cones);CHKERRQ(ierr);
1509 
1510   /* Derive the new local-to-global mapping from the old one */
1511   ierr = PetscSFGetGraph(migrationSF, NULL, &overlapLeaves, &overlapLocal, &overlapRemote);CHKERRQ(ierr);
1512   ierr = PetscMalloc1(overlapLeaves, &overlapRenumberingArray);CHKERRQ(ierr);
1513   ierr = ISLocalToGlobalMappingGetBlockIndices(renumbering, &renumberingArray);CHKERRQ(ierr);
1514   ierr = PetscSFBcastBegin(migrationSF, MPIU_INT, (PetscInt *) renumberingArray, overlapRenumberingArray);CHKERRQ(ierr);
1515   ierr = PetscSFBcastEnd(migrationSF, MPIU_INT, (PetscInt *) renumberingArray, overlapRenumberingArray);CHKERRQ(ierr);
1516   ierr = ISLocalToGlobalMappingCreate(comm, 1, overlapLeaves, (const PetscInt*) overlapRenumberingArray, PETSC_OWN_POINTER, &overlapRenumbering);CHKERRQ(ierr);
1517 
1518   /* Build the overlapping DM */
1519   ierr = DMPlexCreate(comm, dmOverlap);CHKERRQ(ierr);
1520   ierr = DMSetDimension(*dmOverlap, dim);CHKERRQ(ierr);
1521   ierr = PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");CHKERRQ(ierr);
1522   ierr = DMPlexDistributeCones(dm, migrationSF, overlapRenumbering, *dmOverlap);CHKERRQ(ierr);
1523   ierr = DMPlexDistributeCoordinates(dm, migrationSF, *dmOverlap);CHKERRQ(ierr);
1524   ierr = DMPlexDistributeLabels(dm, migrationSF, *dmOverlap);CHKERRQ(ierr);
1525   ierr = DMPlexDistributeSetupHybrid(dm, migrationSF, overlapRenumbering, *dmOverlap);CHKERRQ(ierr);
1526 
1527   /* Build the new point SF by propagating the depthShift generate remote root indices */
1528   ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
1529   ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, NULL, NULL);CHKERRQ(ierr);
1530   ierr = PetscSFGetGraph(overlapSF, NULL, &numOverlapPoints, NULL, NULL);CHKERRQ(ierr);
1531   numGhostPoints = numSharedPoints + numOverlapPoints;
1532   ierr = PetscMalloc2(numGhostPoints, &ghostLocal, numGhostPoints, &ghostRemote);CHKERRQ(ierr);
1533   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
1534   ierr = PetscMalloc2(pEnd-pStart, &pointIDs, overlapLeaves, &recvPointIDs);CHKERRQ(ierr);
1535   for (p=0; p<overlapLeaves; p++) {
1536     if (overlapRemote[p].rank == rank) pointIDs[overlapRemote[p].index] = overlapLocal[p];
1537   }
1538   ierr = PetscSFBcastBegin(migrationSF, MPIU_INT, pointIDs, recvPointIDs);CHKERRQ(ierr);
1539   ierr = PetscSFBcastEnd(migrationSF, MPIU_INT, pointIDs, recvPointIDs);CHKERRQ(ierr);
1540   for (idx=0, p=0; p<overlapLeaves; p++) {
1541     if (overlapRemote[p].rank != rank) {
1542       ghostLocal[idx] = overlapLocal[p];
1543       ghostRemote[idx].index = recvPointIDs[p];
1544       ghostRemote[idx].rank = overlapRemote[p].rank;
1545       idx++;
1546     }
1547   }
1548   ierr = DMPlexGetChart(*dmOverlap, &pStart, &pEnd);CHKERRQ(ierr);
1549   ierr = PetscSFCreate(comm, &newPointSF);;CHKERRQ(ierr);
1550   ierr = PetscSFSetGraph(newPointSF, pEnd - pStart, numGhostPoints, ghostLocal, PETSC_OWN_POINTER, ghostRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1551   ierr = DMSetPointSF(*dmOverlap, newPointSF);CHKERRQ(ierr);
1552   ierr = PetscSFDestroy(&newPointSF);CHKERRQ(ierr);
1553   /* Cleanup overlap partition */
1554   ierr = ISLocalToGlobalMappingDestroy(&overlapRenumbering);CHKERRQ(ierr);
1555   ierr = PetscSFDestroy(&overlapSF);CHKERRQ(ierr);
1556   ierr = PetscFree2(pointIDs, recvPointIDs);CHKERRQ(ierr);
1557   if (sf) *sf = migrationSF;
1558   else    {ierr = PetscSFDestroy(&migrationSF);CHKERRQ(ierr);}
1559   ierr = DMSetFromOptions(*dmOverlap);CHKERRQ(ierr);
1560   PetscFunctionReturn(0);
1561 }
1562