xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision efd4aadf157bf1ba2d80c2be092fcf4247860003)
1 #include <petsc/private/dmpleximpl.h>    /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/dmlabelimpl.h>   /*I      "petscdmlabel.h"  I*/
3 
4 /*@
5   DMPlexSetAdjacencyUseCone - Define adjacency in the mesh using either the cone or the support first
6 
7   Input Parameters:
8 + dm      - The DM object
9 - useCone - Flag to use the cone first
10 
11   Level: intermediate
12 
13   Notes:
14 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
15 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
16 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
17 
18 .seealso: DMPlexGetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
19 @*/
20 PetscErrorCode DMPlexSetAdjacencyUseCone(DM dm, PetscBool useCone)
21 {
22   DM_Plex *mesh = (DM_Plex *) dm->data;
23 
24   PetscFunctionBegin;
25   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
26   mesh->useCone = useCone;
27   PetscFunctionReturn(0);
28 }
29 
30 /*@
31   DMPlexGetAdjacencyUseCone - Query whether adjacency in the mesh uses the cone or the support first
32 
33   Input Parameter:
34 . dm      - The DM object
35 
36   Output Parameter:
37 . useCone - Flag to use the cone first
38 
39   Level: intermediate
40 
41   Notes:
42 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
43 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
44 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
45 
46 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
47 @*/
48 PetscErrorCode DMPlexGetAdjacencyUseCone(DM dm, PetscBool *useCone)
49 {
50   DM_Plex *mesh = (DM_Plex *) dm->data;
51 
52   PetscFunctionBegin;
53   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
54   PetscValidIntPointer(useCone, 2);
55   *useCone = mesh->useCone;
56   PetscFunctionReturn(0);
57 }
58 
59 /*@
60   DMPlexSetAdjacencyUseClosure - Define adjacency in the mesh using the transitive closure
61 
62   Input Parameters:
63 + dm      - The DM object
64 - useClosure - Flag to use the closure
65 
66   Level: intermediate
67 
68   Notes:
69 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
70 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
71 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
72 
73 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
74 @*/
75 PetscErrorCode DMPlexSetAdjacencyUseClosure(DM dm, PetscBool useClosure)
76 {
77   DM_Plex *mesh = (DM_Plex *) dm->data;
78 
79   PetscFunctionBegin;
80   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
81   mesh->useClosure = useClosure;
82   PetscFunctionReturn(0);
83 }
84 
85 /*@
86   DMPlexGetAdjacencyUseClosure - Query whether adjacency in the mesh uses the transitive closure
87 
88   Input Parameter:
89 . dm      - The DM object
90 
91   Output Parameter:
92 . useClosure - Flag to use the closure
93 
94   Level: intermediate
95 
96   Notes:
97 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
98 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
99 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
100 
101 .seealso: DMPlexSetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
102 @*/
103 PetscErrorCode DMPlexGetAdjacencyUseClosure(DM dm, PetscBool *useClosure)
104 {
105   DM_Plex *mesh = (DM_Plex *) dm->data;
106 
107   PetscFunctionBegin;
108   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
109   PetscValidIntPointer(useClosure, 2);
110   *useClosure = mesh->useClosure;
111   PetscFunctionReturn(0);
112 }
113 
114 /*@
115   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
116 
117   Input Parameters:
118 + dm      - The DM object
119 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
120 
121   Level: intermediate
122 
123 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
124 @*/
125 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
126 {
127   DM_Plex *mesh = (DM_Plex *) dm->data;
128 
129   PetscFunctionBegin;
130   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
131   mesh->useAnchors = useAnchors;
132   PetscFunctionReturn(0);
133 }
134 
135 /*@
136   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
137 
138   Input Parameter:
139 . dm      - The DM object
140 
141   Output Parameter:
142 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
143 
144   Level: intermediate
145 
146 .seealso: DMPlexSetAdjacencyUseAnchors(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
147 @*/
148 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
149 {
150   DM_Plex *mesh = (DM_Plex *) dm->data;
151 
152   PetscFunctionBegin;
153   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
154   PetscValidIntPointer(useAnchors, 2);
155   *useAnchors = mesh->useAnchors;
156   PetscFunctionReturn(0);
157 }
158 
159 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
160 {
161   const PetscInt *cone = NULL;
162   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
163   PetscErrorCode  ierr;
164 
165   PetscFunctionBeginHot;
166   ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
167   ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
168   for (c = 0; c <= coneSize; ++c) {
169     const PetscInt  point   = !c ? p : cone[c-1];
170     const PetscInt *support = NULL;
171     PetscInt        supportSize, s, q;
172 
173     ierr = DMPlexGetSupportSize(dm, point, &supportSize);CHKERRQ(ierr);
174     ierr = DMPlexGetSupport(dm, point, &support);CHKERRQ(ierr);
175     for (s = 0; s < supportSize; ++s) {
176       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = support[s]),0); ++q) {
177         if (support[s] == adj[q]) break;
178       }
179       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
180     }
181   }
182   *adjSize = numAdj;
183   PetscFunctionReturn(0);
184 }
185 
186 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
187 {
188   const PetscInt *support = NULL;
189   PetscInt        numAdj   = 0, maxAdjSize = *adjSize, supportSize, s;
190   PetscErrorCode  ierr;
191 
192   PetscFunctionBeginHot;
193   ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr);
194   ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr);
195   for (s = 0; s <= supportSize; ++s) {
196     const PetscInt  point = !s ? p : support[s-1];
197     const PetscInt *cone  = NULL;
198     PetscInt        coneSize, c, q;
199 
200     ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
201     ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
202     for (c = 0; c < coneSize; ++c) {
203       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = cone[c]),0); ++q) {
204         if (cone[c] == adj[q]) break;
205       }
206       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
207     }
208   }
209   *adjSize = numAdj;
210   PetscFunctionReturn(0);
211 }
212 
213 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
214 {
215   PetscInt      *star = NULL;
216   PetscInt       numAdj = 0, maxAdjSize = *adjSize, starSize, s;
217   PetscErrorCode ierr;
218 
219   PetscFunctionBeginHot;
220   ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
221   for (s = 0; s < starSize*2; s += 2) {
222     const PetscInt *closure = NULL;
223     PetscInt        closureSize, c, q;
224 
225     ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
226     for (c = 0; c < closureSize*2; c += 2) {
227       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = closure[c]),0); ++q) {
228         if (closure[c] == adj[q]) break;
229       }
230       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
231     }
232     ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
233   }
234   ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
235   *adjSize = numAdj;
236   PetscFunctionReturn(0);
237 }
238 
239 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
240 {
241   static PetscInt asiz = 0;
242   PetscInt maxAnchors = 1;
243   PetscInt aStart = -1, aEnd = -1;
244   PetscInt maxAdjSize;
245   PetscSection aSec = NULL;
246   IS aIS = NULL;
247   const PetscInt *anchors;
248   PetscErrorCode  ierr;
249 
250   PetscFunctionBeginHot;
251   if (useAnchors) {
252     ierr = DMPlexGetAnchors(dm,&aSec,&aIS);CHKERRQ(ierr);
253     if (aSec) {
254       ierr = PetscSectionGetMaxDof(aSec,&maxAnchors);CHKERRQ(ierr);
255       maxAnchors = PetscMax(1,maxAnchors);
256       ierr = PetscSectionGetChart(aSec,&aStart,&aEnd);CHKERRQ(ierr);
257       ierr = ISGetIndices(aIS,&anchors);CHKERRQ(ierr);
258     }
259   }
260   if (!*adj) {
261     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
262 
263     ierr  = DMPlexGetChart(dm, &pStart,&pEnd);CHKERRQ(ierr);
264     ierr  = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
265     ierr  = DMPlexGetMaxSizes(dm, &maxC, &maxS);CHKERRQ(ierr);
266     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1;
267     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1;
268     asiz  = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries);
269     asiz *= maxAnchors;
270     asiz  = PetscMin(asiz,pEnd-pStart);
271     ierr  = PetscMalloc1(asiz,adj);CHKERRQ(ierr);
272   }
273   if (*adjSize < 0) *adjSize = asiz;
274   maxAdjSize = *adjSize;
275   if (useTransitiveClosure) {
276     ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr);
277   } else if (useCone) {
278     ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
279   } else {
280     ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
281   }
282   if (useAnchors && aSec) {
283     PetscInt origSize = *adjSize;
284     PetscInt numAdj = origSize;
285     PetscInt i = 0, j;
286     PetscInt *orig = *adj;
287 
288     while (i < origSize) {
289       PetscInt p = orig[i];
290       PetscInt aDof = 0;
291 
292       if (p >= aStart && p < aEnd) {
293         ierr = PetscSectionGetDof(aSec,p,&aDof);CHKERRQ(ierr);
294       }
295       if (aDof) {
296         PetscInt aOff;
297         PetscInt s, q;
298 
299         for (j = i + 1; j < numAdj; j++) {
300           orig[j - 1] = orig[j];
301         }
302         origSize--;
303         numAdj--;
304         ierr = PetscSectionGetOffset(aSec,p,&aOff);CHKERRQ(ierr);
305         for (s = 0; s < aDof; ++s) {
306           for (q = 0; q < numAdj || ((void)(orig[numAdj++] = anchors[aOff+s]),0); ++q) {
307             if (anchors[aOff+s] == orig[q]) break;
308           }
309           if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
310         }
311       }
312       else {
313         i++;
314       }
315     }
316     *adjSize = numAdj;
317     ierr = ISRestoreIndices(aIS,&anchors);CHKERRQ(ierr);
318   }
319   PetscFunctionReturn(0);
320 }
321 
322 /*@
323   DMPlexGetAdjacency - Return all points adjacent to the given point
324 
325   Input Parameters:
326 + dm - The DM object
327 . p  - The point
328 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE
329 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize
330 
331   Output Parameters:
332 + adjSize - The number of adjacent points
333 - adj - The adjacent points
334 
335   Level: advanced
336 
337   Notes: The user must PetscFree the adj array if it was not passed in.
338 
339 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator()
340 @*/
341 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
342 {
343   DM_Plex       *mesh = (DM_Plex *) dm->data;
344   PetscErrorCode ierr;
345 
346   PetscFunctionBeginHot;
347   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
348   PetscValidPointer(adjSize,3);
349   PetscValidPointer(adj,4);
350   ierr = DMPlexGetAdjacency_Internal(dm, p, mesh->useCone, mesh->useClosure, mesh->useAnchors, adjSize, adj);CHKERRQ(ierr);
351   PetscFunctionReturn(0);
352 }
353 
354 /*@
355   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
356 
357   Collective on DM
358 
359   Input Parameters:
360 + dm      - The DM
361 - sfPoint - The PetscSF which encodes point connectivity
362 
363   Output Parameters:
364 + processRanks - A list of process neighbors, or NULL
365 - sfProcess    - An SF encoding the two-sided process connectivity, or NULL
366 
367   Level: developer
368 
369 .seealso: PetscSFCreate(), DMPlexCreateProcessSF()
370 @*/
371 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
372 {
373   const PetscSFNode *remotePoints;
374   PetscInt          *localPointsNew;
375   PetscSFNode       *remotePointsNew;
376   const PetscInt    *nranks;
377   PetscInt          *ranksNew;
378   PetscBT            neighbors;
379   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
380   PetscMPIInt        size, proc, rank;
381   PetscErrorCode     ierr;
382 
383   PetscFunctionBegin;
384   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
385   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
386   if (processRanks) {PetscValidPointer(processRanks, 3);}
387   if (sfProcess)    {PetscValidPointer(sfProcess, 4);}
388   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
389   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
390   ierr = PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);CHKERRQ(ierr);
391   ierr = PetscBTCreate(size, &neighbors);CHKERRQ(ierr);
392   ierr = PetscBTMemzero(size, neighbors);CHKERRQ(ierr);
393   /* Compute root-to-leaf process connectivity */
394   ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr);
395   ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr);
396   for (p = pStart; p < pEnd; ++p) {
397     PetscInt ndof, noff, n;
398 
399     ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr);
400     ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr);
401     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
402   }
403   ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr);
404   /* Compute leaf-to-neighbor process connectivity */
405   ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr);
406   ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr);
407   for (p = pStart; p < pEnd; ++p) {
408     PetscInt ndof, noff, n;
409 
410     ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr);
411     ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr);
412     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
413   }
414   ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr);
415   /* Compute leaf-to-root process connectivity */
416   for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);}
417   /* Calculate edges */
418   PetscBTClear(neighbors, rank);
419   for(proc = 0, numNeighbors = 0; proc < size; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;}
420   ierr = PetscMalloc1(numNeighbors, &ranksNew);CHKERRQ(ierr);
421   ierr = PetscMalloc1(numNeighbors, &localPointsNew);CHKERRQ(ierr);
422   ierr = PetscMalloc1(numNeighbors, &remotePointsNew);CHKERRQ(ierr);
423   for(proc = 0, n = 0; proc < size; ++proc) {
424     if (PetscBTLookup(neighbors, proc)) {
425       ranksNew[n]              = proc;
426       localPointsNew[n]        = proc;
427       remotePointsNew[n].index = rank;
428       remotePointsNew[n].rank  = proc;
429       ++n;
430     }
431   }
432   ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr);
433   if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr);}
434   else              {ierr = PetscFree(ranksNew);CHKERRQ(ierr);}
435   if (sfProcess) {
436     ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr);
437     ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr);
438     ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr);
439     ierr = PetscSFSetGraph(*sfProcess, size, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
440   }
441   PetscFunctionReturn(0);
442 }
443 
444 /*@
445   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
446 
447   Collective on DM
448 
449   Input Parameter:
450 . dm - The DM
451 
452   Output Parameters:
453 + rootSection - The number of leaves for a given root point
454 . rootrank    - The rank of each edge into the root point
455 . leafSection - The number of processes sharing a given leaf point
456 - leafrank    - The rank of each process sharing a leaf point
457 
458   Level: developer
459 
460 .seealso: DMPlexCreateOverlap()
461 @*/
462 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
463 {
464   MPI_Comm        comm;
465   PetscSF         sfPoint;
466   const PetscInt *rootdegree;
467   PetscInt       *myrank, *remoterank;
468   PetscInt        pStart, pEnd, p, nedges;
469   PetscMPIInt     rank;
470   PetscErrorCode  ierr;
471 
472   PetscFunctionBegin;
473   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
474   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
475   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
476   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
477   /* Compute number of leaves for each root */
478   ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr);
479   ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr);
480   ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr);
481   ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr);
482   for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr);}
483   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
484   /* Gather rank of each leaf to root */
485   ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr);
486   ierr = PetscMalloc1(pEnd-pStart, &myrank);CHKERRQ(ierr);
487   ierr = PetscMalloc1(nedges,  &remoterank);CHKERRQ(ierr);
488   for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank;
489   ierr = PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
490   ierr = PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
491   ierr = PetscFree(myrank);CHKERRQ(ierr);
492   ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr);
493   /* Distribute remote ranks to leaves */
494   ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr);
495   ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr);
496   PetscFunctionReturn(0);
497 }
498 
499 /*@C
500   DMPlexCreateOverlap - Compute owner information for shared points. This basically gets two-sided for an SF.
501 
502   Collective on DM
503 
504   Input Parameters:
505 + dm          - The DM
506 . levels      - Number of overlap levels
507 . rootSection - The number of leaves for a given root point
508 . rootrank    - The rank of each edge into the root point
509 . leafSection - The number of processes sharing a given leaf point
510 - leafrank    - The rank of each process sharing a leaf point
511 
512   Output Parameters:
513 + ovLabel     - DMLabel containing remote overlap contributions as point/rank pairings
514 
515   Level: developer
516 
517 .seealso: DMPlexDistributeOwnership(), DMPlexDistribute()
518 @*/
519 PetscErrorCode DMPlexCreateOverlap(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
520 {
521   MPI_Comm           comm;
522   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
523   PetscSF            sfPoint, sfProc;
524   const PetscSFNode *remote;
525   const PetscInt    *local;
526   const PetscInt    *nrank, *rrank;
527   PetscInt          *adj = NULL;
528   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l;
529   PetscMPIInt        rank, size;
530   PetscBool          useCone, useClosure, flg;
531   PetscErrorCode     ierr;
532 
533   PetscFunctionBegin;
534   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
535   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
536   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
537   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
538   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
539   ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr);
540   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
541   ierr = DMLabelCreate("Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr);
542   /* Handle leaves: shared with the root point */
543   for (l = 0; l < nleaves; ++l) {
544     PetscInt adjSize = PETSC_DETERMINE, a;
545 
546     ierr = DMPlexGetAdjacency(dm, local[l], &adjSize, &adj);CHKERRQ(ierr);
547     for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr);}
548   }
549   ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr);
550   ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr);
551   /* Handle roots */
552   for (p = pStart; p < pEnd; ++p) {
553     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
554 
555     if ((p >= sStart) && (p < sEnd)) {
556       /* Some leaves share a root with other leaves on different processes */
557       ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr);
558       if (neighbors) {
559         ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr);
560         ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
561         for (n = 0; n < neighbors; ++n) {
562           const PetscInt remoteRank = nrank[noff+n];
563 
564           if (remoteRank == rank) continue;
565           for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
566         }
567       }
568     }
569     /* Roots are shared with leaves */
570     ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr);
571     if (!neighbors) continue;
572     ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr);
573     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
574     for (n = 0; n < neighbors; ++n) {
575       const PetscInt remoteRank = rrank[noff+n];
576 
577       if (remoteRank == rank) continue;
578       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
579     }
580   }
581   ierr = PetscFree(adj);CHKERRQ(ierr);
582   ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr);
583   ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr);
584   /* Add additional overlap levels */
585   for (l = 1; l < levels; l++) {
586     /* Propagate point donations over SF to capture remote connections */
587     ierr = DMPlexPartitionLabelPropagate(dm, ovAdjByRank);CHKERRQ(ierr);
588     /* Add next level of point donations to the label */
589     ierr = DMPlexPartitionLabelAdjacency(dm, ovAdjByRank);CHKERRQ(ierr);
590   }
591   /* We require the closure in the overlap */
592   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
593   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
594   if (useCone || !useClosure) {
595     ierr = DMPlexPartitionLabelClosure(dm, ovAdjByRank);CHKERRQ(ierr);
596   }
597   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-overlap_view", &flg);CHKERRQ(ierr);
598   if (flg) {
599     ierr = DMLabelView(ovAdjByRank, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
600   }
601   /* Make global process SF and invert sender to receiver label */
602   {
603     /* Build a global process SF */
604     PetscSFNode *remoteProc;
605     ierr = PetscMalloc1(size, &remoteProc);CHKERRQ(ierr);
606     for (p = 0; p < size; ++p) {
607       remoteProc[p].rank  = p;
608       remoteProc[p].index = rank;
609     }
610     ierr = PetscSFCreate(comm, &sfProc);CHKERRQ(ierr);
611     ierr = PetscObjectSetName((PetscObject) sfProc, "Process SF");CHKERRQ(ierr);
612     ierr = PetscSFSetGraph(sfProc, size, size, NULL, PETSC_OWN_POINTER, remoteProc, PETSC_OWN_POINTER);CHKERRQ(ierr);
613   }
614   ierr = DMLabelCreate("Overlap label", ovLabel);CHKERRQ(ierr);
615   ierr = DMPlexPartitionLabelInvert(dm, ovAdjByRank, sfProc, *ovLabel);CHKERRQ(ierr);
616   /* Add owned points, except for shared local points */
617   for (p = pStart; p < pEnd; ++p) {ierr = DMLabelSetValue(*ovLabel, p, rank);CHKERRQ(ierr);}
618   for (l = 0; l < nleaves; ++l) {
619     ierr = DMLabelClearValue(*ovLabel, local[l], rank);CHKERRQ(ierr);
620     ierr = DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank);CHKERRQ(ierr);
621   }
622   /* Clean up */
623   ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr);
624   ierr = PetscSFDestroy(&sfProc);CHKERRQ(ierr);
625   PetscFunctionReturn(0);
626 }
627 
628 /*@C
629   DMPlexCreateOverlapMigrationSF - Create an SF describing the new mesh distribution to make the overlap described by the input SF
630 
631   Collective on DM
632 
633   Input Parameters:
634 + dm          - The DM
635 - overlapSF   - The SF mapping ghost points in overlap to owner points on other processes
636 
637   Output Parameters:
638 + migrationSF - An SF that maps original points in old locations to points in new locations
639 
640   Level: developer
641 
642 .seealso: DMPlexCreateOverlap(), DMPlexDistribute()
643 @*/
644 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
645 {
646   MPI_Comm           comm;
647   PetscMPIInt        rank, size;
648   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
649   PetscInt          *pointDepths, *remoteDepths, *ilocal;
650   PetscInt          *depthRecv, *depthShift, *depthIdx;
651   PetscSFNode       *iremote;
652   PetscSF            pointSF;
653   const PetscInt    *sharedLocal;
654   const PetscSFNode *overlapRemote, *sharedRemote;
655   PetscErrorCode     ierr;
656 
657   PetscFunctionBegin;
658   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
659   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
660   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
661   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
662   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
663 
664   /* Before building the migration SF we need to know the new stratum offsets */
665   ierr = PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote);CHKERRQ(ierr);
666   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
667   for (d=0; d<dim+1; d++) {
668     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
669     for (p=pStart; p<pEnd; p++) pointDepths[p] = d;
670   }
671   for (p=0; p<nleaves; p++) remoteDepths[p] = -1;
672   ierr = PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
673   ierr = PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
674 
675   /* Count recevied points in each stratum and compute the internal strata shift */
676   ierr = PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx);CHKERRQ(ierr);
677   for (d=0; d<dim+1; d++) depthRecv[d]=0;
678   for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++;
679   depthShift[dim] = 0;
680   for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim];
681   for (d=1; d<dim; d++) depthShift[d] += depthRecv[0];
682   for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1];
683   for (d=0; d<dim+1; d++) {
684     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
685     depthIdx[d] = pStart + depthShift[d];
686   }
687 
688   /* Form the overlap SF build an SF that describes the full overlap migration SF */
689   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
690   newLeaves = pEnd - pStart + nleaves;
691   ierr = PetscMalloc1(newLeaves, &ilocal);CHKERRQ(ierr);
692   ierr = PetscMalloc1(newLeaves, &iremote);CHKERRQ(ierr);
693   /* First map local points to themselves */
694   for (d=0; d<dim+1; d++) {
695     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
696     for (p=pStart; p<pEnd; p++) {
697       point = p + depthShift[d];
698       ilocal[point] = point;
699       iremote[point].index = p;
700       iremote[point].rank = rank;
701       depthIdx[d]++;
702     }
703   }
704 
705   /* Add in the remote roots for currently shared points */
706   ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
707   ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote);CHKERRQ(ierr);
708   for (d=0; d<dim+1; d++) {
709     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
710     for (p=0; p<numSharedPoints; p++) {
711       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
712         point = sharedLocal[p] + depthShift[d];
713         iremote[point].index = sharedRemote[p].index;
714         iremote[point].rank = sharedRemote[p].rank;
715       }
716     }
717   }
718 
719   /* Now add the incoming overlap points */
720   for (p=0; p<nleaves; p++) {
721     point = depthIdx[remoteDepths[p]];
722     ilocal[point] = point;
723     iremote[point].index = overlapRemote[p].index;
724     iremote[point].rank = overlapRemote[p].rank;
725     depthIdx[remoteDepths[p]]++;
726   }
727   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
728 
729   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
730   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");CHKERRQ(ierr);
731   ierr = PetscSFSetFromOptions(*migrationSF);CHKERRQ(ierr);
732   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
733   ierr = PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);CHKERRQ(ierr);
734 
735   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
736   PetscFunctionReturn(0);
737 }
738 
739 /*@
740   DMPlexStratifyMigrationSF - Rearrange the leaves of a migration sf for stratification.
741 
742   Input Parameter:
743 + dm          - The DM
744 - sf          - A star forest with non-ordered leaves, usually defining a DM point migration
745 
746   Output Parameter:
747 . migrationSF - A star forest with added leaf indirection that ensures the resulting DM is stratified
748 
749   Level: developer
750 
751 .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexDistributeOverlap()
752 @*/
753 PetscErrorCode DMPlexStratifyMigrationSF(DM dm, PetscSF sf, PetscSF *migrationSF)
754 {
755   MPI_Comm           comm;
756   PetscMPIInt        rank, size;
757   PetscInt           d, ldepth, depth, p, pStart, pEnd, nroots, nleaves;
758   PetscInt          *pointDepths, *remoteDepths, *ilocal;
759   PetscInt          *depthRecv, *depthShift, *depthIdx;
760   PetscInt           hybEnd[4];
761   const PetscSFNode *iremote;
762   PetscErrorCode     ierr;
763 
764   PetscFunctionBegin;
765   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
766   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
767   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
768   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
769   ierr = DMPlexGetDepth(dm, &ldepth);CHKERRQ(ierr);
770   ierr = MPIU_Allreduce(&ldepth, &depth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr);
771   if ((ldepth >= 0) && (depth != ldepth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", ldepth, depth);
772 
773   /* Before building the migration SF we need to know the new stratum offsets */
774   ierr = PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote);CHKERRQ(ierr);
775   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
776   ierr = DMPlexGetHybridBounds(dm,&hybEnd[depth],&hybEnd[depth-1],&hybEnd[1],&hybEnd[0]);CHKERRQ(ierr);
777   for (d = 0; d < depth+1; ++d) {
778     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
779     for (p = pStart; p < pEnd; ++p) {
780       if (hybEnd[d] >= 0 && p >= hybEnd[d]) { /* put in a separate value for hybrid points */
781         pointDepths[p] = 2 * d;
782       } else {
783         pointDepths[p] = 2 * d + 1;
784       }
785     }
786   }
787   for (p = 0; p < nleaves; ++p) remoteDepths[p] = -1;
788   ierr = PetscSFBcastBegin(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
789   ierr = PetscSFBcastEnd(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
790   /* Count recevied points in each stratum and compute the internal strata shift */
791   ierr = PetscMalloc3(2*(depth+1), &depthRecv, 2*(depth+1), &depthShift, 2*(depth+1), &depthIdx);CHKERRQ(ierr);
792   for (d = 0; d < 2*(depth+1); ++d) depthRecv[d] = 0;
793   for (p = 0; p < nleaves; ++p) depthRecv[remoteDepths[p]]++;
794   depthShift[2*depth+1] = 0;
795   for (d = 0; d < 2*depth+1; ++d) depthShift[d] = depthRecv[2 * depth + 1];
796   for (d = 0; d < 2*depth; ++d) depthShift[d] += depthRecv[2 * depth];
797   depthShift[0] += depthRecv[1];
798   for (d = 2; d < 2*depth; ++d) depthShift[d] += depthRecv[1];
799   for (d = 2; d < 2*depth; ++d) depthShift[d] += depthRecv[0];
800   for (d = 2 * depth-1; d > 2; --d) {
801     PetscInt e;
802 
803     for (e = d -1; e > 1; --e) depthShift[e] += depthRecv[d];
804   }
805   for (d = 0; d < 2*(depth+1); ++d) {depthIdx[d] = 0;}
806   /* Derive a new local permutation based on stratified indices */
807   ierr = PetscMalloc1(nleaves, &ilocal);CHKERRQ(ierr);
808   for (p = 0; p < nleaves; ++p) {
809     const PetscInt dep = remoteDepths[p];
810 
811     ilocal[p] = depthShift[dep] + depthIdx[dep];
812     depthIdx[dep]++;
813   }
814   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
815   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Migration SF");CHKERRQ(ierr);
816   ierr = PetscSFSetGraph(*migrationSF, nroots, nleaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_COPY_VALUES);CHKERRQ(ierr);
817   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
818   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
819   PetscFunctionReturn(0);
820 }
821 
822 /*@
823   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
824 
825   Collective on DM
826 
827   Input Parameters:
828 + dm - The DMPlex object
829 . pointSF - The PetscSF describing the communication pattern
830 . originalSection - The PetscSection for existing data layout
831 - originalVec - The existing data
832 
833   Output Parameters:
834 + newSection - The PetscSF describing the new data layout
835 - newVec - The new data
836 
837   Level: developer
838 
839 .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData()
840 @*/
841 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
842 {
843   PetscSF        fieldSF;
844   PetscInt      *remoteOffsets, fieldSize;
845   PetscScalar   *originalValues, *newValues;
846   PetscErrorCode ierr;
847 
848   PetscFunctionBegin;
849   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
850   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
851 
852   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
853   ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr);
854   ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr);
855 
856   ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr);
857   ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr);
858   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
859   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
860   ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
861   ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
862   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
863   ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr);
864   ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr);
865   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
866   PetscFunctionReturn(0);
867 }
868 
869 /*@
870   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
871 
872   Collective on DM
873 
874   Input Parameters:
875 + dm - The DMPlex object
876 . pointSF - The PetscSF describing the communication pattern
877 . originalSection - The PetscSection for existing data layout
878 - originalIS - The existing data
879 
880   Output Parameters:
881 + newSection - The PetscSF describing the new data layout
882 - newIS - The new data
883 
884   Level: developer
885 
886 .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData()
887 @*/
888 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
889 {
890   PetscSF         fieldSF;
891   PetscInt       *newValues, *remoteOffsets, fieldSize;
892   const PetscInt *originalValues;
893   PetscErrorCode  ierr;
894 
895   PetscFunctionBegin;
896   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
897   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
898 
899   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
900   ierr = PetscMalloc1(fieldSize, &newValues);CHKERRQ(ierr);
901 
902   ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr);
903   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
904   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
905   ierr = PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
906   ierr = PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
907   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
908   ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr);
909   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr);
910   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
911   PetscFunctionReturn(0);
912 }
913 
914 /*@
915   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
916 
917   Collective on DM
918 
919   Input Parameters:
920 + dm - The DMPlex object
921 . pointSF - The PetscSF describing the communication pattern
922 . originalSection - The PetscSection for existing data layout
923 . datatype - The type of data
924 - originalData - The existing data
925 
926   Output Parameters:
927 + newSection - The PetscSection describing the new data layout
928 - newData - The new data
929 
930   Level: developer
931 
932 .seealso: DMPlexDistribute(), DMPlexDistributeField()
933 @*/
934 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
935 {
936   PetscSF        fieldSF;
937   PetscInt      *remoteOffsets, fieldSize;
938   PetscMPIInt    dataSize;
939   PetscErrorCode ierr;
940 
941   PetscFunctionBegin;
942   ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
943   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
944 
945   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
946   ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr);
947   ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr);
948 
949   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
950   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
951   ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
952   ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
953   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
954   ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
955   PetscFunctionReturn(0);
956 }
957 
958 static PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
959 {
960   DM_Plex               *mesh  = (DM_Plex*) dm->data;
961   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
962   MPI_Comm               comm;
963   PetscSF                coneSF;
964   PetscSection           originalConeSection, newConeSection;
965   PetscInt              *remoteOffsets, *cones, *globCones, *newCones, newConesSize;
966   PetscBool              flg;
967   PetscErrorCode         ierr;
968 
969   PetscFunctionBegin;
970   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
971   PetscValidPointer(dmParallel,4);
972   ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
973 
974   /* Distribute cone section */
975   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
976   ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr);
977   ierr = DMPlexGetConeSection(dmParallel, &newConeSection);CHKERRQ(ierr);
978   ierr = PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr);
979   ierr = DMSetUp(dmParallel);CHKERRQ(ierr);
980   {
981     PetscInt pStart, pEnd, p;
982 
983     ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr);
984     for (p = pStart; p < pEnd; ++p) {
985       PetscInt coneSize;
986       ierr               = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr);
987       pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize);
988     }
989   }
990   /* Communicate and renumber cones */
991   ierr = PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr);
992   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
993   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
994   if (original) {
995     PetscInt numCones;
996 
997     ierr = PetscSectionGetStorageSize(originalConeSection,&numCones);CHKERRQ(ierr); ierr = PetscMalloc1(numCones,&globCones);CHKERRQ(ierr);
998     ierr = ISLocalToGlobalMappingApplyBlock(original, numCones, cones, globCones);CHKERRQ(ierr);
999   }
1000   else {
1001     globCones = cones;
1002   }
1003   ierr = DMPlexGetCones(dmParallel, &newCones);CHKERRQ(ierr);
1004   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr);
1005   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr);
1006   if (original) {
1007     ierr = PetscFree(globCones);CHKERRQ(ierr);
1008   }
1009   ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr);
1010   ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr);
1011 #if PETSC_USE_DEBUG
1012   {
1013     PetscInt  p;
1014     PetscBool valid = PETSC_TRUE;
1015     for (p = 0; p < newConesSize; ++p) {
1016       if (newCones[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
1017     }
1018     if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1019   }
1020 #endif
1021   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr);
1022   if (flg) {
1023     ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr);
1024     ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1025     ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr);
1026     ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1027     ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr);
1028   }
1029   ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr);
1030   ierr = DMPlexGetConeOrientations(dmParallel, &newCones);CHKERRQ(ierr);
1031   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1032   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1033   ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr);
1034   ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
1035   /* Create supports and stratify sieve */
1036   {
1037     PetscInt pStart, pEnd;
1038 
1039     ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr);
1040     ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr);
1041   }
1042   ierr = DMPlexSymmetrize(dmParallel);CHKERRQ(ierr);
1043   ierr = DMPlexStratify(dmParallel);CHKERRQ(ierr);
1044   pmesh->useCone    = mesh->useCone;
1045   pmesh->useClosure = mesh->useClosure;
1046   PetscFunctionReturn(0);
1047 }
1048 
1049 static PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
1050 {
1051   MPI_Comm         comm;
1052   PetscSection     originalCoordSection, newCoordSection;
1053   Vec              originalCoordinates, newCoordinates;
1054   PetscInt         bs;
1055   const char      *name;
1056   const PetscReal *maxCell, *L;
1057   const DMBoundaryType *bd;
1058   PetscErrorCode   ierr;
1059 
1060   PetscFunctionBegin;
1061   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1062   PetscValidPointer(dmParallel, 3);
1063 
1064   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1065   ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr);
1066   ierr = DMGetCoordinateSection(dmParallel, &newCoordSection);CHKERRQ(ierr);
1067   ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr);
1068   if (originalCoordinates) {
1069     ierr = VecCreate(PETSC_COMM_SELF, &newCoordinates);CHKERRQ(ierr);
1070     ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr);
1071     ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr);
1072 
1073     ierr = DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr);
1074     ierr = DMSetCoordinatesLocal(dmParallel, newCoordinates);CHKERRQ(ierr);
1075     ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr);
1076     ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr);
1077     ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr);
1078   }
1079   ierr = DMGetPeriodicity(dm, &maxCell, &L, &bd);CHKERRQ(ierr);
1080   if (L) {ierr = DMSetPeriodicity(dmParallel, maxCell, L, bd);CHKERRQ(ierr);}
1081   PetscFunctionReturn(0);
1082 }
1083 
1084 /* Here we are assuming that process 0 always has everything */
1085 static PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1086 {
1087   DM_Plex         *mesh = (DM_Plex*) dm->data;
1088   MPI_Comm         comm;
1089   DMLabel          depthLabel;
1090   PetscMPIInt      rank;
1091   PetscInt         depth, d, numLabels, numLocalLabels, l;
1092   PetscBool        hasLabels = PETSC_FALSE, lsendDepth, sendDepth;
1093   PetscObjectState depthState = -1;
1094   PetscErrorCode   ierr;
1095 
1096   PetscFunctionBegin;
1097   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1098   PetscValidHeaderSpecific(dm, DM_CLASSID, 3);
1099   ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1100   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1101   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1102 
1103   /* If the user has changed the depth label, communicate it instead */
1104   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1105   ierr = DMPlexGetDepthLabel(dm, &depthLabel);CHKERRQ(ierr);
1106   if (depthLabel) {ierr = DMLabelGetState(depthLabel, &depthState);CHKERRQ(ierr);}
1107   lsendDepth = mesh->depthState != depthState ? PETSC_TRUE : PETSC_FALSE;
1108   ierr = MPIU_Allreduce(&lsendDepth, &sendDepth, 1, MPIU_BOOL, MPI_LOR, comm);CHKERRQ(ierr);
1109   if (sendDepth) {
1110     ierr = DMRemoveLabel(dmParallel, "depth", &depthLabel);CHKERRQ(ierr);
1111     ierr = DMLabelDestroy(&depthLabel);CHKERRQ(ierr);
1112   }
1113   /* Everyone must have either the same number of labels, or none */
1114   ierr = DMGetNumLabels(dm, &numLocalLabels);CHKERRQ(ierr);
1115   numLabels = numLocalLabels;
1116   ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1117   if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1118   for (l = numLabels-1; l >= 0; --l) {
1119     DMLabel     label = NULL, labelNew = NULL;
1120     PetscBool   isdepth;
1121 
1122     if (hasLabels) {
1123       ierr = DMGetLabelByNum(dm, l, &label);CHKERRQ(ierr);
1124       /* Skip "depth" because it is recreated */
1125       ierr = PetscStrcmp(label->name, "depth", &isdepth);CHKERRQ(ierr);
1126     }
1127     ierr = MPI_Bcast(&isdepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr);
1128     if (isdepth && !sendDepth) continue;
1129     ierr = DMLabelDistribute(label, migrationSF, &labelNew);CHKERRQ(ierr);
1130     if (isdepth) {
1131       /* Put in any missing strata which can occur if users are managing the depth label themselves */
1132       PetscInt gdepth;
1133 
1134       ierr = MPIU_Allreduce(&depth, &gdepth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr);
1135       if ((depth >= 0) && (gdepth != depth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", depth, gdepth);
1136       for (d = 0; d <= gdepth; ++d) {
1137         PetscBool has;
1138 
1139         ierr = DMLabelHasStratum(labelNew, d, &has);CHKERRQ(ierr);
1140         if (!has) ierr = DMLabelAddStratum(labelNew, d);CHKERRQ(ierr);
1141       }
1142     }
1143     ierr = DMAddLabel(dmParallel, labelNew);CHKERRQ(ierr);
1144   }
1145   ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1146   PetscFunctionReturn(0);
1147 }
1148 
1149 static PetscErrorCode DMPlexDistributeSetupHybrid(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1150 {
1151   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1152   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1153   PetscBool      *isHybrid, *isHybridParallel;
1154   PetscInt        dim, depth, d;
1155   PetscInt        pStart, pEnd, pStartP, pEndP;
1156   PetscErrorCode  ierr;
1157 
1158   PetscFunctionBegin;
1159   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1160   PetscValidPointer(dmParallel, 4);
1161 
1162   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1163   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1164   ierr = DMPlexGetChart(dm,&pStart,&pEnd);CHKERRQ(ierr);
1165   ierr = DMPlexGetChart(dmParallel,&pStartP,&pEndP);CHKERRQ(ierr);
1166   ierr = PetscCalloc2(pEnd-pStart,&isHybrid,pEndP-pStartP,&isHybridParallel);CHKERRQ(ierr);
1167   for (d = 0; d <= depth; d++) {
1168     PetscInt hybridMax = (depth == 1 && d == 1) ? mesh->hybridPointMax[dim] : mesh->hybridPointMax[d];
1169 
1170     if (hybridMax >= 0) {
1171       PetscInt sStart, sEnd, p;
1172 
1173       ierr = DMPlexGetDepthStratum(dm,d,&sStart,&sEnd);CHKERRQ(ierr);
1174       for (p = hybridMax; p < sEnd; p++) isHybrid[p-pStart] = PETSC_TRUE;
1175     }
1176   }
1177   ierr = PetscSFBcastBegin(migrationSF,MPIU_BOOL,isHybrid,isHybridParallel);CHKERRQ(ierr);
1178   ierr = PetscSFBcastEnd(migrationSF,MPIU_BOOL,isHybrid,isHybridParallel);CHKERRQ(ierr);
1179   for (d = 0; d <= dim; d++) pmesh->hybridPointMax[d] = -1;
1180   for (d = 0; d <= depth; d++) {
1181     PetscInt sStart, sEnd, p, dd;
1182 
1183     ierr = DMPlexGetDepthStratum(dmParallel,d,&sStart,&sEnd);CHKERRQ(ierr);
1184     dd = (depth == 1 && d == 1) ? dim : d;
1185     for (p = sStart; p < sEnd; p++) {
1186       if (isHybridParallel[p-pStartP]) {
1187         pmesh->hybridPointMax[dd] = p;
1188         break;
1189       }
1190     }
1191   }
1192   ierr = PetscFree2(isHybrid,isHybridParallel);CHKERRQ(ierr);
1193   PetscFunctionReturn(0);
1194 }
1195 
1196 static PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1197 {
1198   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1199   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1200   MPI_Comm        comm;
1201   DM              refTree;
1202   PetscSection    origParentSection, newParentSection;
1203   PetscInt        *origParents, *origChildIDs;
1204   PetscBool       flg;
1205   PetscErrorCode  ierr;
1206 
1207   PetscFunctionBegin;
1208   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1209   PetscValidHeaderSpecific(dm, DM_CLASSID, 4);
1210   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1211 
1212   /* Set up tree */
1213   ierr = DMPlexGetReferenceTree(dm,&refTree);CHKERRQ(ierr);
1214   ierr = DMPlexSetReferenceTree(dmParallel,refTree);CHKERRQ(ierr);
1215   ierr = DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL);CHKERRQ(ierr);
1216   if (origParentSection) {
1217     PetscInt        pStart, pEnd;
1218     PetscInt        *newParents, *newChildIDs, *globParents;
1219     PetscInt        *remoteOffsetsParents, newParentSize;
1220     PetscSF         parentSF;
1221 
1222     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1223     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);CHKERRQ(ierr);
1224     ierr = PetscSectionSetChart(newParentSection,pStart,pEnd);CHKERRQ(ierr);
1225     ierr = PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);CHKERRQ(ierr);
1226     ierr = PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);CHKERRQ(ierr);
1227     ierr = PetscFree(remoteOffsetsParents);CHKERRQ(ierr);
1228     ierr = PetscSectionGetStorageSize(newParentSection,&newParentSize);CHKERRQ(ierr);
1229     ierr = PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs);CHKERRQ(ierr);
1230     if (original) {
1231       PetscInt numParents;
1232 
1233       ierr = PetscSectionGetStorageSize(origParentSection,&numParents);CHKERRQ(ierr);
1234       ierr = PetscMalloc1(numParents,&globParents);CHKERRQ(ierr);
1235       ierr = ISLocalToGlobalMappingApplyBlock(original, numParents, origParents, globParents);CHKERRQ(ierr);
1236     }
1237     else {
1238       globParents = origParents;
1239     }
1240     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr);
1241     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr);
1242     if (original) {
1243       ierr = PetscFree(globParents);CHKERRQ(ierr);
1244     }
1245     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1246     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1247     ierr = ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents);CHKERRQ(ierr);
1248 #if PETSC_USE_DEBUG
1249     {
1250       PetscInt  p;
1251       PetscBool valid = PETSC_TRUE;
1252       for (p = 0; p < newParentSize; ++p) {
1253         if (newParents[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
1254       }
1255       if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1256     }
1257 #endif
1258     ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-parents_view", &flg);CHKERRQ(ierr);
1259     if (flg) {
1260       ierr = PetscPrintf(comm, "Serial Parent Section: \n");CHKERRQ(ierr);
1261       ierr = PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1262       ierr = PetscPrintf(comm, "Parallel Parent Section: \n");CHKERRQ(ierr);
1263       ierr = PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1264       ierr = PetscSFView(parentSF, NULL);CHKERRQ(ierr);
1265     }
1266     ierr = DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);CHKERRQ(ierr);
1267     ierr = PetscSectionDestroy(&newParentSection);CHKERRQ(ierr);
1268     ierr = PetscFree2(newParents,newChildIDs);CHKERRQ(ierr);
1269     ierr = PetscSFDestroy(&parentSF);CHKERRQ(ierr);
1270   }
1271   pmesh->useAnchors = mesh->useAnchors;
1272   PetscFunctionReturn(0);
1273 }
1274 
1275 PETSC_UNUSED static PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, DM dmParallel)
1276 {
1277   DM_Plex               *mesh  = (DM_Plex*) dm->data;
1278   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
1279   PetscMPIInt            rank, size;
1280   MPI_Comm               comm;
1281   PetscErrorCode         ierr;
1282 
1283   PetscFunctionBegin;
1284   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1285   PetscValidPointer(dmParallel,7);
1286 
1287   /* Create point SF for parallel mesh */
1288   ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1289   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1290   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1291   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1292   {
1293     const PetscInt *leaves;
1294     PetscSFNode    *remotePoints, *rowners, *lowners;
1295     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1296     PetscInt        pStart, pEnd;
1297 
1298     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1299     ierr = PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr);
1300     ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr);
1301     for (p=0; p<numRoots; p++) {
1302       rowners[p].rank  = -1;
1303       rowners[p].index = -1;
1304     }
1305     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1306     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1307     for (p = 0; p < numLeaves; ++p) {
1308       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1309         lowners[p].rank  = rank;
1310         lowners[p].index = leaves ? leaves[p] : p;
1311       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1312         lowners[p].rank  = -2;
1313         lowners[p].index = -2;
1314       }
1315     }
1316     for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1317       rowners[p].rank  = -3;
1318       rowners[p].index = -3;
1319     }
1320     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1321     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1322     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1323     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1324     for (p = 0; p < numLeaves; ++p) {
1325       if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
1326       if (lowners[p].rank != rank) ++numGhostPoints;
1327     }
1328     ierr = PetscMalloc1(numGhostPoints, &ghostPoints);CHKERRQ(ierr);
1329     ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr);
1330     for (p = 0, gp = 0; p < numLeaves; ++p) {
1331       if (lowners[p].rank != rank) {
1332         ghostPoints[gp]        = leaves ? leaves[p] : p;
1333         remotePoints[gp].rank  = lowners[p].rank;
1334         remotePoints[gp].index = lowners[p].index;
1335         ++gp;
1336       }
1337     }
1338     ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr);
1339     ierr = PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1340     ierr = PetscSFSetFromOptions((dmParallel)->sf);CHKERRQ(ierr);
1341   }
1342   pmesh->useCone    = mesh->useCone;
1343   pmesh->useClosure = mesh->useClosure;
1344   ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1345   PetscFunctionReturn(0);
1346 }
1347 
1348 /*@C
1349   DMPlexDerivePointSF - Build a point SF from an SF describing a point migration
1350 
1351   Input Parameter:
1352 + dm          - The source DMPlex object
1353 . migrationSF - The star forest that describes the parallel point remapping
1354 . ownership   - Flag causing a vote to determine point ownership
1355 
1356   Output Parameter:
1357 - pointSF     - The star forest describing the point overlap in the remapped DM
1358 
1359   Level: developer
1360 
1361 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1362 @*/
1363 PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownership, PetscSF *pointSF)
1364 {
1365   PetscMPIInt        rank;
1366   PetscInt           p, nroots, nleaves, idx, npointLeaves;
1367   PetscInt          *pointLocal;
1368   const PetscInt    *leaves;
1369   const PetscSFNode *roots;
1370   PetscSFNode       *rootNodes, *leafNodes, *pointRemote;
1371   PetscErrorCode     ierr;
1372 
1373   PetscFunctionBegin;
1374   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1375   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1376 
1377   ierr = PetscSFGetGraph(migrationSF, &nroots, &nleaves, &leaves, &roots);CHKERRQ(ierr);
1378   ierr = PetscMalloc2(nroots, &rootNodes, nleaves, &leafNodes);CHKERRQ(ierr);
1379   if (ownership) {
1380     /* Point ownership vote: Process with highest rank ownes shared points */
1381     for (p = 0; p < nleaves; ++p) {
1382       /* Either put in a bid or we know we own it */
1383       leafNodes[p].rank  = rank;
1384       leafNodes[p].index = p;
1385     }
1386     for (p = 0; p < nroots; p++) {
1387       /* Root must not participate in the reduction, flag so that MAXLOC does not use */
1388       rootNodes[p].rank  = -3;
1389       rootNodes[p].index = -3;
1390     }
1391     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);CHKERRQ(ierr);
1392     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);CHKERRQ(ierr);
1393   } else {
1394     for (p = 0; p < nroots; p++) {
1395       rootNodes[p].index = -1;
1396       rootNodes[p].rank = rank;
1397     };
1398     for (p = 0; p < nleaves; p++) {
1399       /* Write new local id into old location */
1400       if (roots[p].rank == rank) {
1401         rootNodes[roots[p].index].index = leaves ? leaves[p] : p;
1402       }
1403     }
1404   }
1405   ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1406   ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1407 
1408   for (npointLeaves = 0, p = 0; p < nleaves; p++) {if (leafNodes[p].rank != rank) npointLeaves++;}
1409   ierr = PetscMalloc1(npointLeaves, &pointLocal);CHKERRQ(ierr);
1410   ierr = PetscMalloc1(npointLeaves, &pointRemote);CHKERRQ(ierr);
1411   for (idx = 0, p = 0; p < nleaves; p++) {
1412     if (leafNodes[p].rank != rank) {
1413       pointLocal[idx] = p;
1414       pointRemote[idx] = leafNodes[p];
1415       idx++;
1416     }
1417   }
1418   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), pointSF);CHKERRQ(ierr);
1419   ierr = PetscSFSetFromOptions(*pointSF);CHKERRQ(ierr);
1420   ierr = PetscSFSetGraph(*pointSF, nleaves, npointLeaves, pointLocal, PETSC_OWN_POINTER, pointRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1421   ierr = PetscFree2(rootNodes, leafNodes);CHKERRQ(ierr);
1422   PetscFunctionReturn(0);
1423 }
1424 
1425 /*@C
1426   DMPlexMigrate  - Migrates internal DM data over the supplied star forest
1427 
1428   Input Parameter:
1429 + dm       - The source DMPlex object
1430 . sf       - The star forest communication context describing the migration pattern
1431 
1432   Output Parameter:
1433 - targetDM - The target DMPlex object
1434 
1435   Level: intermediate
1436 
1437 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1438 @*/
1439 PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM)
1440 {
1441   MPI_Comm               comm;
1442   PetscInt               dim, nroots;
1443   PetscSF                sfPoint;
1444   ISLocalToGlobalMapping ltogMigration;
1445   ISLocalToGlobalMapping ltogOriginal = NULL;
1446   PetscBool              flg;
1447   PetscErrorCode         ierr;
1448 
1449   PetscFunctionBegin;
1450   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1451   ierr = PetscLogEventBegin(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1452   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1453   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1454   ierr = DMSetDimension(targetDM, dim);CHKERRQ(ierr);
1455 
1456   /* Check for a one-to-all distribution pattern */
1457   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1458   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
1459   if (nroots >= 0) {
1460     IS                     isOriginal;
1461     PetscInt               n, size, nleaves;
1462     PetscInt              *numbering_orig, *numbering_new;
1463     /* Get the original point numbering */
1464     ierr = DMPlexCreatePointNumbering(dm, &isOriginal);CHKERRQ(ierr);
1465     ierr = ISLocalToGlobalMappingCreateIS(isOriginal, &ltogOriginal);CHKERRQ(ierr);
1466     ierr = ISLocalToGlobalMappingGetSize(ltogOriginal, &size);CHKERRQ(ierr);
1467     ierr = ISLocalToGlobalMappingGetBlockIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1468     /* Convert to positive global numbers */
1469     for (n=0; n<size; n++) {if (numbering_orig[n] < 0) numbering_orig[n] = -(numbering_orig[n]+1);}
1470     /* Derive the new local-to-global mapping from the old one */
1471     ierr = PetscSFGetGraph(sf, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1472     ierr = PetscMalloc1(nleaves, &numbering_new);CHKERRQ(ierr);
1473     ierr = PetscSFBcastBegin(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr);
1474     ierr = PetscSFBcastEnd(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr);
1475     ierr = ISLocalToGlobalMappingCreate(comm, 1, nleaves, (const PetscInt*) numbering_new, PETSC_OWN_POINTER, &ltogMigration);CHKERRQ(ierr);
1476     ierr = ISLocalToGlobalMappingRestoreIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1477     ierr = ISDestroy(&isOriginal);CHKERRQ(ierr);
1478   } else {
1479     /* One-to-all distribution pattern: We can derive LToG from SF */
1480     ierr = ISLocalToGlobalMappingCreateSF(sf, 0, &ltogMigration);CHKERRQ(ierr);
1481   }
1482   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1483   if (flg) {
1484     ierr = PetscPrintf(comm, "Point renumbering for DM migration:\n");CHKERRQ(ierr);
1485     ierr = ISLocalToGlobalMappingView(ltogMigration, NULL);CHKERRQ(ierr);
1486   }
1487   /* Migrate DM data to target DM */
1488   ierr = DMPlexDistributeCones(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1489   ierr = DMPlexDistributeLabels(dm, sf, targetDM);CHKERRQ(ierr);
1490   ierr = DMPlexDistributeCoordinates(dm, sf, targetDM);CHKERRQ(ierr);
1491   ierr = DMPlexDistributeSetupHybrid(dm, sf, ltogMigration, targetDM);CHKERRQ(ierr);
1492   ierr = DMPlexDistributeSetupTree(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1493   ierr = ISLocalToGlobalMappingDestroy(&ltogOriginal);CHKERRQ(ierr);
1494   ierr = ISLocalToGlobalMappingDestroy(&ltogMigration);CHKERRQ(ierr);
1495   ierr = PetscLogEventEnd(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1496   PetscFunctionReturn(0);
1497 }
1498 
1499 /*@C
1500   DMPlexDistribute - Distributes the mesh and any associated sections.
1501 
1502   Not Collective
1503 
1504   Input Parameter:
1505 + dm  - The original DMPlex object
1506 - overlap - The overlap of partitions, 0 is the default
1507 
1508   Output Parameter:
1509 + sf - The PetscSF used for point distribution
1510 - parallelMesh - The distributed DMPlex object, or NULL
1511 
1512   Note: If the mesh was not distributed, the return value is NULL.
1513 
1514   The user can control the definition of adjacency for the mesh using DMPlexSetAdjacencyUseCone() and
1515   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1516   representation on the mesh.
1517 
1518   Level: intermediate
1519 
1520 .keywords: mesh, elements
1521 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1522 @*/
1523 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1524 {
1525   MPI_Comm               comm;
1526   PetscPartitioner       partitioner;
1527   IS                     cellPart;
1528   PetscSection           cellPartSection;
1529   DM                     dmCoord;
1530   DMLabel                lblPartition, lblMigration;
1531   PetscSF                sfProcess, sfMigration, sfStratified, sfPoint;
1532   PetscBool              flg;
1533   PetscMPIInt            rank, size, p;
1534   PetscErrorCode         ierr;
1535 
1536   PetscFunctionBegin;
1537   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1538   if (sf) PetscValidPointer(sf,4);
1539   PetscValidPointer(dmParallel,5);
1540 
1541   ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1542   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1543   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1544   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1545 
1546   *dmParallel = NULL;
1547   if (size == 1) {
1548     ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1549     PetscFunctionReturn(0);
1550   }
1551 
1552   /* Create cell partition */
1553   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1554   ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr);
1555   ierr = DMPlexGetPartitioner(dm, &partitioner);CHKERRQ(ierr);
1556   ierr = PetscPartitionerPartition(partitioner, dm, cellPartSection, &cellPart);CHKERRQ(ierr);
1557   {
1558     /* Convert partition to DMLabel */
1559     PetscInt proc, pStart, pEnd, npoints, poffset;
1560     const PetscInt *points;
1561     ierr = DMLabelCreate("Point Partition", &lblPartition);CHKERRQ(ierr);
1562     ierr = ISGetIndices(cellPart, &points);CHKERRQ(ierr);
1563     ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr);
1564     for (proc = pStart; proc < pEnd; proc++) {
1565       ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr);
1566       ierr = PetscSectionGetOffset(cellPartSection, proc, &poffset);CHKERRQ(ierr);
1567       for (p = poffset; p < poffset+npoints; p++) {
1568         ierr = DMLabelSetValue(lblPartition, points[p], proc);CHKERRQ(ierr);
1569       }
1570     }
1571     ierr = ISRestoreIndices(cellPart, &points);CHKERRQ(ierr);
1572   }
1573   ierr = DMPlexPartitionLabelClosure(dm, lblPartition);CHKERRQ(ierr);
1574   {
1575     /* Build a global process SF */
1576     PetscSFNode *remoteProc;
1577     ierr = PetscMalloc1(size, &remoteProc);CHKERRQ(ierr);
1578     for (p = 0; p < size; ++p) {
1579       remoteProc[p].rank  = p;
1580       remoteProc[p].index = rank;
1581     }
1582     ierr = PetscSFCreate(comm, &sfProcess);CHKERRQ(ierr);
1583     ierr = PetscObjectSetName((PetscObject) sfProcess, "Process SF");CHKERRQ(ierr);
1584     ierr = PetscSFSetGraph(sfProcess, size, size, NULL, PETSC_OWN_POINTER, remoteProc, PETSC_OWN_POINTER);CHKERRQ(ierr);
1585   }
1586   ierr = DMLabelCreate("Point migration", &lblMigration);CHKERRQ(ierr);
1587   ierr = DMPlexPartitionLabelInvert(dm, lblPartition, sfProcess, lblMigration);CHKERRQ(ierr);
1588   ierr = DMPlexPartitionLabelCreateSF(dm, lblMigration, &sfMigration);CHKERRQ(ierr);
1589   /* Stratify the SF in case we are migrating an already parallel plex */
1590   ierr = DMPlexStratifyMigrationSF(dm, sfMigration, &sfStratified);CHKERRQ(ierr);
1591   ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1592   sfMigration = sfStratified;
1593   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1594   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1595   if (flg) {
1596     ierr = DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1597     ierr = PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1598   }
1599 
1600   /* Create non-overlapping parallel DM and migrate internal data */
1601   ierr = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr);
1602   ierr = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr);
1603   ierr = DMPlexMigrate(dm, sfMigration, *dmParallel);CHKERRQ(ierr);
1604 
1605   /* Build the point SF without overlap */
1606   ierr = DMPlexCreatePointSF(*dmParallel, sfMigration, PETSC_TRUE, &sfPoint);CHKERRQ(ierr);
1607   ierr = DMSetPointSF(*dmParallel, sfPoint);CHKERRQ(ierr);
1608   ierr = DMGetCoordinateDM(*dmParallel, &dmCoord);CHKERRQ(ierr);
1609   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1610   if (flg) {ierr = PetscSFView(sfPoint, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);}
1611 
1612   if (overlap > 0) {
1613     DM                 dmOverlap;
1614     PetscInt           nroots, nleaves;
1615     PetscSFNode       *newRemote;
1616     const PetscSFNode *oldRemote;
1617     PetscSF            sfOverlap, sfOverlapPoint;
1618     /* Add the partition overlap to the distributed DM */
1619     ierr = DMPlexDistributeOverlap(*dmParallel, overlap, &sfOverlap, &dmOverlap);CHKERRQ(ierr);
1620     ierr = DMDestroy(dmParallel);CHKERRQ(ierr);
1621     *dmParallel = dmOverlap;
1622     if (flg) {
1623       ierr = PetscPrintf(comm, "Overlap Migration SF:\n");CHKERRQ(ierr);
1624       ierr = PetscSFView(sfOverlap, NULL);CHKERRQ(ierr);
1625     }
1626 
1627     /* Re-map the migration SF to establish the full migration pattern */
1628     ierr = PetscSFGetGraph(sfMigration, &nroots, NULL, NULL, &oldRemote);CHKERRQ(ierr);
1629     ierr = PetscSFGetGraph(sfOverlap, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1630     ierr = PetscMalloc1(nleaves, &newRemote);CHKERRQ(ierr);
1631     ierr = PetscSFBcastBegin(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1632     ierr = PetscSFBcastEnd(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1633     ierr = PetscSFCreate(comm, &sfOverlapPoint);CHKERRQ(ierr);
1634     ierr = PetscSFSetGraph(sfOverlapPoint, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1635     ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1636     ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1637     sfMigration = sfOverlapPoint;
1638   }
1639   /* Cleanup Partition */
1640   ierr = PetscSFDestroy(&sfProcess);CHKERRQ(ierr);
1641   ierr = DMLabelDestroy(&lblPartition);CHKERRQ(ierr);
1642   ierr = DMLabelDestroy(&lblMigration);CHKERRQ(ierr);
1643   ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr);
1644   ierr = ISDestroy(&cellPart);CHKERRQ(ierr);
1645   /* Copy BC */
1646   ierr = DMCopyBoundary(dm, *dmParallel);CHKERRQ(ierr);
1647   /* Create sfNatural */
1648   if (dm->useNatural) {
1649     PetscSection section;
1650 
1651     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1652     ierr = DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &(*dmParallel)->sfNatural);CHKERRQ(ierr);
1653   }
1654   /* Cleanup */
1655   if (sf) {*sf = sfMigration;}
1656   else    {ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);}
1657   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1658   ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1659   PetscFunctionReturn(0);
1660 }
1661 
1662 /*@C
1663   DMPlexDistributeOverlap - Add partition overlap to a distributed non-overlapping DM.
1664 
1665   Not Collective
1666 
1667   Input Parameter:
1668 + dm  - The non-overlapping distrbuted DMPlex object
1669 - overlap - The overlap of partitions, 0 is the default
1670 
1671   Output Parameter:
1672 + sf - The PetscSF used for point distribution
1673 - dmOverlap - The overlapping distributed DMPlex object, or NULL
1674 
1675   Note: If the mesh was not distributed, the return value is NULL.
1676 
1677   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1678   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1679   representation on the mesh.
1680 
1681   Level: intermediate
1682 
1683 .keywords: mesh, elements
1684 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1685 @*/
1686 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, PetscSF *sf, DM *dmOverlap)
1687 {
1688   MPI_Comm               comm;
1689   PetscMPIInt            size, rank;
1690   PetscSection           rootSection, leafSection;
1691   IS                     rootrank, leafrank;
1692   DM                     dmCoord;
1693   DMLabel                lblOverlap;
1694   PetscSF                sfOverlap, sfStratified, sfPoint;
1695   PetscErrorCode         ierr;
1696 
1697   PetscFunctionBegin;
1698   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1699   if (sf) PetscValidPointer(sf, 3);
1700   PetscValidPointer(dmOverlap, 4);
1701 
1702   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1703   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1704   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1705   if (size == 1) {*dmOverlap = NULL; PetscFunctionReturn(0);}
1706   ierr = PetscLogEventBegin(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1707 
1708   /* Compute point overlap with neighbouring processes on the distributed DM */
1709   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1710   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1711   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1712   ierr = DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);CHKERRQ(ierr);
1713   ierr = DMPlexCreateOverlap(dm, overlap, rootSection, rootrank, leafSection, leafrank, &lblOverlap);CHKERRQ(ierr);
1714   /* Convert overlap label to stratified migration SF */
1715   ierr = DMPlexPartitionLabelCreateSF(dm, lblOverlap, &sfOverlap);CHKERRQ(ierr);
1716   ierr = DMPlexStratifyMigrationSF(dm, sfOverlap, &sfStratified);CHKERRQ(ierr);
1717   ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1718   sfOverlap = sfStratified;
1719   ierr = PetscObjectSetName((PetscObject) sfOverlap, "Overlap SF");CHKERRQ(ierr);
1720   ierr = PetscSFSetFromOptions(sfOverlap);CHKERRQ(ierr);
1721 
1722   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1723   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1724   ierr = ISDestroy(&rootrank);CHKERRQ(ierr);
1725   ierr = ISDestroy(&leafrank);CHKERRQ(ierr);
1726   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1727 
1728   /* Build the overlapping DM */
1729   ierr = DMPlexCreate(comm, dmOverlap);CHKERRQ(ierr);
1730   ierr = PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");CHKERRQ(ierr);
1731   ierr = DMPlexMigrate(dm, sfOverlap, *dmOverlap);CHKERRQ(ierr);
1732   /* Build the new point SF */
1733   ierr = DMPlexCreatePointSF(*dmOverlap, sfOverlap, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
1734   ierr = DMSetPointSF(*dmOverlap, sfPoint);CHKERRQ(ierr);
1735   ierr = DMGetCoordinateDM(*dmOverlap, &dmCoord);CHKERRQ(ierr);
1736   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1737   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1738   /* Cleanup overlap partition */
1739   ierr = DMLabelDestroy(&lblOverlap);CHKERRQ(ierr);
1740   if (sf) *sf = sfOverlap;
1741   else    {ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);}
1742   ierr = PetscLogEventEnd(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1743   PetscFunctionReturn(0);
1744 }
1745 
1746 /*@C
1747   DMPlexGetGatherDM - Get a copy of the DMPlex that gathers all points on the
1748   root process of the original's communicator.
1749 
1750   Input Parameters:
1751 . dm - the original DMPlex object
1752 
1753   Output Parameters:
1754 . gatherMesh - the gathered DM object, or NULL
1755 
1756   Level: intermediate
1757 
1758 .keywords: mesh
1759 .seealso: DMPlexDistribute(), DMPlexGetRedundantDM()
1760 @*/
1761 PetscErrorCode DMPlexGetGatherDM(DM dm, DM * gatherMesh)
1762 {
1763   MPI_Comm       comm;
1764   PetscMPIInt    size;
1765   PetscPartitioner oldPart, gatherPart;
1766   PetscErrorCode ierr;
1767 
1768   PetscFunctionBegin;
1769   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1770   comm = PetscObjectComm((PetscObject)dm);
1771   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1772   *gatherMesh = NULL;
1773   if (size == 1) PetscFunctionReturn(0);
1774   ierr = DMPlexGetPartitioner(dm,&oldPart);CHKERRQ(ierr);
1775   ierr = PetscObjectReference((PetscObject)oldPart);CHKERRQ(ierr);
1776   ierr = PetscPartitionerCreate(comm,&gatherPart);CHKERRQ(ierr);
1777   ierr = PetscPartitionerSetType(gatherPart,PETSCPARTITIONERGATHER);CHKERRQ(ierr);
1778   ierr = DMPlexSetPartitioner(dm,gatherPart);CHKERRQ(ierr);
1779   ierr = DMPlexDistribute(dm,0,NULL,gatherMesh);CHKERRQ(ierr);
1780   ierr = DMPlexSetPartitioner(dm,oldPart);CHKERRQ(ierr);
1781   ierr = PetscPartitionerDestroy(&gatherPart);CHKERRQ(ierr);
1782   ierr = PetscPartitionerDestroy(&oldPart);CHKERRQ(ierr);
1783   PetscFunctionReturn(0);
1784 }
1785 
1786 /*@C
1787   DMPlexGetRedundantDM - Get a copy of the DMPlex that is completely copied on each process.
1788 
1789   Input Parameters:
1790 . dm - the original DMPlex object
1791 
1792   Output Parameters:
1793 . redundantMesh - the redundant DM object, or NULL
1794 
1795   Level: intermediate
1796 
1797 .keywords: mesh
1798 .seealso: DMPlexDistribute(), DMPlexGetGatherDM()
1799 @*/
1800 PetscErrorCode DMPlexGetRedundantDM(DM dm, DM * redundantMesh)
1801 {
1802   MPI_Comm       comm;
1803   PetscMPIInt    size, rank;
1804   PetscInt       pStart, pEnd, p;
1805   PetscInt       numPoints = -1;
1806   PetscSF        migrationSF, sfPoint;
1807   DM             gatherDM, dmCoord;
1808   PetscSFNode    *points;
1809   PetscErrorCode ierr;
1810 
1811   PetscFunctionBegin;
1812   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1813   comm = PetscObjectComm((PetscObject)dm);
1814   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1815   *redundantMesh = NULL;
1816   if (size == 1) {
1817     ierr = PetscObjectReference((PetscObject) dm);CHKERRQ(ierr);
1818     *redundantMesh = dm;
1819     PetscFunctionReturn(0);
1820   }
1821   ierr = DMPlexGetGatherDM(dm,&gatherDM);CHKERRQ(ierr);
1822   if (!gatherDM) PetscFunctionReturn(0);
1823   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1824   ierr = DMPlexGetChart(gatherDM,&pStart,&pEnd);CHKERRQ(ierr);
1825   numPoints = pEnd - pStart;
1826   ierr = MPI_Bcast(&numPoints,1,MPIU_INT,0,comm);CHKERRQ(ierr);
1827   ierr = PetscMalloc1(numPoints,&points);CHKERRQ(ierr);
1828   ierr = PetscSFCreate(comm,&migrationSF);CHKERRQ(ierr);
1829   for (p = 0; p < numPoints; p++) {
1830     points[p].index = p;
1831     points[p].rank  = 0;
1832   }
1833   ierr = PetscSFSetGraph(migrationSF,pEnd-pStart,numPoints,NULL,PETSC_OWN_POINTER,points,PETSC_OWN_POINTER);CHKERRQ(ierr);
1834   ierr = DMPlexCreate(comm, redundantMesh);CHKERRQ(ierr);
1835   ierr = PetscObjectSetName((PetscObject) *redundantMesh, "Redundant Mesh");CHKERRQ(ierr);
1836   ierr = DMPlexMigrate(gatherDM, migrationSF, *redundantMesh);CHKERRQ(ierr);
1837   ierr = DMPlexCreatePointSF(*redundantMesh, migrationSF, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
1838   ierr = DMSetPointSF(*redundantMesh, sfPoint);CHKERRQ(ierr);
1839   ierr = DMGetCoordinateDM(*redundantMesh, &dmCoord);CHKERRQ(ierr);
1840   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1841   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1842   ierr = PetscSFDestroy(&migrationSF);CHKERRQ(ierr);
1843   ierr = DMDestroy(&gatherDM);CHKERRQ(ierr);
1844   PetscFunctionReturn(0);
1845 }
1846