xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision 38e7336fef8d310d693744ab39306ec09879f8c2)
1 #include <petsc-private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 #include <petscsf.h>
3 
4 #undef __FUNCT__
5 #define __FUNCT__ "DMPlexSetAdjacencyUseCone"
6 /*@
7   DMPlexSetAdjacencyUseCone - Define adjacency in the mesh using either the cone or the support first
8 
9   Input Parameters:
10 + dm      - The DM object
11 - useCone - Flag to use the cone first
12 
13   Level: intermediate
14 
15   Notes:
16 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE
17 $     FVM:   Two points p and q are adjacent if q \in star(cone(p)),    useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
18 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
19 
20 .seealso: DMPlexGetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
21 @*/
22 PetscErrorCode DMPlexSetAdjacencyUseCone(DM dm, PetscBool useCone)
23 {
24   DM_Plex *mesh = (DM_Plex *) dm->data;
25 
26   PetscFunctionBegin;
27   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
28   mesh->useCone = useCone;
29   PetscFunctionReturn(0);
30 }
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "DMPlexGetAdjacencyUseCone"
34 /*@
35   DMPlexGetAdjacencyUseCone - Query whether adjacency in the mesh uses the cone or the support first
36 
37   Input Parameter:
38 . dm      - The DM object
39 
40   Output Parameter:
41 . useCone - Flag to use the cone first
42 
43   Level: intermediate
44 
45   Notes:
46 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE
47 $     FVM:   Two points p and q are adjacent if q \in star(cone(p)),    useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
48 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
49 
50 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
51 @*/
52 PetscErrorCode DMPlexGetAdjacencyUseCone(DM dm, PetscBool *useCone)
53 {
54   DM_Plex *mesh = (DM_Plex *) dm->data;
55 
56   PetscFunctionBegin;
57   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
58   PetscValidIntPointer(useCone, 2);
59   *useCone = mesh->useCone;
60   PetscFunctionReturn(0);
61 }
62 
63 #undef __FUNCT__
64 #define __FUNCT__ "DMPlexSetAdjacencyUseClosure"
65 /*@
66   DMPlexSetAdjacencyUseClosure - Define adjacency in the mesh using the transitive closure
67 
68   Input Parameters:
69 + dm      - The DM object
70 - useClosure - Flag to use the closure
71 
72   Level: intermediate
73 
74   Notes:
75 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE
76 $     FVM:   Two points p and q are adjacent if q \in star(cone(p)),    useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
77 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
78 
79 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
80 @*/
81 PetscErrorCode DMPlexSetAdjacencyUseClosure(DM dm, PetscBool useClosure)
82 {
83   DM_Plex *mesh = (DM_Plex *) dm->data;
84 
85   PetscFunctionBegin;
86   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
87   mesh->useClosure = useClosure;
88   PetscFunctionReturn(0);
89 }
90 
91 #undef __FUNCT__
92 #define __FUNCT__ "DMPlexGetAdjacencyUseClosure"
93 /*@
94   DMPlexGetAdjacencyUseClosure - Query whether adjacency in the mesh uses the transitive closure
95 
96   Input Parameter:
97 . dm      - The DM object
98 
99   Output Parameter:
100 . useClosure - Flag to use the closure
101 
102   Level: intermediate
103 
104   Notes:
105 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE
106 $     FVM:   Two points p and q are adjacent if q \in star(cone(p)),    useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
107 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
108 
109 .seealso: DMPlexSetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
110 @*/
111 PetscErrorCode DMPlexGetAdjacencyUseClosure(DM dm, PetscBool *useClosure)
112 {
113   DM_Plex *mesh = (DM_Plex *) dm->data;
114 
115   PetscFunctionBegin;
116   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
117   PetscValidIntPointer(useClosure, 2);
118   *useClosure = mesh->useClosure;
119   PetscFunctionReturn(0);
120 }
121 
122 #undef __FUNCT__
123 #define __FUNCT__ "DMPlexGetAdjacency_Cone_Internal"
124 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
125 {
126   const PetscInt *cone = NULL;
127   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
128   PetscErrorCode  ierr;
129 
130   PetscFunctionBeginHot;
131   ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
132   ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
133   for (c = 0; c < coneSize; ++c) {
134     const PetscInt *support = NULL;
135     PetscInt        supportSize, s, q;
136 
137     ierr = DMPlexGetSupportSize(dm, cone[c], &supportSize);CHKERRQ(ierr);
138     ierr = DMPlexGetSupport(dm, cone[c], &support);CHKERRQ(ierr);
139     for (s = 0; s < supportSize; ++s) {
140       for (q = 0; q < numAdj || (adj[numAdj++] = support[s],0); ++q) {
141         if (support[s] == adj[q]) break;
142       }
143       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
144     }
145   }
146   *adjSize = numAdj;
147   PetscFunctionReturn(0);
148 }
149 
150 #undef __FUNCT__
151 #define __FUNCT__ "DMPlexGetAdjacency_Support_Internal"
152 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
153 {
154   const PetscInt *support = NULL;
155   PetscInt        numAdj   = 0, maxAdjSize = *adjSize, supportSize, s;
156   PetscErrorCode  ierr;
157 
158   PetscFunctionBeginHot;
159   ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr);
160   ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr);
161   for (s = 0; s < supportSize; ++s) {
162     const PetscInt *cone = NULL;
163     PetscInt        coneSize, c, q;
164 
165     ierr = DMPlexGetConeSize(dm, support[s], &coneSize);CHKERRQ(ierr);
166     ierr = DMPlexGetCone(dm, support[s], &cone);CHKERRQ(ierr);
167     for (c = 0; c < coneSize; ++c) {
168       for (q = 0; q < numAdj || (adj[numAdj++] = cone[c],0); ++q) {
169         if (cone[c] == adj[q]) break;
170       }
171       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
172     }
173   }
174   *adjSize = numAdj;
175   PetscFunctionReturn(0);
176 }
177 
178 #undef __FUNCT__
179 #define __FUNCT__ "DMPlexGetAdjacency_Transitive_Internal"
180 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
181 {
182   PetscInt      *star = NULL;
183   PetscInt       numAdj = 0, maxAdjSize = *adjSize, starSize, s;
184   PetscErrorCode ierr;
185 
186   PetscFunctionBeginHot;
187   ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
188   for (s = 0; s < starSize*2; s += 2) {
189     const PetscInt *closure = NULL;
190     PetscInt        closureSize, c, q;
191 
192     ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
193     for (c = 0; c < closureSize*2; c += 2) {
194       for (q = 0; q < numAdj || (adj[numAdj++] = closure[c],0); ++q) {
195         if (closure[c] == adj[q]) break;
196       }
197       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
198     }
199     ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
200   }
201   ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
202   *adjSize = numAdj;
203   PetscFunctionReturn(0);
204 }
205 
206 #undef __FUNCT__
207 #define __FUNCT__ "DMPlexGetAdjacency_Internal"
208 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscInt *adjSize, PetscInt *adj[])
209 {
210   static PetscInt asiz = 0;
211   PetscErrorCode  ierr;
212 
213   PetscFunctionBeginHot;
214   if (!*adj) {
215     PetscInt depth, maxConeSize, maxSupportSize;
216 
217     ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
218     ierr = DMPlexGetMaxSizes(dm, &maxConeSize, &maxSupportSize);CHKERRQ(ierr);
219     asiz = PetscPowInt(maxConeSize, depth+1) * PetscPowInt(maxSupportSize, depth+1) + 1;
220     ierr = PetscMalloc1(asiz,adj);CHKERRQ(ierr);
221   }
222   if (*adjSize < 0) *adjSize = asiz;
223   if (useTransitiveClosure) {
224     ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr);
225   } else if (useCone) {
226     ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
227   } else {
228     ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
229   }
230   PetscFunctionReturn(0);
231 }
232 
233 #undef __FUNCT__
234 #define __FUNCT__ "DMPlexGetAdjacency"
235 /*@
236   DMPlexGetAdjacency - Return all points adjacent to the given point
237 
238   Input Parameters:
239 + dm - The DM object
240 . p  - The point
241 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE
242 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize
243 
244   Output Parameters:
245 + adjSize - The number of adjacent points
246 - adj - The adjacent points
247 
248   Level: advanced
249 
250   Notes: The user must PetscFree the adj array if it was not passed in.
251 
252 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator()
253 @*/
254 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
255 {
256   DM_Plex       *mesh = (DM_Plex *) dm->data;
257   PetscErrorCode ierr;
258 
259   PetscFunctionBeginHot;
260   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
261   PetscValidPointer(adjSize,3);
262   PetscValidPointer(adj,4);
263   ierr = DMPlexGetAdjacency_Internal(dm, p, mesh->useCone, mesh->useClosure, adjSize, adj);CHKERRQ(ierr);
264   PetscFunctionReturn(0);
265 }
266 #undef __FUNCT__
267 #define __FUNCT__ "DMPlexCreateTwoSidedProcessSF"
268 /*@
269   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
270 
271   Collective on DM
272 
273   Input Parameters:
274 + dm      - The DM
275 - sfPoint - The PetscSF which encodes point connectivity
276 
277   Output Parameters:
278 + processRanks - A list of process neighbors, or NULL
279 - sfProcess    - An SF encoding the two-sided process connectivity, or NULL
280 
281   Level: developer
282 
283 .seealso: PetscSFCreate(), DMPlexCreateProcessSF()
284 @*/
285 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
286 {
287   const PetscSFNode *remotePoints;
288   PetscInt          *localPointsNew;
289   PetscSFNode       *remotePointsNew;
290   const PetscInt    *nranks;
291   PetscInt          *ranksNew;
292   PetscBT            neighbors;
293   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
294   PetscMPIInt        numProcs, proc, rank;
295   PetscErrorCode     ierr;
296 
297   PetscFunctionBegin;
298   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
299   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
300   if (processRanks) {PetscValidPointer(processRanks, 3);}
301   if (sfProcess)    {PetscValidPointer(sfProcess, 4);}
302   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
303   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
304   ierr = PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);CHKERRQ(ierr);
305   ierr = PetscBTCreate(numProcs, &neighbors);CHKERRQ(ierr);
306   ierr = PetscBTMemzero(numProcs, neighbors);CHKERRQ(ierr);
307   /* Compute root-to-leaf process connectivity */
308   ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr);
309   ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr);
310   for (p = pStart; p < pEnd; ++p) {
311     PetscInt ndof, noff, n;
312 
313     ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr);
314     ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr);
315     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);}
316   }
317   ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr);
318   /* Compute leaf-to-neighbor process connectivity */
319   ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr);
320   ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr);
321   for (p = pStart; p < pEnd; ++p) {
322     PetscInt ndof, noff, n;
323 
324     ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr);
325     ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr);
326     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);}
327   }
328   ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr);
329   /* Compute leaf-to-root process connectivity */
330   for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);}
331   /* Calculate edges */
332   PetscBTClear(neighbors, rank);
333   for(proc = 0, numNeighbors = 0; proc < numProcs; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;}
334   ierr = PetscMalloc1(numNeighbors, &ranksNew);CHKERRQ(ierr);
335   ierr = PetscMalloc1(numNeighbors, &localPointsNew);CHKERRQ(ierr);
336   ierr = PetscMalloc1(numNeighbors, &remotePointsNew);CHKERRQ(ierr);
337   for(proc = 0, n = 0; proc < numProcs; ++proc) {
338     if (PetscBTLookup(neighbors, proc)) {
339       ranksNew[n]              = proc;
340       localPointsNew[n]        = proc;
341       remotePointsNew[n].index = rank;
342       remotePointsNew[n].rank  = proc;
343       ++n;
344     }
345   }
346   ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr);
347   if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr);}
348   else              {ierr = PetscFree(ranksNew);CHKERRQ(ierr);}
349   if (sfProcess) {
350     ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr);
351     ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr);
352     ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr);
353     ierr = PetscSFSetGraph(*sfProcess, numProcs, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
354   }
355   PetscFunctionReturn(0);
356 }
357 
358 #undef __FUNCT__
359 #define __FUNCT__ "DMPlexDistributeOwnership"
360 /*@
361   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
362 
363   Collective on DM
364 
365   Input Parameter:
366 . dm - The DM
367 
368   Output Parameters:
369 + rootSection - The number of leaves for a given root point
370 . rootrank    - The rank of each edge into the root point
371 . leafSection - The number of processes sharing a given leaf point
372 - leafrank    - The rank of each process sharing a leaf point
373 
374   Level: developer
375 
376 .seealso: DMPlexCreateOverlap()
377 @*/
378 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
379 {
380   MPI_Comm        comm;
381   PetscSF         sfPoint;
382   const PetscInt *rootdegree;
383   PetscInt       *myrank, *remoterank;
384   PetscInt        pStart, pEnd, p, nedges;
385   PetscMPIInt     rank;
386   PetscErrorCode  ierr;
387 
388   PetscFunctionBegin;
389   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
390   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
391   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
392   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
393   /* Compute number of leaves for each root */
394   ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr);
395   ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr);
396   ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr);
397   ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr);
398   for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr);}
399   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
400   /* Gather rank of each leaf to root */
401   ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr);
402   ierr = PetscMalloc1(pEnd-pStart, &myrank);CHKERRQ(ierr);
403   ierr = PetscMalloc1(nedges,  &remoterank);CHKERRQ(ierr);
404   for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank;
405   ierr = PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
406   ierr = PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
407   ierr = PetscFree(myrank);CHKERRQ(ierr);
408   ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr);
409   /* Distribute remote ranks to leaves */
410   ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr);
411   ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr);
412   PetscFunctionReturn(0);
413 }
414 
415 #undef __FUNCT__
416 #define __FUNCT__ "DMPlexCreateOverlap"
417 /*@C
418   DMPlexCreateOverlap - Compute owner information for shared points. This basically gets two-sided for an SF.
419 
420   Collective on DM
421 
422   Input Parameters:
423 + dm          - The DM
424 . rootSection - The number of leaves for a given root point
425 . rootrank    - The rank of each edge into the root point
426 . leafSection - The number of processes sharing a given leaf point
427 - leafrank    - The rank of each process sharing a leaf point
428 
429   Output Parameters:
430 + ovRootSection - The number of new overlap points for each neighboring process
431 . ovRootPoints  - The new overlap points for each neighboring process
432 . ovLeafSection - The number of new overlap points from each neighboring process
433 - ovLeafPoints  - The new overlap points from each neighboring process
434 
435   Level: developer
436 
437 .seealso: DMPlexDistributeOwnership()
438 @*/
439 PetscErrorCode DMPlexCreateOverlap(DM dm, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, PetscSection ovRootSection, PetscSFNode **ovRootPoints, PetscSection ovLeafSection, PetscSFNode **ovLeafPoints)
440 {
441   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
442   PetscSF            sfPoint, sfProc;
443   IS                 valueIS;
444   const PetscSFNode *remote;
445   const PetscInt    *local;
446   const PetscInt    *nrank, *rrank, *neighbors;
447   PetscInt          *adj = NULL;
448   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l, numNeighbors, n, ovSize;
449   PetscMPIInt        rank, numProcs;
450   PetscErrorCode     ierr;
451 
452   PetscFunctionBegin;
453   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
454   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
455   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
456   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
457   ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr);
458   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
459   ierr = DMLabelCreate("Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr);
460   /* Handle leaves: shared with the root point */
461   for (l = 0; l < nleaves; ++l) {
462     PetscInt adjSize = PETSC_DETERMINE, a;
463 
464     ierr = DMPlexGetAdjacency(dm, local[l], &adjSize, &adj);CHKERRQ(ierr);
465     for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr);}
466   }
467   ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr);
468   ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr);
469   /* Handle roots */
470   for (p = pStart; p < pEnd; ++p) {
471     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
472 
473     if ((p >= sStart) && (p < sEnd)) {
474       /* Some leaves share a root with other leaves on different processes */
475       ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr);
476       if (neighbors) {
477         ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr);
478         ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
479         for (n = 0; n < neighbors; ++n) {
480           const PetscInt remoteRank = nrank[noff+n];
481 
482           if (remoteRank == rank) continue;
483           for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
484         }
485       }
486     }
487     /* Roots are shared with leaves */
488     ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr);
489     if (!neighbors) continue;
490     ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr);
491     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
492     for (n = 0; n < neighbors; ++n) {
493       const PetscInt remoteRank = rrank[noff+n];
494 
495       if (remoteRank == rank) continue;
496       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
497     }
498   }
499   ierr = PetscFree(adj);CHKERRQ(ierr);
500   ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr);
501   ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr);
502   {
503     ierr = PetscViewerASCIISynchronizedAllow(PETSC_VIEWER_STDOUT_WORLD, PETSC_TRUE);CHKERRQ(ierr);
504     ierr = DMLabelView(ovAdjByRank, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
505   }
506   /* Convert to (point, rank) and use actual owners */
507   ierr = PetscSectionSetChart(ovRootSection, 0, numProcs);CHKERRQ(ierr);
508   ierr = DMLabelGetValueIS(ovAdjByRank, &valueIS);CHKERRQ(ierr);
509   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
510   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
511   for (n = 0; n < numNeighbors; ++n) {
512     PetscInt numPoints;
513 
514     ierr = DMLabelGetStratumSize(ovAdjByRank, neighbors[n], &numPoints);CHKERRQ(ierr);
515     ierr = PetscSectionAddDof(ovRootSection, neighbors[n], numPoints);CHKERRQ(ierr);
516   }
517   ierr = PetscSectionSetUp(ovRootSection);CHKERRQ(ierr);
518   ierr = PetscSectionGetStorageSize(ovRootSection, &ovSize);CHKERRQ(ierr);
519   ierr = PetscMalloc1(ovSize, ovRootPoints);CHKERRQ(ierr);
520   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
521   for (n = 0; n < numNeighbors; ++n) {
522     IS              pointIS;
523     const PetscInt *points;
524     PetscInt        off, numPoints, p;
525 
526     ierr = PetscSectionGetOffset(ovRootSection, neighbors[n], &off);CHKERRQ(ierr);
527     ierr = DMLabelGetStratumIS(ovAdjByRank, neighbors[n], &pointIS);CHKERRQ(ierr);
528     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
529     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
530     for (p = 0; p < numPoints; ++p) {
531       ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);
532       if (l >= 0) {(*ovRootPoints)[off+p] = remote[l];}
533       else        {(*ovRootPoints)[off+p].index = points[p]; (*ovRootPoints)[off+p].rank = rank;}
534     }
535     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
536     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
537   }
538   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
539   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
540   ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr);
541   /* Make process SF */
542   ierr = DMPlexCreateTwoSidedProcessSF(dm, sfPoint, rootSection, rootrank, leafSection, leafrank, NULL, &sfProc);CHKERRQ(ierr);
543   {
544     ierr = PetscSFView(sfProc, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
545   }
546   /* Communicate overlap */
547   ierr = DMPlexDistributeData(dm, sfProc, ovRootSection, MPIU_2INT, (void *) *ovRootPoints, ovLeafSection, (void **) ovLeafPoints);CHKERRQ(ierr);
548   ierr = PetscSFDestroy(&sfProc);CHKERRQ(ierr);
549   PetscFunctionReturn(0);
550 }
551 
552 #undef __FUNCT__
553 #define __FUNCT__ "DMPlexDistributeField"
554 /*@
555   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
556 
557   Collective on DM
558 
559   Input Parameters:
560 + dm - The DMPlex object
561 . pointSF - The PetscSF describing the communication pattern
562 . originalSection - The PetscSection for existing data layout
563 - originalVec - The existing data
564 
565   Output Parameters:
566 + newSection - The PetscSF describing the new data layout
567 - newVec - The new data
568 
569   Level: developer
570 
571 .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData()
572 @*/
573 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
574 {
575   PetscSF        fieldSF;
576   PetscInt      *remoteOffsets, fieldSize;
577   PetscScalar   *originalValues, *newValues;
578   PetscErrorCode ierr;
579 
580   PetscFunctionBegin;
581   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
582   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
583 
584   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
585   ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr);
586   ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr);
587 
588   ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr);
589   ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr);
590   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
591   ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
592   ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
593   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
594   ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr);
595   ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr);
596   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
597   PetscFunctionReturn(0);
598 }
599 
600 #undef __FUNCT__
601 #define __FUNCT__ "DMPlexDistributeFieldIS"
602 /*@
603   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
604 
605   Collective on DM
606 
607   Input Parameters:
608 + dm - The DMPlex object
609 . pointSF - The PetscSF describing the communication pattern
610 . originalSection - The PetscSection for existing data layout
611 - originalIS - The existing data
612 
613   Output Parameters:
614 + newSection - The PetscSF describing the new data layout
615 - newIS - The new data
616 
617   Level: developer
618 
619 .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData()
620 @*/
621 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
622 {
623   PetscSF         fieldSF;
624   PetscInt       *newValues, *remoteOffsets, fieldSize;
625   const PetscInt *originalValues;
626   PetscErrorCode  ierr;
627 
628   PetscFunctionBegin;
629   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
630   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
631 
632   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
633   ierr = PetscMalloc(fieldSize * sizeof(PetscInt), &newValues);CHKERRQ(ierr);
634 
635   ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr);
636   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
637   ierr = PetscSFBcastBegin(fieldSF, MPIU_INT, originalValues, newValues);CHKERRQ(ierr);
638   ierr = PetscSFBcastEnd(fieldSF, MPIU_INT, originalValues, newValues);CHKERRQ(ierr);
639   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
640   ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr);
641   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr);
642   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
643   PetscFunctionReturn(0);
644 }
645 
646 #undef __FUNCT__
647 #define __FUNCT__ "DMPlexDistributeData"
648 /*@
649   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
650 
651   Collective on DM
652 
653   Input Parameters:
654 + dm - The DMPlex object
655 . pointSF - The PetscSF describing the communication pattern
656 . originalSection - The PetscSection for existing data layout
657 . datatype - The type of data
658 - originalData - The existing data
659 
660   Output Parameters:
661 + newSection - The PetscSection describing the new data layout
662 - newData - The new data
663 
664   Level: developer
665 
666 .seealso: DMPlexDistribute(), DMPlexDistributeField()
667 @*/
668 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
669 {
670   PetscSF        fieldSF;
671   PetscInt      *remoteOffsets, fieldSize;
672   PetscMPIInt    dataSize;
673   PetscErrorCode ierr;
674 
675   PetscFunctionBegin;
676   ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
677   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
678 
679   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
680   ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr);
681   ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr);
682 
683   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
684   ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
685   ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
686   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
687   ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
688   PetscFunctionReturn(0);
689 }
690 
691 #undef __FUNCT__
692 #define __FUNCT__ "DMPlexDistribute"
693 /*@C
694   DMPlexDistribute - Distributes the mesh and any associated sections.
695 
696   Not Collective
697 
698   Input Parameter:
699 + dm  - The original DMPlex object
700 . partitioner - The partitioning package, or NULL for the default
701 - overlap - The overlap of partitions, 0 is the default
702 
703   Output Parameter:
704 + sf - The PetscSF used for point distribution
705 - parallelMesh - The distributed DMPlex object, or NULL
706 
707   Note: If the mesh was not distributed, the return value is NULL.
708 
709   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
710   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
711   representation on the mesh.
712 
713   Level: intermediate
714 
715 .keywords: mesh, elements
716 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
717 @*/
718 PetscErrorCode DMPlexDistribute(DM dm, const char partitioner[], PetscInt overlap, PetscSF *sf, DM *dmParallel)
719 {
720   DM_Plex               *mesh   = (DM_Plex*) dm->data, *pmesh;
721   MPI_Comm               comm;
722   const PetscInt         height = 0;
723   PetscInt               dim, numRemoteRanks;
724   IS                     origCellPart,        origPart,        cellPart,        part;
725   PetscSection           origCellPartSection, origPartSection, cellPartSection, partSection;
726   PetscSFNode           *remoteRanks;
727   PetscSF                partSF, pointSF, coneSF;
728   ISLocalToGlobalMapping renumbering;
729   PetscSection           originalConeSection, newConeSection;
730   PetscInt              *remoteOffsets;
731   PetscInt              *cones, *newCones, newConesSize;
732   PetscBool              flg;
733   PetscMPIInt            rank, numProcs, p;
734   PetscErrorCode         ierr;
735 
736   PetscFunctionBegin;
737   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
738   if (sf) PetscValidPointer(sf,4);
739   PetscValidPointer(dmParallel,5);
740 
741   ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
742   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
743   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
744   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
745 
746   *dmParallel = NULL;
747   if (numProcs == 1) PetscFunctionReturn(0);
748 
749   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
750   /* Create cell partition - We need to rewrite to use IS, use the MatPartition stuff */
751   ierr = PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
752   if (overlap > 1) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Overlap > 1 not yet implemented");
753   ierr = DMPlexCreatePartition(dm, partitioner, height, overlap > 0 ? PETSC_TRUE : PETSC_FALSE, &cellPartSection, &cellPart, &origCellPartSection, &origCellPart);CHKERRQ(ierr);
754   /* Create SF assuming a serial partition for all processes: Could check for IS length here */
755   if (!rank) numRemoteRanks = numProcs;
756   else       numRemoteRanks = 0;
757   ierr = PetscMalloc1(numRemoteRanks, &remoteRanks);CHKERRQ(ierr);
758   for (p = 0; p < numRemoteRanks; ++p) {
759     remoteRanks[p].rank  = p;
760     remoteRanks[p].index = 0;
761   }
762   ierr = PetscSFCreate(comm, &partSF);CHKERRQ(ierr);
763   ierr = PetscSFSetGraph(partSF, 1, numRemoteRanks, NULL, PETSC_OWN_POINTER, remoteRanks, PETSC_OWN_POINTER);CHKERRQ(ierr);
764   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
765   if (flg) {
766     ierr = PetscPrintf(comm, "Cell Partition:\n");CHKERRQ(ierr);
767     ierr = PetscSectionView(cellPartSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
768     ierr = ISView(cellPart, NULL);CHKERRQ(ierr);
769     if (origCellPart) {
770       ierr = PetscPrintf(comm, "Original Cell Partition:\n");CHKERRQ(ierr);
771       ierr = PetscSectionView(origCellPartSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
772       ierr = ISView(origCellPart, NULL);CHKERRQ(ierr);
773     }
774     ierr = PetscSFView(partSF, NULL);CHKERRQ(ierr);
775   }
776   /* Close the partition over the mesh */
777   ierr = DMPlexCreatePartitionClosure(dm, cellPartSection, cellPart, &partSection, &part);CHKERRQ(ierr);
778   ierr = ISDestroy(&cellPart);CHKERRQ(ierr);
779   ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr);
780   /* Create new mesh */
781   ierr  = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr);
782   ierr  = DMSetDimension(*dmParallel, dim);CHKERRQ(ierr);
783   ierr  = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr);
784   pmesh = (DM_Plex*) (*dmParallel)->data;
785   /* Distribute sieve points and the global point numbering (replaces creating remote bases) */
786   ierr = PetscSFConvertPartition(partSF, partSection, part, &renumbering, &pointSF);CHKERRQ(ierr);
787   if (flg) {
788     ierr = PetscPrintf(comm, "Point Partition:\n");CHKERRQ(ierr);
789     ierr = PetscSectionView(partSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
790     ierr = ISView(part, NULL);CHKERRQ(ierr);
791     ierr = PetscSFView(pointSF, NULL);CHKERRQ(ierr);
792     ierr = PetscPrintf(comm, "Point Renumbering after partition:\n");CHKERRQ(ierr);
793     ierr = ISLocalToGlobalMappingView(renumbering, NULL);CHKERRQ(ierr);
794   }
795   ierr = PetscLogEventEnd(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
796   ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
797   /* Distribute cone section */
798   ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr);
799   ierr = DMPlexGetConeSection(*dmParallel, &newConeSection);CHKERRQ(ierr);
800   ierr = PetscSFDistributeSection(pointSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr);
801   ierr = DMSetUp(*dmParallel);CHKERRQ(ierr);
802   {
803     PetscInt pStart, pEnd, p;
804 
805     ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr);
806     for (p = pStart; p < pEnd; ++p) {
807       PetscInt coneSize;
808       ierr               = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr);
809       pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize);
810     }
811   }
812   /* Communicate and renumber cones */
813   ierr = PetscSFCreateSectionSF(pointSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr);
814   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
815   ierr = DMPlexGetCones(*dmParallel, &newCones);CHKERRQ(ierr);
816   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
817   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
818   ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr);
819   ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr);
820   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr);
821   if (flg) {
822     ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr);
823     ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
824     ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr);
825     ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
826     ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr);
827   }
828   ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr);
829   ierr = DMPlexGetConeOrientations(*dmParallel, &newCones);CHKERRQ(ierr);
830   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
831   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
832   ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr);
833   ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
834   /* Create supports and stratify sieve */
835   {
836     PetscInt pStart, pEnd;
837 
838     ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr);
839     ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr);
840   }
841   ierr = DMPlexSymmetrize(*dmParallel);CHKERRQ(ierr);
842   ierr = DMPlexStratify(*dmParallel);CHKERRQ(ierr);
843   /* Create point SF for parallel mesh */
844   ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
845   {
846     const PetscInt *leaves;
847     PetscSFNode    *remotePoints, *rowners, *lowners;
848     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
849     PetscInt        pStart, pEnd;
850 
851     ierr = DMPlexGetChart(*dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
852     ierr = PetscSFGetGraph(pointSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr);
853     ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr);
854     for (p=0; p<numRoots; p++) {
855       rowners[p].rank  = -1;
856       rowners[p].index = -1;
857     }
858     if (origCellPart) {
859       /* Make sure points in the original partition are not assigned to other procs */
860       const PetscInt *origPoints;
861 
862       ierr = DMPlexCreatePartitionClosure(dm, origCellPartSection, origCellPart, &origPartSection, &origPart);CHKERRQ(ierr);
863       ierr = ISGetIndices(origPart, &origPoints);CHKERRQ(ierr);
864       for (p = 0; p < numProcs; ++p) {
865         PetscInt dof, off, d;
866 
867         ierr = PetscSectionGetDof(origPartSection, p, &dof);CHKERRQ(ierr);
868         ierr = PetscSectionGetOffset(origPartSection, p, &off);CHKERRQ(ierr);
869         for (d = off; d < off+dof; ++d) {
870           rowners[origPoints[d]].rank = p;
871         }
872       }
873       ierr = ISRestoreIndices(origPart, &origPoints);CHKERRQ(ierr);
874       ierr = ISDestroy(&origPart);CHKERRQ(ierr);
875       ierr = PetscSectionDestroy(&origPartSection);CHKERRQ(ierr);
876     }
877     ierr = ISDestroy(&origCellPart);CHKERRQ(ierr);
878     ierr = PetscSectionDestroy(&origCellPartSection);CHKERRQ(ierr);
879 
880     ierr = PetscSFBcastBegin(pointSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
881     ierr = PetscSFBcastEnd(pointSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
882     for (p = 0; p < numLeaves; ++p) {
883       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
884         lowners[p].rank  = rank;
885         lowners[p].index = leaves ? leaves[p] : p;
886       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
887         lowners[p].rank  = -2;
888         lowners[p].index = -2;
889       }
890     }
891     for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
892       rowners[p].rank  = -3;
893       rowners[p].index = -3;
894     }
895     ierr = PetscSFReduceBegin(pointSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
896     ierr = PetscSFReduceEnd(pointSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
897     ierr = PetscSFBcastBegin(pointSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
898     ierr = PetscSFBcastEnd(pointSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
899     for (p = 0; p < numLeaves; ++p) {
900       if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
901       if (lowners[p].rank != rank) ++numGhostPoints;
902     }
903     ierr = PetscMalloc1(numGhostPoints,    &ghostPoints);CHKERRQ(ierr);
904     ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr);
905     for (p = 0, gp = 0; p < numLeaves; ++p) {
906       if (lowners[p].rank != rank) {
907         ghostPoints[gp]        = leaves ? leaves[p] : p;
908         remotePoints[gp].rank  = lowners[p].rank;
909         remotePoints[gp].index = lowners[p].index;
910         ++gp;
911       }
912     }
913     ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr);
914     ierr = PetscSFSetGraph((*dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
915     ierr = PetscSFSetFromOptions((*dmParallel)->sf);CHKERRQ(ierr);
916   }
917   pmesh->useCone    = mesh->useCone;
918   pmesh->useClosure = mesh->useClosure;
919   ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
920   /* Distribute Coordinates */
921   {
922     PetscSection     originalCoordSection, newCoordSection;
923     Vec              originalCoordinates, newCoordinates;
924     PetscInt         bs;
925     const char      *name;
926     const PetscReal *maxCell, *L;
927 
928     ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr);
929     ierr = DMGetCoordinateSection(*dmParallel, &newCoordSection);CHKERRQ(ierr);
930     ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr);
931     if (originalCoordinates) {
932       ierr = VecCreate(comm, &newCoordinates);CHKERRQ(ierr);
933       ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr);
934       ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr);
935 
936       ierr = DMPlexDistributeField(dm, pointSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr);
937       ierr = DMSetCoordinatesLocal(*dmParallel, newCoordinates);CHKERRQ(ierr);
938       ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr);
939       ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr);
940       ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr);
941     }
942     ierr = DMGetPeriodicity(dm, &maxCell, &L);CHKERRQ(ierr);
943     if (L) {ierr = DMSetPeriodicity(*dmParallel, maxCell, L);CHKERRQ(ierr);}
944   }
945   /* Distribute labels */
946   ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
947   {
948     DMLabel  next      = mesh->labels, newNext = pmesh->labels;
949     PetscInt numLabels = 0, l;
950 
951     /* Bcast number of labels */
952     while (next) {++numLabels; next = next->next;}
953     ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr);
954     next = mesh->labels;
955     for (l = 0; l < numLabels; ++l) {
956       DMLabel   labelNew;
957       PetscBool isdepth;
958 
959       /* Skip "depth" because it is recreated */
960       if (!rank) {ierr = PetscStrcmp(next->name, "depth", &isdepth);CHKERRQ(ierr);}
961       ierr = MPI_Bcast(&isdepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr);
962       if (isdepth) {if (!rank) next = next->next; continue;}
963       ierr = DMLabelDistribute(next, partSection, part, renumbering, &labelNew);CHKERRQ(ierr);
964       /* Insert into list */
965       if (newNext) newNext->next = labelNew;
966       else         pmesh->labels = labelNew;
967       newNext = labelNew;
968       if (!rank) next = next->next;
969     }
970   }
971   ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
972   /* Setup hybrid structure */
973   {
974     const PetscInt *gpoints;
975     PetscInt        depth, n, d;
976 
977     for (d = 0; d <= dim; ++d) {pmesh->hybridPointMax[d] = mesh->hybridPointMax[d];}
978     ierr = MPI_Bcast(pmesh->hybridPointMax, dim+1, MPIU_INT, 0, comm);CHKERRQ(ierr);
979     ierr = ISLocalToGlobalMappingGetSize(renumbering, &n);CHKERRQ(ierr);
980     ierr = ISLocalToGlobalMappingGetIndices(renumbering, &gpoints);CHKERRQ(ierr);
981     ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
982     for (d = 0; d <= dim; ++d) {
983       PetscInt pmax = pmesh->hybridPointMax[d], newmax = 0, pEnd, stratum[2], p;
984 
985       if (pmax < 0) continue;
986       ierr = DMPlexGetDepthStratum(dm, d > depth ? depth : d, &stratum[0], &stratum[1]);CHKERRQ(ierr);
987       ierr = DMPlexGetDepthStratum(*dmParallel, d, NULL, &pEnd);CHKERRQ(ierr);
988       ierr = MPI_Bcast(stratum, 2, MPIU_INT, 0, comm);CHKERRQ(ierr);
989       for (p = 0; p < n; ++p) {
990         const PetscInt point = gpoints[p];
991 
992         if ((point >= stratum[0]) && (point < stratum[1]) && (point >= pmax)) ++newmax;
993       }
994       if (newmax > 0) pmesh->hybridPointMax[d] = pEnd - newmax;
995       else            pmesh->hybridPointMax[d] = -1;
996     }
997     ierr = ISLocalToGlobalMappingRestoreIndices(renumbering, &gpoints);CHKERRQ(ierr);
998   }
999   /* Cleanup Partition */
1000   ierr = ISLocalToGlobalMappingDestroy(&renumbering);CHKERRQ(ierr);
1001   ierr = PetscSFDestroy(&partSF);CHKERRQ(ierr);
1002   ierr = PetscSectionDestroy(&partSection);CHKERRQ(ierr);
1003   ierr = ISDestroy(&part);CHKERRQ(ierr);
1004   /* Copy BC */
1005   ierr = DMPlexCopyBoundary(dm, *dmParallel);CHKERRQ(ierr);
1006   /* Cleanup */
1007   if (sf) {*sf = pointSF;}
1008   else    {ierr = PetscSFDestroy(&pointSF);CHKERRQ(ierr);}
1009   ierr = DMSetFromOptions(*dmParallel);CHKERRQ(ierr);
1010   ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1011   PetscFunctionReturn(0);
1012 }
1013