xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision 60a1948e59517edd5702a5e277d78229b7015096)
1 #include <petsc-private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 #include <petscsf.h>
3 
4 #undef __FUNCT__
5 #define __FUNCT__ "DMPlexSetAdjacencyUseCone"
6 /*@
7   DMPlexSetAdjacencyUseCone - Define adjacency in the mesh using either the cone or the support first
8 
9   Input Parameters:
10 + dm      - The DM object
11 - useCone - Flag to use the cone first
12 
13   Level: intermediate
14 
15   Notes:
16 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE
17 $     FVM:   Two points p and q are adjacent if q \in star(cone(p)),    useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
18 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
19 
20 .seealso: DMPlexGetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
21 @*/
22 PetscErrorCode DMPlexSetAdjacencyUseCone(DM dm, PetscBool useCone)
23 {
24   DM_Plex *mesh = (DM_Plex *) dm->data;
25 
26   PetscFunctionBegin;
27   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
28   mesh->useCone = useCone;
29   PetscFunctionReturn(0);
30 }
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "DMPlexGetAdjacencyUseCone"
34 /*@
35   DMPlexGetAdjacencyUseCone - Query whether adjacency in the mesh uses the cone or the support first
36 
37   Input Parameter:
38 . dm      - The DM object
39 
40   Output Parameter:
41 . useCone - Flag to use the cone first
42 
43   Level: intermediate
44 
45   Notes:
46 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE
47 $     FVM:   Two points p and q are adjacent if q \in star(cone(p)),    useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
48 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
49 
50 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
51 @*/
52 PetscErrorCode DMPlexGetAdjacencyUseCone(DM dm, PetscBool *useCone)
53 {
54   DM_Plex *mesh = (DM_Plex *) dm->data;
55 
56   PetscFunctionBegin;
57   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
58   PetscValidIntPointer(useCone, 2);
59   *useCone = mesh->useCone;
60   PetscFunctionReturn(0);
61 }
62 
63 #undef __FUNCT__
64 #define __FUNCT__ "DMPlexSetAdjacencyUseClosure"
65 /*@
66   DMPlexSetAdjacencyUseClosure - Define adjacency in the mesh using the transitive closure
67 
68   Input Parameters:
69 + dm      - The DM object
70 - useClosure - Flag to use the closure
71 
72   Level: intermediate
73 
74   Notes:
75 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE
76 $     FVM:   Two points p and q are adjacent if q \in star(cone(p)),    useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
77 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
78 
79 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
80 @*/
81 PetscErrorCode DMPlexSetAdjacencyUseClosure(DM dm, PetscBool useClosure)
82 {
83   DM_Plex *mesh = (DM_Plex *) dm->data;
84 
85   PetscFunctionBegin;
86   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
87   mesh->useClosure = useClosure;
88   PetscFunctionReturn(0);
89 }
90 
91 #undef __FUNCT__
92 #define __FUNCT__ "DMPlexGetAdjacencyUseClosure"
93 /*@
94   DMPlexGetAdjacencyUseClosure - Query whether adjacency in the mesh uses the transitive closure
95 
96   Input Parameter:
97 . dm      - The DM object
98 
99   Output Parameter:
100 . useClosure - Flag to use the closure
101 
102   Level: intermediate
103 
104   Notes:
105 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE
106 $     FVM:   Two points p and q are adjacent if q \in star(cone(p)),    useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
107 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
108 
109 .seealso: DMPlexSetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
110 @*/
111 PetscErrorCode DMPlexGetAdjacencyUseClosure(DM dm, PetscBool *useClosure)
112 {
113   DM_Plex *mesh = (DM_Plex *) dm->data;
114 
115   PetscFunctionBegin;
116   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
117   PetscValidIntPointer(useClosure, 2);
118   *useClosure = mesh->useClosure;
119   PetscFunctionReturn(0);
120 }
121 
122 #undef __FUNCT__
123 #define __FUNCT__ "DMPlexGetAdjacency_Cone_Internal"
124 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
125 {
126   const PetscInt *cone = NULL;
127   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
128   PetscErrorCode  ierr;
129 
130   PetscFunctionBeginHot;
131   ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
132   ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
133   for (c = 0; c < coneSize; ++c) {
134     const PetscInt *support = NULL;
135     PetscInt        supportSize, s, q;
136 
137     ierr = DMPlexGetSupportSize(dm, cone[c], &supportSize);CHKERRQ(ierr);
138     ierr = DMPlexGetSupport(dm, cone[c], &support);CHKERRQ(ierr);
139     for (s = 0; s < supportSize; ++s) {
140       for (q = 0; q < numAdj || (adj[numAdj++] = support[s],0); ++q) {
141         if (support[s] == adj[q]) break;
142       }
143       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
144     }
145   }
146   *adjSize = numAdj;
147   PetscFunctionReturn(0);
148 }
149 
150 #undef __FUNCT__
151 #define __FUNCT__ "DMPlexGetAdjacency_Support_Internal"
152 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
153 {
154   const PetscInt *support = NULL;
155   PetscInt        numAdj   = 0, maxAdjSize = *adjSize, supportSize, s;
156   PetscErrorCode  ierr;
157 
158   PetscFunctionBeginHot;
159   ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr);
160   ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr);
161   for (s = 0; s < supportSize; ++s) {
162     const PetscInt *cone = NULL;
163     PetscInt        coneSize, c, q;
164 
165     ierr = DMPlexGetConeSize(dm, support[s], &coneSize);CHKERRQ(ierr);
166     ierr = DMPlexGetCone(dm, support[s], &cone);CHKERRQ(ierr);
167     for (c = 0; c < coneSize; ++c) {
168       for (q = 0; q < numAdj || (adj[numAdj++] = cone[c],0); ++q) {
169         if (cone[c] == adj[q]) break;
170       }
171       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
172     }
173   }
174   *adjSize = numAdj;
175   PetscFunctionReturn(0);
176 }
177 
178 #undef __FUNCT__
179 #define __FUNCT__ "DMPlexGetAdjacency_Transitive_Internal"
180 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
181 {
182   PetscInt      *star = NULL;
183   PetscInt       numAdj = 0, maxAdjSize = *adjSize, starSize, s;
184   PetscErrorCode ierr;
185 
186   PetscFunctionBeginHot;
187   ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
188   for (s = 0; s < starSize*2; s += 2) {
189     const PetscInt *closure = NULL;
190     PetscInt        closureSize, c, q;
191 
192     ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
193     for (c = 0; c < closureSize*2; c += 2) {
194       for (q = 0; q < numAdj || (adj[numAdj++] = closure[c],0); ++q) {
195         if (closure[c] == adj[q]) break;
196       }
197       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
198     }
199     ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
200   }
201   ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
202   *adjSize = numAdj;
203   PetscFunctionReturn(0);
204 }
205 
206 #undef __FUNCT__
207 #define __FUNCT__ "DMPlexGetAdjacency_Internal"
208 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscInt *adjSize, PetscInt *adj[])
209 {
210   static PetscInt asiz = 0;
211   PetscErrorCode  ierr;
212 
213   PetscFunctionBeginHot;
214   if (!*adj) {
215     PetscInt depth, maxConeSize, maxSupportSize;
216 
217     ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
218     ierr = DMPlexGetMaxSizes(dm, &maxConeSize, &maxSupportSize);CHKERRQ(ierr);
219     asiz = PetscPowInt(maxConeSize, depth+1) * PetscPowInt(maxSupportSize, depth+1) + 1;
220     ierr = PetscMalloc1(asiz,adj);CHKERRQ(ierr);
221   }
222   if (*adjSize < 0) *adjSize = asiz;
223   if (useTransitiveClosure) {
224     ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr);
225   } else if (useCone) {
226     ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
227   } else {
228     ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
229   }
230   PetscFunctionReturn(0);
231 }
232 
233 #undef __FUNCT__
234 #define __FUNCT__ "DMPlexGetAdjacency"
235 /*@
236   DMPlexGetAdjacency - Return all points adjacent to the given point
237 
238   Input Parameters:
239 + dm - The DM object
240 . p  - The point
241 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE
242 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize
243 
244   Output Parameters:
245 + adjSize - The number of adjacent points
246 - adj - The adjacent points
247 
248   Level: advanced
249 
250   Notes: The user must PetscFree the adj array if it was not passed in.
251 
252 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator()
253 @*/
254 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
255 {
256   DM_Plex       *mesh = (DM_Plex *) dm->data;
257   PetscErrorCode ierr;
258 
259   PetscFunctionBeginHot;
260   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
261   PetscValidPointer(adjSize,3);
262   PetscValidPointer(adj,4);
263   ierr = DMPlexGetAdjacency_Internal(dm, p, mesh->useCone, mesh->useClosure, adjSize, adj);CHKERRQ(ierr);
264   PetscFunctionReturn(0);
265 }
266 
267 #undef __FUNCT__
268 #define __FUNCT__ "DMPlexDistributeField"
269 /*@
270   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
271 
272   Collective on DM
273 
274   Input Parameters:
275 + dm - The DMPlex object
276 . pointSF - The PetscSF describing the communication pattern
277 . originalSection - The PetscSection for existing data layout
278 - originalVec - The existing data
279 
280   Output Parameters:
281 + newSection - The PetscSF describing the new data layout
282 - newVec - The new data
283 
284   Level: developer
285 
286 .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData()
287 @*/
288 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
289 {
290   PetscSF        fieldSF;
291   PetscInt      *remoteOffsets, fieldSize;
292   PetscScalar   *originalValues, *newValues;
293   PetscErrorCode ierr;
294 
295   PetscFunctionBegin;
296   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
297   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
298 
299   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
300   ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr);
301   ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr);
302 
303   ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr);
304   ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr);
305   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
306   ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
307   ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
308   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
309   ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr);
310   ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr);
311   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
312   PetscFunctionReturn(0);
313 }
314 
315 #undef __FUNCT__
316 #define __FUNCT__ "DMPlexDistributeFieldIS"
317 /*@
318   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
319 
320   Collective on DM
321 
322   Input Parameters:
323 + dm - The DMPlex object
324 . pointSF - The PetscSF describing the communication pattern
325 . originalSection - The PetscSection for existing data layout
326 - originalIS - The existing data
327 
328   Output Parameters:
329 + newSection - The PetscSF describing the new data layout
330 - newIS - The new data
331 
332   Level: developer
333 
334 .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData()
335 @*/
336 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
337 {
338   PetscSF         fieldSF;
339   PetscInt       *newValues, *remoteOffsets, fieldSize;
340   const PetscInt *originalValues;
341   PetscErrorCode  ierr;
342 
343   PetscFunctionBegin;
344   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
345   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
346 
347   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
348   ierr = PetscMalloc(fieldSize * sizeof(PetscInt), &newValues);CHKERRQ(ierr);
349 
350   ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr);
351   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
352   ierr = PetscSFBcastBegin(fieldSF, MPIU_INT, originalValues, newValues);CHKERRQ(ierr);
353   ierr = PetscSFBcastEnd(fieldSF, MPIU_INT, originalValues, newValues);CHKERRQ(ierr);
354   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
355   ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr);
356   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr);
357   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
358   PetscFunctionReturn(0);
359 }
360 
361 #undef __FUNCT__
362 #define __FUNCT__ "DMPlexDistributeData"
363 /*@
364   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
365 
366   Collective on DM
367 
368   Input Parameters:
369 + dm - The DMPlex object
370 . pointSF - The PetscSF describing the communication pattern
371 . originalSection - The PetscSection for existing data layout
372 . datatype - The type of data
373 - originalData - The existing data
374 
375   Output Parameters:
376 + newSection - The PetscSection describing the new data layout
377 - newData - The new data
378 
379   Level: developer
380 
381 .seealso: DMPlexDistribute(), DMPlexDistributeField()
382 @*/
383 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
384 {
385   PetscSF        fieldSF;
386   PetscInt      *remoteOffsets, fieldSize;
387   PetscMPIInt    dataSize;
388   PetscErrorCode ierr;
389 
390   PetscFunctionBegin;
391   ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
392   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
393 
394   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
395   ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr);
396   ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr);
397 
398   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
399   ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
400   ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
401   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
402   ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
403   PetscFunctionReturn(0);
404 }
405 
406 #undef __FUNCT__
407 #define __FUNCT__ "DMPlexDistribute"
408 /*@C
409   DMPlexDistribute - Distributes the mesh and any associated sections.
410 
411   Not Collective
412 
413   Input Parameter:
414 + dm  - The original DMPlex object
415 . partitioner - The partitioning package, or NULL for the default
416 - overlap - The overlap of partitions, 0 is the default
417 
418   Output Parameter:
419 + sf - The PetscSF used for point distribution
420 - parallelMesh - The distributed DMPlex object, or NULL
421 
422   Note: If the mesh was not distributed, the return value is NULL.
423 
424   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
425   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
426   representation on the mesh.
427 
428   Level: intermediate
429 
430 .keywords: mesh, elements
431 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
432 @*/
433 PetscErrorCode DMPlexDistribute(DM dm, const char partitioner[], PetscInt overlap, PetscSF *sf, DM *dmParallel)
434 {
435   DM_Plex               *mesh   = (DM_Plex*) dm->data, *pmesh;
436   MPI_Comm               comm;
437   const PetscInt         height = 0;
438   PetscInt               dim, numRemoteRanks;
439   IS                     origCellPart,        origPart,        cellPart,        part;
440   PetscSection           origCellPartSection, origPartSection, cellPartSection, partSection;
441   PetscSFNode           *remoteRanks;
442   PetscSF                partSF, pointSF, coneSF;
443   ISLocalToGlobalMapping renumbering;
444   PetscSection           originalConeSection, newConeSection;
445   PetscInt              *remoteOffsets;
446   PetscInt              *cones, *newCones, newConesSize;
447   PetscBool              flg;
448   PetscMPIInt            rank, numProcs, p;
449   PetscErrorCode         ierr;
450 
451   PetscFunctionBegin;
452   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
453   if (sf) PetscValidPointer(sf,4);
454   PetscValidPointer(dmParallel,5);
455 
456   ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
457   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
458   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
459   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
460 
461   *dmParallel = NULL;
462   if (numProcs == 1) PetscFunctionReturn(0);
463 
464   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
465   /* Create cell partition - We need to rewrite to use IS, use the MatPartition stuff */
466   ierr = PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
467   if (overlap > 1) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Overlap > 1 not yet implemented");
468   ierr = DMPlexCreatePartition(dm, partitioner, height, overlap > 0 ? PETSC_TRUE : PETSC_FALSE, &cellPartSection, &cellPart, &origCellPartSection, &origCellPart);CHKERRQ(ierr);
469   /* Create SF assuming a serial partition for all processes: Could check for IS length here */
470   if (!rank) numRemoteRanks = numProcs;
471   else       numRemoteRanks = 0;
472   ierr = PetscMalloc1(numRemoteRanks, &remoteRanks);CHKERRQ(ierr);
473   for (p = 0; p < numRemoteRanks; ++p) {
474     remoteRanks[p].rank  = p;
475     remoteRanks[p].index = 0;
476   }
477   ierr = PetscSFCreate(comm, &partSF);CHKERRQ(ierr);
478   ierr = PetscSFSetGraph(partSF, 1, numRemoteRanks, NULL, PETSC_OWN_POINTER, remoteRanks, PETSC_OWN_POINTER);CHKERRQ(ierr);
479   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
480   if (flg) {
481     ierr = PetscPrintf(comm, "Cell Partition:\n");CHKERRQ(ierr);
482     ierr = PetscSectionView(cellPartSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
483     ierr = ISView(cellPart, NULL);CHKERRQ(ierr);
484     if (origCellPart) {
485       ierr = PetscPrintf(comm, "Original Cell Partition:\n");CHKERRQ(ierr);
486       ierr = PetscSectionView(origCellPartSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
487       ierr = ISView(origCellPart, NULL);CHKERRQ(ierr);
488     }
489     ierr = PetscSFView(partSF, NULL);CHKERRQ(ierr);
490   }
491   /* Close the partition over the mesh */
492   ierr = DMPlexCreatePartitionClosure(dm, cellPartSection, cellPart, &partSection, &part);CHKERRQ(ierr);
493   ierr = ISDestroy(&cellPart);CHKERRQ(ierr);
494   ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr);
495   /* Create new mesh */
496   ierr  = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr);
497   ierr  = DMSetDimension(*dmParallel, dim);CHKERRQ(ierr);
498   ierr  = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr);
499   pmesh = (DM_Plex*) (*dmParallel)->data;
500   /* Distribute sieve points and the global point numbering (replaces creating remote bases) */
501   ierr = PetscSFConvertPartition(partSF, partSection, part, &renumbering, &pointSF);CHKERRQ(ierr);
502   if (flg) {
503     ierr = PetscPrintf(comm, "Point Partition:\n");CHKERRQ(ierr);
504     ierr = PetscSectionView(partSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
505     ierr = ISView(part, NULL);CHKERRQ(ierr);
506     ierr = PetscSFView(pointSF, NULL);CHKERRQ(ierr);
507     ierr = PetscPrintf(comm, "Point Renumbering after partition:\n");CHKERRQ(ierr);
508     ierr = ISLocalToGlobalMappingView(renumbering, NULL);CHKERRQ(ierr);
509   }
510   ierr = PetscLogEventEnd(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
511   ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
512   /* Distribute cone section */
513   ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr);
514   ierr = DMPlexGetConeSection(*dmParallel, &newConeSection);CHKERRQ(ierr);
515   ierr = PetscSFDistributeSection(pointSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr);
516   ierr = DMSetUp(*dmParallel);CHKERRQ(ierr);
517   {
518     PetscInt pStart, pEnd, p;
519 
520     ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr);
521     for (p = pStart; p < pEnd; ++p) {
522       PetscInt coneSize;
523       ierr               = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr);
524       pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize);
525     }
526   }
527   /* Communicate and renumber cones */
528   ierr = PetscSFCreateSectionSF(pointSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr);
529   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
530   ierr = DMPlexGetCones(*dmParallel, &newCones);CHKERRQ(ierr);
531   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
532   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
533   ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr);
534   ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr);
535   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr);
536   if (flg) {
537     ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr);
538     ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
539     ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr);
540     ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
541     ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr);
542   }
543   ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr);
544   ierr = DMPlexGetConeOrientations(*dmParallel, &newCones);CHKERRQ(ierr);
545   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
546   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
547   ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr);
548   ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
549   /* Create supports and stratify sieve */
550   {
551     PetscInt pStart, pEnd;
552 
553     ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr);
554     ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr);
555   }
556   ierr = DMPlexSymmetrize(*dmParallel);CHKERRQ(ierr);
557   ierr = DMPlexStratify(*dmParallel);CHKERRQ(ierr);
558   /* Create point SF for parallel mesh */
559   ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
560   {
561     const PetscInt *leaves;
562     PetscSFNode    *remotePoints, *rowners, *lowners;
563     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
564     PetscInt        pStart, pEnd;
565 
566     ierr = DMPlexGetChart(*dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
567     ierr = PetscSFGetGraph(pointSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr);
568     ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr);
569     for (p=0; p<numRoots; p++) {
570       rowners[p].rank  = -1;
571       rowners[p].index = -1;
572     }
573     if (origCellPart) {
574       /* Make sure points in the original partition are not assigned to other procs */
575       const PetscInt *origPoints;
576 
577       ierr = DMPlexCreatePartitionClosure(dm, origCellPartSection, origCellPart, &origPartSection, &origPart);CHKERRQ(ierr);
578       ierr = ISGetIndices(origPart, &origPoints);CHKERRQ(ierr);
579       for (p = 0; p < numProcs; ++p) {
580         PetscInt dof, off, d;
581 
582         ierr = PetscSectionGetDof(origPartSection, p, &dof);CHKERRQ(ierr);
583         ierr = PetscSectionGetOffset(origPartSection, p, &off);CHKERRQ(ierr);
584         for (d = off; d < off+dof; ++d) {
585           rowners[origPoints[d]].rank = p;
586         }
587       }
588       ierr = ISRestoreIndices(origPart, &origPoints);CHKERRQ(ierr);
589       ierr = ISDestroy(&origPart);CHKERRQ(ierr);
590       ierr = PetscSectionDestroy(&origPartSection);CHKERRQ(ierr);
591     }
592     ierr = ISDestroy(&origCellPart);CHKERRQ(ierr);
593     ierr = PetscSectionDestroy(&origCellPartSection);CHKERRQ(ierr);
594 
595     ierr = PetscSFBcastBegin(pointSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
596     ierr = PetscSFBcastEnd(pointSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
597     for (p = 0; p < numLeaves; ++p) {
598       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
599         lowners[p].rank  = rank;
600         lowners[p].index = leaves ? leaves[p] : p;
601       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
602         lowners[p].rank  = -2;
603         lowners[p].index = -2;
604       }
605     }
606     for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
607       rowners[p].rank  = -3;
608       rowners[p].index = -3;
609     }
610     ierr = PetscSFReduceBegin(pointSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
611     ierr = PetscSFReduceEnd(pointSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
612     ierr = PetscSFBcastBegin(pointSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
613     ierr = PetscSFBcastEnd(pointSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
614     for (p = 0; p < numLeaves; ++p) {
615       if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
616       if (lowners[p].rank != rank) ++numGhostPoints;
617     }
618     ierr = PetscMalloc1(numGhostPoints,    &ghostPoints);CHKERRQ(ierr);
619     ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr);
620     for (p = 0, gp = 0; p < numLeaves; ++p) {
621       if (lowners[p].rank != rank) {
622         ghostPoints[gp]        = leaves ? leaves[p] : p;
623         remotePoints[gp].rank  = lowners[p].rank;
624         remotePoints[gp].index = lowners[p].index;
625         ++gp;
626       }
627     }
628     ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr);
629     ierr = PetscSFSetGraph((*dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
630     ierr = PetscSFSetFromOptions((*dmParallel)->sf);CHKERRQ(ierr);
631   }
632   pmesh->useCone    = mesh->useCone;
633   pmesh->useClosure = mesh->useClosure;
634   ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
635   /* Distribute Coordinates */
636   {
637     PetscSection     originalCoordSection, newCoordSection;
638     Vec              originalCoordinates, newCoordinates;
639     PetscInt         bs;
640     const char      *name;
641     const PetscReal *maxCell, *L;
642 
643     ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr);
644     ierr = DMGetCoordinateSection(*dmParallel, &newCoordSection);CHKERRQ(ierr);
645     ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr);
646     ierr = VecCreate(comm, &newCoordinates);CHKERRQ(ierr);
647     ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr);
648     ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr);
649 
650     ierr = DMPlexDistributeField(dm, pointSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr);
651     ierr = DMSetCoordinatesLocal(*dmParallel, newCoordinates);CHKERRQ(ierr);
652     ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr);
653     ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr);
654     ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr);
655     ierr = DMGetPeriodicity(dm, &maxCell, &L);CHKERRQ(ierr);
656     if (L) {ierr = DMSetPeriodicity(*dmParallel, maxCell, L);CHKERRQ(ierr);}
657   }
658   /* Distribute labels */
659   ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
660   {
661     DMLabel  next      = mesh->labels, newNext = pmesh->labels;
662     PetscInt numLabels = 0, l;
663 
664     /* Bcast number of labels */
665     while (next) {++numLabels; next = next->next;}
666     ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr);
667     next = mesh->labels;
668     for (l = 0; l < numLabels; ++l) {
669       DMLabel   labelNew;
670       PetscBool isdepth;
671 
672       /* Skip "depth" because it is recreated */
673       if (!rank) {ierr = PetscStrcmp(next->name, "depth", &isdepth);CHKERRQ(ierr);}
674       ierr = MPI_Bcast(&isdepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr);
675       if (isdepth) {if (!rank) next = next->next; continue;}
676       ierr = DMLabelDistribute(next, partSection, part, renumbering, &labelNew);CHKERRQ(ierr);
677       /* Insert into list */
678       if (newNext) newNext->next = labelNew;
679       else         pmesh->labels = labelNew;
680       newNext = labelNew;
681       if (!rank) next = next->next;
682     }
683   }
684   ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
685   /* Setup hybrid structure */
686   {
687     const PetscInt *gpoints;
688     PetscInt        depth, n, d;
689 
690     for (d = 0; d <= dim; ++d) {pmesh->hybridPointMax[d] = mesh->hybridPointMax[d];}
691     ierr = MPI_Bcast(pmesh->hybridPointMax, dim+1, MPIU_INT, 0, comm);CHKERRQ(ierr);
692     ierr = ISLocalToGlobalMappingGetSize(renumbering, &n);CHKERRQ(ierr);
693     ierr = ISLocalToGlobalMappingGetIndices(renumbering, &gpoints);CHKERRQ(ierr);
694     ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
695     for (d = 0; d <= dim; ++d) {
696       PetscInt pmax = pmesh->hybridPointMax[d], newmax = 0, pEnd, stratum[2], p;
697 
698       if (pmax < 0) continue;
699       ierr = DMPlexGetDepthStratum(dm, d > depth ? depth : d, &stratum[0], &stratum[1]);CHKERRQ(ierr);
700       ierr = DMPlexGetDepthStratum(*dmParallel, d, NULL, &pEnd);CHKERRQ(ierr);
701       ierr = MPI_Bcast(stratum, 2, MPIU_INT, 0, comm);CHKERRQ(ierr);
702       for (p = 0; p < n; ++p) {
703         const PetscInt point = gpoints[p];
704 
705         if ((point >= stratum[0]) && (point < stratum[1]) && (point >= pmax)) ++newmax;
706       }
707       if (newmax > 0) pmesh->hybridPointMax[d] = pEnd - newmax;
708       else            pmesh->hybridPointMax[d] = -1;
709     }
710     ierr = ISLocalToGlobalMappingRestoreIndices(renumbering, &gpoints);CHKERRQ(ierr);
711   }
712   /* Cleanup Partition */
713   ierr = ISLocalToGlobalMappingDestroy(&renumbering);CHKERRQ(ierr);
714   ierr = PetscSFDestroy(&partSF);CHKERRQ(ierr);
715   ierr = PetscSectionDestroy(&partSection);CHKERRQ(ierr);
716   ierr = ISDestroy(&part);CHKERRQ(ierr);
717   /* Copy BC */
718   ierr = DMPlexCopyBoundary(dm, *dmParallel);CHKERRQ(ierr);
719   /* Cleanup */
720   if (sf) {*sf = pointSF;}
721   else    {ierr = PetscSFDestroy(&pointSF);CHKERRQ(ierr);}
722   ierr = DMSetFromOptions(*dmParallel);CHKERRQ(ierr);
723   ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
724   PetscFunctionReturn(0);
725 }
726