1 #include <petsc/private/dmpleximpl.h> /*I "petscdmplex.h" I*/
2 #include <petsc/private/dmlabelimpl.h> /*I "petscdmlabel.h" I*/
3 #include <petsc/private/partitionerimpl.h>
4
5 /*@C
6 DMPlexSetAdjacencyUser - Define adjacency in the mesh using a user-provided callback
7
8 Input Parameters:
9 + dm - The DM object
10 . user - The user callback, may be `NULL` (to clear the callback)
11 - ctx - context for callback evaluation, may be `NULL`
12
13 Level: advanced
14
15 Notes:
16 The caller of `DMPlexGetAdjacency()` may need to arrange that a large enough array is available for the adjacency.
17
18 Any setting here overrides other configuration of `DMPLEX` adjacency determination.
19
20 .seealso: `DMPLEX`, `DMSetAdjacency()`, `DMPlexDistribute()`, `DMPlexPreallocateOperator()`, `DMPlexGetAdjacency()`, `DMPlexGetAdjacencyUser()`
21 @*/
DMPlexSetAdjacencyUser(DM dm,PetscErrorCode (* user)(DM,PetscInt,PetscInt *,PetscInt[],void *),PetscCtx ctx)22 PetscErrorCode DMPlexSetAdjacencyUser(DM dm, PetscErrorCode (*user)(DM, PetscInt, PetscInt *, PetscInt[], void *), PetscCtx ctx)
23 {
24 DM_Plex *mesh = (DM_Plex *)dm->data;
25
26 PetscFunctionBegin;
27 PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
28 mesh->useradjacency = user;
29 mesh->useradjacencyctx = ctx;
30 PetscFunctionReturn(PETSC_SUCCESS);
31 }
32
33 /*@C
34 DMPlexGetAdjacencyUser - get the user-defined adjacency callback
35
36 Input Parameter:
37 . dm - The `DM` object
38
39 Output Parameters:
40 + user - The callback
41 - ctx - context for callback evaluation
42
43 Level: advanced
44
45 .seealso: `DMPLEX`, `DMSetAdjacency()`, `DMPlexDistribute()`, `DMPlexPreallocateOperator()`, `DMPlexGetAdjacency()`, `DMPlexSetAdjacencyUser()`
46 @*/
DMPlexGetAdjacencyUser(DM dm,PetscErrorCode (** user)(DM,PetscInt,PetscInt *,PetscInt[],void *),void ** ctx)47 PetscErrorCode DMPlexGetAdjacencyUser(DM dm, PetscErrorCode (**user)(DM, PetscInt, PetscInt *, PetscInt[], void *), void **ctx)
48 {
49 DM_Plex *mesh = (DM_Plex *)dm->data;
50
51 PetscFunctionBegin;
52 PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
53 if (user) *user = mesh->useradjacency;
54 if (ctx) *ctx = mesh->useradjacencyctx;
55 PetscFunctionReturn(PETSC_SUCCESS);
56 }
57
58 /*@
59 DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
60
61 Input Parameters:
62 + dm - The `DM` object
63 - useAnchors - Flag to use the constraints. If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
64
65 Level: intermediate
66
67 .seealso: `DMPLEX`, `DMGetAdjacency()`, `DMSetAdjacency()`, `DMPlexDistribute()`, `DMPlexPreallocateOperator()`, `DMPlexSetAnchors()`
68 @*/
DMPlexSetAdjacencyUseAnchors(DM dm,PetscBool useAnchors)69 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
70 {
71 DM_Plex *mesh = (DM_Plex *)dm->data;
72
73 PetscFunctionBegin;
74 PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
75 mesh->useAnchors = useAnchors;
76 PetscFunctionReturn(PETSC_SUCCESS);
77 }
78
79 /*@
80 DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
81
82 Input Parameter:
83 . dm - The `DM` object
84
85 Output Parameter:
86 . useAnchors - Flag to use the closure. If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
87
88 Level: intermediate
89
90 .seealso: `DMPLEX`, `DMPlexSetAdjacencyUseAnchors()`, `DMSetAdjacency()`, `DMGetAdjacency()`, `DMPlexDistribute()`, `DMPlexPreallocateOperator()`, `DMPlexSetAnchors()`
91 @*/
DMPlexGetAdjacencyUseAnchors(DM dm,PetscBool * useAnchors)92 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
93 {
94 DM_Plex *mesh = (DM_Plex *)dm->data;
95
96 PetscFunctionBegin;
97 PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
98 PetscAssertPointer(useAnchors, 2);
99 *useAnchors = mesh->useAnchors;
100 PetscFunctionReturn(PETSC_SUCCESS);
101 }
102
DMPlexGetAdjacency_Cone_Internal(DM dm,PetscInt p,PetscInt * adjSize,PetscInt adj[])103 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
104 {
105 const PetscInt *cone = NULL;
106 PetscInt numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
107
108 PetscFunctionBeginHot;
109 PetscCall(DMPlexGetConeSize(dm, p, &coneSize));
110 PetscCall(DMPlexGetCone(dm, p, &cone));
111 for (c = 0; c <= coneSize; ++c) {
112 const PetscInt point = !c ? p : cone[c - 1];
113 const PetscInt *support = NULL;
114 PetscInt supportSize, s, q;
115
116 PetscCall(DMPlexGetSupportSize(dm, point, &supportSize));
117 PetscCall(DMPlexGetSupport(dm, point, &support));
118 for (s = 0; s < supportSize; ++s) {
119 for (q = 0; q < numAdj || ((void)(adj[numAdj++] = support[s]), 0); ++q) {
120 if (support[s] == adj[q]) break;
121 }
122 PetscCheck(numAdj <= maxAdjSize, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%" PetscInt_FMT ")", maxAdjSize);
123 }
124 }
125 *adjSize = numAdj;
126 PetscFunctionReturn(PETSC_SUCCESS);
127 }
128
DMPlexGetAdjacency_Support_Internal(DM dm,PetscInt p,PetscInt * adjSize,PetscInt adj[])129 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
130 {
131 const PetscInt *support = NULL;
132 PetscInt numAdj = 0, maxAdjSize = *adjSize, supportSize, s;
133
134 PetscFunctionBeginHot;
135 PetscCall(DMPlexGetSupportSize(dm, p, &supportSize));
136 PetscCall(DMPlexGetSupport(dm, p, &support));
137 for (s = 0; s <= supportSize; ++s) {
138 const PetscInt point = !s ? p : support[s - 1];
139 const PetscInt *cone = NULL;
140 PetscInt coneSize, c, q;
141
142 PetscCall(DMPlexGetConeSize(dm, point, &coneSize));
143 PetscCall(DMPlexGetCone(dm, point, &cone));
144 for (c = 0; c < coneSize; ++c) {
145 for (q = 0; q < numAdj || ((void)(adj[numAdj++] = cone[c]), 0); ++q) {
146 if (cone[c] == adj[q]) break;
147 }
148 PetscCheck(numAdj <= maxAdjSize, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%" PetscInt_FMT ")", maxAdjSize);
149 }
150 }
151 *adjSize = numAdj;
152 PetscFunctionReturn(PETSC_SUCCESS);
153 }
154
DMPlexGetAdjacency_Transitive_Internal(DM dm,PetscInt p,PetscBool useClosure,PetscInt * adjSize,PetscInt adj[])155 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
156 {
157 PetscInt *star = NULL;
158 PetscInt numAdj = 0, maxAdjSize = *adjSize, starSize, s;
159
160 PetscFunctionBeginHot;
161 PetscCall(DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star));
162 for (s = 0; s < starSize * 2; s += 2) {
163 const PetscInt *closure = NULL;
164 PetscInt closureSize, c, q;
165
166 PetscCall(DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt **)&closure));
167 for (c = 0; c < closureSize * 2; c += 2) {
168 for (q = 0; q < numAdj || ((void)(adj[numAdj++] = closure[c]), 0); ++q) {
169 if (closure[c] == adj[q]) break;
170 }
171 PetscCheck(numAdj <= maxAdjSize, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%" PetscInt_FMT ")", maxAdjSize);
172 }
173 PetscCall(DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt **)&closure));
174 }
175 PetscCall(DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star));
176 *adjSize = numAdj;
177 PetscFunctionReturn(PETSC_SUCCESS);
178 }
179
180 // Returns the maximum number of adjacent points in the DMPlex
DMPlexGetMaxAdjacencySize_Internal(DM dm,PetscBool useAnchors,PetscInt * max_adjacency_size)181 PetscErrorCode DMPlexGetMaxAdjacencySize_Internal(DM dm, PetscBool useAnchors, PetscInt *max_adjacency_size)
182 {
183 PetscInt depth, maxC, maxS, maxP, pStart, pEnd, asiz, maxAnchors = 1;
184
185 PetscFunctionBeginUser;
186 if (useAnchors) {
187 PetscSection aSec = NULL;
188 IS aIS = NULL;
189 PetscInt aStart, aEnd;
190 PetscCall(DMPlexGetAnchors(dm, &aSec, &aIS));
191 if (aSec) {
192 PetscCall(PetscSectionGetMaxDof(aSec, &maxAnchors));
193 maxAnchors = PetscMax(1, maxAnchors);
194 PetscCall(PetscSectionGetChart(aSec, &aStart, &aEnd));
195 }
196 }
197
198 PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
199 PetscCall(DMPlexGetDepth(dm, &depth));
200 depth = PetscMax(depth, -depth);
201 PetscCall(DMPlexGetMaxSizes(dm, &maxC, &maxS));
202 maxP = maxS * maxC;
203 /* Adjacency can be as large as supp(cl(cell)) or cl(supp(vertex)),
204 supp(cell) + supp(maxC faces) + supp(maxC^2 edges) + supp(maxC^3 vertices)
205 = 0 + maxS*maxC + maxS^2*maxC^2 + maxS^3*maxC^3
206 = \sum^d_{i=0} (maxS*maxC)^i - 1
207 = (maxS*maxC)^{d+1} - 1 / (maxS*maxC - 1) - 1
208 We could improve this by getting the max by strata:
209 supp[d](cell) + supp[d-1](maxC[d] faces) + supp[1](maxC[d]*maxC[d-1] edges) + supp[0](maxC[d]*maxC[d-1]*maxC[d-2] vertices)
210 = 0 + maxS[d-1]*maxC[d] + maxS[1]*maxC[d]*maxC[d-1] + maxS[0]*maxC[d]*maxC[d-1]*maxC[d-2]
211 and the same with S and C reversed
212 */
213 if ((depth == 3 && maxP > 200) || (depth == 2 && maxP > 580)) asiz = pEnd - pStart;
214 else asiz = (maxP > 1) ? ((PetscPowInt(maxP, depth + 1) - 1) / (maxP - 1)) : depth + 1;
215 asiz *= maxAnchors;
216 *max_adjacency_size = PetscMin(asiz, pEnd - pStart);
217 PetscFunctionReturn(PETSC_SUCCESS);
218 }
219
220 // Returns Adjacent mesh points to the selected point given specific criteria
221 //
222 // + adjSize - Number of adjacent points
223 // - adj - Array of the adjacent points
DMPlexGetAdjacency_Internal(DM dm,PetscInt p,PetscBool useCone,PetscBool useTransitiveClosure,PetscBool useAnchors,PetscInt * adjSize,PetscInt * adj[])224 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
225 {
226 static PetscInt asiz = 0;
227 PetscInt aStart = -1, aEnd = -1;
228 PetscInt maxAdjSize;
229 PetscSection aSec = NULL;
230 IS aIS = NULL;
231 const PetscInt *anchors;
232 DM_Plex *mesh = (DM_Plex *)dm->data;
233
234 PetscFunctionBeginHot;
235 if (useAnchors) {
236 PetscCall(DMPlexGetAnchors(dm, &aSec, &aIS));
237 if (aSec) {
238 PetscCall(PetscSectionGetChart(aSec, &aStart, &aEnd));
239 PetscCall(ISGetIndices(aIS, &anchors));
240 }
241 }
242 if (!*adj) {
243 PetscCall(DMPlexGetMaxAdjacencySize_Internal(dm, useAnchors, &asiz));
244 PetscCall(PetscMalloc1(asiz, adj));
245 }
246 if (*adjSize < 0) *adjSize = asiz;
247 maxAdjSize = *adjSize;
248 if (mesh->useradjacency) {
249 PetscCall((*mesh->useradjacency)(dm, p, adjSize, *adj, mesh->useradjacencyctx));
250 } else if (useTransitiveClosure) {
251 PetscCall(DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj));
252 } else if (useCone) {
253 PetscCall(DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj));
254 } else {
255 PetscCall(DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj));
256 }
257 if (useAnchors && aSec) {
258 PetscInt origSize = *adjSize;
259 PetscInt numAdj = origSize;
260 PetscInt i = 0, j;
261 PetscInt *orig = *adj;
262
263 while (i < origSize) {
264 PetscInt p = orig[i];
265 PetscInt aDof = 0;
266
267 if (p >= aStart && p < aEnd) PetscCall(PetscSectionGetDof(aSec, p, &aDof));
268 if (aDof) {
269 PetscInt aOff;
270 PetscInt s, q;
271
272 for (j = i + 1; j < numAdj; j++) orig[j - 1] = orig[j];
273 origSize--;
274 numAdj--;
275 PetscCall(PetscSectionGetOffset(aSec, p, &aOff));
276 for (s = 0; s < aDof; ++s) {
277 for (q = 0; q < numAdj || ((void)(orig[numAdj++] = anchors[aOff + s]), 0); ++q) {
278 if (anchors[aOff + s] == orig[q]) break;
279 }
280 PetscCheck(numAdj <= maxAdjSize, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%" PetscInt_FMT ")", maxAdjSize);
281 }
282 } else {
283 i++;
284 }
285 }
286 *adjSize = numAdj;
287 PetscCall(ISRestoreIndices(aIS, &anchors));
288 }
289 PetscFunctionReturn(PETSC_SUCCESS);
290 }
291
292 /*@
293 DMPlexGetAdjacency - Return all points adjacent to the given point
294
295 Input Parameters:
296 + dm - The `DM` object
297 - p - The point
298
299 Input/Output Parameters:
300 + adjSize - The maximum size of `adj` if it is non-`NULL`, or `PETSC_DETERMINE`;
301 on output the number of adjacent points
302 - adj - Either `NULL` so that the array is allocated, or an existing array with size `adjSize`;
303 on output contains the adjacent points
304
305 Level: advanced
306
307 Notes:
308 The user must `PetscFree()` the `adj` array if it was not passed in.
309
310 .seealso: `DMPLEX`, `DMSetAdjacency()`, `DMPlexDistribute()`, `DMCreateMatrix()`, `DMPlexPreallocateOperator()`
311 @*/
DMPlexGetAdjacency(DM dm,PetscInt p,PetscInt * adjSize,PetscInt * adj[])312 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
313 {
314 PetscBool useCone, useClosure, useAnchors;
315
316 PetscFunctionBeginHot;
317 PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
318 PetscAssertPointer(adjSize, 3);
319 PetscAssertPointer(adj, 4);
320 PetscCall(DMGetBasicAdjacency(dm, &useCone, &useClosure));
321 PetscCall(DMPlexGetAdjacencyUseAnchors(dm, &useAnchors));
322 PetscCall(DMPlexGetAdjacency_Internal(dm, p, useCone, useClosure, useAnchors, adjSize, adj));
323 PetscFunctionReturn(PETSC_SUCCESS);
324 }
325
326 /*@
327 DMPlexCreateTwoSidedProcessSF - Create an `PetscSF` which just has process connectivity
328
329 Collective
330
331 Input Parameters:
332 + dm - The `DM`
333 . sfPoint - The `PetscSF` which encodes point connectivity
334 . rootRankSection - to be documented
335 . rootRanks - to be documented
336 . leafRankSection - to be documented
337 - leafRanks - to be documented
338
339 Output Parameters:
340 + processRanks - A list of process neighbors, or `NULL`
341 - sfProcess - An `PetscSF` encoding the two-sided process connectivity, or `NULL`
342
343 Level: developer
344
345 .seealso: `DMPLEX`, `PetscSFCreate()`, `DMPlexCreateProcessSF()`
346 @*/
DMPlexCreateTwoSidedProcessSF(DM dm,PetscSF sfPoint,PetscSection rootRankSection,IS rootRanks,PetscSection leafRankSection,IS leafRanks,PeOp IS * processRanks,PeOp PetscSF * sfProcess)347 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, PeOp IS *processRanks, PeOp PetscSF *sfProcess)
348 {
349 const PetscSFNode *remotePoints;
350 PetscInt *localPointsNew;
351 PetscSFNode *remotePointsNew;
352 const PetscInt *nranks;
353 PetscInt *ranksNew;
354 PetscBT neighbors;
355 PetscInt pStart, pEnd, p, numLeaves, l, numNeighbors, n;
356 PetscMPIInt size, proc, rank;
357
358 PetscFunctionBegin;
359 PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
360 PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
361 if (processRanks) PetscAssertPointer(processRanks, 7);
362 if (sfProcess) PetscAssertPointer(sfProcess, 8);
363 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)dm), &size));
364 PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
365 PetscCall(PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints));
366 PetscCall(PetscBTCreate(size, &neighbors));
367 PetscCall(PetscBTMemzero(size, neighbors));
368 /* Compute root-to-leaf process connectivity */
369 PetscCall(PetscSectionGetChart(rootRankSection, &pStart, &pEnd));
370 PetscCall(ISGetIndices(rootRanks, &nranks));
371 for (p = pStart; p < pEnd; ++p) {
372 PetscInt ndof, noff, n;
373
374 PetscCall(PetscSectionGetDof(rootRankSection, p, &ndof));
375 PetscCall(PetscSectionGetOffset(rootRankSection, p, &noff));
376 for (n = 0; n < ndof; ++n) PetscCall(PetscBTSet(neighbors, nranks[noff + n]));
377 }
378 PetscCall(ISRestoreIndices(rootRanks, &nranks));
379 /* Compute leaf-to-neighbor process connectivity */
380 PetscCall(PetscSectionGetChart(leafRankSection, &pStart, &pEnd));
381 PetscCall(ISGetIndices(leafRanks, &nranks));
382 for (p = pStart; p < pEnd; ++p) {
383 PetscInt ndof, noff, n;
384
385 PetscCall(PetscSectionGetDof(leafRankSection, p, &ndof));
386 PetscCall(PetscSectionGetOffset(leafRankSection, p, &noff));
387 for (n = 0; n < ndof; ++n) PetscCall(PetscBTSet(neighbors, nranks[noff + n]));
388 }
389 PetscCall(ISRestoreIndices(leafRanks, &nranks));
390 /* Compute leaf-to-root process connectivity */
391 for (l = 0; l < numLeaves; ++l) PetscCall(PetscBTSet(neighbors, remotePoints[l].rank));
392 /* Calculate edges */
393 PetscCall(PetscBTClear(neighbors, rank));
394 for (proc = 0, numNeighbors = 0; proc < size; ++proc) {
395 if (PetscBTLookup(neighbors, proc)) ++numNeighbors;
396 }
397 PetscCall(PetscMalloc1(numNeighbors, &ranksNew));
398 PetscCall(PetscMalloc1(numNeighbors, &localPointsNew));
399 PetscCall(PetscMalloc1(numNeighbors, &remotePointsNew));
400 for (proc = 0, n = 0; proc < size; ++proc) {
401 if (PetscBTLookup(neighbors, proc)) {
402 ranksNew[n] = proc;
403 localPointsNew[n] = proc;
404 remotePointsNew[n].index = rank;
405 remotePointsNew[n].rank = proc;
406 ++n;
407 }
408 }
409 PetscCall(PetscBTDestroy(&neighbors));
410 if (processRanks) PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks));
411 else PetscCall(PetscFree(ranksNew));
412 if (sfProcess) {
413 PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess));
414 PetscCall(PetscObjectSetName((PetscObject)*sfProcess, "Two-Sided Process SF"));
415 PetscCall(PetscSFSetFromOptions(*sfProcess));
416 PetscCall(PetscSFSetGraph(*sfProcess, size, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER));
417 }
418 PetscFunctionReturn(PETSC_SUCCESS);
419 }
420
421 /*@
422 DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
423
424 Collective
425
426 Input Parameter:
427 . dm - The `DM`
428
429 Output Parameters:
430 + rootSection - The number of leaves for a given root point
431 . rootrank - The rank of each edge into the root point
432 . leafSection - The number of processes sharing a given leaf point
433 - leafrank - The rank of each process sharing a leaf point
434
435 Level: developer
436
437 .seealso: `DMPLEX`, `DMPlexCreateOverlapLabel()`, `DMPlexDistribute()`, `DMPlexDistributeOverlap()`
438 @*/
DMPlexDistributeOwnership(DM dm,PetscSection rootSection,IS * rootrank,PetscSection leafSection,IS * leafrank)439 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
440 {
441 MPI_Comm comm;
442 PetscSF sfPoint;
443 const PetscInt *rootdegree;
444 PetscInt *myrank, *remoterank;
445 PetscInt pStart, pEnd, p, nedges;
446 PetscMPIInt rank;
447
448 PetscFunctionBegin;
449 PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
450 PetscCallMPI(MPI_Comm_rank(comm, &rank));
451 PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
452 PetscCall(DMGetPointSF(dm, &sfPoint));
453 /* Compute number of leaves for each root */
454 PetscCall(PetscObjectSetName((PetscObject)rootSection, "Root Section"));
455 PetscCall(PetscSectionSetChart(rootSection, pStart, pEnd));
456 PetscCall(PetscSFComputeDegreeBegin(sfPoint, &rootdegree));
457 PetscCall(PetscSFComputeDegreeEnd(sfPoint, &rootdegree));
458 for (p = pStart; p < pEnd; ++p) PetscCall(PetscSectionSetDof(rootSection, p, rootdegree[p - pStart]));
459 PetscCall(PetscSectionSetUp(rootSection));
460 /* Gather rank of each leaf to root */
461 PetscCall(PetscSectionGetStorageSize(rootSection, &nedges));
462 PetscCall(PetscMalloc1(pEnd - pStart, &myrank));
463 PetscCall(PetscMalloc1(nedges, &remoterank));
464 for (p = 0; p < pEnd - pStart; ++p) myrank[p] = rank;
465 PetscCall(PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank));
466 PetscCall(PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank));
467 PetscCall(PetscFree(myrank));
468 PetscCall(ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank));
469 /* Distribute remote ranks to leaves */
470 PetscCall(PetscObjectSetName((PetscObject)leafSection, "Leaf Section"));
471 PetscCall(DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank));
472 PetscFunctionReturn(PETSC_SUCCESS);
473 }
474
475 /*@
476 DMPlexCreateOverlapLabel - Compute a label indicating what overlap points should be sent to new processes
477
478 Collective
479
480 Input Parameters:
481 + dm - The `DM`
482 . levels - Number of overlap levels
483 . rootSection - The number of leaves for a given root point
484 . rootrank - The rank of each edge into the root point
485 . leafSection - The number of processes sharing a given leaf point
486 - leafrank - The rank of each process sharing a leaf point
487
488 Output Parameter:
489 . ovLabel - `DMLabel` containing remote overlap contributions as point/rank pairings
490
491 Level: developer
492
493 .seealso: `DMPLEX`, `DMPlexCreateOverlapLabelFromLabels()`, `DMPlexGetAdjacency()`, `DMPlexDistributeOwnership()`, `DMPlexDistribute()`
494 @*/
DMPlexCreateOverlapLabel(DM dm,PetscInt levels,PetscSection rootSection,IS rootrank,PetscSection leafSection,IS leafrank,DMLabel * ovLabel)495 PetscErrorCode DMPlexCreateOverlapLabel(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
496 {
497 MPI_Comm comm;
498 DMLabel ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
499 PetscSF sfPoint;
500 const PetscSFNode *remote;
501 const PetscInt *local;
502 const PetscInt *nrank, *rrank;
503 PetscInt *adj = NULL;
504 PetscInt pStart, pEnd, p, sStart, sEnd, nleaves, l;
505 PetscMPIInt rank, size;
506 PetscBool flg;
507
508 PetscFunctionBegin;
509 *ovLabel = NULL;
510 PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
511 PetscCallMPI(MPI_Comm_size(comm, &size));
512 PetscCallMPI(MPI_Comm_rank(comm, &rank));
513 if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
514 PetscCall(DMGetPointSF(dm, &sfPoint));
515 PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
516 if (!levels) {
517 /* Add owned points */
518 PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel));
519 for (p = pStart; p < pEnd; ++p) PetscCall(DMLabelSetValue(*ovLabel, p, rank));
520 PetscFunctionReturn(PETSC_SUCCESS);
521 }
522 PetscCall(PetscSectionGetChart(leafSection, &sStart, &sEnd));
523 PetscCall(PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote));
524 PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap adjacency", &ovAdjByRank));
525 /* Handle leaves: shared with the root point */
526 for (l = 0; l < nleaves; ++l) {
527 PetscInt adjSize = PETSC_DETERMINE, a;
528
529 PetscCall(DMPlexGetAdjacency(dm, local ? local[l] : l, &adjSize, &adj));
530 for (a = 0; a < adjSize; ++a) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank));
531 }
532 PetscCall(ISGetIndices(rootrank, &rrank));
533 PetscCall(ISGetIndices(leafrank, &nrank));
534 /* Handle roots */
535 for (p = pStart; p < pEnd; ++p) {
536 PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
537
538 if ((p >= sStart) && (p < sEnd)) {
539 /* Some leaves share a root with other leaves on different processes */
540 PetscCall(PetscSectionGetDof(leafSection, p, &neighbors));
541 if (neighbors) {
542 PetscCall(PetscSectionGetOffset(leafSection, p, &noff));
543 PetscCall(DMPlexGetAdjacency(dm, p, &adjSize, &adj));
544 for (n = 0; n < neighbors; ++n) {
545 const PetscInt remoteRank = nrank[noff + n];
546
547 if (remoteRank == rank) continue;
548 for (a = 0; a < adjSize; ++a) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remoteRank));
549 }
550 }
551 }
552 /* Roots are shared with leaves */
553 PetscCall(PetscSectionGetDof(rootSection, p, &neighbors));
554 if (!neighbors) continue;
555 PetscCall(PetscSectionGetOffset(rootSection, p, &noff));
556 PetscCall(DMPlexGetAdjacency(dm, p, &adjSize, &adj));
557 for (n = 0; n < neighbors; ++n) {
558 const PetscInt remoteRank = rrank[noff + n];
559
560 if (remoteRank == rank) continue;
561 for (a = 0; a < adjSize; ++a) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remoteRank));
562 }
563 }
564 PetscCall(PetscFree(adj));
565 PetscCall(ISRestoreIndices(rootrank, &rrank));
566 PetscCall(ISRestoreIndices(leafrank, &nrank));
567 /* Add additional overlap levels */
568 for (l = 1; l < levels; l++) {
569 /* Propagate point donations over SF to capture remote connections */
570 PetscCall(DMPlexPartitionLabelPropagate(dm, ovAdjByRank));
571 /* Add next level of point donations to the label */
572 PetscCall(DMPlexPartitionLabelAdjacency(dm, ovAdjByRank));
573 }
574 /* We require the closure in the overlap */
575 PetscCall(DMPlexPartitionLabelClosure(dm, ovAdjByRank));
576 PetscCall(PetscOptionsHasName(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-overlap_view", &flg));
577 if (flg) {
578 PetscViewer viewer;
579 PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm), &viewer));
580 PetscCall(DMLabelView(ovAdjByRank, viewer));
581 }
582 /* Invert sender to receiver label */
583 PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel));
584 PetscCall(DMPlexPartitionLabelInvert(dm, ovAdjByRank, NULL, *ovLabel));
585 /* Add owned points, except for shared local points */
586 for (p = pStart; p < pEnd; ++p) PetscCall(DMLabelSetValue(*ovLabel, p, rank));
587 for (l = 0; l < nleaves; ++l) {
588 PetscCall(DMLabelClearValue(*ovLabel, local[l], rank));
589 PetscCall(DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank));
590 }
591 /* Clean up */
592 PetscCall(DMLabelDestroy(&ovAdjByRank));
593 PetscFunctionReturn(PETSC_SUCCESS);
594 }
595
HandlePoint_Private(DM dm,PetscInt p,PetscSection section,const PetscInt ranks[],PetscInt numExLabels,const DMLabel exLabel[],const PetscInt exValue[],DMLabel ovAdjByRank)596 static PetscErrorCode HandlePoint_Private(DM dm, PetscInt p, PetscSection section, const PetscInt ranks[], PetscInt numExLabels, const DMLabel exLabel[], const PetscInt exValue[], DMLabel ovAdjByRank)
597 {
598 PetscInt neighbors, el;
599
600 PetscFunctionBegin;
601 PetscCall(PetscSectionGetDof(section, p, &neighbors));
602 if (neighbors) {
603 PetscInt *adj = NULL;
604 PetscInt adjSize = PETSC_DETERMINE, noff, n, a;
605 PetscMPIInt rank;
606
607 PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
608 PetscCall(PetscSectionGetOffset(section, p, &noff));
609 PetscCall(DMPlexGetAdjacency(dm, p, &adjSize, &adj));
610 for (n = 0; n < neighbors; ++n) {
611 const PetscInt remoteRank = ranks[noff + n];
612
613 if (remoteRank == rank) continue;
614 for (a = 0; a < adjSize; ++a) {
615 PetscBool insert = PETSC_TRUE;
616
617 for (el = 0; el < numExLabels; ++el) {
618 PetscInt exVal;
619 PetscCall(DMLabelGetValue(exLabel[el], adj[a], &exVal));
620 if (exVal == exValue[el]) {
621 insert = PETSC_FALSE;
622 break;
623 }
624 }
625 if (insert) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remoteRank));
626 }
627 }
628 PetscCall(PetscFree(adj));
629 }
630 PetscFunctionReturn(PETSC_SUCCESS);
631 }
632
633 /*@C
634 DMPlexCreateOverlapLabelFromLabels - Compute a label indicating what overlap points should be sent to new processes
635
636 Collective
637
638 Input Parameters:
639 + dm - The `DM`
640 . numLabels - The number of labels to draw candidate points from
641 . label - An array of labels containing candidate points
642 . value - An array of label values marking the candidate points
643 . numExLabels - The number of labels to use for exclusion
644 . exLabel - An array of labels indicating points to be excluded, or `NULL`
645 . exValue - An array of label values to be excluded, or `NULL`
646 . rootSection - The number of leaves for a given root point
647 . rootrank - The rank of each edge into the root point
648 . leafSection - The number of processes sharing a given leaf point
649 - leafrank - The rank of each process sharing a leaf point
650
651 Output Parameter:
652 . ovLabel - `DMLabel` containing remote overlap contributions as point/rank pairings
653
654 Level: developer
655
656 Note:
657 The candidate points are only added to the overlap if they are adjacent to a shared point
658
659 .seealso: `DMPLEX`, `DMPlexCreateOverlapLabel()`, `DMPlexGetAdjacency()`, `DMPlexDistributeOwnership()`, `DMPlexDistribute()`
660 @*/
DMPlexCreateOverlapLabelFromLabels(DM dm,PetscInt numLabels,const DMLabel label[],const PetscInt value[],PetscInt numExLabels,const DMLabel exLabel[],const PetscInt exValue[],PetscSection rootSection,IS rootrank,PetscSection leafSection,IS leafrank,DMLabel * ovLabel)661 PetscErrorCode DMPlexCreateOverlapLabelFromLabels(DM dm, PetscInt numLabels, const DMLabel label[], const PetscInt value[], PetscInt numExLabels, const DMLabel exLabel[], const PetscInt exValue[], PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
662 {
663 MPI_Comm comm;
664 DMLabel ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
665 PetscSF sfPoint;
666 const PetscSFNode *remote;
667 const PetscInt *local;
668 const PetscInt *nrank, *rrank;
669 PetscInt *adj = NULL;
670 PetscInt pStart, pEnd, p, sStart, sEnd, nleaves, l, el;
671 PetscMPIInt rank, size;
672 PetscBool flg;
673
674 PetscFunctionBegin;
675 *ovLabel = NULL;
676 PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
677 PetscCallMPI(MPI_Comm_size(comm, &size));
678 PetscCallMPI(MPI_Comm_rank(comm, &rank));
679 if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
680 PetscCall(DMGetPointSF(dm, &sfPoint));
681 PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
682 PetscCall(PetscSectionGetChart(leafSection, &sStart, &sEnd));
683 PetscCall(PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote));
684 PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap adjacency", &ovAdjByRank));
685 PetscCall(ISGetIndices(rootrank, &rrank));
686 PetscCall(ISGetIndices(leafrank, &nrank));
687 for (l = 0; l < numLabels; ++l) {
688 IS valIS;
689 const PetscInt *points;
690 PetscInt n;
691
692 PetscCall(DMLabelGetStratumIS(label[l], value[l], &valIS));
693 if (!valIS) continue;
694 PetscCall(ISGetIndices(valIS, &points));
695 PetscCall(ISGetLocalSize(valIS, &n));
696 for (PetscInt i = 0; i < n; ++i) {
697 const PetscInt p = points[i];
698
699 if ((p >= sStart) && (p < sEnd)) {
700 PetscInt loc, adjSize = PETSC_DETERMINE;
701
702 /* Handle leaves: shared with the root point */
703 if (local) PetscCall(PetscFindInt(p, nleaves, local, &loc));
704 else loc = (p >= 0 && p < nleaves) ? p : -1;
705 if (loc >= 0) {
706 const PetscInt remoteRank = remote[loc].rank;
707
708 PetscCall(DMPlexGetAdjacency(dm, p, &adjSize, &adj));
709 for (PetscInt a = 0; a < adjSize; ++a) {
710 PetscBool insert = PETSC_TRUE;
711
712 for (el = 0; el < numExLabels; ++el) {
713 PetscInt exVal;
714 PetscCall(DMLabelGetValue(exLabel[el], adj[a], &exVal));
715 if (exVal == exValue[el]) {
716 insert = PETSC_FALSE;
717 break;
718 }
719 }
720 if (insert) PetscCall(DMLabelSetValue(ovAdjByRank, adj[a], remoteRank));
721 }
722 }
723 /* Some leaves share a root with other leaves on different processes */
724 PetscCall(HandlePoint_Private(dm, p, leafSection, nrank, numExLabels, exLabel, exValue, ovAdjByRank));
725 }
726 /* Roots are shared with leaves */
727 PetscCall(HandlePoint_Private(dm, p, rootSection, rrank, numExLabels, exLabel, exValue, ovAdjByRank));
728 }
729 PetscCall(ISRestoreIndices(valIS, &points));
730 PetscCall(ISDestroy(&valIS));
731 }
732 PetscCall(PetscFree(adj));
733 PetscCall(ISRestoreIndices(rootrank, &rrank));
734 PetscCall(ISRestoreIndices(leafrank, &nrank));
735 /* We require the closure in the overlap */
736 PetscCall(DMPlexPartitionLabelClosure(dm, ovAdjByRank));
737 PetscCall(PetscOptionsHasName(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-overlap_view", &flg));
738 if (flg) {
739 PetscViewer viewer;
740 PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm), &viewer));
741 PetscCall(DMLabelView(ovAdjByRank, viewer));
742 }
743 /* Invert sender to receiver label */
744 PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel));
745 PetscCall(DMPlexPartitionLabelInvert(dm, ovAdjByRank, NULL, *ovLabel));
746 /* Add owned points, except for shared local points */
747 for (p = pStart; p < pEnd; ++p) PetscCall(DMLabelSetValue(*ovLabel, p, rank));
748 for (l = 0; l < nleaves; ++l) {
749 PetscCall(DMLabelClearValue(*ovLabel, local[l], rank));
750 PetscCall(DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank));
751 }
752 /* Clean up */
753 PetscCall(DMLabelDestroy(&ovAdjByRank));
754 PetscFunctionReturn(PETSC_SUCCESS);
755 }
756
757 /*@
758 DMPlexCreateOverlapMigrationSF - Create a `PetscSF` describing the new mesh distribution to make the overlap described by the input `PetscSF`
759
760 Collective
761
762 Input Parameters:
763 + dm - The `DM`
764 - overlapSF - The `PetscSF` mapping ghost points in overlap to owner points on other processes
765
766 Output Parameter:
767 . migrationSF - A `PetscSF` that maps original points in old locations to points in new locations
768
769 Level: developer
770
771 .seealso: `DMPLEX`, `DMPlexCreateOverlapLabel()`, `DMPlexDistributeOverlap()`, `DMPlexDistribute()`
772 @*/
DMPlexCreateOverlapMigrationSF(DM dm,PetscSF overlapSF,PetscSF * migrationSF)773 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
774 {
775 MPI_Comm comm;
776 PetscMPIInt rank, size;
777 PetscInt d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
778 PetscInt *pointDepths, *remoteDepths, *ilocal;
779 PetscInt *depthRecv, *depthShift, *depthIdx;
780 PetscSFNode *iremote;
781 PetscSF pointSF;
782 const PetscInt *sharedLocal;
783 const PetscSFNode *overlapRemote, *sharedRemote;
784
785 PetscFunctionBegin;
786 PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
787 PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
788 PetscCallMPI(MPI_Comm_rank(comm, &rank));
789 PetscCallMPI(MPI_Comm_size(comm, &size));
790 PetscCall(DMGetDimension(dm, &dim));
791
792 /* Before building the migration SF we need to know the new stratum offsets */
793 PetscCall(PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote));
794 PetscCall(PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths));
795 for (d = 0; d < dim + 1; d++) {
796 PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
797 for (p = pStart; p < pEnd; p++) pointDepths[p] = d;
798 }
799 for (p = 0; p < nleaves; p++) remoteDepths[p] = -1;
800 PetscCall(PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths, MPI_REPLACE));
801 PetscCall(PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths, MPI_REPLACE));
802
803 /* Count received points in each stratum and compute the internal strata shift */
804 PetscCall(PetscMalloc3(dim + 1, &depthRecv, dim + 1, &depthShift, dim + 1, &depthIdx));
805 for (d = 0; d < dim + 1; d++) depthRecv[d] = 0;
806 for (p = 0; p < nleaves; p++) depthRecv[remoteDepths[p]]++;
807 depthShift[dim] = 0;
808 for (d = 0; d < dim; d++) depthShift[d] = depthRecv[dim];
809 for (d = 1; d < dim; d++) depthShift[d] += depthRecv[0];
810 for (d = dim - 2; d > 0; d--) depthShift[d] += depthRecv[d + 1];
811 for (d = 0; d < dim + 1; d++) {
812 PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
813 depthIdx[d] = pStart + depthShift[d];
814 }
815
816 /* Form the overlap SF build an SF that describes the full overlap migration SF */
817 PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
818 newLeaves = pEnd - pStart + nleaves;
819 PetscCall(PetscMalloc1(newLeaves, &ilocal));
820 PetscCall(PetscMalloc1(newLeaves, &iremote));
821 /* First map local points to themselves */
822 for (d = 0; d < dim + 1; d++) {
823 PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
824 for (p = pStart; p < pEnd; p++) {
825 point = p + depthShift[d];
826 ilocal[point] = point;
827 iremote[point].index = p;
828 iremote[point].rank = rank;
829 depthIdx[d]++;
830 }
831 }
832
833 /* Add in the remote roots for currently shared points */
834 PetscCall(DMGetPointSF(dm, &pointSF));
835 PetscCall(PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote));
836 for (d = 0; d < dim + 1; d++) {
837 PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
838 for (p = 0; p < numSharedPoints; p++) {
839 if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
840 point = sharedLocal[p] + depthShift[d];
841 iremote[point].index = sharedRemote[p].index;
842 iremote[point].rank = sharedRemote[p].rank;
843 }
844 }
845 }
846
847 /* Now add the incoming overlap points */
848 for (p = 0; p < nleaves; p++) {
849 point = depthIdx[remoteDepths[p]];
850 ilocal[point] = point;
851 iremote[point].index = overlapRemote[p].index;
852 iremote[point].rank = overlapRemote[p].rank;
853 depthIdx[remoteDepths[p]]++;
854 }
855 PetscCall(PetscFree2(pointDepths, remoteDepths));
856
857 PetscCall(PetscSFCreate(comm, migrationSF));
858 PetscCall(PetscObjectSetName((PetscObject)*migrationSF, "Overlap Migration SF"));
859 PetscCall(PetscSFSetFromOptions(*migrationSF));
860 PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
861 PetscCall(PetscSFSetGraph(*migrationSF, pEnd - pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER));
862
863 PetscCall(PetscFree3(depthRecv, depthShift, depthIdx));
864 PetscFunctionReturn(PETSC_SUCCESS);
865 }
866
867 /*@
868 DMPlexStratifyMigrationSF - Rearrange the leaves of a migration sf for stratification.
869
870 Input Parameters:
871 + dm - The DM
872 - sf - A star forest with non-ordered leaves, usually defining a DM point migration
873
874 Output Parameter:
875 . migrationSF - A star forest with added leaf indirection that ensures the resulting DM is stratified
876
877 Level: developer
878
879 Note:
880 This lexicographically sorts by (depth, cellType)
881
882 .seealso: `DMPLEX`, `DMPlexPartitionLabelCreateSF()`, `DMPlexDistribute()`, `DMPlexDistributeOverlap()`
883 @*/
DMPlexStratifyMigrationSF(DM dm,PetscSF sf,PetscSF * migrationSF)884 PetscErrorCode DMPlexStratifyMigrationSF(DM dm, PetscSF sf, PetscSF *migrationSF)
885 {
886 MPI_Comm comm;
887 PetscMPIInt rank, size;
888 PetscInt d, ldepth, depth, dim, p, pStart, pEnd, nroots, nleaves;
889 PetscSFNode *pointDepths, *remoteDepths;
890 PetscInt *ilocal;
891 PetscInt *depthRecv, *depthShift, *depthIdx;
892 PetscInt *ctRecv, *ctShift, *ctIdx;
893 const PetscSFNode *iremote;
894
895 PetscFunctionBegin;
896 PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
897 PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
898 PetscCallMPI(MPI_Comm_rank(comm, &rank));
899 PetscCallMPI(MPI_Comm_size(comm, &size));
900 PetscCall(DMPlexGetDepth(dm, &ldepth));
901 PetscCall(DMGetDimension(dm, &dim));
902 PetscCallMPI(MPIU_Allreduce(&ldepth, &depth, 1, MPIU_INT, MPI_MAX, comm));
903 PetscCheck(!(ldepth >= 0) || !(depth != ldepth), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %" PetscInt_FMT " != %" PetscInt_FMT, ldepth, depth);
904 PetscCall(PetscLogEventBegin(DMPLEX_PartStratSF, dm, 0, 0, 0));
905
906 /* Before building the migration SF we need to know the new stratum offsets */
907 PetscCall(PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote));
908 PetscCall(PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths));
909 for (d = 0; d < depth + 1; ++d) {
910 PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
911 for (p = pStart; p < pEnd; ++p) {
912 DMPolytopeType ct;
913
914 PetscCall(DMPlexGetCellType(dm, p, &ct));
915 pointDepths[p].index = d;
916 pointDepths[p].rank = ct;
917 }
918 }
919 for (p = 0; p < nleaves; ++p) {
920 remoteDepths[p].index = -1;
921 remoteDepths[p].rank = -1;
922 }
923 PetscCall(PetscSFBcastBegin(sf, MPIU_SF_NODE, pointDepths, remoteDepths, MPI_REPLACE));
924 PetscCall(PetscSFBcastEnd(sf, MPIU_SF_NODE, pointDepths, remoteDepths, MPI_REPLACE));
925 /* Count received points in each stratum and compute the internal strata shift */
926 PetscCall(PetscCalloc6(depth + 1, &depthRecv, depth + 1, &depthShift, depth + 1, &depthIdx, DM_NUM_POLYTOPES, &ctRecv, DM_NUM_POLYTOPES, &ctShift, DM_NUM_POLYTOPES, &ctIdx));
927 for (p = 0; p < nleaves; ++p) {
928 if (remoteDepths[p].rank < 0) {
929 ++depthRecv[remoteDepths[p].index];
930 } else {
931 ++ctRecv[remoteDepths[p].rank];
932 }
933 }
934 {
935 PetscInt depths[4], dims[4], shift = 0, i, c;
936
937 /* Cells (depth), Vertices (0), Faces (depth-1), Edges (1)
938 Consider DM_POLYTOPE_FV_GHOST, DM_POLYTOPE_INTERIOR_GHOST and DM_POLYTOPE_UNKNOWN_CELL as cells
939 */
940 depths[0] = depth;
941 depths[1] = 0;
942 depths[2] = depth - 1;
943 depths[3] = 1;
944 dims[0] = dim;
945 dims[1] = 0;
946 dims[2] = dim - 1;
947 dims[3] = 1;
948 for (i = 0; i <= depth; ++i) {
949 const PetscInt dep = depths[i];
950 const PetscInt dim = dims[i];
951
952 for (c = 0; c < DM_NUM_POLYTOPES; ++c) {
953 if (DMPolytopeTypeGetDim((DMPolytopeType)c) != dim && !(i == 0 && (c == DM_POLYTOPE_FV_GHOST || c == DM_POLYTOPE_INTERIOR_GHOST || c == DM_POLYTOPE_UNKNOWN_CELL))) continue;
954 ctShift[c] = shift;
955 shift += ctRecv[c];
956 }
957 depthShift[dep] = shift;
958 shift += depthRecv[dep];
959 }
960 for (c = 0; c < DM_NUM_POLYTOPES; ++c) {
961 const PetscInt ctDim = DMPolytopeTypeGetDim((DMPolytopeType)c);
962
963 if ((ctDim < 0 || ctDim > dim) && c != DM_POLYTOPE_FV_GHOST && c != DM_POLYTOPE_INTERIOR_GHOST && c != DM_POLYTOPE_UNKNOWN_CELL) {
964 ctShift[c] = shift;
965 shift += ctRecv[c];
966 }
967 }
968 }
969 /* Derive a new local permutation based on stratified indices */
970 PetscCall(PetscMalloc1(nleaves, &ilocal));
971 for (p = 0; p < nleaves; ++p) {
972 const DMPolytopeType ct = (DMPolytopeType)remoteDepths[p].rank;
973
974 ilocal[p] = ctShift[ct] + ctIdx[ct];
975 ++ctIdx[ct];
976 }
977 PetscCall(PetscSFCreate(comm, migrationSF));
978 PetscCall(PetscObjectSetName((PetscObject)*migrationSF, "Migration SF"));
979 PetscCall(PetscSFSetGraph(*migrationSF, nroots, nleaves, ilocal, PETSC_OWN_POINTER, (PetscSFNode *)iremote, PETSC_COPY_VALUES));
980 PetscCall(PetscFree2(pointDepths, remoteDepths));
981 PetscCall(PetscFree6(depthRecv, depthShift, depthIdx, ctRecv, ctShift, ctIdx));
982 PetscCall(PetscLogEventEnd(DMPLEX_PartStratSF, dm, 0, 0, 0));
983 PetscFunctionReturn(PETSC_SUCCESS);
984 }
985
986 /*@
987 DMPlexDistributeField - Distribute field data to match a given `PetscSF`, usually the `PetscSF` from mesh distribution
988
989 Collective
990
991 Input Parameters:
992 + dm - The `DMPLEX` object
993 . pointSF - The `PetscSF` describing the communication pattern
994 . originalSection - The `PetscSection` for existing data layout
995 - originalVec - The existing data in a local vector
996
997 Output Parameters:
998 + newSection - The `PetscSF` describing the new data layout
999 - newVec - The new data in a local vector
1000
1001 Level: developer
1002
1003 .seealso: `DMPLEX`, `DMPlexDistribute()`, `DMPlexDistributeFieldIS()`, `DMPlexDistributeData()`, `PetscSectionMigrateData()`
1004 @*/
DMPlexDistributeField(DM dm,PetscSF pointSF,PetscSection originalSection,Vec originalVec,PetscSection newSection,Vec newVec)1005 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
1006 {
1007 PetscSF fieldSF;
1008 PetscInt *remoteOffsets, fieldSize;
1009 PetscScalar *originalValues, *newValues;
1010
1011 PetscFunctionBegin;
1012 PetscCall(PetscLogEventBegin(DMPLEX_DistributeField, dm, 0, 0, 0));
1013 PetscCall(PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection));
1014
1015 PetscCall(PetscSectionGetStorageSize(newSection, &fieldSize));
1016 PetscCall(VecSetSizes(newVec, fieldSize, PETSC_DETERMINE));
1017 PetscCall(VecSetType(newVec, dm->vectype));
1018
1019 PetscCall(VecGetArray(originalVec, &originalValues));
1020 PetscCall(VecGetArray(newVec, &newValues));
1021 PetscCall(PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF));
1022 PetscCall(PetscFree(remoteOffsets));
1023 PetscCall(PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues, MPI_REPLACE));
1024 PetscCall(PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues, MPI_REPLACE));
1025 PetscCall(PetscSFDestroy(&fieldSF));
1026 PetscCall(VecRestoreArray(newVec, &newValues));
1027 PetscCall(VecRestoreArray(originalVec, &originalValues));
1028 PetscCall(PetscLogEventEnd(DMPLEX_DistributeField, dm, 0, 0, 0));
1029 PetscFunctionReturn(PETSC_SUCCESS);
1030 }
1031
1032 /*@
1033 DMPlexDistributeFieldIS - Distribute field data to match a given `PetscSF`, usually the `PetscSF` from mesh distribution
1034
1035 Collective
1036
1037 Input Parameters:
1038 + dm - The `DMPLEX` object
1039 . pointSF - The `PetscSF` describing the communication pattern
1040 . originalSection - The `PetscSection` for existing data layout
1041 - originalIS - The existing data
1042
1043 Output Parameters:
1044 + newSection - The `PetscSF` describing the new data layout
1045 - newIS - The new data
1046
1047 Level: developer
1048
1049 .seealso: `DMPLEX`, `DMPlexDistribute()`, `DMPlexDistributeField()`, `DMPlexDistributeData()`, `PetscSectionMigrateData()`
1050 @*/
DMPlexDistributeFieldIS(DM dm,PetscSF pointSF,PetscSection originalSection,IS originalIS,PetscSection newSection,IS * newIS)1051 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
1052 {
1053 PetscInt *newValues, fieldSize;
1054 const PetscInt *originalValues;
1055
1056 PetscFunctionBegin;
1057 PetscCall(PetscLogEventBegin(DMPLEX_DistributeField, dm, 0, 0, 0));
1058 PetscCall(ISGetIndices(originalIS, &originalValues));
1059 PetscCall(PetscSectionMigrateData(pointSF, MPIU_INT, originalSection, originalValues, newSection, (void **)&newValues, NULL));
1060 PetscCall(ISRestoreIndices(originalIS, &originalValues));
1061
1062 PetscCall(PetscSectionGetStorageSize(newSection, &fieldSize));
1063 PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS));
1064 PetscCall(PetscLogEventEnd(DMPLEX_DistributeField, dm, 0, 0, 0));
1065 PetscFunctionReturn(PETSC_SUCCESS);
1066 }
1067
1068 /*@
1069 DMPlexDistributeData - Distribute field data to match a given `PetscSF`, usually the `PetscSF` from mesh distribution
1070
1071 Collective
1072
1073 Input Parameters:
1074 + dm - The `DMPLEX` object
1075 . pointSF - The `PetscSF` describing the communication pattern
1076 . originalSection - The `PetscSection` for existing data layout
1077 . datatype - The type of data
1078 - originalData - The existing data
1079
1080 Output Parameters:
1081 + newSection - The `PetscSection` describing the new data layout
1082 - newData - The new data
1083
1084 Level: developer
1085
1086 Note:
1087 This is simply a wrapper around `PetscSectionMigrateData()`, but includes DM-specific logging.
1088
1089 .seealso: `DMPLEX`, `DMPlexDistribute()`, `DMPlexDistributeField()`, `PetscSectionMigrateData()`
1090 @*/
DMPlexDistributeData(DM dm,PetscSF pointSF,PetscSection originalSection,MPI_Datatype datatype,void * originalData,PetscSection newSection,void ** newData)1091 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
1092 {
1093 PetscFunctionBegin;
1094 PetscCall(PetscLogEventBegin(DMPLEX_DistributeData, dm, 0, 0, 0));
1095 PetscCall(PetscSectionMigrateData(pointSF, datatype, originalSection, originalData, newSection, newData, NULL));
1096 PetscCall(PetscLogEventEnd(DMPLEX_DistributeData, dm, 0, 0, 0));
1097 PetscFunctionReturn(PETSC_SUCCESS);
1098 }
1099
DMPlexDistributeCones(DM dm,PetscSF migrationSF,ISLocalToGlobalMapping original,ISLocalToGlobalMapping renumbering,DM dmParallel)1100 static PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1101 {
1102 DM_Plex *pmesh = (DM_Plex *)dmParallel->data;
1103 MPI_Comm comm;
1104 PetscSF coneSF;
1105 PetscSection originalConeSection, newConeSection;
1106 PetscInt *remoteOffsets, *cones, *globCones, *newCones, newConesSize;
1107 PetscBool flg;
1108
1109 PetscFunctionBegin;
1110 PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1111 PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
1112 PetscCall(PetscLogEventBegin(DMPLEX_DistributeCones, dm, 0, 0, 0));
1113 /* Distribute cone section */
1114 PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1115 PetscCall(DMPlexGetConeSection(dm, &originalConeSection));
1116 PetscCall(DMPlexGetConeSection(dmParallel, &newConeSection));
1117 PetscCall(PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection));
1118 PetscCall(DMSetUp(dmParallel));
1119 /* Communicate and renumber cones */
1120 PetscCall(PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF));
1121 PetscCall(PetscFree(remoteOffsets));
1122 PetscCall(DMPlexGetCones(dm, &cones));
1123 if (original) {
1124 PetscInt numCones;
1125
1126 PetscCall(PetscSectionGetStorageSize(originalConeSection, &numCones));
1127 PetscCall(PetscMalloc1(numCones, &globCones));
1128 PetscCall(ISLocalToGlobalMappingApplyBlock(original, numCones, cones, globCones));
1129 } else {
1130 globCones = cones;
1131 }
1132 PetscCall(DMPlexGetCones(dmParallel, &newCones));
1133 PetscCall(PetscSFBcastBegin(coneSF, MPIU_INT, globCones, newCones, MPI_REPLACE));
1134 PetscCall(PetscSFBcastEnd(coneSF, MPIU_INT, globCones, newCones, MPI_REPLACE));
1135 if (original) PetscCall(PetscFree(globCones));
1136 PetscCall(PetscSectionGetStorageSize(newConeSection, &newConesSize));
1137 PetscCall(ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones));
1138 if (PetscDefined(USE_DEBUG)) {
1139 PetscInt p;
1140 PetscBool valid = PETSC_TRUE;
1141 for (p = 0; p < newConesSize; ++p) {
1142 if (newCones[p] < 0) {
1143 valid = PETSC_FALSE;
1144 PetscCall(PetscPrintf(PETSC_COMM_SELF, "[%d] Point %" PetscInt_FMT " not in overlap SF\n", PetscGlobalRank, p));
1145 }
1146 }
1147 PetscCheck(valid, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1148 }
1149 PetscCall(PetscOptionsHasName(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-cones_view", &flg));
1150 if (flg) {
1151 PetscCall(PetscPrintf(comm, "Serial Cone Section:\n"));
1152 PetscCall(PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_(comm)));
1153 PetscCall(PetscPrintf(comm, "Parallel Cone Section:\n"));
1154 PetscCall(PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_(comm)));
1155 PetscCall(PetscSFView(coneSF, NULL));
1156 }
1157 PetscCall(DMPlexGetConeOrientations(dm, &cones));
1158 PetscCall(DMPlexGetConeOrientations(dmParallel, &newCones));
1159 PetscCall(PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones, MPI_REPLACE));
1160 PetscCall(PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones, MPI_REPLACE));
1161 PetscCall(PetscSFDestroy(&coneSF));
1162 PetscCall(PetscLogEventEnd(DMPLEX_DistributeCones, dm, 0, 0, 0));
1163 /* Create supports and stratify DMPlex */
1164 {
1165 PetscInt pStart, pEnd;
1166
1167 PetscCall(PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd));
1168 PetscCall(PetscSectionSetChart(pmesh->supportSection, pStart, pEnd));
1169 }
1170 PetscCall(DMPlexSymmetrize(dmParallel));
1171 PetscCall(DMPlexStratify(dmParallel));
1172 {
1173 PetscBool useCone, useClosure, useAnchors;
1174
1175 PetscCall(DMGetBasicAdjacency(dm, &useCone, &useClosure));
1176 PetscCall(DMSetBasicAdjacency(dmParallel, useCone, useClosure));
1177 PetscCall(DMPlexGetAdjacencyUseAnchors(dm, &useAnchors));
1178 PetscCall(DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors));
1179 }
1180 PetscFunctionReturn(PETSC_SUCCESS);
1181 }
1182
DMPlexDistributeCoordinates(DM dm,PetscSF migrationSF,DM dmParallel)1183 static PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
1184 {
1185 MPI_Comm comm;
1186 DM cdm, cdmParallel;
1187 PetscSection originalCoordSection, newCoordSection;
1188 Vec originalCoordinates, newCoordinates;
1189 PetscInt bs;
1190 const char *name;
1191 const PetscReal *maxCell, *Lstart, *L;
1192
1193 PetscFunctionBegin;
1194 PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1195 PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1196
1197 PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1198 PetscCall(DMGetCoordinateDM(dm, &cdm));
1199 PetscCall(DMGetCoordinateDM(dmParallel, &cdmParallel));
1200 PetscCall(DMCopyDisc(cdm, cdmParallel));
1201 PetscCall(DMGetCoordinateSection(dm, &originalCoordSection));
1202 PetscCall(DMGetCoordinateSection(dmParallel, &newCoordSection));
1203 PetscCall(DMGetCoordinatesLocal(dm, &originalCoordinates));
1204 if (originalCoordinates) {
1205 PetscCall(VecCreate(PETSC_COMM_SELF, &newCoordinates));
1206 PetscCall(PetscObjectGetName((PetscObject)originalCoordinates, &name));
1207 PetscCall(PetscObjectSetName((PetscObject)newCoordinates, name));
1208
1209 PetscCall(DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates));
1210 PetscCall(DMSetCoordinatesLocal(dmParallel, newCoordinates));
1211 PetscCall(VecGetBlockSize(originalCoordinates, &bs));
1212 PetscCall(VecSetBlockSize(newCoordinates, bs));
1213 PetscCall(VecDestroy(&newCoordinates));
1214 }
1215
1216 PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1217 PetscCall(DMGetPeriodicity(dm, &maxCell, &Lstart, &L));
1218 PetscCall(DMSetPeriodicity(dmParallel, maxCell, Lstart, L));
1219 PetscCall(DMGetCellCoordinateDM(dm, &cdm));
1220 if (cdm) {
1221 PetscCall(DMClone(dmParallel, &cdmParallel));
1222 PetscCall(DMSetCellCoordinateDM(dmParallel, cdmParallel));
1223 PetscCall(DMCopyDisc(cdm, cdmParallel));
1224 PetscCall(DMDestroy(&cdmParallel));
1225 PetscCall(DMGetCellCoordinateSection(dm, &originalCoordSection));
1226 PetscCall(DMGetCellCoordinatesLocal(dm, &originalCoordinates));
1227 PetscCall(PetscSectionCreate(comm, &newCoordSection));
1228 if (originalCoordinates) {
1229 PetscCall(VecCreate(PETSC_COMM_SELF, &newCoordinates));
1230 PetscCall(PetscObjectGetName((PetscObject)originalCoordinates, &name));
1231 PetscCall(PetscObjectSetName((PetscObject)newCoordinates, name));
1232
1233 PetscCall(DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates));
1234 PetscCall(VecGetBlockSize(originalCoordinates, &bs));
1235 PetscCall(VecSetBlockSize(newCoordinates, bs));
1236 PetscCall(DMSetCellCoordinateSection(dmParallel, bs, newCoordSection));
1237 PetscCall(DMSetCellCoordinatesLocal(dmParallel, newCoordinates));
1238 PetscCall(VecDestroy(&newCoordinates));
1239 }
1240 PetscCall(PetscSectionDestroy(&newCoordSection));
1241 }
1242 PetscFunctionReturn(PETSC_SUCCESS);
1243 }
1244
DMPlexDistributeLabels(DM dm,PetscSF migrationSF,DM dmParallel)1245 static PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1246 {
1247 DM_Plex *mesh = (DM_Plex *)dm->data;
1248 MPI_Comm comm;
1249 DMLabel depthLabel;
1250 PetscMPIInt rank;
1251 PetscInt depth, d, numLabels, numLocalLabels, l;
1252 PetscBool hasLabels = PETSC_FALSE, lsendDepth, sendDepth;
1253 PetscObjectState depthState = -1;
1254
1255 PetscFunctionBegin;
1256 PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1257 PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1258
1259 PetscCall(PetscLogEventBegin(DMPLEX_DistributeLabels, dm, 0, 0, 0));
1260 PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1261 PetscCallMPI(MPI_Comm_rank(comm, &rank));
1262
1263 /* If the user has changed the depth label, communicate it instead */
1264 PetscCall(DMPlexGetDepth(dm, &depth));
1265 PetscCall(DMPlexGetDepthLabel(dm, &depthLabel));
1266 if (depthLabel) PetscCall(PetscObjectStateGet((PetscObject)depthLabel, &depthState));
1267 lsendDepth = mesh->depthState != depthState ? PETSC_TRUE : PETSC_FALSE;
1268 PetscCallMPI(MPIU_Allreduce(&lsendDepth, &sendDepth, 1, MPI_C_BOOL, MPI_LOR, comm));
1269 if (sendDepth) {
1270 PetscCall(DMPlexGetDepthLabel(dmParallel, &dmParallel->depthLabel));
1271 PetscCall(DMRemoveLabelBySelf(dmParallel, &dmParallel->depthLabel, PETSC_FALSE));
1272 }
1273 /* Everyone must have either the same number of labels, or none */
1274 PetscCall(DMGetNumLabels(dm, &numLocalLabels));
1275 numLabels = numLocalLabels;
1276 PetscCallMPI(MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm));
1277 if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1278 for (l = 0; l < numLabels; ++l) {
1279 DMLabel label = NULL, labelNew = NULL;
1280 PetscBool isDepth, lisOutput = PETSC_TRUE, isOutput;
1281 const char *name = NULL;
1282
1283 if (hasLabels) {
1284 PetscCall(DMGetLabelByNum(dm, l, &label));
1285 /* Skip "depth" because it is recreated */
1286 PetscCall(PetscObjectGetName((PetscObject)label, &name));
1287 PetscCall(PetscStrcmp(name, "depth", &isDepth));
1288 } else {
1289 isDepth = PETSC_FALSE;
1290 }
1291 PetscCallMPI(MPI_Bcast(&isDepth, 1, MPI_C_BOOL, 0, comm));
1292 if (isDepth && !sendDepth) continue;
1293 PetscCall(DMLabelDistribute(label, migrationSF, &labelNew));
1294 if (isDepth) {
1295 /* Put in any missing strata which can occur if users are managing the depth label themselves */
1296 PetscInt gdepth;
1297
1298 PetscCallMPI(MPIU_Allreduce(&depth, &gdepth, 1, MPIU_INT, MPI_MAX, comm));
1299 PetscCheck(!(depth >= 0) || !(gdepth != depth), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %" PetscInt_FMT " != %" PetscInt_FMT, depth, gdepth);
1300 for (d = 0; d <= gdepth; ++d) {
1301 PetscBool has;
1302
1303 PetscCall(DMLabelHasStratum(labelNew, d, &has));
1304 if (!has) PetscCall(DMLabelAddStratum(labelNew, d));
1305 }
1306 }
1307 PetscCall(DMAddLabel(dmParallel, labelNew));
1308 /* Put the output flag in the new label */
1309 if (hasLabels) PetscCall(DMGetLabelOutput(dm, name, &lisOutput));
1310 PetscCallMPI(MPIU_Allreduce(&lisOutput, &isOutput, 1, MPI_C_BOOL, MPI_LAND, comm));
1311 PetscCall(PetscObjectGetName((PetscObject)labelNew, &name));
1312 PetscCall(DMSetLabelOutput(dmParallel, name, isOutput));
1313 PetscCall(DMLabelDestroy(&labelNew));
1314 }
1315 {
1316 DMLabel ctLabel;
1317
1318 // Reset label for fast lookup
1319 PetscCall(DMPlexGetCellTypeLabel(dmParallel, &ctLabel));
1320 PetscCall(DMLabelMakeAllInvalid_Internal(ctLabel));
1321 }
1322 PetscCall(PetscLogEventEnd(DMPLEX_DistributeLabels, dm, 0, 0, 0));
1323 PetscFunctionReturn(PETSC_SUCCESS);
1324 }
1325
DMPlexDistributeSetupTree(DM dm,PetscSF migrationSF,ISLocalToGlobalMapping original,ISLocalToGlobalMapping renumbering,DM dmParallel)1326 static PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1327 {
1328 DM_Plex *mesh = (DM_Plex *)dm->data;
1329 DM_Plex *pmesh = (DM_Plex *)dmParallel->data;
1330 MPI_Comm comm;
1331 DM refTree;
1332 PetscSection origParentSection, newParentSection;
1333 PetscInt *origParents, *origChildIDs;
1334 PetscBool flg;
1335
1336 PetscFunctionBegin;
1337 PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1338 PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
1339 PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1340
1341 /* Set up tree */
1342 PetscCall(DMPlexGetReferenceTree(dm, &refTree));
1343 PetscCall(DMPlexSetReferenceTree(dmParallel, refTree));
1344 PetscCall(DMPlexGetTree(dm, &origParentSection, &origParents, &origChildIDs, NULL, NULL));
1345 if (origParentSection) {
1346 PetscInt pStart, pEnd;
1347 PetscInt *newParents, *newChildIDs, *globParents;
1348 PetscInt *remoteOffsetsParents, newParentSize;
1349 PetscSF parentSF;
1350
1351 PetscCall(DMPlexGetChart(dmParallel, &pStart, &pEnd));
1352 PetscCall(PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel), &newParentSection));
1353 PetscCall(PetscSectionSetChart(newParentSection, pStart, pEnd));
1354 PetscCall(PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection));
1355 PetscCall(PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF));
1356 PetscCall(PetscFree(remoteOffsetsParents));
1357 PetscCall(PetscSectionGetStorageSize(newParentSection, &newParentSize));
1358 PetscCall(PetscMalloc2(newParentSize, &newParents, newParentSize, &newChildIDs));
1359 if (original) {
1360 PetscInt numParents;
1361
1362 PetscCall(PetscSectionGetStorageSize(origParentSection, &numParents));
1363 PetscCall(PetscMalloc1(numParents, &globParents));
1364 PetscCall(ISLocalToGlobalMappingApplyBlock(original, numParents, origParents, globParents));
1365 } else {
1366 globParents = origParents;
1367 }
1368 PetscCall(PetscSFBcastBegin(parentSF, MPIU_INT, globParents, newParents, MPI_REPLACE));
1369 PetscCall(PetscSFBcastEnd(parentSF, MPIU_INT, globParents, newParents, MPI_REPLACE));
1370 if (original) PetscCall(PetscFree(globParents));
1371 PetscCall(PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs, MPI_REPLACE));
1372 PetscCall(PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs, MPI_REPLACE));
1373 PetscCall(ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents));
1374 if (PetscDefined(USE_DEBUG)) {
1375 PetscInt p;
1376 PetscBool valid = PETSC_TRUE;
1377 for (p = 0; p < newParentSize; ++p) {
1378 if (newParents[p] < 0) {
1379 valid = PETSC_FALSE;
1380 PetscCall(PetscPrintf(PETSC_COMM_SELF, "Point %" PetscInt_FMT " not in overlap SF\n", p));
1381 }
1382 }
1383 PetscCheck(valid, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1384 }
1385 PetscCall(PetscOptionsHasName(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-parents_view", &flg));
1386 if (flg) {
1387 PetscCall(PetscPrintf(comm, "Serial Parent Section: \n"));
1388 PetscCall(PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_(comm)));
1389 PetscCall(PetscPrintf(comm, "Parallel Parent Section: \n"));
1390 PetscCall(PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_(comm)));
1391 PetscCall(PetscSFView(parentSF, NULL));
1392 }
1393 PetscCall(DMPlexSetTree(dmParallel, newParentSection, newParents, newChildIDs));
1394 PetscCall(PetscSectionDestroy(&newParentSection));
1395 PetscCall(PetscFree2(newParents, newChildIDs));
1396 PetscCall(PetscSFDestroy(&parentSF));
1397 }
1398 pmesh->useAnchors = mesh->useAnchors;
1399 PetscFunctionReturn(PETSC_SUCCESS);
1400 }
1401
1402 /*@
1403 DMPlexSetPartitionBalance - Should distribution of the `DM` attempt to balance the shared point partition?
1404
1405 Input Parameters:
1406 + dm - The `DMPLEX` object
1407 - flg - Balance the partition?
1408
1409 Level: intermediate
1410
1411 .seealso: `DMPLEX`, `DMPlexDistribute()`, `DMPlexGetPartitionBalance()`
1412 @*/
DMPlexSetPartitionBalance(DM dm,PetscBool flg)1413 PetscErrorCode DMPlexSetPartitionBalance(DM dm, PetscBool flg)
1414 {
1415 DM_Plex *mesh = (DM_Plex *)dm->data;
1416
1417 PetscFunctionBegin;
1418 mesh->partitionBalance = flg;
1419 PetscFunctionReturn(PETSC_SUCCESS);
1420 }
1421
1422 /*@
1423 DMPlexGetPartitionBalance - Does distribution of the `DM` attempt to balance the shared point partition?
1424
1425 Input Parameter:
1426 . dm - The `DMPLEX` object
1427
1428 Output Parameter:
1429 . flg - Balance the partition?
1430
1431 Level: intermediate
1432
1433 .seealso: `DMPLEX`, `DMPlexDistribute()`, `DMPlexSetPartitionBalance()`
1434 @*/
DMPlexGetPartitionBalance(DM dm,PetscBool * flg)1435 PetscErrorCode DMPlexGetPartitionBalance(DM dm, PetscBool *flg)
1436 {
1437 DM_Plex *mesh = (DM_Plex *)dm->data;
1438
1439 PetscFunctionBegin;
1440 PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1441 PetscAssertPointer(flg, 2);
1442 *flg = mesh->partitionBalance;
1443 PetscFunctionReturn(PETSC_SUCCESS);
1444 }
1445
1446 typedef struct {
1447 PetscInt vote, rank, index;
1448 } Petsc3Int;
1449
1450 /* MaxLoc, but carry a third piece of information around */
MaxLocCarry(void * in_,void * inout_,PetscMPIInt * len_,MPI_Datatype * dtype)1451 static void MPIAPI MaxLocCarry(void *in_, void *inout_, PetscMPIInt *len_, MPI_Datatype *dtype)
1452 {
1453 Petsc3Int *a = (Petsc3Int *)inout_;
1454 Petsc3Int *b = (Petsc3Int *)in_;
1455 PetscInt i, len = *len_;
1456 for (i = 0; i < len; i++) {
1457 if (a[i].vote < b[i].vote) {
1458 a[i].vote = b[i].vote;
1459 a[i].rank = b[i].rank;
1460 a[i].index = b[i].index;
1461 } else if (a[i].vote <= b[i].vote) {
1462 if (a[i].rank >= b[i].rank) {
1463 a[i].rank = b[i].rank;
1464 a[i].index = b[i].index;
1465 }
1466 }
1467 }
1468 }
1469
1470 /*@
1471 DMPlexCreatePointSF - Build a point `PetscSF` from an `PetscSF` describing a point migration
1472
1473 Input Parameters:
1474 + dm - The source `DMPLEX` object
1475 . migrationSF - The star forest that describes the parallel point remapping
1476 - ownership - Flag causing a vote to determine point ownership
1477
1478 Output Parameter:
1479 . pointSF - The star forest describing the point overlap in the remapped `DM`
1480
1481 Level: developer
1482
1483 Note:
1484 Output `pointSF` is guaranteed to have the array of local indices (leaves) sorted.
1485
1486 .seealso: `DMPLEX`, `PetscSF`, `DM`, `DMPlexDistribute()`, `DMPlexDistributeOverlap()`
1487 @*/
DMPlexCreatePointSF(DM dm,PetscSF migrationSF,PetscBool ownership,PetscSF * pointSF)1488 PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownership, PetscSF *pointSF)
1489 {
1490 PetscMPIInt rank, size;
1491 PetscInt p, nroots, nleaves, idx, npointLeaves;
1492 PetscInt *pointLocal;
1493 const PetscInt *leaves;
1494 const PetscSFNode *roots;
1495 PetscSFNode *rootNodes, *leafNodes, *pointRemote;
1496 Vec shifts;
1497 const PetscInt numShifts = 13759;
1498 const PetscScalar *shift = NULL;
1499 const PetscBool shiftDebug = PETSC_FALSE;
1500 PetscBool balance;
1501
1502 PetscFunctionBegin;
1503 PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1504 PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
1505 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)dm), &size));
1506 PetscCall(PetscLogEventBegin(DMPLEX_CreatePointSF, dm, 0, 0, 0));
1507 PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)dm), pointSF));
1508 PetscCall(PetscSFSetFromOptions(*pointSF));
1509 if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
1510
1511 PetscCall(DMPlexGetPartitionBalance(dm, &balance));
1512 PetscCall(PetscSFGetGraph(migrationSF, &nroots, &nleaves, &leaves, &roots));
1513 PetscCall(PetscMalloc2(nroots, &rootNodes, nleaves, &leafNodes));
1514 if (ownership) {
1515 MPI_Op op;
1516 MPI_Datatype datatype;
1517 Petsc3Int *rootVote = NULL, *leafVote = NULL;
1518
1519 /* If balancing, we compute a random cyclic shift of the rank for each remote point. That way, the max will evenly distribute among ranks. */
1520 if (balance) {
1521 PetscRandom r;
1522
1523 PetscCall(PetscRandomCreate(PETSC_COMM_SELF, &r));
1524 PetscCall(PetscRandomSetInterval(r, 0, 2467 * size));
1525 PetscCall(VecCreate(PETSC_COMM_SELF, &shifts));
1526 PetscCall(VecSetSizes(shifts, numShifts, numShifts));
1527 PetscCall(VecSetType(shifts, VECSTANDARD));
1528 PetscCall(VecSetRandom(shifts, r));
1529 PetscCall(PetscRandomDestroy(&r));
1530 PetscCall(VecGetArrayRead(shifts, &shift));
1531 }
1532
1533 PetscCall(PetscMalloc1(nroots, &rootVote));
1534 PetscCall(PetscMalloc1(nleaves, &leafVote));
1535 /* Point ownership vote: Process with highest rank owns shared points */
1536 for (p = 0; p < nleaves; ++p) {
1537 if (shiftDebug) {
1538 PetscCall(PetscSynchronizedPrintf(PetscObjectComm((PetscObject)dm), "[%d] Point %" PetscInt_FMT " RemotePoint %" PetscInt_FMT " Shift %" PetscInt_FMT " MyRank %" PetscInt_FMT "\n", rank, leaves ? leaves[p] : p, roots[p].index,
1539 (PetscInt)PetscRealPart(shift[roots[p].index % numShifts]), (rank + (shift ? (PetscInt)PetscRealPart(shift[roots[p].index % numShifts]) : 0)) % size));
1540 }
1541 /* Either put in a bid or we know we own it */
1542 leafVote[p].vote = (rank + (shift ? (PetscInt)PetscRealPart(shift[roots[p].index % numShifts]) : 0)) % size;
1543 leafVote[p].rank = rank;
1544 leafVote[p].index = p;
1545 }
1546 for (p = 0; p < nroots; p++) {
1547 /* Root must not participate in the reduction, flag so that MAXLOC does not use */
1548 rootVote[p].vote = -3;
1549 rootVote[p].rank = -3;
1550 rootVote[p].index = -3;
1551 }
1552 PetscCallMPI(MPI_Type_contiguous(3, MPIU_INT, &datatype));
1553 PetscCallMPI(MPI_Type_commit(&datatype));
1554 PetscCallMPI(MPI_Op_create(&MaxLocCarry, 1, &op));
1555 PetscCall(PetscSFReduceBegin(migrationSF, datatype, leafVote, rootVote, op));
1556 PetscCall(PetscSFReduceEnd(migrationSF, datatype, leafVote, rootVote, op));
1557 PetscCallMPI(MPI_Op_free(&op));
1558 PetscCallMPI(MPI_Type_free(&datatype));
1559 for (p = 0; p < nroots; p++) {
1560 rootNodes[p].rank = rootVote[p].rank;
1561 rootNodes[p].index = rootVote[p].index;
1562 }
1563 PetscCall(PetscFree(leafVote));
1564 PetscCall(PetscFree(rootVote));
1565 } else {
1566 for (p = 0; p < nroots; p++) {
1567 rootNodes[p].index = -1;
1568 rootNodes[p].rank = rank;
1569 }
1570 for (p = 0; p < nleaves; p++) {
1571 /* Write new local id into old location */
1572 if (roots[p].rank == rank) rootNodes[roots[p].index].index = leaves ? leaves[p] : p;
1573 }
1574 }
1575 PetscCall(PetscSFBcastBegin(migrationSF, MPIU_SF_NODE, rootNodes, leafNodes, MPI_REPLACE));
1576 PetscCall(PetscSFBcastEnd(migrationSF, MPIU_SF_NODE, rootNodes, leafNodes, MPI_REPLACE));
1577
1578 for (npointLeaves = 0, p = 0; p < nleaves; p++) {
1579 if (leafNodes[p].rank != rank) npointLeaves++;
1580 }
1581 PetscCall(PetscMalloc1(npointLeaves, &pointLocal));
1582 PetscCall(PetscMalloc1(npointLeaves, &pointRemote));
1583 for (idx = 0, p = 0; p < nleaves; p++) {
1584 if (leafNodes[p].rank != rank) {
1585 /* Note that pointLocal is automatically sorted as it is sublist of 0, ..., nleaves-1 */
1586 pointLocal[idx] = p;
1587 pointRemote[idx] = leafNodes[p];
1588 idx++;
1589 }
1590 }
1591 if (shift) {
1592 PetscCall(VecRestoreArrayRead(shifts, &shift));
1593 PetscCall(VecDestroy(&shifts));
1594 }
1595 if (shiftDebug) PetscCall(PetscSynchronizedFlush(PetscObjectComm((PetscObject)dm), PETSC_STDOUT));
1596 PetscCall(PetscSFSetGraph(*pointSF, nleaves, npointLeaves, pointLocal, PETSC_OWN_POINTER, pointRemote, PETSC_OWN_POINTER));
1597 PetscCall(PetscFree2(rootNodes, leafNodes));
1598 PetscCall(PetscLogEventEnd(DMPLEX_CreatePointSF, dm, 0, 0, 0));
1599 if (PetscDefined(USE_DEBUG)) PetscCall(DMPlexCheckPointSF(dm, *pointSF, PETSC_FALSE));
1600 PetscFunctionReturn(PETSC_SUCCESS);
1601 }
1602
1603 /*@
1604 DMPlexMigrate - Migrates internal `DM` data over the supplied star forest
1605
1606 Collective
1607
1608 Input Parameters:
1609 + dm - The source `DMPLEX` object
1610 - sf - The star forest communication context describing the migration pattern
1611
1612 Output Parameter:
1613 . targetDM - The target `DMPLEX` object
1614
1615 Level: intermediate
1616
1617 .seealso: `DMPLEX`, `PetscSF`, `DM`, `DMPlexDistribute()`, `DMPlexDistributeOverlap()`
1618 @*/
DMPlexMigrate(DM dm,PetscSF sf,DM targetDM)1619 PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM)
1620 {
1621 MPI_Comm comm;
1622 PetscInt dim, cdim, nroots;
1623 PetscSF sfPoint;
1624 ISLocalToGlobalMapping ltogMigration;
1625 ISLocalToGlobalMapping ltogOriginal = NULL;
1626
1627 PetscFunctionBegin;
1628 PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1629 PetscCall(PetscLogEventBegin(DMPLEX_Migrate, dm, 0, 0, 0));
1630 PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1631 PetscCall(DMGetDimension(dm, &dim));
1632 PetscCall(DMSetDimension(targetDM, dim));
1633 PetscCall(DMGetCoordinateDim(dm, &cdim));
1634 PetscCall(DMSetCoordinateDim(targetDM, cdim));
1635
1636 /* Check for a one-to-all distribution pattern */
1637 PetscCall(DMGetPointSF(dm, &sfPoint));
1638 PetscCall(PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL));
1639 if (nroots >= 0) {
1640 IS isOriginal;
1641 PetscInt n, size, nleaves;
1642 PetscInt *numbering_orig, *numbering_new;
1643
1644 /* Get the original point numbering */
1645 PetscCall(DMPlexCreatePointNumbering(dm, &isOriginal));
1646 PetscCall(ISLocalToGlobalMappingCreateIS(isOriginal, <ogOriginal));
1647 PetscCall(ISLocalToGlobalMappingGetSize(ltogOriginal, &size));
1648 PetscCall(ISLocalToGlobalMappingGetBlockIndices(ltogOriginal, (const PetscInt **)&numbering_orig));
1649 /* Convert to positive global numbers */
1650 for (n = 0; n < size; n++) {
1651 if (numbering_orig[n] < 0) numbering_orig[n] = -(numbering_orig[n] + 1);
1652 }
1653 /* Derive the new local-to-global mapping from the old one */
1654 PetscCall(PetscSFGetGraph(sf, NULL, &nleaves, NULL, NULL));
1655 PetscCall(PetscMalloc1(nleaves, &numbering_new));
1656 PetscCall(PetscSFBcastBegin(sf, MPIU_INT, numbering_orig, numbering_new, MPI_REPLACE));
1657 PetscCall(PetscSFBcastEnd(sf, MPIU_INT, numbering_orig, numbering_new, MPI_REPLACE));
1658 PetscCall(ISLocalToGlobalMappingCreate(comm, 1, nleaves, numbering_new, PETSC_OWN_POINTER, <ogMigration));
1659 PetscCall(ISLocalToGlobalMappingRestoreIndices(ltogOriginal, (const PetscInt **)&numbering_orig));
1660 PetscCall(ISDestroy(&isOriginal));
1661 } else {
1662 /* One-to-all distribution pattern: We can derive LToG from SF */
1663 PetscCall(ISLocalToGlobalMappingCreateSF(sf, 0, <ogMigration));
1664 }
1665 PetscCall(PetscObjectSetName((PetscObject)ltogMigration, "Point renumbering for DM migration"));
1666 PetscCall(ISLocalToGlobalMappingViewFromOptions(ltogMigration, (PetscObject)dm, "-partition_view"));
1667 /* Migrate DM data to target DM */
1668 PetscCall(DMPlexDistributeCones(dm, sf, ltogOriginal, ltogMigration, targetDM));
1669 PetscCall(DMPlexDistributeLabels(dm, sf, targetDM));
1670 PetscCall(DMPlexDistributeCoordinates(dm, sf, targetDM));
1671 PetscCall(DMPlexDistributeSetupTree(dm, sf, ltogOriginal, ltogMigration, targetDM));
1672 PetscCall(ISLocalToGlobalMappingDestroy(<ogOriginal));
1673 PetscCall(ISLocalToGlobalMappingDestroy(<ogMigration));
1674 PetscCall(PetscLogEventEnd(DMPLEX_Migrate, dm, 0, 0, 0));
1675 PetscFunctionReturn(PETSC_SUCCESS);
1676 }
1677
1678 /*@
1679 DMPlexRemapMigrationSF - Rewrite the distribution SF to account for overlap
1680
1681 Collective
1682
1683 Input Parameters:
1684 + sfOverlap - The `PetscSF` object just for the overlap
1685 - sfMigration - The original distribution `PetscSF` object
1686
1687 Output Parameters:
1688 . sfMigrationNew - A rewritten `PetscSF` object that incorporates the overlap
1689
1690 Level: developer
1691
1692 .seealso: `DMPLEX`, `DM`, `DMPlexDistribute()`, `DMPlexDistributeOverlap()`, `DMPlexGetOverlap()`
1693 @*/
DMPlexRemapMigrationSF(PetscSF sfOverlap,PetscSF sfMigration,PetscSF * sfMigrationNew)1694 PetscErrorCode DMPlexRemapMigrationSF(PetscSF sfOverlap, PetscSF sfMigration, PetscSF *sfMigrationNew)
1695 {
1696 PetscSFNode *newRemote, *permRemote = NULL;
1697 const PetscInt *oldLeaves;
1698 const PetscSFNode *oldRemote;
1699 PetscInt nroots, nleaves, noldleaves;
1700
1701 PetscFunctionBegin;
1702 PetscCall(PetscSFGetGraph(sfMigration, &nroots, &noldleaves, &oldLeaves, &oldRemote));
1703 PetscCall(PetscSFGetGraph(sfOverlap, NULL, &nleaves, NULL, NULL));
1704 PetscCall(PetscMalloc1(nleaves, &newRemote));
1705 /* oldRemote: original root point mapping to original leaf point
1706 newRemote: original leaf point mapping to overlapped leaf point */
1707 if (oldLeaves) {
1708 /* After stratification, the migration remotes may not be in root (canonical) order, so we reorder using the leaf numbering */
1709 PetscCall(PetscMalloc1(noldleaves, &permRemote));
1710 for (PetscInt l = 0; l < noldleaves; ++l) permRemote[oldLeaves[l]] = oldRemote[l];
1711 oldRemote = permRemote;
1712 }
1713 PetscCall(PetscSFBcastBegin(sfOverlap, MPIU_SF_NODE, oldRemote, newRemote, MPI_REPLACE));
1714 PetscCall(PetscSFBcastEnd(sfOverlap, MPIU_SF_NODE, oldRemote, newRemote, MPI_REPLACE));
1715 PetscCall(PetscFree(permRemote));
1716 PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)sfOverlap), sfMigrationNew));
1717 PetscCall(PetscSFSetGraph(*sfMigrationNew, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER));
1718 PetscFunctionReturn(PETSC_SUCCESS);
1719 }
1720
1721 /* For DG-like methods, the code below is equivalent (but faster) than calling
1722 DMPlexCreateClosureIndex(dm,section) */
DMPlexCreateClosureIndex_CELL(DM dm,PetscSection section)1723 static PetscErrorCode DMPlexCreateClosureIndex_CELL(DM dm, PetscSection section)
1724 {
1725 PetscSection clSection;
1726 IS clPoints;
1727 PetscInt pStart, pEnd, point;
1728 PetscInt *closure, pos = 0;
1729
1730 PetscFunctionBegin;
1731 if (!section) PetscCall(DMGetLocalSection(dm, §ion));
1732 PetscCall(DMPlexGetHeightStratum(dm, 0, &pStart, &pEnd));
1733 PetscCall(PetscSectionCreate(PetscObjectComm((PetscObject)dm), &clSection));
1734 PetscCall(PetscSectionSetChart(clSection, pStart, pEnd));
1735 PetscCall(PetscMalloc1((2 * (pEnd - pStart)), &closure));
1736 for (point = pStart; point < pEnd; point++) {
1737 PetscCall(PetscSectionSetDof(clSection, point, 2));
1738 closure[pos++] = point; /* point */
1739 closure[pos++] = 0; /* orientation */
1740 }
1741 PetscCall(PetscSectionSetUp(clSection));
1742 PetscCall(ISCreateGeneral(PETSC_COMM_SELF, 2 * (pEnd - pStart), closure, PETSC_OWN_POINTER, &clPoints));
1743 PetscCall(PetscSectionSetClosureIndex(section, (PetscObject)dm, clSection, clPoints));
1744 PetscCall(PetscSectionDestroy(&clSection));
1745 PetscCall(ISDestroy(&clPoints));
1746 PetscFunctionReturn(PETSC_SUCCESS);
1747 }
1748
DMPlexDistribute_Multistage(DM dm,PetscInt overlap,PetscSF * sf,DM * dmParallel)1749 static PetscErrorCode DMPlexDistribute_Multistage(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1750 {
1751 MPI_Comm comm = PetscObjectComm((PetscObject)dm);
1752 PetscPartitioner part;
1753 PetscBool balance, printHeader;
1754 PetscInt nl = 0;
1755
1756 PetscFunctionBegin;
1757 if (sf) *sf = NULL;
1758 *dmParallel = NULL;
1759
1760 PetscCall(DMPlexGetPartitioner(dm, &part));
1761 printHeader = part->printHeader;
1762 PetscCall(DMPlexGetPartitionBalance(dm, &balance));
1763 PetscCall(PetscPartitionerSetUp(part));
1764 PetscCall(PetscLogEventBegin(DMPLEX_DistributeMultistage, dm, 0, 0, 0));
1765 PetscCall(PetscPartitionerMultistageGetStages_Multistage(part, &nl, NULL));
1766 for (PetscInt l = 0; l < nl; l++) {
1767 PetscInt ovl = (l < nl - 1) ? 0 : overlap;
1768 PetscSF sfDist;
1769 DM dmDist;
1770
1771 PetscCall(DMPlexSetPartitionBalance(dm, balance));
1772 PetscCall(DMViewFromOptions(dm, (PetscObject)part, "-petscpartitioner_multistage_dm_view"));
1773 PetscCall(PetscPartitionerMultistageSetStage_Multistage(part, l, (PetscObject)dm));
1774 PetscCall(DMPlexSetPartitioner(dm, part));
1775 PetscCall(DMPlexDistribute(dm, ovl, &sfDist, &dmDist));
1776 PetscCheck(dmDist, comm, PETSC_ERR_PLIB, "No distributed DM generated (stage %" PetscInt_FMT ")", l);
1777 PetscCheck(sfDist, comm, PETSC_ERR_PLIB, "No SF generated (stage %" PetscInt_FMT ")", l);
1778 part->printHeader = PETSC_FALSE;
1779
1780 /* Propagate cell weights to the next level (if any, and if not the final dm) */
1781 if (part->usevwgt && dm->localSection && l != nl - 1) {
1782 PetscSection oldSection, newSection;
1783
1784 PetscCall(DMGetLocalSection(dm, &oldSection));
1785 PetscCall(DMGetLocalSection(dmDist, &newSection));
1786 PetscCall(PetscSFDistributeSection(sfDist, oldSection, NULL, newSection));
1787 PetscCall(DMPlexCreateClosureIndex_CELL(dmDist, newSection));
1788 }
1789 if (!sf) PetscCall(PetscSFDestroy(&sfDist));
1790 if (l > 0) PetscCall(DMDestroy(&dm));
1791
1792 if (sf && *sf) {
1793 PetscSF sfA = *sf, sfB = sfDist;
1794 PetscCall(PetscSFCompose(sfA, sfB, &sfDist));
1795 PetscCall(PetscSFDestroy(&sfA));
1796 PetscCall(PetscSFDestroy(&sfB));
1797 }
1798
1799 if (sf) *sf = sfDist;
1800 dm = *dmParallel = dmDist;
1801 }
1802 PetscCall(PetscPartitionerMultistageSetStage_Multistage(part, 0, NULL)); /* reset */
1803 PetscCall(PetscLogEventEnd(DMPLEX_DistributeMultistage, dm, 0, 0, 0));
1804 part->printHeader = printHeader;
1805 PetscFunctionReturn(PETSC_SUCCESS);
1806 }
1807
1808 /*@
1809 DMPlexDistribute - Distributes the mesh and any associated sections.
1810
1811 Collective
1812
1813 Input Parameters:
1814 + dm - The original `DMPLEX` object
1815 - overlap - The overlap of partitions, 0 is the default
1816
1817 Output Parameters:
1818 + sf - The `PetscSF` used for point distribution, or `NULL` if not needed
1819 - dmParallel - The distributed `DMPLEX` object
1820
1821 Level: intermediate
1822
1823 Note:
1824 If the mesh was not distributed, the output `dmParallel` will be `NULL`.
1825
1826 The user can control the definition of adjacency for the mesh using `DMSetAdjacency()`. They should choose the combination appropriate for the function
1827 representation on the mesh.
1828
1829 .seealso: `DMPLEX`, `DM`, `DMPlexCreate()`, `DMSetAdjacency()`, `DMPlexGetOverlap()`
1830 @*/
DMPlexDistribute(DM dm,PetscInt overlap,PeOp PetscSF * sf,DM * dmParallel)1831 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PeOp PetscSF *sf, DM *dmParallel)
1832 {
1833 MPI_Comm comm;
1834 PetscPartitioner partitioner;
1835 IS cellPart;
1836 PetscSection cellPartSection;
1837 DM dmCoord;
1838 DMLabel lblPartition, lblMigration;
1839 PetscSF sfMigration, sfStratified, sfPoint;
1840 PetscBool flg, balance, isms;
1841 PetscMPIInt rank, size;
1842
1843 PetscFunctionBegin;
1844 PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1845 PetscValidLogicalCollectiveInt(dm, overlap, 2);
1846 if (sf) PetscAssertPointer(sf, 3);
1847 PetscAssertPointer(dmParallel, 4);
1848
1849 if (sf) *sf = NULL;
1850 *dmParallel = NULL;
1851 PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1852 PetscCallMPI(MPI_Comm_rank(comm, &rank));
1853 PetscCallMPI(MPI_Comm_size(comm, &size));
1854 if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
1855
1856 /* Handle multistage partitioner */
1857 PetscCall(DMPlexGetPartitioner(dm, &partitioner));
1858 PetscCall(PetscObjectTypeCompare((PetscObject)partitioner, PETSCPARTITIONERMULTISTAGE, &isms));
1859 if (isms) {
1860 PetscObject stagedm;
1861
1862 PetscCall(PetscPartitionerMultistageGetStage_Multistage(partitioner, NULL, &stagedm));
1863 if (!stagedm) { /* No stage dm present, start the multistage algorithm */
1864 PetscCall(DMPlexDistribute_Multistage(dm, overlap, sf, dmParallel));
1865 PetscFunctionReturn(PETSC_SUCCESS);
1866 }
1867 }
1868 PetscCall(PetscLogEventBegin(DMPLEX_Distribute, dm, 0, 0, 0));
1869 /* Create cell partition */
1870 PetscCall(PetscLogEventBegin(DMPLEX_Partition, dm, 0, 0, 0));
1871 PetscCall(PetscSectionCreate(comm, &cellPartSection));
1872 PetscCall(PetscPartitionerDMPlexPartition(partitioner, dm, NULL, cellPartSection, &cellPart));
1873 PetscCall(PetscLogEventBegin(DMPLEX_PartSelf, dm, 0, 0, 0));
1874 {
1875 /* Convert partition to DMLabel */
1876 IS is;
1877 PetscHSetI ht;
1878 const PetscInt *points;
1879 PetscInt *iranks;
1880 PetscInt pStart, pEnd, proc, npoints, poff = 0, nranks;
1881
1882 PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Point Partition", &lblPartition));
1883 /* Preallocate strata */
1884 PetscCall(PetscHSetICreate(&ht));
1885 PetscCall(PetscSectionGetChart(cellPartSection, &pStart, &pEnd));
1886 for (proc = pStart; proc < pEnd; proc++) {
1887 PetscCall(PetscSectionGetDof(cellPartSection, proc, &npoints));
1888 if (npoints) PetscCall(PetscHSetIAdd(ht, proc));
1889 }
1890 PetscCall(PetscHSetIGetSize(ht, &nranks));
1891 PetscCall(PetscMalloc1(nranks, &iranks));
1892 PetscCall(PetscHSetIGetElems(ht, &poff, iranks));
1893 PetscCall(PetscHSetIDestroy(&ht));
1894 PetscCall(DMLabelAddStrata(lblPartition, nranks, iranks));
1895 PetscCall(PetscFree(iranks));
1896 /* Inline DMPlexPartitionLabelClosure() */
1897 PetscCall(ISGetIndices(cellPart, &points));
1898 PetscCall(PetscSectionGetChart(cellPartSection, &pStart, &pEnd));
1899 for (proc = pStart; proc < pEnd; proc++) {
1900 PetscCall(PetscSectionGetDof(cellPartSection, proc, &npoints));
1901 if (!npoints) continue;
1902 PetscCall(PetscSectionGetOffset(cellPartSection, proc, &poff));
1903 PetscCall(DMPlexClosurePoints_Private(dm, npoints, points + poff, &is));
1904 PetscCall(DMLabelSetStratumIS(lblPartition, proc, is));
1905 PetscCall(ISDestroy(&is));
1906 }
1907 PetscCall(ISRestoreIndices(cellPart, &points));
1908 }
1909 PetscCall(PetscLogEventEnd(DMPLEX_PartSelf, dm, 0, 0, 0));
1910
1911 PetscCall(DMLabelCreate(PETSC_COMM_SELF, "Point migration", &lblMigration));
1912 PetscCall(DMPlexPartitionLabelInvert(dm, lblPartition, NULL, lblMigration));
1913 PetscCall(DMPlexPartitionLabelCreateSF(dm, lblMigration, PETSC_TRUE, &sfMigration));
1914 PetscCall(DMPlexStratifyMigrationSF(dm, sfMigration, &sfStratified));
1915 PetscCall(PetscSFDestroy(&sfMigration));
1916 sfMigration = sfStratified;
1917 PetscCall(PetscSFSetUp(sfMigration));
1918 PetscCall(PetscLogEventEnd(DMPLEX_Partition, dm, 0, 0, 0));
1919 PetscCall(PetscOptionsHasName(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-partition_view", &flg));
1920 if (flg) {
1921 PetscCall(DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_(comm)));
1922 PetscCall(PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_(comm)));
1923 }
1924
1925 /* Create non-overlapping parallel DM and migrate internal data */
1926 PetscCall(DMPlexCreate(comm, dmParallel));
1927 PetscCall(PetscObjectSetName((PetscObject)*dmParallel, "Parallel Mesh"));
1928 PetscCall(DMPlexMigrate(dm, sfMigration, *dmParallel));
1929
1930 /* Build the point SF without overlap */
1931 PetscCall(DMPlexGetPartitionBalance(dm, &balance));
1932 PetscCall(DMPlexSetPartitionBalance(*dmParallel, balance));
1933 PetscCall(DMPlexCreatePointSF(*dmParallel, sfMigration, PETSC_TRUE, &sfPoint));
1934 PetscCall(DMSetPointSF(*dmParallel, sfPoint));
1935 PetscCall(DMPlexMigrateIsoperiodicFaceSF_Internal(dm, *dmParallel, sfMigration));
1936 PetscCall(DMGetCoordinateDM(*dmParallel, &dmCoord));
1937 if (dmCoord) PetscCall(DMSetPointSF(dmCoord, sfPoint));
1938 if (flg) PetscCall(PetscSFView(sfPoint, NULL));
1939
1940 if (overlap > 0) {
1941 DM dmOverlap;
1942 PetscSF sfOverlap, sfOverlapPoint;
1943
1944 /* Add the partition overlap to the distributed DM */
1945 PetscCall(DMPlexDistributeOverlap(*dmParallel, overlap, &sfOverlap, &dmOverlap));
1946 PetscCall(DMDestroy(dmParallel));
1947 *dmParallel = dmOverlap;
1948 if (flg) {
1949 PetscCall(PetscPrintf(comm, "Overlap Migration SF:\n"));
1950 PetscCall(PetscSFView(sfOverlap, NULL));
1951 }
1952
1953 /* Re-map the migration SF to establish the full migration pattern */
1954 PetscCall(DMPlexRemapMigrationSF(sfOverlap, sfMigration, &sfOverlapPoint));
1955 PetscCall(PetscSFDestroy(&sfOverlap));
1956 PetscCall(PetscSFDestroy(&sfMigration));
1957 sfMigration = sfOverlapPoint;
1958 }
1959 /* Cleanup Partition */
1960 PetscCall(DMLabelDestroy(&lblPartition));
1961 PetscCall(DMLabelDestroy(&lblMigration));
1962 PetscCall(PetscSectionDestroy(&cellPartSection));
1963 PetscCall(ISDestroy(&cellPart));
1964 PetscCall(DMPlexCopy_Internal(dm, PETSC_TRUE, PETSC_FALSE, *dmParallel));
1965 // Create sfNatural, need discretization information
1966 PetscCall(DMCopyDisc(dm, *dmParallel));
1967 if (dm->localSection) {
1968 PetscSection psection;
1969 PetscCall(PetscSectionCreate(PetscObjectComm((PetscObject)dm), &psection));
1970 PetscCall(PetscSFDistributeSection(sfMigration, dm->localSection, NULL, psection));
1971 PetscCall(DMSetLocalSection(*dmParallel, psection));
1972 PetscCall(PetscSectionDestroy(&psection));
1973 }
1974 if (dm->useNatural) {
1975 PetscSection section;
1976
1977 PetscCall(DMSetUseNatural(*dmParallel, PETSC_TRUE));
1978 PetscCall(DMGetLocalSection(dm, §ion));
1979
1980 /* If DM has useNatural = PETSC_TRUE, but no sfNatural is set, the DM's Section is considered natural */
1981 /* sfMigration and sfNatural are respectively the point and dofs SFs mapping to this natural DM */
1982 /* Compose with a previous sfNatural if present */
1983 if (dm->sfNatural) {
1984 PetscCall(DMPlexMigrateGlobalToNaturalSF(dm, *dmParallel, dm->sfNatural, sfMigration, &(*dmParallel)->sfNatural));
1985 } else {
1986 PetscCall(DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &(*dmParallel)->sfNatural));
1987 }
1988 /* Compose with a previous sfMigration if present */
1989 if (dm->sfMigration) {
1990 PetscSF naturalPointSF;
1991
1992 PetscCall(PetscSFCompose(dm->sfMigration, sfMigration, &naturalPointSF));
1993 PetscCall(PetscSFDestroy(&sfMigration));
1994 sfMigration = naturalPointSF;
1995 }
1996 }
1997 /* Cleanup */
1998 if (sf) {
1999 *sf = sfMigration;
2000 } else PetscCall(PetscSFDestroy(&sfMigration));
2001 PetscCall(PetscSFDestroy(&sfPoint));
2002 PetscCall(PetscLogEventEnd(DMPLEX_Distribute, dm, 0, 0, 0));
2003 PetscFunctionReturn(PETSC_SUCCESS);
2004 }
2005
DMPlexDistributeOverlap_Internal(DM dm,PetscInt overlap,MPI_Comm newcomm,const char * ovlboundary,PetscSF * sf,DM * dmOverlap)2006 PetscErrorCode DMPlexDistributeOverlap_Internal(DM dm, PetscInt overlap, MPI_Comm newcomm, const char *ovlboundary, PetscSF *sf, DM *dmOverlap)
2007 {
2008 DM_Plex *mesh = (DM_Plex *)dm->data;
2009 MPI_Comm comm;
2010 PetscMPIInt size, rank;
2011 PetscSection rootSection, leafSection;
2012 IS rootrank, leafrank;
2013 DM dmCoord;
2014 DMLabel lblOverlap;
2015 PetscSF sfOverlap, sfStratified, sfPoint;
2016 PetscBool clear_ovlboundary;
2017
2018 PetscFunctionBegin;
2019 if (sf) *sf = NULL;
2020 *dmOverlap = NULL;
2021 PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
2022 PetscCallMPI(MPI_Comm_size(comm, &size));
2023 PetscCallMPI(MPI_Comm_rank(comm, &rank));
2024 if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
2025 {
2026 // We need to get options for the _already_distributed mesh, so it must be done here
2027 PetscInt overlap;
2028 const char *prefix;
2029 char oldPrefix[PETSC_MAX_PATH_LEN];
2030
2031 oldPrefix[0] = '\0';
2032 PetscCall(PetscObjectGetOptionsPrefix((PetscObject)dm, &prefix));
2033 PetscCall(PetscStrncpy(oldPrefix, prefix, sizeof(oldPrefix)));
2034 PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)dm, "dist_"));
2035 PetscCall(DMPlexGetOverlap(dm, &overlap));
2036 PetscObjectOptionsBegin((PetscObject)dm);
2037 PetscCall(DMSetFromOptions_Overlap_Plex(dm, PetscOptionsObject, &overlap));
2038 PetscOptionsEnd();
2039 PetscCall(PetscObjectSetOptionsPrefix((PetscObject)dm, oldPrefix[0] == '\0' ? NULL : oldPrefix));
2040 }
2041 if (ovlboundary) {
2042 PetscBool flg;
2043 PetscCall(DMHasLabel(dm, ovlboundary, &flg));
2044 if (!flg) {
2045 DMLabel label;
2046
2047 PetscCall(DMCreateLabel(dm, ovlboundary));
2048 PetscCall(DMGetLabel(dm, ovlboundary, &label));
2049 PetscCall(DMPlexMarkBoundaryFaces(dm, 1, label));
2050 clear_ovlboundary = PETSC_TRUE;
2051 }
2052 }
2053 PetscCall(PetscLogEventBegin(DMPLEX_DistributeOverlap, dm, 0, 0, 0));
2054 /* Compute point overlap with neighbouring processes on the distributed DM */
2055 PetscCall(PetscLogEventBegin(DMPLEX_Partition, dm, 0, 0, 0));
2056 PetscCall(PetscSectionCreate(newcomm, &rootSection));
2057 PetscCall(PetscSectionCreate(newcomm, &leafSection));
2058 PetscCall(DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank));
2059 if (mesh->numOvLabels) PetscCall(DMPlexCreateOverlapLabelFromLabels(dm, mesh->numOvLabels, mesh->ovLabels, mesh->ovValues, mesh->numOvExLabels, mesh->ovExLabels, mesh->ovExValues, rootSection, rootrank, leafSection, leafrank, &lblOverlap));
2060 else PetscCall(DMPlexCreateOverlapLabel(dm, overlap, rootSection, rootrank, leafSection, leafrank, &lblOverlap));
2061 /* Convert overlap label to stratified migration SF */
2062 PetscCall(DMPlexPartitionLabelCreateSF(dm, lblOverlap, PETSC_FALSE, &sfOverlap));
2063 PetscCall(DMPlexStratifyMigrationSF(dm, sfOverlap, &sfStratified));
2064 PetscCall(PetscSFDestroy(&sfOverlap));
2065 sfOverlap = sfStratified;
2066 PetscCall(PetscObjectSetName((PetscObject)sfOverlap, "Overlap SF"));
2067 PetscCall(PetscSFSetFromOptions(sfOverlap));
2068
2069 PetscCall(PetscSectionDestroy(&rootSection));
2070 PetscCall(PetscSectionDestroy(&leafSection));
2071 PetscCall(ISDestroy(&rootrank));
2072 PetscCall(ISDestroy(&leafrank));
2073 PetscCall(PetscLogEventEnd(DMPLEX_Partition, dm, 0, 0, 0));
2074
2075 /* Build the overlapping DM */
2076 PetscCall(DMPlexCreate(newcomm, dmOverlap));
2077 PetscCall(PetscObjectSetName((PetscObject)*dmOverlap, "Parallel Mesh"));
2078 PetscCall(DMPlexMigrate(dm, sfOverlap, *dmOverlap));
2079 /* Store the overlap in the new DM */
2080 PetscCall(DMPlexSetOverlap_Plex(*dmOverlap, dm, overlap));
2081 /* Build the new point SF */
2082 PetscCall(DMPlexCreatePointSF(*dmOverlap, sfOverlap, PETSC_FALSE, &sfPoint));
2083 PetscCall(DMSetPointSF(*dmOverlap, sfPoint));
2084 PetscCall(DMGetCoordinateDM(*dmOverlap, &dmCoord));
2085 if (dmCoord) PetscCall(DMSetPointSF(dmCoord, sfPoint));
2086 PetscCall(DMGetCellCoordinateDM(*dmOverlap, &dmCoord));
2087 if (dmCoord) PetscCall(DMSetPointSF(dmCoord, sfPoint));
2088 PetscCall(PetscSFDestroy(&sfPoint));
2089 PetscCall(DMPlexCopy_Internal(dm, PETSC_TRUE, PETSC_FALSE, *dmOverlap));
2090 /* TODO: labels stored inside the DS for regions should be handled here */
2091 PetscCall(DMCopyDisc(dm, *dmOverlap));
2092 /* Cleanup overlap partition */
2093 PetscCall(DMLabelDestroy(&lblOverlap));
2094 if (sf) *sf = sfOverlap;
2095 else PetscCall(PetscSFDestroy(&sfOverlap));
2096 if (ovlboundary) {
2097 DMLabel label;
2098 PetscBool flg;
2099
2100 if (clear_ovlboundary) PetscCall(DMRemoveLabel(dm, ovlboundary, NULL));
2101 PetscCall(DMHasLabel(*dmOverlap, ovlboundary, &flg));
2102 PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Missing %s label", ovlboundary);
2103 PetscCall(DMGetLabel(*dmOverlap, ovlboundary, &label));
2104 PetscCall(DMPlexMarkBoundaryFaces_Internal(*dmOverlap, 1, 0, label, PETSC_TRUE));
2105 }
2106 PetscCall(PetscLogEventEnd(DMPLEX_DistributeOverlap, dm, 0, 0, 0));
2107 PetscFunctionReturn(PETSC_SUCCESS);
2108 }
2109
2110 /*@
2111 DMPlexDistributeOverlap - Add partition overlap to a distributed non-overlapping `DM`.
2112
2113 Collective
2114
2115 Input Parameters:
2116 + dm - The non-overlapping distributed `DMPLEX` object
2117 - overlap - The overlap of partitions (the same on all ranks)
2118
2119 Output Parameters:
2120 + sf - The `PetscSF` used for point distribution, or pass `NULL` if not needed
2121 - dmOverlap - The overlapping distributed `DMPLEX` object
2122
2123 Options Database Keys:
2124 + -dm_plex_overlap_labels <name1,name2,...> - List of overlap label names
2125 . -dm_plex_overlap_values <int1,int2,...> - List of overlap label values
2126 . -dm_plex_overlap_exclude_label <name> - Label used to exclude points from overlap
2127 - -dm_plex_overlap_exclude_value <int> - Label value used to exclude points from overlap
2128
2129 Level: advanced
2130
2131 Notes:
2132 If the mesh was not distributed, the return value is `NULL`.
2133
2134 The user can control the definition of adjacency for the mesh using `DMSetAdjacency()`. They should choose the combination appropriate for the function
2135 representation on the mesh.
2136
2137 .seealso: `DMPLEX`, `PetscSF`, `DM`, `DMPlexCreate()`, `DMSetAdjacency()`, `DMPlexDistribute()`, `DMPlexCreateOverlapLabel()`, `DMPlexGetOverlap()`
2138 @*/
DMPlexDistributeOverlap(DM dm,PetscInt overlap,PeOp PetscSF * sf,DM * dmOverlap)2139 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, PeOp PetscSF *sf, DM *dmOverlap)
2140 {
2141 PetscFunctionBegin;
2142 PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2143 PetscValidLogicalCollectiveInt(dm, overlap, 2);
2144 if (sf) PetscAssertPointer(sf, 3);
2145 PetscAssertPointer(dmOverlap, 4);
2146 PetscCall(DMPlexDistributeOverlap_Internal(dm, overlap, PetscObjectComm((PetscObject)dm), NULL, sf, dmOverlap));
2147 PetscFunctionReturn(PETSC_SUCCESS);
2148 }
2149
DMPlexGetOverlap_Plex(DM dm,PetscInt * overlap)2150 PetscErrorCode DMPlexGetOverlap_Plex(DM dm, PetscInt *overlap)
2151 {
2152 DM_Plex *mesh = (DM_Plex *)dm->data;
2153
2154 PetscFunctionBegin;
2155 *overlap = mesh->overlap;
2156 PetscFunctionReturn(PETSC_SUCCESS);
2157 }
2158
DMPlexSetOverlap_Plex(DM dm,DM dmSrc,PetscInt overlap)2159 PetscErrorCode DMPlexSetOverlap_Plex(DM dm, DM dmSrc, PetscInt overlap)
2160 {
2161 DM_Plex *mesh = NULL;
2162 DM_Plex *meshSrc = NULL;
2163
2164 PetscFunctionBegin;
2165 PetscValidHeaderSpecificType(dm, DM_CLASSID, 1, DMPLEX);
2166 mesh = (DM_Plex *)dm->data;
2167 if (dmSrc) {
2168 PetscValidHeaderSpecificType(dmSrc, DM_CLASSID, 2, DMPLEX);
2169 meshSrc = (DM_Plex *)dmSrc->data;
2170 mesh->overlap = meshSrc->overlap;
2171 } else {
2172 mesh->overlap = 0;
2173 }
2174 mesh->overlap += overlap;
2175 PetscFunctionReturn(PETSC_SUCCESS);
2176 }
2177
2178 /*@
2179 DMPlexGetOverlap - Get the width of the cell overlap
2180
2181 Not Collective
2182
2183 Input Parameter:
2184 . dm - The `DM`
2185
2186 Output Parameter:
2187 . overlap - the width of the cell overlap
2188
2189 Level: intermediate
2190
2191 .seealso: `DMPLEX`, `DMPlexSetOverlap()`, `DMPlexDistribute()`
2192 @*/
DMPlexGetOverlap(DM dm,PetscInt * overlap)2193 PetscErrorCode DMPlexGetOverlap(DM dm, PetscInt *overlap)
2194 {
2195 PetscFunctionBegin;
2196 PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2197 PetscAssertPointer(overlap, 2);
2198 PetscUseMethod(dm, "DMPlexGetOverlap_C", (DM, PetscInt *), (dm, overlap));
2199 PetscFunctionReturn(PETSC_SUCCESS);
2200 }
2201
2202 /*@
2203 DMPlexSetOverlap - Set the width of the cell overlap
2204
2205 Logically Collective
2206
2207 Input Parameters:
2208 + dm - The `DM`
2209 . dmSrc - The `DM` that produced this one, or `NULL`
2210 - overlap - the width of the cell overlap
2211
2212 Level: intermediate
2213
2214 Note:
2215 The overlap from `dmSrc` is added to `dm`
2216
2217 .seealso: `DMPLEX`, `DMPlexGetOverlap()`, `DMPlexDistribute()`
2218 @*/
DMPlexSetOverlap(DM dm,DM dmSrc,PetscInt overlap)2219 PetscErrorCode DMPlexSetOverlap(DM dm, DM dmSrc, PetscInt overlap)
2220 {
2221 PetscFunctionBegin;
2222 PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2223 PetscValidLogicalCollectiveInt(dm, overlap, 3);
2224 PetscTryMethod(dm, "DMPlexSetOverlap_C", (DM, DM, PetscInt), (dm, dmSrc, overlap));
2225 PetscFunctionReturn(PETSC_SUCCESS);
2226 }
2227
DMPlexDistributeSetDefault_Plex(DM dm,PetscBool dist)2228 PetscErrorCode DMPlexDistributeSetDefault_Plex(DM dm, PetscBool dist)
2229 {
2230 DM_Plex *mesh = (DM_Plex *)dm->data;
2231
2232 PetscFunctionBegin;
2233 mesh->distDefault = dist;
2234 PetscFunctionReturn(PETSC_SUCCESS);
2235 }
2236
2237 /*@
2238 DMPlexDistributeSetDefault - Set flag indicating whether the `DM` should be distributed by default
2239
2240 Logically Collective
2241
2242 Input Parameters:
2243 + dm - The `DM`
2244 - dist - Flag for distribution
2245
2246 Level: intermediate
2247
2248 .seealso: `DMPLEX`, `DMPlexDistributeGetDefault()`, `DMPlexDistribute()`
2249 @*/
DMPlexDistributeSetDefault(DM dm,PetscBool dist)2250 PetscErrorCode DMPlexDistributeSetDefault(DM dm, PetscBool dist)
2251 {
2252 PetscFunctionBegin;
2253 PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2254 PetscValidLogicalCollectiveBool(dm, dist, 2);
2255 PetscTryMethod(dm, "DMPlexDistributeSetDefault_C", (DM, PetscBool), (dm, dist));
2256 PetscFunctionReturn(PETSC_SUCCESS);
2257 }
2258
DMPlexDistributeGetDefault_Plex(DM dm,PetscBool * dist)2259 PetscErrorCode DMPlexDistributeGetDefault_Plex(DM dm, PetscBool *dist)
2260 {
2261 DM_Plex *mesh = (DM_Plex *)dm->data;
2262
2263 PetscFunctionBegin;
2264 *dist = mesh->distDefault;
2265 PetscFunctionReturn(PETSC_SUCCESS);
2266 }
2267
2268 /*@
2269 DMPlexDistributeGetDefault - Get flag indicating whether the `DM` should be distributed by default
2270
2271 Not Collective
2272
2273 Input Parameter:
2274 . dm - The `DM`
2275
2276 Output Parameter:
2277 . dist - Flag for distribution
2278
2279 Level: intermediate
2280
2281 .seealso: `DMPLEX`, `DM`, `DMPlexDistributeSetDefault()`, `DMPlexDistribute()`
2282 @*/
DMPlexDistributeGetDefault(DM dm,PetscBool * dist)2283 PetscErrorCode DMPlexDistributeGetDefault(DM dm, PetscBool *dist)
2284 {
2285 PetscFunctionBegin;
2286 PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2287 PetscAssertPointer(dist, 2);
2288 PetscUseMethod(dm, "DMPlexDistributeGetDefault_C", (DM, PetscBool *), (dm, dist));
2289 PetscFunctionReturn(PETSC_SUCCESS);
2290 }
2291
2292 /*@
2293 DMPlexGetGatherDM - Get a copy of the `DMPLEX` that gathers all points on the
2294 root process of the original's communicator.
2295
2296 Collective
2297
2298 Input Parameter:
2299 . dm - the original `DMPLEX` object
2300
2301 Output Parameters:
2302 + sf - the `PetscSF` used for point distribution (optional)
2303 - gatherMesh - the gathered `DM` object, or `NULL`
2304
2305 Level: intermediate
2306
2307 .seealso: `DMPLEX`, `DM`, `PetscSF`, `DMPlexDistribute()`, `DMPlexGetRedundantDM()`
2308 @*/
DMPlexGetGatherDM(DM dm,PetscSF * sf,PeOp DM * gatherMesh)2309 PetscErrorCode DMPlexGetGatherDM(DM dm, PetscSF *sf, PeOp DM *gatherMesh)
2310 {
2311 MPI_Comm comm;
2312 PetscMPIInt size;
2313 PetscPartitioner oldPart, gatherPart;
2314
2315 PetscFunctionBegin;
2316 PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2317 PetscAssertPointer(gatherMesh, 3);
2318 *gatherMesh = NULL;
2319 if (sf) *sf = NULL;
2320 comm = PetscObjectComm((PetscObject)dm);
2321 PetscCallMPI(MPI_Comm_size(comm, &size));
2322 if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
2323 PetscCall(DMPlexGetPartitioner(dm, &oldPart));
2324 PetscCall(PetscObjectReference((PetscObject)oldPart));
2325 PetscCall(PetscPartitionerCreate(comm, &gatherPart));
2326 PetscCall(PetscPartitionerSetType(gatherPart, PETSCPARTITIONERGATHER));
2327 PetscCall(DMPlexSetPartitioner(dm, gatherPart));
2328 PetscCall(DMPlexDistribute(dm, 0, sf, gatherMesh));
2329
2330 PetscCall(DMPlexSetPartitioner(dm, oldPart));
2331 PetscCall(PetscPartitionerDestroy(&gatherPart));
2332 PetscCall(PetscPartitionerDestroy(&oldPart));
2333 PetscFunctionReturn(PETSC_SUCCESS);
2334 }
2335
2336 /*@
2337 DMPlexGetRedundantDM - Get a copy of the `DMPLEX` that is completely copied on each process.
2338
2339 Collective
2340
2341 Input Parameter:
2342 . dm - the original `DMPLEX` object
2343
2344 Output Parameters:
2345 + sf - the `PetscSF` used for point distribution (optional)
2346 - redundantMesh - the redundant `DM` object, or `NULL`
2347
2348 Level: intermediate
2349
2350 .seealso: `DMPLEX`, `DMPlexDistribute()`, `DMPlexGetGatherDM()`
2351 @*/
DMPlexGetRedundantDM(DM dm,PetscSF * sf,PeOp DM * redundantMesh)2352 PetscErrorCode DMPlexGetRedundantDM(DM dm, PetscSF *sf, PeOp DM *redundantMesh)
2353 {
2354 MPI_Comm comm;
2355 PetscMPIInt size, rank;
2356 PetscInt pStart, pEnd, p;
2357 PetscInt numPoints = -1;
2358 PetscSF migrationSF, sfPoint, gatherSF;
2359 DM gatherDM, dmCoord;
2360 PetscSFNode *points;
2361
2362 PetscFunctionBegin;
2363 PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2364 PetscAssertPointer(redundantMesh, 3);
2365 *redundantMesh = NULL;
2366 comm = PetscObjectComm((PetscObject)dm);
2367 PetscCallMPI(MPI_Comm_size(comm, &size));
2368 if (size == 1) {
2369 PetscCall(PetscObjectReference((PetscObject)dm));
2370 *redundantMesh = dm;
2371 if (sf) *sf = NULL;
2372 PetscFunctionReturn(PETSC_SUCCESS);
2373 }
2374 PetscCall(DMPlexGetGatherDM(dm, &gatherSF, &gatherDM));
2375 if (!gatherDM) PetscFunctionReturn(PETSC_SUCCESS);
2376 PetscCallMPI(MPI_Comm_rank(comm, &rank));
2377 PetscCall(DMPlexGetChart(gatherDM, &pStart, &pEnd));
2378 numPoints = pEnd - pStart;
2379 PetscCallMPI(MPI_Bcast(&numPoints, 1, MPIU_INT, 0, comm));
2380 PetscCall(PetscMalloc1(numPoints, &points));
2381 PetscCall(PetscSFCreate(comm, &migrationSF));
2382 for (p = 0; p < numPoints; p++) {
2383 points[p].index = p;
2384 points[p].rank = 0;
2385 }
2386 PetscCall(PetscSFSetGraph(migrationSF, pEnd - pStart, numPoints, NULL, PETSC_OWN_POINTER, points, PETSC_OWN_POINTER));
2387 PetscCall(DMPlexCreate(comm, redundantMesh));
2388 PetscCall(PetscObjectSetName((PetscObject)*redundantMesh, "Redundant Mesh"));
2389 PetscCall(DMPlexMigrate(gatherDM, migrationSF, *redundantMesh));
2390 /* This is to express that all point are in overlap */
2391 PetscCall(DMPlexSetOverlap_Plex(*redundantMesh, NULL, PETSC_INT_MAX));
2392 PetscCall(DMPlexCreatePointSF(*redundantMesh, migrationSF, PETSC_FALSE, &sfPoint));
2393 PetscCall(DMSetPointSF(*redundantMesh, sfPoint));
2394 PetscCall(DMGetCoordinateDM(*redundantMesh, &dmCoord));
2395 if (dmCoord) PetscCall(DMSetPointSF(dmCoord, sfPoint));
2396 PetscCall(PetscSFDestroy(&sfPoint));
2397 if (sf) {
2398 PetscSF tsf;
2399
2400 PetscCall(PetscSFCompose(gatherSF, migrationSF, &tsf));
2401 PetscCall(DMPlexStratifyMigrationSF(dm, tsf, sf));
2402 PetscCall(PetscSFDestroy(&tsf));
2403 }
2404 PetscCall(PetscSFDestroy(&migrationSF));
2405 PetscCall(PetscSFDestroy(&gatherSF));
2406 PetscCall(DMDestroy(&gatherDM));
2407 PetscCall(DMCopyDisc(dm, *redundantMesh));
2408 PetscCall(DMPlexCopy_Internal(dm, PETSC_TRUE, PETSC_FALSE, *redundantMesh));
2409 PetscFunctionReturn(PETSC_SUCCESS);
2410 }
2411
2412 /*@
2413 DMPlexIsDistributed - Find out whether this `DM` is distributed, i.e. more than one rank owns some points.
2414
2415 Collective
2416
2417 Input Parameter:
2418 . dm - The `DM` object
2419
2420 Output Parameter:
2421 . distributed - Flag whether the `DM` is distributed
2422
2423 Level: intermediate
2424
2425 Notes:
2426 This currently finds out whether at least two ranks have any DAG points.
2427 This involves `MPI_Allreduce()` with one integer.
2428 The result is currently not stashed so every call to this routine involves this global communication.
2429
2430 .seealso: `DMPLEX`, `DMPlexDistribute()`, `DMPlexGetOverlap()`, `DMPlexIsInterpolated()`
2431 @*/
DMPlexIsDistributed(DM dm,PetscBool * distributed)2432 PetscErrorCode DMPlexIsDistributed(DM dm, PetscBool *distributed)
2433 {
2434 PetscInt pStart, pEnd, count;
2435 MPI_Comm comm;
2436 PetscMPIInt size;
2437
2438 PetscFunctionBegin;
2439 PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2440 PetscAssertPointer(distributed, 2);
2441 PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
2442 PetscCallMPI(MPI_Comm_size(comm, &size));
2443 if (size == 1) {
2444 *distributed = PETSC_FALSE;
2445 PetscFunctionReturn(PETSC_SUCCESS);
2446 }
2447 PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
2448 count = (pEnd - pStart) > 0 ? 1 : 0;
2449 PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, &count, 1, MPIU_INT, MPI_SUM, comm));
2450 *distributed = count > 1 ? PETSC_TRUE : PETSC_FALSE;
2451 PetscFunctionReturn(PETSC_SUCCESS);
2452 }
2453
2454 /*@
2455 DMPlexDistributionSetName - Set the name of the specific parallel distribution
2456
2457 Input Parameters:
2458 + dm - The `DM`
2459 - name - The name of the specific parallel distribution
2460
2461 Level: developer
2462
2463 Note:
2464 If distribution name is set when saving, `DMPlexTopologyView()` saves the plex's
2465 parallel distribution (i.e., partition, ownership, and local ordering of points) under
2466 this name. Conversely, if distribution name is set when loading, `DMPlexTopologyLoad()`
2467 loads the parallel distribution stored in file under this name.
2468
2469 .seealso: `DMPLEX`, `DMPlexDistributionGetName()`, `DMPlexTopologyView()`, `DMPlexTopologyLoad()`
2470 @*/
DMPlexDistributionSetName(DM dm,const char name[])2471 PetscErrorCode DMPlexDistributionSetName(DM dm, const char name[])
2472 {
2473 DM_Plex *mesh = (DM_Plex *)dm->data;
2474
2475 PetscFunctionBegin;
2476 PetscValidHeaderSpecificType(dm, DM_CLASSID, 1, DMPLEX);
2477 if (name) PetscAssertPointer(name, 2);
2478 PetscCall(PetscFree(mesh->distributionName));
2479 PetscCall(PetscStrallocpy(name, &mesh->distributionName));
2480 PetscFunctionReturn(PETSC_SUCCESS);
2481 }
2482
2483 /*@
2484 DMPlexDistributionGetName - Retrieve the name of the specific parallel distribution
2485
2486 Input Parameter:
2487 . dm - The `DM`
2488
2489 Output Parameter:
2490 . name - The name of the specific parallel distribution
2491
2492 Level: developer
2493
2494 Note:
2495 If distribution name is set when saving, `DMPlexTopologyView()` saves the plex's
2496 parallel distribution (i.e., partition, ownership, and local ordering of points) under
2497 this name. Conversely, if distribution name is set when loading, `DMPlexTopologyLoad()`
2498 loads the parallel distribution stored in file under this name.
2499
2500 .seealso: `DMPLEX`, `DMPlexDistributionSetName()`, `DMPlexTopologyView()`, `DMPlexTopologyLoad()`
2501 @*/
DMPlexDistributionGetName(DM dm,const char * name[])2502 PetscErrorCode DMPlexDistributionGetName(DM dm, const char *name[])
2503 {
2504 DM_Plex *mesh = (DM_Plex *)dm->data;
2505
2506 PetscFunctionBegin;
2507 PetscValidHeaderSpecificType(dm, DM_CLASSID, 1, DMPLEX);
2508 PetscAssertPointer(name, 2);
2509 *name = mesh->distributionName;
2510 PetscFunctionReturn(PETSC_SUCCESS);
2511 }
2512