1 #include <petsc/private/dmpleximpl.h> /*I "petscdmplex.h" I*/ 2 #include <petsc/private/dmlabelimpl.h> /*I "petscdmlabel.h" I*/ 3 4 /*@ 5 DMPlexSetAdjacencyUseCone - Define adjacency in the mesh using either the cone or the support first 6 7 Input Parameters: 8 + dm - The DM object 9 - useCone - Flag to use the cone first 10 11 Level: intermediate 12 13 Notes: 14 $ FEM: Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE 15 $ FVM: Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE, useClosure = PETSC_FALSE 16 $ FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE, useClosure = PETSC_TRUE 17 18 .seealso: DMPlexGetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator() 19 @*/ 20 PetscErrorCode DMPlexSetAdjacencyUseCone(DM dm, PetscBool useCone) 21 { 22 DM_Plex *mesh = (DM_Plex *) dm->data; 23 24 PetscFunctionBegin; 25 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 26 mesh->useCone = useCone; 27 PetscFunctionReturn(0); 28 } 29 30 /*@ 31 DMPlexGetAdjacencyUseCone - Query whether adjacency in the mesh uses the cone or the support first 32 33 Input Parameter: 34 . dm - The DM object 35 36 Output Parameter: 37 . useCone - Flag to use the cone first 38 39 Level: intermediate 40 41 Notes: 42 $ FEM: Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE 43 $ FVM: Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE, useClosure = PETSC_FALSE 44 $ FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE, useClosure = PETSC_TRUE 45 46 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator() 47 @*/ 48 PetscErrorCode DMPlexGetAdjacencyUseCone(DM dm, PetscBool *useCone) 49 { 50 DM_Plex *mesh = (DM_Plex *) dm->data; 51 52 PetscFunctionBegin; 53 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 54 PetscValidIntPointer(useCone, 2); 55 *useCone = mesh->useCone; 56 PetscFunctionReturn(0); 57 } 58 59 /*@ 60 DMPlexSetAdjacencyUseClosure - Define adjacency in the mesh using the transitive closure 61 62 Input Parameters: 63 + dm - The DM object 64 - useClosure - Flag to use the closure 65 66 Level: intermediate 67 68 Notes: 69 $ FEM: Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE 70 $ FVM: Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE, useClosure = PETSC_FALSE 71 $ FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE, useClosure = PETSC_TRUE 72 73 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator() 74 @*/ 75 PetscErrorCode DMPlexSetAdjacencyUseClosure(DM dm, PetscBool useClosure) 76 { 77 DM_Plex *mesh = (DM_Plex *) dm->data; 78 79 PetscFunctionBegin; 80 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 81 mesh->useClosure = useClosure; 82 PetscFunctionReturn(0); 83 } 84 85 /*@ 86 DMPlexGetAdjacencyUseClosure - Query whether adjacency in the mesh uses the transitive closure 87 88 Input Parameter: 89 . dm - The DM object 90 91 Output Parameter: 92 . useClosure - Flag to use the closure 93 94 Level: intermediate 95 96 Notes: 97 $ FEM: Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE 98 $ FVM: Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE, useClosure = PETSC_FALSE 99 $ FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE, useClosure = PETSC_TRUE 100 101 .seealso: DMPlexSetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator() 102 @*/ 103 PetscErrorCode DMPlexGetAdjacencyUseClosure(DM dm, PetscBool *useClosure) 104 { 105 DM_Plex *mesh = (DM_Plex *) dm->data; 106 107 PetscFunctionBegin; 108 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 109 PetscValidIntPointer(useClosure, 2); 110 *useClosure = mesh->useClosure; 111 PetscFunctionReturn(0); 112 } 113 114 /*@ 115 DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints. 116 117 Input Parameters: 118 + dm - The DM object 119 - useAnchors - Flag to use the constraints. If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place. 120 121 Level: intermediate 122 123 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors() 124 @*/ 125 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors) 126 { 127 DM_Plex *mesh = (DM_Plex *) dm->data; 128 129 PetscFunctionBegin; 130 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 131 mesh->useAnchors = useAnchors; 132 PetscFunctionReturn(0); 133 } 134 135 /*@ 136 DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints. 137 138 Input Parameter: 139 . dm - The DM object 140 141 Output Parameter: 142 . useAnchors - Flag to use the closure. If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place. 143 144 Level: intermediate 145 146 .seealso: DMPlexSetAdjacencyUseAnchors(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors() 147 @*/ 148 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors) 149 { 150 DM_Plex *mesh = (DM_Plex *) dm->data; 151 152 PetscFunctionBegin; 153 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 154 PetscValidIntPointer(useAnchors, 2); 155 *useAnchors = mesh->useAnchors; 156 PetscFunctionReturn(0); 157 } 158 159 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[]) 160 { 161 const PetscInt *cone = NULL; 162 PetscInt numAdj = 0, maxAdjSize = *adjSize, coneSize, c; 163 PetscErrorCode ierr; 164 165 PetscFunctionBeginHot; 166 ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr); 167 ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr); 168 for (c = 0; c <= coneSize; ++c) { 169 const PetscInt point = !c ? p : cone[c-1]; 170 const PetscInt *support = NULL; 171 PetscInt supportSize, s, q; 172 173 ierr = DMPlexGetSupportSize(dm, point, &supportSize);CHKERRQ(ierr); 174 ierr = DMPlexGetSupport(dm, point, &support);CHKERRQ(ierr); 175 for (s = 0; s < supportSize; ++s) { 176 for (q = 0; q < numAdj || ((void)(adj[numAdj++] = support[s]),0); ++q) { 177 if (support[s] == adj[q]) break; 178 } 179 if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize); 180 } 181 } 182 *adjSize = numAdj; 183 PetscFunctionReturn(0); 184 } 185 186 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[]) 187 { 188 const PetscInt *support = NULL; 189 PetscInt numAdj = 0, maxAdjSize = *adjSize, supportSize, s; 190 PetscErrorCode ierr; 191 192 PetscFunctionBeginHot; 193 ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr); 194 ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr); 195 for (s = 0; s <= supportSize; ++s) { 196 const PetscInt point = !s ? p : support[s-1]; 197 const PetscInt *cone = NULL; 198 PetscInt coneSize, c, q; 199 200 ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr); 201 ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr); 202 for (c = 0; c < coneSize; ++c) { 203 for (q = 0; q < numAdj || ((void)(adj[numAdj++] = cone[c]),0); ++q) { 204 if (cone[c] == adj[q]) break; 205 } 206 if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize); 207 } 208 } 209 *adjSize = numAdj; 210 PetscFunctionReturn(0); 211 } 212 213 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[]) 214 { 215 PetscInt *star = NULL; 216 PetscInt numAdj = 0, maxAdjSize = *adjSize, starSize, s; 217 PetscErrorCode ierr; 218 219 PetscFunctionBeginHot; 220 ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr); 221 for (s = 0; s < starSize*2; s += 2) { 222 const PetscInt *closure = NULL; 223 PetscInt closureSize, c, q; 224 225 ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr); 226 for (c = 0; c < closureSize*2; c += 2) { 227 for (q = 0; q < numAdj || ((void)(adj[numAdj++] = closure[c]),0); ++q) { 228 if (closure[c] == adj[q]) break; 229 } 230 if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize); 231 } 232 ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr); 233 } 234 ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr); 235 *adjSize = numAdj; 236 PetscFunctionReturn(0); 237 } 238 239 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[]) 240 { 241 static PetscInt asiz = 0; 242 PetscInt maxAnchors = 1; 243 PetscInt aStart = -1, aEnd = -1; 244 PetscInt maxAdjSize; 245 PetscSection aSec = NULL; 246 IS aIS = NULL; 247 const PetscInt *anchors; 248 PetscErrorCode ierr; 249 250 PetscFunctionBeginHot; 251 if (useAnchors) { 252 ierr = DMPlexGetAnchors(dm,&aSec,&aIS);CHKERRQ(ierr); 253 if (aSec) { 254 ierr = PetscSectionGetMaxDof(aSec,&maxAnchors);CHKERRQ(ierr); 255 maxAnchors = PetscMax(1,maxAnchors); 256 ierr = PetscSectionGetChart(aSec,&aStart,&aEnd);CHKERRQ(ierr); 257 ierr = ISGetIndices(aIS,&anchors);CHKERRQ(ierr); 258 } 259 } 260 if (!*adj) { 261 PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd; 262 263 ierr = DMPlexGetChart(dm, &pStart,&pEnd);CHKERRQ(ierr); 264 ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr); 265 ierr = DMPlexGetMaxSizes(dm, &maxC, &maxS);CHKERRQ(ierr); 266 coneSeries = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1; 267 supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1; 268 asiz = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries); 269 asiz *= maxAnchors; 270 asiz = PetscMin(asiz,pEnd-pStart); 271 ierr = PetscMalloc1(asiz,adj);CHKERRQ(ierr); 272 } 273 if (*adjSize < 0) *adjSize = asiz; 274 maxAdjSize = *adjSize; 275 if (useTransitiveClosure) { 276 ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr); 277 } else if (useCone) { 278 ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr); 279 } else { 280 ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr); 281 } 282 if (useAnchors && aSec) { 283 PetscInt origSize = *adjSize; 284 PetscInt numAdj = origSize; 285 PetscInt i = 0, j; 286 PetscInt *orig = *adj; 287 288 while (i < origSize) { 289 PetscInt p = orig[i]; 290 PetscInt aDof = 0; 291 292 if (p >= aStart && p < aEnd) { 293 ierr = PetscSectionGetDof(aSec,p,&aDof);CHKERRQ(ierr); 294 } 295 if (aDof) { 296 PetscInt aOff; 297 PetscInt s, q; 298 299 for (j = i + 1; j < numAdj; j++) { 300 orig[j - 1] = orig[j]; 301 } 302 origSize--; 303 numAdj--; 304 ierr = PetscSectionGetOffset(aSec,p,&aOff);CHKERRQ(ierr); 305 for (s = 0; s < aDof; ++s) { 306 for (q = 0; q < numAdj || ((void)(orig[numAdj++] = anchors[aOff+s]),0); ++q) { 307 if (anchors[aOff+s] == orig[q]) break; 308 } 309 if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize); 310 } 311 } 312 else { 313 i++; 314 } 315 } 316 *adjSize = numAdj; 317 ierr = ISRestoreIndices(aIS,&anchors);CHKERRQ(ierr); 318 } 319 PetscFunctionReturn(0); 320 } 321 322 /*@ 323 DMPlexGetAdjacency - Return all points adjacent to the given point 324 325 Input Parameters: 326 + dm - The DM object 327 . p - The point 328 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE 329 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize 330 331 Output Parameters: 332 + adjSize - The number of adjacent points 333 - adj - The adjacent points 334 335 Level: advanced 336 337 Notes: The user must PetscFree the adj array if it was not passed in. 338 339 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator() 340 @*/ 341 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[]) 342 { 343 DM_Plex *mesh = (DM_Plex *) dm->data; 344 PetscErrorCode ierr; 345 346 PetscFunctionBeginHot; 347 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 348 PetscValidPointer(adjSize,3); 349 PetscValidPointer(adj,4); 350 ierr = DMPlexGetAdjacency_Internal(dm, p, mesh->useCone, mesh->useClosure, mesh->useAnchors, adjSize, adj);CHKERRQ(ierr); 351 PetscFunctionReturn(0); 352 } 353 354 /*@ 355 DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity 356 357 Collective on DM 358 359 Input Parameters: 360 + dm - The DM 361 - sfPoint - The PetscSF which encodes point connectivity 362 363 Output Parameters: 364 + processRanks - A list of process neighbors, or NULL 365 - sfProcess - An SF encoding the two-sided process connectivity, or NULL 366 367 Level: developer 368 369 .seealso: PetscSFCreate(), DMPlexCreateProcessSF() 370 @*/ 371 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess) 372 { 373 const PetscSFNode *remotePoints; 374 PetscInt *localPointsNew; 375 PetscSFNode *remotePointsNew; 376 const PetscInt *nranks; 377 PetscInt *ranksNew; 378 PetscBT neighbors; 379 PetscInt pStart, pEnd, p, numLeaves, l, numNeighbors, n; 380 PetscMPIInt size, proc, rank; 381 PetscErrorCode ierr; 382 383 PetscFunctionBegin; 384 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 385 PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2); 386 if (processRanks) {PetscValidPointer(processRanks, 3);} 387 if (sfProcess) {PetscValidPointer(sfProcess, 4);} 388 ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr); 389 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr); 390 ierr = PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);CHKERRQ(ierr); 391 ierr = PetscBTCreate(size, &neighbors);CHKERRQ(ierr); 392 ierr = PetscBTMemzero(size, neighbors);CHKERRQ(ierr); 393 /* Compute root-to-leaf process connectivity */ 394 ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr); 395 ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr); 396 for (p = pStart; p < pEnd; ++p) { 397 PetscInt ndof, noff, n; 398 399 ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr); 400 ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr); 401 for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);} 402 } 403 ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr); 404 /* Compute leaf-to-neighbor process connectivity */ 405 ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr); 406 ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr); 407 for (p = pStart; p < pEnd; ++p) { 408 PetscInt ndof, noff, n; 409 410 ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr); 411 ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr); 412 for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);} 413 } 414 ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr); 415 /* Compute leaf-to-root process connectivity */ 416 for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);} 417 /* Calculate edges */ 418 PetscBTClear(neighbors, rank); 419 for(proc = 0, numNeighbors = 0; proc < size; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;} 420 ierr = PetscMalloc1(numNeighbors, &ranksNew);CHKERRQ(ierr); 421 ierr = PetscMalloc1(numNeighbors, &localPointsNew);CHKERRQ(ierr); 422 ierr = PetscMalloc1(numNeighbors, &remotePointsNew);CHKERRQ(ierr); 423 for(proc = 0, n = 0; proc < size; ++proc) { 424 if (PetscBTLookup(neighbors, proc)) { 425 ranksNew[n] = proc; 426 localPointsNew[n] = proc; 427 remotePointsNew[n].index = rank; 428 remotePointsNew[n].rank = proc; 429 ++n; 430 } 431 } 432 ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr); 433 if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr);} 434 else {ierr = PetscFree(ranksNew);CHKERRQ(ierr);} 435 if (sfProcess) { 436 ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr); 437 ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr); 438 ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr); 439 ierr = PetscSFSetGraph(*sfProcess, size, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr); 440 } 441 PetscFunctionReturn(0); 442 } 443 444 /*@ 445 DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF. 446 447 Collective on DM 448 449 Input Parameter: 450 . dm - The DM 451 452 Output Parameters: 453 + rootSection - The number of leaves for a given root point 454 . rootrank - The rank of each edge into the root point 455 . leafSection - The number of processes sharing a given leaf point 456 - leafrank - The rank of each process sharing a leaf point 457 458 Level: developer 459 460 .seealso: DMPlexCreateOverlap() 461 @*/ 462 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank) 463 { 464 MPI_Comm comm; 465 PetscSF sfPoint; 466 const PetscInt *rootdegree; 467 PetscInt *myrank, *remoterank; 468 PetscInt pStart, pEnd, p, nedges; 469 PetscMPIInt rank; 470 PetscErrorCode ierr; 471 472 PetscFunctionBegin; 473 ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr); 474 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 475 ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr); 476 ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr); 477 /* Compute number of leaves for each root */ 478 ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr); 479 ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr); 480 ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr); 481 ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr); 482 for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr);} 483 ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr); 484 /* Gather rank of each leaf to root */ 485 ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr); 486 ierr = PetscMalloc1(pEnd-pStart, &myrank);CHKERRQ(ierr); 487 ierr = PetscMalloc1(nedges, &remoterank);CHKERRQ(ierr); 488 for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank; 489 ierr = PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr); 490 ierr = PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr); 491 ierr = PetscFree(myrank);CHKERRQ(ierr); 492 ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr); 493 /* Distribute remote ranks to leaves */ 494 ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr); 495 ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr); 496 PetscFunctionReturn(0); 497 } 498 499 /*@C 500 DMPlexCreateOverlap - Compute owner information for shared points. This basically gets two-sided for an SF. 501 502 Collective on DM 503 504 Input Parameters: 505 + dm - The DM 506 . levels - Number of overlap levels 507 . rootSection - The number of leaves for a given root point 508 . rootrank - The rank of each edge into the root point 509 . leafSection - The number of processes sharing a given leaf point 510 - leafrank - The rank of each process sharing a leaf point 511 512 Output Parameters: 513 + ovLabel - DMLabel containing remote overlap contributions as point/rank pairings 514 515 Level: developer 516 517 .seealso: DMPlexDistributeOwnership(), DMPlexDistribute() 518 @*/ 519 PetscErrorCode DMPlexCreateOverlap(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel) 520 { 521 MPI_Comm comm; 522 DMLabel ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */ 523 PetscSF sfPoint, sfProc; 524 const PetscSFNode *remote; 525 const PetscInt *local; 526 const PetscInt *nrank, *rrank; 527 PetscInt *adj = NULL; 528 PetscInt pStart, pEnd, p, sStart, sEnd, nleaves, l; 529 PetscMPIInt rank, size; 530 PetscBool useCone, useClosure, flg; 531 PetscErrorCode ierr; 532 533 PetscFunctionBegin; 534 ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr); 535 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 536 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 537 ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr); 538 ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr); 539 ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr); 540 ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr); 541 ierr = DMLabelCreate("Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr); 542 /* Handle leaves: shared with the root point */ 543 for (l = 0; l < nleaves; ++l) { 544 PetscInt adjSize = PETSC_DETERMINE, a; 545 546 ierr = DMPlexGetAdjacency(dm, local[l], &adjSize, &adj);CHKERRQ(ierr); 547 for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr);} 548 } 549 ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr); 550 ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr); 551 /* Handle roots */ 552 for (p = pStart; p < pEnd; ++p) { 553 PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a; 554 555 if ((p >= sStart) && (p < sEnd)) { 556 /* Some leaves share a root with other leaves on different processes */ 557 ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr); 558 if (neighbors) { 559 ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr); 560 ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr); 561 for (n = 0; n < neighbors; ++n) { 562 const PetscInt remoteRank = nrank[noff+n]; 563 564 if (remoteRank == rank) continue; 565 for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);} 566 } 567 } 568 } 569 /* Roots are shared with leaves */ 570 ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr); 571 if (!neighbors) continue; 572 ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr); 573 ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr); 574 for (n = 0; n < neighbors; ++n) { 575 const PetscInt remoteRank = rrank[noff+n]; 576 577 if (remoteRank == rank) continue; 578 for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);} 579 } 580 } 581 ierr = PetscFree(adj);CHKERRQ(ierr); 582 ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr); 583 ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr); 584 /* Add additional overlap levels */ 585 for (l = 1; l < levels; l++) { 586 /* Propagate point donations over SF to capture remote connections */ 587 ierr = DMPlexPartitionLabelPropagate(dm, ovAdjByRank);CHKERRQ(ierr); 588 /* Add next level of point donations to the label */ 589 ierr = DMPlexPartitionLabelAdjacency(dm, ovAdjByRank);CHKERRQ(ierr); 590 } 591 /* We require the closure in the overlap */ 592 ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr); 593 ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr); 594 if (useCone || !useClosure) { 595 ierr = DMPlexPartitionLabelClosure(dm, ovAdjByRank);CHKERRQ(ierr); 596 } 597 ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-overlap_view", &flg);CHKERRQ(ierr); 598 if (flg) { 599 ierr = DMLabelView(ovAdjByRank, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 600 } 601 /* Make global process SF and invert sender to receiver label */ 602 { 603 /* Build a global process SF */ 604 PetscSFNode *remoteProc; 605 ierr = PetscMalloc1(size, &remoteProc);CHKERRQ(ierr); 606 for (p = 0; p < size; ++p) { 607 remoteProc[p].rank = p; 608 remoteProc[p].index = rank; 609 } 610 ierr = PetscSFCreate(comm, &sfProc);CHKERRQ(ierr); 611 ierr = PetscObjectSetName((PetscObject) sfProc, "Process SF");CHKERRQ(ierr); 612 ierr = PetscSFSetGraph(sfProc, size, size, NULL, PETSC_OWN_POINTER, remoteProc, PETSC_OWN_POINTER);CHKERRQ(ierr); 613 } 614 ierr = DMLabelCreate("Overlap label", ovLabel);CHKERRQ(ierr); 615 ierr = DMPlexPartitionLabelInvert(dm, ovAdjByRank, sfProc, *ovLabel);CHKERRQ(ierr); 616 /* Add owned points, except for shared local points */ 617 for (p = pStart; p < pEnd; ++p) {ierr = DMLabelSetValue(*ovLabel, p, rank);CHKERRQ(ierr);} 618 for (l = 0; l < nleaves; ++l) { 619 ierr = DMLabelClearValue(*ovLabel, local[l], rank);CHKERRQ(ierr); 620 ierr = DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank);CHKERRQ(ierr); 621 } 622 /* Clean up */ 623 ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr); 624 ierr = PetscSFDestroy(&sfProc);CHKERRQ(ierr); 625 PetscFunctionReturn(0); 626 } 627 628 /*@C 629 DMPlexCreateOverlapMigrationSF - Create an SF describing the new mesh distribution to make the overlap described by the input SF 630 631 Collective on DM 632 633 Input Parameters: 634 + dm - The DM 635 - overlapSF - The SF mapping ghost points in overlap to owner points on other processes 636 637 Output Parameters: 638 + migrationSF - An SF that maps original points in old locations to points in new locations 639 640 Level: developer 641 642 .seealso: DMPlexCreateOverlap(), DMPlexDistribute() 643 @*/ 644 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF) 645 { 646 MPI_Comm comm; 647 PetscMPIInt rank, size; 648 PetscInt d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints; 649 PetscInt *pointDepths, *remoteDepths, *ilocal; 650 PetscInt *depthRecv, *depthShift, *depthIdx; 651 PetscSFNode *iremote; 652 PetscSF pointSF; 653 const PetscInt *sharedLocal; 654 const PetscSFNode *overlapRemote, *sharedRemote; 655 PetscErrorCode ierr; 656 657 PetscFunctionBegin; 658 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 659 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr); 660 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 661 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 662 ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr); 663 664 /* Before building the migration SF we need to know the new stratum offsets */ 665 ierr = PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote);CHKERRQ(ierr); 666 ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr); 667 for (d=0; d<dim+1; d++) { 668 ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr); 669 for (p=pStart; p<pEnd; p++) pointDepths[p] = d; 670 } 671 for (p=0; p<nleaves; p++) remoteDepths[p] = -1; 672 ierr = PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr); 673 ierr = PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr); 674 675 /* Count recevied points in each stratum and compute the internal strata shift */ 676 ierr = PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx);CHKERRQ(ierr); 677 for (d=0; d<dim+1; d++) depthRecv[d]=0; 678 for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++; 679 depthShift[dim] = 0; 680 for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim]; 681 for (d=1; d<dim; d++) depthShift[d] += depthRecv[0]; 682 for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1]; 683 for (d=0; d<dim+1; d++) { 684 ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr); 685 depthIdx[d] = pStart + depthShift[d]; 686 } 687 688 /* Form the overlap SF build an SF that describes the full overlap migration SF */ 689 ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr); 690 newLeaves = pEnd - pStart + nleaves; 691 ierr = PetscMalloc1(newLeaves, &ilocal);CHKERRQ(ierr); 692 ierr = PetscMalloc1(newLeaves, &iremote);CHKERRQ(ierr); 693 /* First map local points to themselves */ 694 for (d=0; d<dim+1; d++) { 695 ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr); 696 for (p=pStart; p<pEnd; p++) { 697 point = p + depthShift[d]; 698 ilocal[point] = point; 699 iremote[point].index = p; 700 iremote[point].rank = rank; 701 depthIdx[d]++; 702 } 703 } 704 705 /* Add in the remote roots for currently shared points */ 706 ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr); 707 ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote);CHKERRQ(ierr); 708 for (d=0; d<dim+1; d++) { 709 ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr); 710 for (p=0; p<numSharedPoints; p++) { 711 if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) { 712 point = sharedLocal[p] + depthShift[d]; 713 iremote[point].index = sharedRemote[p].index; 714 iremote[point].rank = sharedRemote[p].rank; 715 } 716 } 717 } 718 719 /* Now add the incoming overlap points */ 720 for (p=0; p<nleaves; p++) { 721 point = depthIdx[remoteDepths[p]]; 722 ilocal[point] = point; 723 iremote[point].index = overlapRemote[p].index; 724 iremote[point].rank = overlapRemote[p].rank; 725 depthIdx[remoteDepths[p]]++; 726 } 727 ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr); 728 729 ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr); 730 ierr = PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");CHKERRQ(ierr); 731 ierr = PetscSFSetFromOptions(*migrationSF);CHKERRQ(ierr); 732 ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr); 733 ierr = PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);CHKERRQ(ierr); 734 735 ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr); 736 PetscFunctionReturn(0); 737 } 738 739 /*@ 740 DMPlexStratifyMigrationSF - Rearrange the leaves of a migration sf for stratification. 741 742 Input Parameter: 743 + dm - The DM 744 - sf - A star forest with non-ordered leaves, usually defining a DM point migration 745 746 Output Parameter: 747 . migrationSF - A star forest with added leaf indirection that ensures the resulting DM is stratified 748 749 Level: developer 750 751 .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexDistributeOverlap() 752 @*/ 753 PetscErrorCode DMPlexStratifyMigrationSF(DM dm, PetscSF sf, PetscSF *migrationSF) 754 { 755 MPI_Comm comm; 756 PetscMPIInt rank, size; 757 PetscInt d, ldepth, depth, p, pStart, pEnd, nroots, nleaves; 758 PetscInt *pointDepths, *remoteDepths, *ilocal; 759 PetscInt *depthRecv, *depthShift, *depthIdx; 760 PetscInt hybEnd[4]; 761 const PetscSFNode *iremote; 762 PetscErrorCode ierr; 763 764 PetscFunctionBegin; 765 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 766 ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr); 767 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 768 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 769 ierr = DMPlexGetDepth(dm, &ldepth);CHKERRQ(ierr); 770 ierr = MPIU_Allreduce(&ldepth, &depth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr); 771 if ((ldepth >= 0) && (depth != ldepth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", ldepth, depth); 772 773 /* Before building the migration SF we need to know the new stratum offsets */ 774 ierr = PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote);CHKERRQ(ierr); 775 ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr); 776 ierr = DMPlexGetHybridBounds(dm,&hybEnd[depth],&hybEnd[depth-1],&hybEnd[1],&hybEnd[0]);CHKERRQ(ierr); 777 for (d = 0; d < depth+1; ++d) { 778 ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr); 779 for (p = pStart; p < pEnd; ++p) { 780 if (hybEnd[d] >= 0 && p >= hybEnd[d]) { /* put in a separate value for hybrid points */ 781 pointDepths[p] = 2 * d; 782 } else { 783 pointDepths[p] = 2 * d + 1; 784 } 785 } 786 } 787 for (p = 0; p < nleaves; ++p) remoteDepths[p] = -1; 788 ierr = PetscSFBcastBegin(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr); 789 ierr = PetscSFBcastEnd(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr); 790 /* Count recevied points in each stratum and compute the internal strata shift */ 791 ierr = PetscMalloc3(2*(depth+1), &depthRecv, 2*(depth+1), &depthShift, 2*(depth+1), &depthIdx);CHKERRQ(ierr); 792 for (d = 0; d < 2*(depth+1); ++d) depthRecv[d] = 0; 793 for (p = 0; p < nleaves; ++p) depthRecv[remoteDepths[p]]++; 794 depthShift[2*depth+1] = 0; 795 for (d = 0; d < 2*depth+1; ++d) depthShift[d] = depthRecv[2 * depth + 1]; 796 for (d = 0; d < 2*depth; ++d) depthShift[d] += depthRecv[2 * depth]; 797 depthShift[0] += depthRecv[1]; 798 for (d = 2; d < 2*depth; ++d) depthShift[d] += depthRecv[1]; 799 for (d = 2; d < 2*depth; ++d) depthShift[d] += depthRecv[0]; 800 for (d = 2 * depth-1; d > 2; --d) { 801 PetscInt e; 802 803 for (e = d -1; e > 1; --e) depthShift[e] += depthRecv[d]; 804 } 805 for (d = 0; d < 2*(depth+1); ++d) {depthIdx[d] = 0;} 806 /* Derive a new local permutation based on stratified indices */ 807 ierr = PetscMalloc1(nleaves, &ilocal);CHKERRQ(ierr); 808 for (p = 0; p < nleaves; ++p) { 809 const PetscInt dep = remoteDepths[p]; 810 811 ilocal[p] = depthShift[dep] + depthIdx[dep]; 812 depthIdx[dep]++; 813 } 814 ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr); 815 ierr = PetscObjectSetName((PetscObject) *migrationSF, "Migration SF");CHKERRQ(ierr); 816 ierr = PetscSFSetGraph(*migrationSF, nroots, nleaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_COPY_VALUES);CHKERRQ(ierr); 817 ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr); 818 ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr); 819 PetscFunctionReturn(0); 820 } 821 822 /*@ 823 DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution 824 825 Collective on DM 826 827 Input Parameters: 828 + dm - The DMPlex object 829 . pointSF - The PetscSF describing the communication pattern 830 . originalSection - The PetscSection for existing data layout 831 - originalVec - The existing data 832 833 Output Parameters: 834 + newSection - The PetscSF describing the new data layout 835 - newVec - The new data 836 837 Level: developer 838 839 .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData() 840 @*/ 841 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec) 842 { 843 PetscSF fieldSF; 844 PetscInt *remoteOffsets, fieldSize; 845 PetscScalar *originalValues, *newValues; 846 PetscErrorCode ierr; 847 848 PetscFunctionBegin; 849 ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr); 850 ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr); 851 852 ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr); 853 ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr); 854 ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr); 855 856 ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr); 857 ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr); 858 ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr); 859 ierr = PetscFree(remoteOffsets);CHKERRQ(ierr); 860 ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr); 861 ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr); 862 ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr); 863 ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr); 864 ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr); 865 ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr); 866 PetscFunctionReturn(0); 867 } 868 869 /*@ 870 DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution 871 872 Collective on DM 873 874 Input Parameters: 875 + dm - The DMPlex object 876 . pointSF - The PetscSF describing the communication pattern 877 . originalSection - The PetscSection for existing data layout 878 - originalIS - The existing data 879 880 Output Parameters: 881 + newSection - The PetscSF describing the new data layout 882 - newIS - The new data 883 884 Level: developer 885 886 .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData() 887 @*/ 888 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS) 889 { 890 PetscSF fieldSF; 891 PetscInt *newValues, *remoteOffsets, fieldSize; 892 const PetscInt *originalValues; 893 PetscErrorCode ierr; 894 895 PetscFunctionBegin; 896 ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr); 897 ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr); 898 899 ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr); 900 ierr = PetscMalloc1(fieldSize, &newValues);CHKERRQ(ierr); 901 902 ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr); 903 ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr); 904 ierr = PetscFree(remoteOffsets);CHKERRQ(ierr); 905 ierr = PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr); 906 ierr = PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr); 907 ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr); 908 ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr); 909 ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr); 910 ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr); 911 PetscFunctionReturn(0); 912 } 913 914 /*@ 915 DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution 916 917 Collective on DM 918 919 Input Parameters: 920 + dm - The DMPlex object 921 . pointSF - The PetscSF describing the communication pattern 922 . originalSection - The PetscSection for existing data layout 923 . datatype - The type of data 924 - originalData - The existing data 925 926 Output Parameters: 927 + newSection - The PetscSection describing the new data layout 928 - newData - The new data 929 930 Level: developer 931 932 .seealso: DMPlexDistribute(), DMPlexDistributeField() 933 @*/ 934 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData) 935 { 936 PetscSF fieldSF; 937 PetscInt *remoteOffsets, fieldSize; 938 PetscMPIInt dataSize; 939 PetscErrorCode ierr; 940 941 PetscFunctionBegin; 942 ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr); 943 ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr); 944 945 ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr); 946 ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr); 947 ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr); 948 949 ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr); 950 ierr = PetscFree(remoteOffsets);CHKERRQ(ierr); 951 ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr); 952 ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr); 953 ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr); 954 ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr); 955 PetscFunctionReturn(0); 956 } 957 958 static PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel) 959 { 960 DM_Plex *mesh = (DM_Plex*) dm->data; 961 DM_Plex *pmesh = (DM_Plex*) (dmParallel)->data; 962 MPI_Comm comm; 963 PetscSF coneSF; 964 PetscSection originalConeSection, newConeSection; 965 PetscInt *remoteOffsets, *cones, *globCones, *newCones, newConesSize; 966 PetscBool flg; 967 PetscErrorCode ierr; 968 969 PetscFunctionBegin; 970 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 971 PetscValidPointer(dmParallel,4); 972 ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr); 973 974 /* Distribute cone section */ 975 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr); 976 ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr); 977 ierr = DMPlexGetConeSection(dmParallel, &newConeSection);CHKERRQ(ierr); 978 ierr = PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr); 979 ierr = DMSetUp(dmParallel);CHKERRQ(ierr); 980 { 981 PetscInt pStart, pEnd, p; 982 983 ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr); 984 for (p = pStart; p < pEnd; ++p) { 985 PetscInt coneSize; 986 ierr = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr); 987 pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize); 988 } 989 } 990 /* Communicate and renumber cones */ 991 ierr = PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr); 992 ierr = PetscFree(remoteOffsets);CHKERRQ(ierr); 993 ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr); 994 if (original) { 995 PetscInt numCones; 996 997 ierr = PetscSectionGetStorageSize(originalConeSection,&numCones);CHKERRQ(ierr); ierr = PetscMalloc1(numCones,&globCones);CHKERRQ(ierr); 998 ierr = ISLocalToGlobalMappingApplyBlock(original, numCones, cones, globCones);CHKERRQ(ierr); 999 } 1000 else { 1001 globCones = cones; 1002 } 1003 ierr = DMPlexGetCones(dmParallel, &newCones);CHKERRQ(ierr); 1004 ierr = PetscSFBcastBegin(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr); 1005 ierr = PetscSFBcastEnd(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr); 1006 if (original) { 1007 ierr = PetscFree(globCones);CHKERRQ(ierr); 1008 } 1009 ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr); 1010 ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr); 1011 #if PETSC_USE_DEBUG 1012 { 1013 PetscInt p; 1014 PetscBool valid = PETSC_TRUE; 1015 for (p = 0; p < newConesSize; ++p) { 1016 if (newCones[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);} 1017 } 1018 if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map"); 1019 } 1020 #endif 1021 ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr); 1022 if (flg) { 1023 ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr); 1024 ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 1025 ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr); 1026 ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 1027 ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr); 1028 } 1029 ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr); 1030 ierr = DMPlexGetConeOrientations(dmParallel, &newCones);CHKERRQ(ierr); 1031 ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr); 1032 ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr); 1033 ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr); 1034 ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr); 1035 /* Create supports and stratify sieve */ 1036 { 1037 PetscInt pStart, pEnd; 1038 1039 ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr); 1040 ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr); 1041 } 1042 ierr = DMPlexSymmetrize(dmParallel);CHKERRQ(ierr); 1043 ierr = DMPlexStratify(dmParallel);CHKERRQ(ierr); 1044 pmesh->useCone = mesh->useCone; 1045 pmesh->useClosure = mesh->useClosure; 1046 PetscFunctionReturn(0); 1047 } 1048 1049 static PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel) 1050 { 1051 MPI_Comm comm; 1052 PetscSection originalCoordSection, newCoordSection; 1053 Vec originalCoordinates, newCoordinates; 1054 PetscInt bs; 1055 PetscBool isper; 1056 const char *name; 1057 const PetscReal *maxCell, *L; 1058 const DMBoundaryType *bd; 1059 PetscErrorCode ierr; 1060 1061 PetscFunctionBegin; 1062 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1063 PetscValidPointer(dmParallel, 3); 1064 1065 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr); 1066 ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr); 1067 ierr = DMGetCoordinateSection(dmParallel, &newCoordSection);CHKERRQ(ierr); 1068 ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr); 1069 if (originalCoordinates) { 1070 ierr = VecCreate(PETSC_COMM_SELF, &newCoordinates);CHKERRQ(ierr); 1071 ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr); 1072 ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr); 1073 1074 ierr = DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr); 1075 ierr = DMSetCoordinatesLocal(dmParallel, newCoordinates);CHKERRQ(ierr); 1076 ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr); 1077 ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr); 1078 ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr); 1079 } 1080 ierr = DMGetPeriodicity(dm, &isper, &maxCell, &L, &bd);CHKERRQ(ierr); 1081 ierr = DMSetPeriodicity(dmParallel, isper, maxCell, L, bd);CHKERRQ(ierr); 1082 PetscFunctionReturn(0); 1083 } 1084 1085 /* Here we are assuming that process 0 always has everything */ 1086 static PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel) 1087 { 1088 DM_Plex *mesh = (DM_Plex*) dm->data; 1089 MPI_Comm comm; 1090 DMLabel depthLabel; 1091 PetscMPIInt rank; 1092 PetscInt depth, d, numLabels, numLocalLabels, l; 1093 PetscBool hasLabels = PETSC_FALSE, lsendDepth, sendDepth; 1094 PetscObjectState depthState = -1; 1095 PetscErrorCode ierr; 1096 1097 PetscFunctionBegin; 1098 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1099 PetscValidHeaderSpecific(dm, DM_CLASSID, 3); 1100 ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr); 1101 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr); 1102 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 1103 1104 /* If the user has changed the depth label, communicate it instead */ 1105 ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr); 1106 ierr = DMPlexGetDepthLabel(dm, &depthLabel);CHKERRQ(ierr); 1107 if (depthLabel) {ierr = DMLabelGetState(depthLabel, &depthState);CHKERRQ(ierr);} 1108 lsendDepth = mesh->depthState != depthState ? PETSC_TRUE : PETSC_FALSE; 1109 ierr = MPIU_Allreduce(&lsendDepth, &sendDepth, 1, MPIU_BOOL, MPI_LOR, comm);CHKERRQ(ierr); 1110 if (sendDepth) { 1111 ierr = DMRemoveLabel(dmParallel, "depth", &depthLabel);CHKERRQ(ierr); 1112 ierr = DMLabelDestroy(&depthLabel);CHKERRQ(ierr); 1113 } 1114 /* Everyone must have either the same number of labels, or none */ 1115 ierr = DMGetNumLabels(dm, &numLocalLabels);CHKERRQ(ierr); 1116 numLabels = numLocalLabels; 1117 ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr); 1118 if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE; 1119 for (l = numLabels-1; l >= 0; --l) { 1120 DMLabel label = NULL, labelNew = NULL; 1121 PetscBool isdepth; 1122 1123 if (hasLabels) { 1124 ierr = DMGetLabelByNum(dm, l, &label);CHKERRQ(ierr); 1125 /* Skip "depth" because it is recreated */ 1126 ierr = PetscStrcmp(label->name, "depth", &isdepth);CHKERRQ(ierr); 1127 } 1128 ierr = MPI_Bcast(&isdepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr); 1129 if (isdepth && !sendDepth) continue; 1130 ierr = DMLabelDistribute(label, migrationSF, &labelNew);CHKERRQ(ierr); 1131 if (isdepth) { 1132 /* Put in any missing strata which can occur if users are managing the depth label themselves */ 1133 PetscInt gdepth; 1134 1135 ierr = MPIU_Allreduce(&depth, &gdepth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr); 1136 if ((depth >= 0) && (gdepth != depth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", depth, gdepth); 1137 for (d = 0; d <= gdepth; ++d) { 1138 PetscBool has; 1139 1140 ierr = DMLabelHasStratum(labelNew, d, &has);CHKERRQ(ierr); 1141 if (!has) ierr = DMLabelAddStratum(labelNew, d);CHKERRQ(ierr); 1142 } 1143 } 1144 ierr = DMAddLabel(dmParallel, labelNew);CHKERRQ(ierr); 1145 } 1146 ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr); 1147 PetscFunctionReturn(0); 1148 } 1149 1150 static PetscErrorCode DMPlexDistributeSetupHybrid(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel) 1151 { 1152 DM_Plex *mesh = (DM_Plex*) dm->data; 1153 DM_Plex *pmesh = (DM_Plex*) (dmParallel)->data; 1154 PetscBool *isHybrid, *isHybridParallel; 1155 PetscInt dim, depth, d; 1156 PetscInt pStart, pEnd, pStartP, pEndP; 1157 PetscErrorCode ierr; 1158 1159 PetscFunctionBegin; 1160 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1161 PetscValidPointer(dmParallel, 4); 1162 1163 ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr); 1164 ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr); 1165 ierr = DMPlexGetChart(dm,&pStart,&pEnd);CHKERRQ(ierr); 1166 ierr = DMPlexGetChart(dmParallel,&pStartP,&pEndP);CHKERRQ(ierr); 1167 ierr = PetscCalloc2(pEnd-pStart,&isHybrid,pEndP-pStartP,&isHybridParallel);CHKERRQ(ierr); 1168 for (d = 0; d <= depth; d++) { 1169 PetscInt hybridMax = (depth == 1 && d == 1) ? mesh->hybridPointMax[dim] : mesh->hybridPointMax[d]; 1170 1171 if (hybridMax >= 0) { 1172 PetscInt sStart, sEnd, p; 1173 1174 ierr = DMPlexGetDepthStratum(dm,d,&sStart,&sEnd);CHKERRQ(ierr); 1175 for (p = hybridMax; p < sEnd; p++) isHybrid[p-pStart] = PETSC_TRUE; 1176 } 1177 } 1178 ierr = PetscSFBcastBegin(migrationSF,MPIU_BOOL,isHybrid,isHybridParallel);CHKERRQ(ierr); 1179 ierr = PetscSFBcastEnd(migrationSF,MPIU_BOOL,isHybrid,isHybridParallel);CHKERRQ(ierr); 1180 for (d = 0; d <= dim; d++) pmesh->hybridPointMax[d] = -1; 1181 for (d = 0; d <= depth; d++) { 1182 PetscInt sStart, sEnd, p, dd; 1183 1184 ierr = DMPlexGetDepthStratum(dmParallel,d,&sStart,&sEnd);CHKERRQ(ierr); 1185 dd = (depth == 1 && d == 1) ? dim : d; 1186 for (p = sStart; p < sEnd; p++) { 1187 if (isHybridParallel[p-pStartP]) { 1188 pmesh->hybridPointMax[dd] = p; 1189 break; 1190 } 1191 } 1192 } 1193 ierr = PetscFree2(isHybrid,isHybridParallel);CHKERRQ(ierr); 1194 PetscFunctionReturn(0); 1195 } 1196 1197 static PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel) 1198 { 1199 DM_Plex *mesh = (DM_Plex*) dm->data; 1200 DM_Plex *pmesh = (DM_Plex*) (dmParallel)->data; 1201 MPI_Comm comm; 1202 DM refTree; 1203 PetscSection origParentSection, newParentSection; 1204 PetscInt *origParents, *origChildIDs; 1205 PetscBool flg; 1206 PetscErrorCode ierr; 1207 1208 PetscFunctionBegin; 1209 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1210 PetscValidHeaderSpecific(dm, DM_CLASSID, 4); 1211 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr); 1212 1213 /* Set up tree */ 1214 ierr = DMPlexGetReferenceTree(dm,&refTree);CHKERRQ(ierr); 1215 ierr = DMPlexSetReferenceTree(dmParallel,refTree);CHKERRQ(ierr); 1216 ierr = DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL);CHKERRQ(ierr); 1217 if (origParentSection) { 1218 PetscInt pStart, pEnd; 1219 PetscInt *newParents, *newChildIDs, *globParents; 1220 PetscInt *remoteOffsetsParents, newParentSize; 1221 PetscSF parentSF; 1222 1223 ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr); 1224 ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);CHKERRQ(ierr); 1225 ierr = PetscSectionSetChart(newParentSection,pStart,pEnd);CHKERRQ(ierr); 1226 ierr = PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);CHKERRQ(ierr); 1227 ierr = PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);CHKERRQ(ierr); 1228 ierr = PetscFree(remoteOffsetsParents);CHKERRQ(ierr); 1229 ierr = PetscSectionGetStorageSize(newParentSection,&newParentSize);CHKERRQ(ierr); 1230 ierr = PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs);CHKERRQ(ierr); 1231 if (original) { 1232 PetscInt numParents; 1233 1234 ierr = PetscSectionGetStorageSize(origParentSection,&numParents);CHKERRQ(ierr); 1235 ierr = PetscMalloc1(numParents,&globParents);CHKERRQ(ierr); 1236 ierr = ISLocalToGlobalMappingApplyBlock(original, numParents, origParents, globParents);CHKERRQ(ierr); 1237 } 1238 else { 1239 globParents = origParents; 1240 } 1241 ierr = PetscSFBcastBegin(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr); 1242 ierr = PetscSFBcastEnd(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr); 1243 if (original) { 1244 ierr = PetscFree(globParents);CHKERRQ(ierr); 1245 } 1246 ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr); 1247 ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr); 1248 ierr = ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents);CHKERRQ(ierr); 1249 #if PETSC_USE_DEBUG 1250 { 1251 PetscInt p; 1252 PetscBool valid = PETSC_TRUE; 1253 for (p = 0; p < newParentSize; ++p) { 1254 if (newParents[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);} 1255 } 1256 if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map"); 1257 } 1258 #endif 1259 ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-parents_view", &flg);CHKERRQ(ierr); 1260 if (flg) { 1261 ierr = PetscPrintf(comm, "Serial Parent Section: \n");CHKERRQ(ierr); 1262 ierr = PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 1263 ierr = PetscPrintf(comm, "Parallel Parent Section: \n");CHKERRQ(ierr); 1264 ierr = PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 1265 ierr = PetscSFView(parentSF, NULL);CHKERRQ(ierr); 1266 } 1267 ierr = DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);CHKERRQ(ierr); 1268 ierr = PetscSectionDestroy(&newParentSection);CHKERRQ(ierr); 1269 ierr = PetscFree2(newParents,newChildIDs);CHKERRQ(ierr); 1270 ierr = PetscSFDestroy(&parentSF);CHKERRQ(ierr); 1271 } 1272 pmesh->useAnchors = mesh->useAnchors; 1273 PetscFunctionReturn(0); 1274 } 1275 1276 PETSC_UNUSED static PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, DM dmParallel) 1277 { 1278 DM_Plex *mesh = (DM_Plex*) dm->data; 1279 DM_Plex *pmesh = (DM_Plex*) (dmParallel)->data; 1280 PetscMPIInt rank, size; 1281 MPI_Comm comm; 1282 PetscErrorCode ierr; 1283 1284 PetscFunctionBegin; 1285 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1286 PetscValidPointer(dmParallel,7); 1287 1288 /* Create point SF for parallel mesh */ 1289 ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr); 1290 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr); 1291 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 1292 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 1293 { 1294 const PetscInt *leaves; 1295 PetscSFNode *remotePoints, *rowners, *lowners; 1296 PetscInt numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints; 1297 PetscInt pStart, pEnd; 1298 1299 ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr); 1300 ierr = PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr); 1301 ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr); 1302 for (p=0; p<numRoots; p++) { 1303 rowners[p].rank = -1; 1304 rowners[p].index = -1; 1305 } 1306 ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr); 1307 ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr); 1308 for (p = 0; p < numLeaves; ++p) { 1309 if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */ 1310 lowners[p].rank = rank; 1311 lowners[p].index = leaves ? leaves[p] : p; 1312 } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */ 1313 lowners[p].rank = -2; 1314 lowners[p].index = -2; 1315 } 1316 } 1317 for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */ 1318 rowners[p].rank = -3; 1319 rowners[p].index = -3; 1320 } 1321 ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr); 1322 ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr); 1323 ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr); 1324 ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr); 1325 for (p = 0; p < numLeaves; ++p) { 1326 if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed"); 1327 if (lowners[p].rank != rank) ++numGhostPoints; 1328 } 1329 ierr = PetscMalloc1(numGhostPoints, &ghostPoints);CHKERRQ(ierr); 1330 ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr); 1331 for (p = 0, gp = 0; p < numLeaves; ++p) { 1332 if (lowners[p].rank != rank) { 1333 ghostPoints[gp] = leaves ? leaves[p] : p; 1334 remotePoints[gp].rank = lowners[p].rank; 1335 remotePoints[gp].index = lowners[p].index; 1336 ++gp; 1337 } 1338 } 1339 ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr); 1340 ierr = PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr); 1341 ierr = PetscSFSetFromOptions((dmParallel)->sf);CHKERRQ(ierr); 1342 } 1343 pmesh->useCone = mesh->useCone; 1344 pmesh->useClosure = mesh->useClosure; 1345 ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr); 1346 PetscFunctionReturn(0); 1347 } 1348 1349 /*@C 1350 DMPlexDerivePointSF - Build a point SF from an SF describing a point migration 1351 1352 Input Parameter: 1353 + dm - The source DMPlex object 1354 . migrationSF - The star forest that describes the parallel point remapping 1355 . ownership - Flag causing a vote to determine point ownership 1356 1357 Output Parameter: 1358 - pointSF - The star forest describing the point overlap in the remapped DM 1359 1360 Level: developer 1361 1362 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap() 1363 @*/ 1364 PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownership, PetscSF *pointSF) 1365 { 1366 PetscMPIInt rank; 1367 PetscInt p, nroots, nleaves, idx, npointLeaves; 1368 PetscInt *pointLocal; 1369 const PetscInt *leaves; 1370 const PetscSFNode *roots; 1371 PetscSFNode *rootNodes, *leafNodes, *pointRemote; 1372 PetscErrorCode ierr; 1373 1374 PetscFunctionBegin; 1375 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1376 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr); 1377 1378 ierr = PetscSFGetGraph(migrationSF, &nroots, &nleaves, &leaves, &roots);CHKERRQ(ierr); 1379 ierr = PetscMalloc2(nroots, &rootNodes, nleaves, &leafNodes);CHKERRQ(ierr); 1380 if (ownership) { 1381 /* Point ownership vote: Process with highest rank ownes shared points */ 1382 for (p = 0; p < nleaves; ++p) { 1383 /* Either put in a bid or we know we own it */ 1384 leafNodes[p].rank = rank; 1385 leafNodes[p].index = p; 1386 } 1387 for (p = 0; p < nroots; p++) { 1388 /* Root must not participate in the reduction, flag so that MAXLOC does not use */ 1389 rootNodes[p].rank = -3; 1390 rootNodes[p].index = -3; 1391 } 1392 ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);CHKERRQ(ierr); 1393 ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);CHKERRQ(ierr); 1394 } else { 1395 for (p = 0; p < nroots; p++) { 1396 rootNodes[p].index = -1; 1397 rootNodes[p].rank = rank; 1398 }; 1399 for (p = 0; p < nleaves; p++) { 1400 /* Write new local id into old location */ 1401 if (roots[p].rank == rank) { 1402 rootNodes[roots[p].index].index = leaves ? leaves[p] : p; 1403 } 1404 } 1405 } 1406 ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr); 1407 ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr); 1408 1409 for (npointLeaves = 0, p = 0; p < nleaves; p++) {if (leafNodes[p].rank != rank) npointLeaves++;} 1410 ierr = PetscMalloc1(npointLeaves, &pointLocal);CHKERRQ(ierr); 1411 ierr = PetscMalloc1(npointLeaves, &pointRemote);CHKERRQ(ierr); 1412 for (idx = 0, p = 0; p < nleaves; p++) { 1413 if (leafNodes[p].rank != rank) { 1414 pointLocal[idx] = p; 1415 pointRemote[idx] = leafNodes[p]; 1416 idx++; 1417 } 1418 } 1419 ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), pointSF);CHKERRQ(ierr); 1420 ierr = PetscSFSetFromOptions(*pointSF);CHKERRQ(ierr); 1421 ierr = PetscSFSetGraph(*pointSF, nleaves, npointLeaves, pointLocal, PETSC_OWN_POINTER, pointRemote, PETSC_OWN_POINTER);CHKERRQ(ierr); 1422 ierr = PetscFree2(rootNodes, leafNodes);CHKERRQ(ierr); 1423 PetscFunctionReturn(0); 1424 } 1425 1426 /*@C 1427 DMPlexMigrate - Migrates internal DM data over the supplied star forest 1428 1429 Input Parameter: 1430 + dm - The source DMPlex object 1431 . sf - The star forest communication context describing the migration pattern 1432 1433 Output Parameter: 1434 - targetDM - The target DMPlex object 1435 1436 Level: intermediate 1437 1438 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap() 1439 @*/ 1440 PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM) 1441 { 1442 MPI_Comm comm; 1443 PetscInt dim, nroots; 1444 PetscSF sfPoint; 1445 ISLocalToGlobalMapping ltogMigration; 1446 ISLocalToGlobalMapping ltogOriginal = NULL; 1447 PetscBool flg; 1448 PetscErrorCode ierr; 1449 1450 PetscFunctionBegin; 1451 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1452 ierr = PetscLogEventBegin(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr); 1453 ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr); 1454 ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr); 1455 ierr = DMSetDimension(targetDM, dim);CHKERRQ(ierr); 1456 1457 /* Check for a one-to-all distribution pattern */ 1458 ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr); 1459 ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr); 1460 if (nroots >= 0) { 1461 IS isOriginal; 1462 PetscInt n, size, nleaves; 1463 PetscInt *numbering_orig, *numbering_new; 1464 /* Get the original point numbering */ 1465 ierr = DMPlexCreatePointNumbering(dm, &isOriginal);CHKERRQ(ierr); 1466 ierr = ISLocalToGlobalMappingCreateIS(isOriginal, <ogOriginal);CHKERRQ(ierr); 1467 ierr = ISLocalToGlobalMappingGetSize(ltogOriginal, &size);CHKERRQ(ierr); 1468 ierr = ISLocalToGlobalMappingGetBlockIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr); 1469 /* Convert to positive global numbers */ 1470 for (n=0; n<size; n++) {if (numbering_orig[n] < 0) numbering_orig[n] = -(numbering_orig[n]+1);} 1471 /* Derive the new local-to-global mapping from the old one */ 1472 ierr = PetscSFGetGraph(sf, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr); 1473 ierr = PetscMalloc1(nleaves, &numbering_new);CHKERRQ(ierr); 1474 ierr = PetscSFBcastBegin(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr); 1475 ierr = PetscSFBcastEnd(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr); 1476 ierr = ISLocalToGlobalMappingCreate(comm, 1, nleaves, (const PetscInt*) numbering_new, PETSC_OWN_POINTER, <ogMigration);CHKERRQ(ierr); 1477 ierr = ISLocalToGlobalMappingRestoreIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr); 1478 ierr = ISDestroy(&isOriginal);CHKERRQ(ierr); 1479 } else { 1480 /* One-to-all distribution pattern: We can derive LToG from SF */ 1481 ierr = ISLocalToGlobalMappingCreateSF(sf, 0, <ogMigration);CHKERRQ(ierr); 1482 } 1483 ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr); 1484 if (flg) { 1485 ierr = PetscPrintf(comm, "Point renumbering for DM migration:\n");CHKERRQ(ierr); 1486 ierr = ISLocalToGlobalMappingView(ltogMigration, NULL);CHKERRQ(ierr); 1487 } 1488 /* Migrate DM data to target DM */ 1489 ierr = DMPlexDistributeCones(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr); 1490 ierr = DMPlexDistributeLabels(dm, sf, targetDM);CHKERRQ(ierr); 1491 ierr = DMPlexDistributeCoordinates(dm, sf, targetDM);CHKERRQ(ierr); 1492 ierr = DMPlexDistributeSetupHybrid(dm, sf, ltogMigration, targetDM);CHKERRQ(ierr); 1493 ierr = DMPlexDistributeSetupTree(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr); 1494 ierr = ISLocalToGlobalMappingDestroy(<ogOriginal);CHKERRQ(ierr); 1495 ierr = ISLocalToGlobalMappingDestroy(<ogMigration);CHKERRQ(ierr); 1496 ierr = PetscLogEventEnd(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr); 1497 PetscFunctionReturn(0); 1498 } 1499 1500 /*@C 1501 DMPlexDistribute - Distributes the mesh and any associated sections. 1502 1503 Not Collective 1504 1505 Input Parameter: 1506 + dm - The original DMPlex object 1507 - overlap - The overlap of partitions, 0 is the default 1508 1509 Output Parameter: 1510 + sf - The PetscSF used for point distribution 1511 - parallelMesh - The distributed DMPlex object, or NULL 1512 1513 Note: If the mesh was not distributed, the return value is NULL. 1514 1515 The user can control the definition of adjacency for the mesh using DMPlexSetAdjacencyUseCone() and 1516 DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function 1517 representation on the mesh. 1518 1519 Level: intermediate 1520 1521 .keywords: mesh, elements 1522 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure() 1523 @*/ 1524 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel) 1525 { 1526 MPI_Comm comm; 1527 PetscPartitioner partitioner; 1528 IS cellPart; 1529 PetscSection cellPartSection; 1530 DM dmCoord; 1531 DMLabel lblPartition, lblMigration; 1532 PetscSF sfProcess, sfMigration, sfStratified, sfPoint; 1533 PetscBool flg; 1534 PetscMPIInt rank, size, p; 1535 PetscErrorCode ierr; 1536 1537 PetscFunctionBegin; 1538 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1539 if (sf) PetscValidPointer(sf,4); 1540 PetscValidPointer(dmParallel,5); 1541 1542 ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr); 1543 ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr); 1544 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 1545 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 1546 1547 *dmParallel = NULL; 1548 if (size == 1) { 1549 ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr); 1550 PetscFunctionReturn(0); 1551 } 1552 1553 /* Create cell partition */ 1554 ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr); 1555 ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr); 1556 ierr = DMPlexGetPartitioner(dm, &partitioner);CHKERRQ(ierr); 1557 ierr = PetscPartitionerPartition(partitioner, dm, cellPartSection, &cellPart);CHKERRQ(ierr); 1558 { 1559 /* Convert partition to DMLabel */ 1560 PetscInt proc, pStart, pEnd, npoints, poffset; 1561 const PetscInt *points; 1562 ierr = DMLabelCreate("Point Partition", &lblPartition);CHKERRQ(ierr); 1563 ierr = ISGetIndices(cellPart, &points);CHKERRQ(ierr); 1564 ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr); 1565 for (proc = pStart; proc < pEnd; proc++) { 1566 ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr); 1567 ierr = PetscSectionGetOffset(cellPartSection, proc, &poffset);CHKERRQ(ierr); 1568 for (p = poffset; p < poffset+npoints; p++) { 1569 ierr = DMLabelSetValue(lblPartition, points[p], proc);CHKERRQ(ierr); 1570 } 1571 } 1572 ierr = ISRestoreIndices(cellPart, &points);CHKERRQ(ierr); 1573 } 1574 ierr = DMPlexPartitionLabelClosure(dm, lblPartition);CHKERRQ(ierr); 1575 { 1576 /* Build a global process SF */ 1577 PetscSFNode *remoteProc; 1578 ierr = PetscMalloc1(size, &remoteProc);CHKERRQ(ierr); 1579 for (p = 0; p < size; ++p) { 1580 remoteProc[p].rank = p; 1581 remoteProc[p].index = rank; 1582 } 1583 ierr = PetscSFCreate(comm, &sfProcess);CHKERRQ(ierr); 1584 ierr = PetscObjectSetName((PetscObject) sfProcess, "Process SF");CHKERRQ(ierr); 1585 ierr = PetscSFSetGraph(sfProcess, size, size, NULL, PETSC_OWN_POINTER, remoteProc, PETSC_OWN_POINTER);CHKERRQ(ierr); 1586 } 1587 ierr = DMLabelCreate("Point migration", &lblMigration);CHKERRQ(ierr); 1588 ierr = DMPlexPartitionLabelInvert(dm, lblPartition, sfProcess, lblMigration);CHKERRQ(ierr); 1589 ierr = DMPlexPartitionLabelCreateSF(dm, lblMigration, &sfMigration);CHKERRQ(ierr); 1590 /* Stratify the SF in case we are migrating an already parallel plex */ 1591 ierr = DMPlexStratifyMigrationSF(dm, sfMigration, &sfStratified);CHKERRQ(ierr); 1592 ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr); 1593 sfMigration = sfStratified; 1594 ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr); 1595 ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr); 1596 if (flg) { 1597 ierr = DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 1598 ierr = PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 1599 } 1600 1601 /* Create non-overlapping parallel DM and migrate internal data */ 1602 ierr = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr); 1603 ierr = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr); 1604 ierr = DMPlexMigrate(dm, sfMigration, *dmParallel);CHKERRQ(ierr); 1605 1606 /* Build the point SF without overlap */ 1607 ierr = DMPlexCreatePointSF(*dmParallel, sfMigration, PETSC_TRUE, &sfPoint);CHKERRQ(ierr); 1608 ierr = DMSetPointSF(*dmParallel, sfPoint);CHKERRQ(ierr); 1609 ierr = DMGetCoordinateDM(*dmParallel, &dmCoord);CHKERRQ(ierr); 1610 if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);} 1611 if (flg) {ierr = PetscSFView(sfPoint, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);} 1612 1613 if (overlap > 0) { 1614 DM dmOverlap; 1615 PetscInt nroots, nleaves; 1616 PetscSFNode *newRemote; 1617 const PetscSFNode *oldRemote; 1618 PetscSF sfOverlap, sfOverlapPoint; 1619 /* Add the partition overlap to the distributed DM */ 1620 ierr = DMPlexDistributeOverlap(*dmParallel, overlap, &sfOverlap, &dmOverlap);CHKERRQ(ierr); 1621 ierr = DMDestroy(dmParallel);CHKERRQ(ierr); 1622 *dmParallel = dmOverlap; 1623 if (flg) { 1624 ierr = PetscPrintf(comm, "Overlap Migration SF:\n");CHKERRQ(ierr); 1625 ierr = PetscSFView(sfOverlap, NULL);CHKERRQ(ierr); 1626 } 1627 1628 /* Re-map the migration SF to establish the full migration pattern */ 1629 ierr = PetscSFGetGraph(sfMigration, &nroots, NULL, NULL, &oldRemote);CHKERRQ(ierr); 1630 ierr = PetscSFGetGraph(sfOverlap, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr); 1631 ierr = PetscMalloc1(nleaves, &newRemote);CHKERRQ(ierr); 1632 ierr = PetscSFBcastBegin(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr); 1633 ierr = PetscSFBcastEnd(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr); 1634 ierr = PetscSFCreate(comm, &sfOverlapPoint);CHKERRQ(ierr); 1635 ierr = PetscSFSetGraph(sfOverlapPoint, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);CHKERRQ(ierr); 1636 ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr); 1637 ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr); 1638 sfMigration = sfOverlapPoint; 1639 } 1640 /* Cleanup Partition */ 1641 ierr = PetscSFDestroy(&sfProcess);CHKERRQ(ierr); 1642 ierr = DMLabelDestroy(&lblPartition);CHKERRQ(ierr); 1643 ierr = DMLabelDestroy(&lblMigration);CHKERRQ(ierr); 1644 ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr); 1645 ierr = ISDestroy(&cellPart);CHKERRQ(ierr); 1646 /* Copy BC */ 1647 ierr = DMCopyBoundary(dm, *dmParallel);CHKERRQ(ierr); 1648 /* Create sfNatural */ 1649 if (dm->useNatural) { 1650 PetscSection section; 1651 1652 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1653 ierr = DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &(*dmParallel)->sfNatural);CHKERRQ(ierr); 1654 } 1655 /* Cleanup */ 1656 if (sf) {*sf = sfMigration;} 1657 else {ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);} 1658 ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr); 1659 ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr); 1660 PetscFunctionReturn(0); 1661 } 1662 1663 /*@C 1664 DMPlexDistributeOverlap - Add partition overlap to a distributed non-overlapping DM. 1665 1666 Not Collective 1667 1668 Input Parameter: 1669 + dm - The non-overlapping distrbuted DMPlex object 1670 - overlap - The overlap of partitions, 0 is the default 1671 1672 Output Parameter: 1673 + sf - The PetscSF used for point distribution 1674 - dmOverlap - The overlapping distributed DMPlex object, or NULL 1675 1676 Note: If the mesh was not distributed, the return value is NULL. 1677 1678 The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and 1679 DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function 1680 representation on the mesh. 1681 1682 Level: intermediate 1683 1684 .keywords: mesh, elements 1685 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure() 1686 @*/ 1687 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, PetscSF *sf, DM *dmOverlap) 1688 { 1689 MPI_Comm comm; 1690 PetscMPIInt size, rank; 1691 PetscSection rootSection, leafSection; 1692 IS rootrank, leafrank; 1693 DM dmCoord; 1694 DMLabel lblOverlap; 1695 PetscSF sfOverlap, sfStratified, sfPoint; 1696 PetscErrorCode ierr; 1697 1698 PetscFunctionBegin; 1699 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1700 if (sf) PetscValidPointer(sf, 3); 1701 PetscValidPointer(dmOverlap, 4); 1702 1703 ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr); 1704 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 1705 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 1706 if (size == 1) {*dmOverlap = NULL; PetscFunctionReturn(0);} 1707 ierr = PetscLogEventBegin(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr); 1708 1709 /* Compute point overlap with neighbouring processes on the distributed DM */ 1710 ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr); 1711 ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr); 1712 ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr); 1713 ierr = DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);CHKERRQ(ierr); 1714 ierr = DMPlexCreateOverlap(dm, overlap, rootSection, rootrank, leafSection, leafrank, &lblOverlap);CHKERRQ(ierr); 1715 /* Convert overlap label to stratified migration SF */ 1716 ierr = DMPlexPartitionLabelCreateSF(dm, lblOverlap, &sfOverlap);CHKERRQ(ierr); 1717 ierr = DMPlexStratifyMigrationSF(dm, sfOverlap, &sfStratified);CHKERRQ(ierr); 1718 ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr); 1719 sfOverlap = sfStratified; 1720 ierr = PetscObjectSetName((PetscObject) sfOverlap, "Overlap SF");CHKERRQ(ierr); 1721 ierr = PetscSFSetFromOptions(sfOverlap);CHKERRQ(ierr); 1722 1723 ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr); 1724 ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr); 1725 ierr = ISDestroy(&rootrank);CHKERRQ(ierr); 1726 ierr = ISDestroy(&leafrank);CHKERRQ(ierr); 1727 ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr); 1728 1729 /* Build the overlapping DM */ 1730 ierr = DMPlexCreate(comm, dmOverlap);CHKERRQ(ierr); 1731 ierr = PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");CHKERRQ(ierr); 1732 ierr = DMPlexMigrate(dm, sfOverlap, *dmOverlap);CHKERRQ(ierr); 1733 /* Build the new point SF */ 1734 ierr = DMPlexCreatePointSF(*dmOverlap, sfOverlap, PETSC_FALSE, &sfPoint);CHKERRQ(ierr); 1735 ierr = DMSetPointSF(*dmOverlap, sfPoint);CHKERRQ(ierr); 1736 ierr = DMGetCoordinateDM(*dmOverlap, &dmCoord);CHKERRQ(ierr); 1737 if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);} 1738 ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr); 1739 /* Cleanup overlap partition */ 1740 ierr = DMLabelDestroy(&lblOverlap);CHKERRQ(ierr); 1741 if (sf) *sf = sfOverlap; 1742 else {ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);} 1743 ierr = PetscLogEventEnd(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr); 1744 PetscFunctionReturn(0); 1745 } 1746 1747 /*@C 1748 DMPlexGetGatherDM - Get a copy of the DMPlex that gathers all points on the 1749 root process of the original's communicator. 1750 1751 Input Parameters: 1752 . dm - the original DMPlex object 1753 1754 Output Parameters: 1755 . gatherMesh - the gathered DM object, or NULL 1756 1757 Level: intermediate 1758 1759 .keywords: mesh 1760 .seealso: DMPlexDistribute(), DMPlexGetRedundantDM() 1761 @*/ 1762 PetscErrorCode DMPlexGetGatherDM(DM dm, DM * gatherMesh) 1763 { 1764 MPI_Comm comm; 1765 PetscMPIInt size; 1766 PetscPartitioner oldPart, gatherPart; 1767 PetscErrorCode ierr; 1768 1769 PetscFunctionBegin; 1770 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1771 comm = PetscObjectComm((PetscObject)dm); 1772 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 1773 *gatherMesh = NULL; 1774 if (size == 1) PetscFunctionReturn(0); 1775 ierr = DMPlexGetPartitioner(dm,&oldPart);CHKERRQ(ierr); 1776 ierr = PetscObjectReference((PetscObject)oldPart);CHKERRQ(ierr); 1777 ierr = PetscPartitionerCreate(comm,&gatherPart);CHKERRQ(ierr); 1778 ierr = PetscPartitionerSetType(gatherPart,PETSCPARTITIONERGATHER);CHKERRQ(ierr); 1779 ierr = DMPlexSetPartitioner(dm,gatherPart);CHKERRQ(ierr); 1780 ierr = DMPlexDistribute(dm,0,NULL,gatherMesh);CHKERRQ(ierr); 1781 ierr = DMPlexSetPartitioner(dm,oldPart);CHKERRQ(ierr); 1782 ierr = PetscPartitionerDestroy(&gatherPart);CHKERRQ(ierr); 1783 ierr = PetscPartitionerDestroy(&oldPart);CHKERRQ(ierr); 1784 PetscFunctionReturn(0); 1785 } 1786 1787 /*@C 1788 DMPlexGetRedundantDM - Get a copy of the DMPlex that is completely copied on each process. 1789 1790 Input Parameters: 1791 . dm - the original DMPlex object 1792 1793 Output Parameters: 1794 . redundantMesh - the redundant DM object, or NULL 1795 1796 Level: intermediate 1797 1798 .keywords: mesh 1799 .seealso: DMPlexDistribute(), DMPlexGetGatherDM() 1800 @*/ 1801 PetscErrorCode DMPlexGetRedundantDM(DM dm, DM * redundantMesh) 1802 { 1803 MPI_Comm comm; 1804 PetscMPIInt size, rank; 1805 PetscInt pStart, pEnd, p; 1806 PetscInt numPoints = -1; 1807 PetscSF migrationSF, sfPoint; 1808 DM gatherDM, dmCoord; 1809 PetscSFNode *points; 1810 PetscErrorCode ierr; 1811 1812 PetscFunctionBegin; 1813 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1814 comm = PetscObjectComm((PetscObject)dm); 1815 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 1816 *redundantMesh = NULL; 1817 if (size == 1) { 1818 ierr = PetscObjectReference((PetscObject) dm);CHKERRQ(ierr); 1819 *redundantMesh = dm; 1820 PetscFunctionReturn(0); 1821 } 1822 ierr = DMPlexGetGatherDM(dm,&gatherDM);CHKERRQ(ierr); 1823 if (!gatherDM) PetscFunctionReturn(0); 1824 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 1825 ierr = DMPlexGetChart(gatherDM,&pStart,&pEnd);CHKERRQ(ierr); 1826 numPoints = pEnd - pStart; 1827 ierr = MPI_Bcast(&numPoints,1,MPIU_INT,0,comm);CHKERRQ(ierr); 1828 ierr = PetscMalloc1(numPoints,&points);CHKERRQ(ierr); 1829 ierr = PetscSFCreate(comm,&migrationSF);CHKERRQ(ierr); 1830 for (p = 0; p < numPoints; p++) { 1831 points[p].index = p; 1832 points[p].rank = 0; 1833 } 1834 ierr = PetscSFSetGraph(migrationSF,pEnd-pStart,numPoints,NULL,PETSC_OWN_POINTER,points,PETSC_OWN_POINTER);CHKERRQ(ierr); 1835 ierr = DMPlexCreate(comm, redundantMesh);CHKERRQ(ierr); 1836 ierr = PetscObjectSetName((PetscObject) *redundantMesh, "Redundant Mesh");CHKERRQ(ierr); 1837 ierr = DMPlexMigrate(gatherDM, migrationSF, *redundantMesh);CHKERRQ(ierr); 1838 ierr = DMPlexCreatePointSF(*redundantMesh, migrationSF, PETSC_FALSE, &sfPoint);CHKERRQ(ierr); 1839 ierr = DMSetPointSF(*redundantMesh, sfPoint);CHKERRQ(ierr); 1840 ierr = DMGetCoordinateDM(*redundantMesh, &dmCoord);CHKERRQ(ierr); 1841 if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);} 1842 ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr); 1843 ierr = PetscSFDestroy(&migrationSF);CHKERRQ(ierr); 1844 ierr = DMDestroy(&gatherDM);CHKERRQ(ierr); 1845 PetscFunctionReturn(0); 1846 } 1847